code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import web
import time
import timeit
from collections import deque
from sys import exit, exc_info, argv
import math
import os
localport = os.environ.get('PORT', 8080)
from cStringIO import StringIO
clock = timeit.default_timer;
web.config.debug = False;
urls = (
'/u','controller',
'/u2','controller2',
'/error','controller3',
'/stop','closecontroller'
)
app = web.application(urls, globals())
def sign(val):
return 1 if val >= 0 else -1
class closecontroller:
def GET(self):
return exit(0)
def POST(self):
return exit(0)
class controller:
def GET(self):
return self.process();
def POST(self):
return self.process();
def process(self):
i = web.input();#print i
f = "-1929291"
try:
if hasattr(i, 'data'):
PC = plantcontroller(i.data.split())
#time.sleep(5)
f = PC.evaluate()[0];
except:
print exc_info(), i
web.header("Content-Type", "text/plain") # Set the Header
return str(f)
class controller2:
def GET(self):
return self.process();
def POST(self):
return self.process();
def process(self):
i = web.input();#print i
f = "-1929291"
try:
if hasattr(i, 'data'):
PC = plantcontroller(i.data.split())
#time.sleep(5)
f = PC.evaluate();
except:
print exc_info(), i
web.header("Content-Type", "text/plain") # Set the Header
return str(PC.file_str.getvalue())
class controller3:
def GET(self):
return self.process();
def POST(self):
return self.process();
def process(self):
i = web.input();#print i
f = "-1929291"
try:
if hasattr(i, 'data'):
PC = plantcontroller(i.data.split())
#time.sleep(5)
f = PC.evaluate()[1];
except:
print exc_info(), i
web.header("Content-Type", "text/plain") # Set the Header
return str(f)
#set up the best clock that can be accessed on this machine
clock = timeit.default_timer;
#get the current time (time the controller was started).
time0 = clock();
class plantcontroller:
def __init__(self, data):
try:
self.duration= float(data[0])
self.h= float(data[1]);
KpR= float(data[2]);
KiR= float(data[3]);
KdR= float(data[4]);
KpM= float(data[5]);
KiM= float(data[6]);
KdM= float(data[7]);
except:
print exc_info()
self.duration= 0;
self.h= .02;
(KpM,KiM,KdM,KpR,KiR,KdR)=(0,0,0,0,0,0)
KiM,KiR=(0,0)
self.R_ref = .5
self.w=2*3.14159*1/100.*1;
self.CumulativeError = 0.
self.Count = 0.
self.R = deque([ (0,0), (0,0), (0,0)],10);
self.Theta = deque([ (0,0), (0,0), (0,0)],10);
self.StateTime = 0;
self.angle_max = 3.14/180.0*(32+20)
'''
#------------------------------------------
#PID controller _0 for ball!
# http://www.forkosh.com/mimetex.cgi?P(s)=\frac{Y(s)}{X(s)}=\frac{a_2s^2+a_1 s+a_0}{b_2s^2+b_1s+b_0}
a2_0 = KdR;
a1_0 = KpR;
a0_0 = KiR;
b2_0 = 0;
b1_0 = 1;
b0_0 = 0;
#------------------------------------------
A2_0 = a2_0 + a1_0 * self.h + a0_0 * self.h * self.h;
A1_0 =-2*a2_0 - a1_0 * self.h;
A0_0 = a2_0;
B2_0 = b2_0 + b1_0 * self.h + b0_0 * self.h * self.h;
B1_0 =-2*b2_0 - b1_0 * self.h;
B0_0 = b2_0;
self.eta0_0 = -B0_0/B2_0;
self.eta1_0 = -B1_0/B2_0;
self.gamma0_0 = A0_0/B2_0;
self.gamma1_0 = A1_0/B2_0;
self.gamma2_0 = A2_0/B2_0;
self.Y0 = deque([ (0,0), (0,0), (0,0)],3);
self.X0 = deque([ (0,0), (0,0), (0,0)],3);
#------------------------------------------
#PID controller _1 for beam!
# http://www.forkosh.com/mimetex.cgi?P(s)=\frac{Y(s)}{X(s)}=\frac{a_2s^2+a_1 s+a_0}{b_2s^2+b_1s+b_0}
a2_1 = KdM;
a1_1 = KpM;
a0_1 = KiM;
b2_1 = 0;
b1_1 = 1;
b0_1 = 0;
#------------------------------------------
A2_1 = a2_1 + a1_1 * self.h + a0_1 * self.h * self.h;
A1_1 =-2*a2_1 - a1_1 * self.h;
A0_1 = a2_1;
B2_1 = b2_1 + b1_1 * self.h + b0_1 * self.h * self.h;
B1_1 =-2*b2_1 - b1_1 * self.h;
B0_1 = b2_1;
self.eta0_1 = -B0_1/B2_1;
self.eta1_1 = -B1_1/B2_1;
self.gamma0_1 = A0_1/B2_1;
self.gamma1_1 = A1_1/B2_1;
self.gamma2_1 = A2_1/B2_1;
self.X1 = deque([ (0,0), (0,0), (0,0)],3);
self.Y1 = deque([ (0,0), (0,0), (0,0)],3);
'''
self.AR= KpR;
self.BR= KdR/self.h;
self.Y0 = deque([ (0,0), (0,0), (0,0)],3);
self.X0 = deque([ (0,0), (0,0), (0,0)],3);
self.AM= KpM;
self.BM= KdM/self.h;
self.X1 = deque([ (0,0), (0,0), (0,0)],3);
self.Y1 = deque([ (0,0), (0,0), (0,0)],3);
m = 0.111;
R = 0.015;
g = -9.8;
L = 1.0;
d = 0.03;
J = 9.99e-6;
H = -m*g*d/L/(J/R*R+m);
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{-m*g*d/L/(J/R^2+m)}{s^2}
#http://www.forkosh.com/mimetex.cgi?s=\frac{z-1}{zh}
#http://www.forkosh.com/mimetex.cgi?r[n]=2r[n-1]-r[n-2]+Hh^2\theta[n]
self.Dist = deque([ (0,0), (0,0), (0,0)],10);
self.Theta_plant = deque([ (0,0), (0,0), (0,0)],10);
self.U = deque([ (0,0), (0,0), (0,0)]);
self.h_plant = self.h/10;
self.Count_plant = 0;
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{\Theta(z)}{V_{in}(z)}=\frac{A_2^2z^2}{B_2^2z^2 + B_1z + B_0}
alpha=0.01176
beta=0.58823
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{\Theta(s)}{V_{in}(s)}=\frac{1}{s(\alpha s+\beta)}
A12=self.h_plant*self.h_plant
B12=alpha
B11=(beta*self.h_plant- 2*alpha)
B10=alpha
self.P=A12/B12
self.Q=B11/B12
self.R=B10/B12
self.theta_high = 3.14/180.0*(32+20);
self.r_high = 1.1;
#2.2.6
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{X(s)}{A(s)}=\frac{-7}{s^2}
A22=-7*self.h_plant*self.h_plant
B22=1
B21=-2
B20=1
self.L=A22/B22
self.M=B21/B22
self.N=B20/B22
self.file_str = StringIO()
#in the future could base the time on the plant, and pass it as a parameter to this method
def referencex(self,t):
return self.R_ref*sign(math.cos(self.w*t));
def referencey(self,t):
return self.R_ref*sign(math.sin(self.w*t));
def updatePID2(self):
#global X0, X1, Y0, Y1, Theta, StateTime, CumulativeError, Count
# Update the time and iteration number
self.Count += 1
t = self.Count*self.h;
try:
self.X0[-3]=self.X0[-2];self.X0[-2]=self.X0[-1];self.X0[-1]=(self.Dist[-1][0], self.Dist[-1][1])
self.X1[-3]=self.X1[-2];self.X1[-2]=self.X1[-1];self.X1[-1]=self.Theta_plant[-1]
StateTime= self.Count_plant*self.h_plant;
except:
print exc_info(), "error"
self.CumulativeError = self.CumulativeError + 10 #/(duration/(h*h))
'''
Determine the desired beam angle based on the ball position
'''
x_d = self.referencex(t);#ref(t1,xkernel ,xamplitude ,xfrequency)
e_x = x_d - self.X0[-1][0];
angle_d = self.AR * (e_x) + self.BR * (self.X0[-1][0]-self.X0[-2][0]);
if angle_d > self.angle_max: angle_d=self.angle_max;
elif angle_d < -self.angle_max: angle_d=-self.angle_max;
u_x = self.AM*(angle_d*16 - self.X1[-1][0]) + self.BM * (self.X1[-1][0]-self.X1[-2][0])
y_d = self.referencey(t);#ref(t1,ykernel,yamplitude,yfrequency)
e_y = y_d - self.X0[-1][1];
angle_d1 = self.AR * (e_y) + self.BR * (self.X0[-1][1]-self.X0[-2][1]);
if angle_d1 > self.angle_max: angle_d1=self.angle_max;
elif angle_d1 < -self.angle_max: angle_d1=-self.angle_max;
u_y = self.AM*(angle_d1*16 - self.X1[-1][1]) + self.BM * (self.X1[-1][1]-self.X1[-2][1])
self.Y1[-3]=self.Y1[-2];self.Y1[-2]=self.Y1[-1];self.Y1[-1]=(u_x,u_y,);
self.file_str.write("%s %s %s 0\n"%("p",self.Dist[-1][0], self.Dist[-1][1]))
self.CumulativeError = self.CumulativeError + abs(e_x) #/(duration/h)
self.CumulativeError = self.CumulativeError + abs(e_y) #/(duration/h)
def updatePID(self):
#global X0, X1, Y0, Y1, Theta, StateTime, CumulativeError, Count
self.Count += 1
t = self.Count*self.h;
'''
Determine the desired beam angle based on the ball position
'''
self.CumulativeError = self.CumulativeError + abs(self.X0[-1][0]) #/(duration/h)
self.CumulativeError = self.CumulativeError + abs(self.X0[-1][1]) #/(duration/h)
angle0 = self.eta1_0 * self.Y0[-1][0] + self.eta0_0 * self.Y0[-2][0] + self.gamma2_0 * self.X0[-1][0] + self.gamma1_0 * self.X0[-2][0] + self.gamma0_0 * self.X0[-3][0];
angle1 = self.eta1_0 * self.Y0[-1][1] + self.eta0_0 * self.Y0[-2][1] + self.gamma2_0 * self.X0[-1][1] + self.gamma1_0 * self.X0[-2][1] + self.gamma0_0 * self.X0[-3][1];
#probably should get the old values of Y0 from X1.... Right now I'm remembering the requested values
self.Y0[-3]=self.Y0[-2];self.Y0[-2]=self.Y0[-1];self.Y0[-1]=(angle0, angle1);
if angle0 > self.angle_max: angle0=self.angle_max;
elif angle0 < -self.angle_max: angle0=-self.angle_max;
if angle1 > self.angle_max: angle1=self.angle_max;
elif angle1 < -self.angle_max: angle1=-self.angle_max;
for i in range(3):
self.X1[i]=(self.X1[i][0]-angle0*16,self.X1[i][1]-angle1*16);
'''
Determine the desired control value based on the current (and the desired) beam angle
'''
u0u2 = self.eta1_1 * self.Y1[-1][0] + self.eta0_1 * self.Y1[-2][0] + self.gamma2_1 * self.X1[-1][0] + self.gamma1_1 * self.X1[-2][0] + self.gamma0_1 * self.X1[-3][0];
u1u2 = self.eta1_1 * self.Y1[-1][1] + self.eta0_1 * self.Y1[-2][1] + self.gamma2_1 * self.X1[-1][1] + self.gamma1_1 * self.X1[-2][1] + self.gamma0_1 * self.X1[-3][1];
self.Y1[-3]=self.Y1[-2];self.Y1[-2]=self.Y1[-1];self.Y1[-1]=(-u0u2,-u1u2);
try:
self.X0[-3]=self.X0[-2];self.X0[-2]=self.X0[-1];self.X0[-1]=(self.Dist[-1][0]-self.referencex(t), self.Dist[-1][1]-self.referencey(t))
self.X1[-3]=self.X1[-2];self.X1[-2]=self.X1[-1];self.X1[-1]=self.Theta_plant[-1]
StateTime= self.Count_plant*self.h_plant;
except:
print exc_info(), "error"
self.CumulativeError = self.CumulativeError + 10 #/(duration/(h*h))
def updatePlant(self):
#global Theta_plant, Dist, U, Count_plant
self.Count_plant += 1;
self.U.append(self.Y1[-1]);
theta0 = self.P * self.U[-1][0] - self.Q * self.Theta_plant[-1][0] - self.R * self.Theta_plant[-2][0]
if theta0 > self.theta_high: theta0 = self.theta_high
elif theta0 < -self.theta_high: theta0 = -self.theta_high
theta1 = self.P * self.U[-1][1] - self.Q * self.Theta_plant[-1][1] - self.R * self.Theta_plant[-2][1]
if theta1 > self.theta_high: theta1 = self.theta_high
elif theta1 < -self.theta_high: theta1 = -self.theta_high
self.Theta_plant.append((theta0,theta1));
x0 = self.L * self.Theta_plant[-1][0]/16.0 - self.M * self.Dist[-1][0] - self.N * self.Dist[-2][0]; #alpha = theta/16 eqn 2.2.2 EEE490
if x0 > self.r_high: x0 = self.r_high;
elif x0 < -self.r_high: x0 = -self.r_high;
x1 = self.L * self.Theta_plant[-1][1]/16.0 - self.M * self.Dist[-1][1] - self.N * self.Dist[-2][1]; #alpha = theta/16 eqn 2.2.2 EEE490
if x1 > self.r_high: x1 = self.r_high;
elif x1 < -self.r_high: x1 = -self.r_high;
self.Dist.append((x0,x1));
#print str(repr(Count_plant*h_plant)) + ","+ str(Dist[-1][0]) +","+ str(Dist[-1][1]) +","+ str(Theta[-1][0]) +","+ str(Theta[-1][1]) +","+ str(U[-1][0]) +","+ str(U[-1][1]) +","+ str(repr(Count*h))+ ","+ str(repr((Count_plant-1)*h_plant))+",sekou"
def evaluate(self):
value = -99;
try:
if self.h < self.duration/3.0 and self.h > 0.001:
for i in range(int(self.duration/0.001)):
if i%int(self.h/.001) == 0:
self.updatePID2();
if i%int(self.h_plant/.001) == 0:
self.updatePlant();
if 0.0000000000000001 != self.CumulativeError:
value= 1/pow(self.CumulativeError/self.Count,5.0);
except:
print exc_info()
print "syntax is duration STEPSIZE KPball KIball KDball KPbeam KIbeam KDbeam"
return [value,self.CumulativeError/self.Count]
if __name__ == "__main__":
wsgifunc = app.wsgifunc()
wsgifunc = web.httpserver.StaticMiddleware(wsgifunc)
server = web.httpserver.WSGIServer(("0.0.0.0", int(localport)),wsgifunc)
print "http://%s:%s/" % ("0.0.0.0", localport)
try:
server.start()
except (KeyboardInterrupt, SystemExit):
server.stop()
print "Shutting down service"
|
slremy/bioinspiredbackend
|
websingleton.py
|
Python
|
gpl-3.0
| 12,276
|
# coding: utf-8
natural_language = [
'Natural Language :: Portuguese (Brazilian)',
]
|
hiatobr/Paloma
|
src/setup/classifiers/natural_language.py
|
Python
|
gpl-3.0
| 90
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine (xbmcmegapack@gmail.com)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
import os
import stat
import sys
import copy
import xbmc
import config
class Favourites_manager():
""" Class that manages this specific menu context."""
def __init__(self):
"""Private Properties."""
self.f = xbmc.translatePath(config.ADDON_SPECIAL_PATH) + \
config.PLUGIN_NAME + config.PLUGIN_MY_FAVOURITES_FILE
self.dictionary = {}
if not os.stat(self.f)[stat.ST_SIZE] == 0: # if not no favourites
self.dictionary = copy.deepcopy(eval(open(self.f, 'r').read()))
def add(self, favourite):
"""Add favourite to dictionary."""
self.dictionary.update(favourite)
f = open(self.f, 'w')
f.write(str(self.dictionary))
f.close()
def delete(self, key):
"""Delete favourite from dictionary."""
del self.dictionary[key]
f = open(self.f, 'w')
f.write(str(self.dictionary))
f.close()
def delete_all(self):
"""Delete all favourites from dictionary."""
f = open(self.f, 'w')
f.flush()
f.close()
if __name__ == "__main__":
context_menu_command = str(sys.argv[1])
exec context_menu_command # See config.CONTEXT_MENU_FAVOURITES_COMMANDS
|
xbmcmegapack/plugin.video.megapack.dev
|
resources/lib/favourites_manager.py
|
Python
|
gpl-3.0
| 2,043
|
import sys
from gossip import Node
if __name__ == '__main__':
server_port = sys.argv[1]
peers = sys.argv[2:]
n = Node(server_port, peers)
while True:
pass
|
abdelwas/GossipPubSub
|
subscriber.py
|
Python
|
gpl-3.0
| 183
|
"""Askbot template context processor that makes some parameters
from the django settings, all parameters from the askbot livesettings
and the application available for the templates
"""
from django.conf import settings
import askbot
from askbot import api
from askbot.conf import settings as askbot_settings
from askbot.skins.loaders import get_skin
from askbot.utils import url_utils
def application_settings(request):
"""The context processor function"""
my_settings = askbot_settings.as_dict()
my_settings['LANGUAGE_CODE'] = settings.LANGUAGE_CODE
my_settings['ASKBOT_URL'] = settings.ASKBOT_URL
my_settings['DEBUG'] = settings.DEBUG
my_settings['ASKBOT_VERSION'] = askbot.get_version()
my_settings['LOGIN_URL'] = url_utils.get_login_url()
my_settings['LOGOUT_URL'] = url_utils.get_logout_url()
my_settings['LOGOUT_REDIRECT_URL'] = url_utils.get_logout_redirect_url()
return {
'settings': my_settings,
'skin': get_skin(request),
'moderation_items': api.get_info_on_moderation_items(request.user)
}
|
aavrug/askbot-devel
|
askbot/context.py
|
Python
|
gpl-3.0
| 1,070
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
# Copyright: (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: openssl_certificate
version_added: "2.4"
short_description: Generate and/or check OpenSSL certificates
description:
- This module allows one to (re)generate OpenSSL certificates.
- It implements a notion of provider (ie. C(selfsigned), C(ownca), C(acme), C(assertonly))
for your certificate.
- The C(assertonly) provider is intended for use cases where one is only interested in
checking properties of a supplied certificate.
- The C(ownca) provider is intended for generate OpenSSL certificate signed with your own
CA (Certificate Authority) certificate (self-signed certificate).
- Many properties that can be specified in this module are for validation of an
existing or newly generated certificate. The proper place to specify them, if you
want to receive a certificate with these properties is a CSR (Certificate Signing Request).
- "Please note that the module regenerates existing certificate if it doesn't match the module's
options, or if it seems to be corrupt. If you are concerned that this could overwrite
your existing certificate, consider using the I(backup) option."
- It uses the pyOpenSSL or cryptography python library to interact with OpenSSL.
- If both the cryptography and PyOpenSSL libraries are available (and meet the minimum version requirements)
cryptography will be preferred as a backend over PyOpenSSL (unless the backend is forced with C(select_crypto_backend))
requirements:
- PyOpenSSL >= 0.15 or cryptography >= 1.6 (if using C(selfsigned) or C(assertonly) provider)
- acme-tiny (if using the C(acme) provider)
author:
- Yanis Guenane (@Spredzy)
- Markus Teufelberger (@MarkusTeufelberger)
options:
state:
description:
- Whether the certificate should exist or not, taking action if the state is different from what is stated.
type: str
default: present
choices: [ absent, present ]
path:
description:
- Remote absolute path where the generated certificate file should be created or is already located.
type: path
required: true
provider:
description:
- Name of the provider to use to generate/retrieve the OpenSSL certificate.
- The C(assertonly) provider will not generate files and fail if the certificate file is missing.
type: str
required: true
choices: [ acme, assertonly, ownca, selfsigned ]
force:
description:
- Generate the certificate, even if it already exists.
type: bool
default: no
csr_path:
description:
- Path to the Certificate Signing Request (CSR) used to generate this certificate.
- This is not required in C(assertonly) mode.
type: path
privatekey_path:
description:
- Path to the private key to use when signing the certificate.
type: path
privatekey_passphrase:
description:
- The passphrase for the I(privatekey_path).
- This is required if the private key is password protected.
type: str
selfsigned_version:
description:
- Version of the C(selfsigned) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(selfsigned) provider.
type: int
default: 3
version_added: "2.5"
selfsigned_digest:
description:
- Digest algorithm to be used when self-signing the certificate.
- This is only used by the C(selfsigned) provider.
type: str
default: sha256
selfsigned_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +0s
aliases: [ selfsigned_notBefore ]
selfsigned_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +3650d
aliases: [ selfsigned_notAfter ]
ownca_path:
description:
- Remote absolute path of the CA (Certificate Authority) certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_path:
description:
- Path to the CA (Certificate Authority) private key to use when signing the certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_passphrase:
description:
- The passphrase for the I(ownca_privatekey_path).
- This is only used by the C(ownca) provider.
type: str
version_added: "2.7"
ownca_digest:
description:
- The digest algorithm to be used for the C(ownca) certificate.
- This is only used by the C(ownca) provider.
type: str
default: sha256
version_added: "2.7"
ownca_version:
description:
- The version of the C(ownca) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(ownca) provider.
type: int
default: 3
version_added: "2.7"
ownca_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(ownca) provider.
type: str
default: +0s
version_added: "2.7"
ownca_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(ownca) provider.
type: str
default: +3650d
version_added: "2.7"
acme_accountkey_path:
description:
- The path to the accountkey for the C(acme) provider.
- This is only used by the C(acme) provider.
type: path
acme_challenge_path:
description:
- The path to the ACME challenge directory that is served on U(http://<HOST>:80/.well-known/acme-challenge/)
- This is only used by the C(acme) provider.
type: path
acme_chain:
description:
- Include the intermediate certificate to the generated certificate
- This is only used by the C(acme) provider.
- Note that this is only available for older versions of C(acme-tiny).
New versions include the chain automatically, and setting I(acme_chain) to C(yes) results in an error.
type: bool
default: no
version_added: "2.5"
signature_algorithms:
description:
- A list of algorithms that you would accept the certificate to be signed with
(e.g. ['sha256WithRSAEncryption', 'sha512WithRSAEncryption']).
- This is only used by the C(assertonly) provider.
type: list
issuer:
description:
- The key/value pairs that must be present in the issuer name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
type: dict
issuer_strict:
description:
- If set to C(yes), the I(issuer) field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
version_added: "2.5"
subject:
description:
- The key/value pairs that must be present in the subject name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
type: dict
subject_strict:
description:
- If set to C(yes), the I(subject) field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
version_added: "2.5"
has_expired:
description:
- Checks if the certificate is expired/not expired at the time the module is executed.
- This is only used by the C(assertonly) provider.
type: bool
default: no
version:
description:
- The version of the certificate.
- Nowadays it should almost always be 3.
- This is only used by the C(assertonly) provider.
type: int
valid_at:
description:
- The certificate must be valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
invalid_at:
description:
- The certificate must be invalid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
not_before:
description:
- The certificate must start to become valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
aliases: [ notBefore ]
not_after:
description:
- The certificate must expire at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
aliases: [ notAfter ]
valid_in:
description:
- The certificate must still be valid at this relative time offset from now.
- Valid format is C([+-]timespec | number_of_seconds) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using this parameter, this module is NOT idempotent.
- This is only used by the C(assertonly) provider.
type: str
key_usage:
description:
- The I(key_usage) extension field must contain all these values.
- This is only used by the C(assertonly) provider.
type: list
aliases: [ keyUsage ]
key_usage_strict:
description:
- If set to C(yes), the I(key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
aliases: [ keyUsage_strict ]
extended_key_usage:
description:
- The I(extended_key_usage) extension field must contain all these values.
- This is only used by the C(assertonly) provider.
type: list
aliases: [ extendedKeyUsage ]
extended_key_usage_strict:
description:
- If set to C(yes), the I(extended_key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
aliases: [ extendedKeyUsage_strict ]
subject_alt_name:
description:
- The I(subject_alt_name) extension field must contain these values.
- This is only used by the C(assertonly) provider.
type: list
aliases: [ subjectAltName ]
subject_alt_name_strict:
description:
- If set to C(yes), the I(subject_alt_name) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
aliases: [ subjectAltName_strict ]
select_crypto_backend:
description:
- Determines which crypto backend to use.
- The default choice is C(auto), which tries to use C(cryptography) if available, and falls back to C(pyopenssl).
- If set to C(pyopenssl), will try to use the L(pyOpenSSL,https://pypi.org/project/pyOpenSSL/) library.
- If set to C(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
type: str
default: auto
choices: [ auto, cryptography, pyopenssl ]
version_added: "2.8"
backup:
description:
- Create a backup file including a timestamp so you can get the original
certificate back if you overwrote it with a new one by accident.
- This is not used by the C(assertonly) provider.
type: bool
default: no
version_added: "2.8"
extends_documentation_fragment: files
notes:
- All ASN.1 TIME values should be specified following the YYYYMMDDHHMMSSZ pattern.
- Date specified should be UTC. Minutes and seconds are mandatory.
- For security reason, when you use C(ownca) provider, you should NOT run M(openssl_certificate) on
a target machine, but on a dedicated CA machine. It is recommended not to store the CA private key
on the target machine. Once signed, the certificate can be moved to the target machine.
seealso:
- module: openssl_csr
- module: openssl_dhparam
- module: openssl_pkcs12
- module: openssl_privatekey
- module: openssl_publickey
'''
EXAMPLES = r'''
- name: Generate a Self Signed OpenSSL certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
privatekey_path: /etc/ssl/private/ansible.com.pem
csr_path: /etc/ssl/csr/ansible.com.csr
provider: selfsigned
- name: Generate an OpenSSL certificate signed with your own CA certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
ownca_path: /etc/ssl/crt/ansible_CA.crt
ownca_privatekey_path: /etc/ssl/private/ansible_CA.pem
provider: ownca
- name: Generate a Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
- name: Force (re-)generate a new Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
force: yes
# Examples for some checks one could use the assertonly provider for:
# How to use the assertonly provider to implement and trigger your own custom certificate generation workflow:
- name: Check if a certificate is currently still valid, ignoring failures
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
ignore_errors: yes
register: validity_check
- name: Run custom task(s) to get a new, valid certificate in case the initial check failed
command: superspecialSSL recreate /etc/ssl/crt/example.com.crt
when: validity_check.failed
- name: Check the new certificate again for validity with the same parameters, this time failing the play if it is still invalid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
when: validity_check.failed
# Some other checks that assertonly could be used for:
- name: Verify that an existing certificate was issued by the Let's Encrypt CA and is currently still valid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
issuer:
O: Let's Encrypt
has_expired: no
- name: Ensure that a certificate uses a modern signature algorithm (no SHA1, MD5 or DSA)
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
signature_algorithms:
- sha224WithRSAEncryption
- sha256WithRSAEncryption
- sha384WithRSAEncryption
- sha512WithRSAEncryption
- sha224WithECDSAEncryption
- sha256WithECDSAEncryption
- sha384WithECDSAEncryption
- sha512WithECDSAEncryption
- name: Ensure that the existing certificate belongs to the specified private key
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
privatekey_path: /etc/ssl/private/example.com.pem
provider: assertonly
- name: Ensure that the existing certificate is still valid at the winter solstice 2017
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_at: 20171221162800Z
- name: Ensure that the existing certificate is still valid 2 weeks (1209600 seconds) from now
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_in: 1209600
- name: Ensure that the existing certificate is only used for digital signatures and encrypting other keys
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
key_usage:
- digitalSignature
- keyEncipherment
key_usage_strict: true
- name: Ensure that the existing certificate can be used for client authentication
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- name: Ensure that the existing certificate can only be used for client authentication and time stamping
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- 1.3.6.1.5.5.7.3.8
extended_key_usage_strict: true
- name: Ensure that the existing certificate has a certain domain in its subjectAltName
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
subject_alt_name:
- www.example.com
- test.example.com
'''
RETURN = r'''
filename:
description: Path to the generated Certificate
returned: changed or success
type: str
sample: /etc/ssl/crt/www.ansible.com.crt
backup_file:
description: Name of backup file created.
returned: changed and if I(backup) is C(yes)
type: str
sample: /path/to/www.ansible.com.crt.2019-03-09@11:22~
'''
from random import randint
import abc
import datetime
import os
import traceback
from distutils.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native, to_bytes, to_text
from ansible.module_utils.compat import ipaddress as compat_ipaddress
MINIMAL_CRYPTOGRAPHY_VERSION = '1.6'
MINIMAL_PYOPENSSL_VERSION = '0.15'
PYOPENSSL_IMP_ERR = None
try:
import OpenSSL
from OpenSSL import crypto
PYOPENSSL_VERSION = LooseVersion(OpenSSL.__version__)
except ImportError:
PYOPENSSL_IMP_ERR = traceback.format_exc()
PYOPENSSL_FOUND = False
else:
PYOPENSSL_FOUND = True
CRYPTOGRAPHY_IMP_ERR = None
try:
import cryptography
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import Encoding
from cryptography.x509 import NameAttribute, Name
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
except ImportError:
CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
CRYPTOGRAPHY_FOUND = False
else:
CRYPTOGRAPHY_FOUND = True
class CertificateError(crypto_utils.OpenSSLObjectError):
pass
class Certificate(crypto_utils.OpenSSLObject):
def __init__(self, module, backend):
super(Certificate, self).__init__(
module.params['path'],
module.params['state'],
module.params['force'],
module.check_mode
)
self.provider = module.params['provider']
self.privatekey_path = module.params['privatekey_path']
self.privatekey_passphrase = module.params['privatekey_passphrase']
self.csr_path = module.params['csr_path']
self.cert = None
self.privatekey = None
self.csr = None
self.backend = backend
self.module = module
self.backup = module.params['backup']
self.backup_file = None
def get_relative_time_option(self, input_string, input_name):
"""Return an ASN1 formatted string if a relative timespec
or an ASN1 formatted string is provided."""
result = input_string
if result.startswith("+") or result.startswith("-"):
result_datetime = crypto_utils.convert_relative_to_datetime(
result)
if self.backend == 'pyopenssl':
return result_datetime.strftime("%Y%m%d%H%M%SZ")
elif self.backend == 'cryptography':
return result_datetime
if result is None:
raise CertificateError(
'The timespec "%s" for %s is not valid' %
input_string, input_name)
if self.backend == 'cryptography':
for date_fmt in ['%Y%m%d%H%M%SZ', '%Y%m%d%H%MZ', '%Y%m%d%H%M%S%z', '%Y%m%d%H%M%z']:
try:
result = datetime.datetime.strptime(input_string, date_fmt)
break
except ValueError:
pass
if not isinstance(result, datetime.datetime):
raise CertificateError(
'The time spec "%s" for %s is invalid' %
(input_string, input_name)
)
return result
def _validate_privatekey(self):
if self.backend == 'pyopenssl':
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
elif self.backend == 'cryptography':
return self.cert.public_key().public_numbers() == self.privatekey.public_key().public_numbers()
def _validate_csr(self):
if self.backend == 'pyopenssl':
# Verify that CSR is signed by certificate's private key
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
# Check subject
if self.csr.get_subject() != self.cert.get_subject():
return False
# Check extensions
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
elif self.backend == 'cryptography':
# Verify that CSR is signed by certificate's private key
if not self.csr.is_signature_valid:
return False
if self.csr.public_key().public_numbers() != self.cert.public_key().public_numbers():
return False
# Check subject
if self.csr.subject != self.cert.subject:
return False
# Check extensions
cert_exts = self.cert.extensions
csr_exts = self.csr.extensions
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = csr_exts.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def remove(self, module):
if self.backup:
self.backup_file = module.backup_local(self.path)
super(Certificate, self).remove(module)
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
state_and_perms = super(Certificate, self).check(module, perms_required)
if not state_and_perms:
return False
try:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
except Exception as dummy:
return False
if self.privatekey_path:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if not self._validate_privatekey():
return False
if self.csr_path:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
if not self._validate_csr():
return False
return True
class CertificateAbsent(Certificate):
def __init__(self, module):
super(CertificateAbsent, self).__init__(module, 'cryptography') # backend doesn't matter
def generate(self, module):
pass
def dump(self, check_mode=False):
# Use only for absent
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
class SelfSignedCertificateCryptography(Certificate):
"""Generate the self-signed certificate, using the cryptography backend"""
def __init__(self, module):
super(SelfSignedCertificateCryptography, self).__init__(module, 'cryptography')
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = crypto_utils.select_message_digest(module.params['selfsigned_digest'])
self.version = module.params['selfsigned_version']
self.serial_number = x509.random_serial_number()
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self._module = module
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=to_native(exc))
if self.digest is None:
raise CertificateError(
'The digest %s is not supported with the cryptography backend' % module.params['selfsigned_digest']
)
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
try:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.csr.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.privatekey.public_key())
for extension in self.csr.extensions:
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
except ValueError as e:
raise CertificateError(str(e))
certificate = cert_builder.sign(
private_key=self.privatekey, algorithm=self.digest,
backend=default_backend()
)
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class SelfSignedCertificate(Certificate):
"""Generate the self-signed certificate."""
def __init__(self, module):
super(SelfSignedCertificate, self).__init__(module, 'pyopenssl')
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = module.params['selfsigned_digest']
self.version = module.params['selfsigned_version']
self.serial_number = randint(1000, 99999)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.csr.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
class OwnCACertificateCryptography(Certificate):
"""Generate the own CA certificate. Using the cryptography backend"""
def __init__(self, module):
super(OwnCACertificateCryptography, self).__init__(module, 'cryptography')
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = crypto_utils.select_message_digest(module.params['ownca_digest'])
self.version = module.params['ownca_version']
self.serial_number = x509.random_serial_number()
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path, backend=self.backend)
try:
self.ca_private_key = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.ca_cert.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.csr.public_key())
for extension in self.csr.extensions:
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
certificate = cert_builder.sign(
private_key=self.ca_private_key, algorithm=self.digest,
backend=default_backend()
)
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class OwnCACertificate(Certificate):
"""Generate the own CA certificate."""
def __init__(self, module):
super(OwnCACertificate, self).__init__(module, 'pyopenssl')
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = module.params['ownca_digest']
self.version = module.params['ownca_version']
self.serial_number = randint(1000, 99999)
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
self.csr = crypto_utils.load_certificate_request(self.csr_path)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path)
try:
self.ca_privatekey = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.ca_cert.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.ca_privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
def compare_sets(subset, superset, equality=False):
if equality:
return set(subset) == set(superset)
else:
return all(x in superset for x in subset)
def compare_dicts(subset, superset, equality=False):
if equality:
return subset == superset
else:
return all(superset.get(x) == v for x, v in subset.items())
NO_EXTENSION = 'no extension'
class AssertOnlyCertificateBase(Certificate):
def __init__(self, module, backend):
super(AssertOnlyCertificateBase, self).__init__(module, backend)
self.signature_algorithms = module.params['signature_algorithms']
if module.params['subject']:
self.subject = crypto_utils.parse_name_field(module.params['subject'])
else:
self.subject = []
self.subject_strict = module.params['subject_strict']
if module.params['issuer']:
self.issuer = crypto_utils.parse_name_field(module.params['issuer'])
else:
self.issuer = []
self.issuer_strict = module.params['issuer_strict']
self.has_expired = module.params['has_expired']
self.version = module.params['version']
self.key_usage = module.params['key_usage']
self.key_usage_strict = module.params['key_usage_strict']
self.extended_key_usage = module.params['extended_key_usage']
self.extended_key_usage_strict = module.params['extended_key_usage_strict']
self.subject_alt_name = module.params['subject_alt_name']
self.subject_alt_name_strict = module.params['subject_alt_name_strict']
self.not_before = module.params['not_before']
self.not_after = module.params['not_after']
self.valid_at = module.params['valid_at']
self.invalid_at = module.params['invalid_at']
self.valid_in = module.params['valid_in']
if self.valid_in and not self.valid_in.startswith("+") and not self.valid_in.startswith("-"):
try:
int(self.valid_in)
except ValueError:
module.fail_json(msg='The supplied value for "valid_in" (%s) is not an integer or a valid timespec' % self.valid_in)
self.valid_in = "+" + self.valid_in + "s"
# Load objects
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
if self.privatekey_path is not None:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if self.csr_path is not None:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
@abc.abstractmethod
def _validate_privatekey(self):
pass
@abc.abstractmethod
def _validate_csr_signature(self):
pass
@abc.abstractmethod
def _validate_csr_subject(self):
pass
@abc.abstractmethod
def _validate_csr_extensions(self):
pass
@abc.abstractmethod
def _validate_signature_algorithms(self):
pass
@abc.abstractmethod
def _validate_subject(self):
pass
@abc.abstractmethod
def _validate_issuer(self):
pass
@abc.abstractmethod
def _validate_has_expired(self):
pass
@abc.abstractmethod
def _validate_version(self):
pass
@abc.abstractmethod
def _validate_key_usage(self):
pass
@abc.abstractmethod
def _validate_extended_key_usage(self):
pass
@abc.abstractmethod
def _validate_subject_alt_name(self):
pass
@abc.abstractmethod
def _validate_not_before(self):
pass
@abc.abstractmethod
def _validate_not_after(self):
pass
@abc.abstractmethod
def _validate_valid_at(self):
pass
@abc.abstractmethod
def _validate_invalid_at(self):
pass
@abc.abstractmethod
def _validate_valid_in(self):
pass
def assertonly(self, module):
messages = []
if self.privatekey_path is not None:
if not self._validate_privatekey():
messages.append(
'Certificate %s and private key %s do not match' %
(self.path, self.privatekey_path)
)
if self.csr_path is not None:
if not self._validate_csr_signature():
messages.append(
'Certificate %s and CSR %s do not match: private key mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_subject():
messages.append(
'Certificate %s and CSR %s do not match: subject mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_extensions():
messages.append(
'Certificate %s and CSR %s do not match: extensions mismatch' %
(self.path, self.csr_path)
)
if self.signature_algorithms is not None:
wrong_alg = self._validate_signature_algorithms()
if wrong_alg:
messages.append(
'Invalid signature algorithm (got %s, expected one of %s)' %
(wrong_alg, self.signature_algorithms)
)
if self.subject is not None:
failure = self._validate_subject()
if failure:
dummy, cert_subject = failure
messages.append(
'Invalid subject component (got %s, expected all of %s to be present)' %
(cert_subject, self.subject)
)
if self.issuer is not None:
failure = self._validate_issuer()
if failure:
dummy, cert_issuer = failure
messages.append(
'Invalid issuer component (got %s, expected all of %s to be present)' % (cert_issuer, self.issuer)
)
if self.has_expired is not None:
cert_expired = self._validate_has_expired()
if cert_expired != self.has_expired:
messages.append(
'Certificate expiration check failed (certificate expiration is %s, expected %s)' %
(cert_expired, self.has_expired)
)
if self.version is not None:
cert_version = self._validate_version()
if cert_version != self.version:
messages.append(
'Invalid certificate version number (got %s, expected %s)' %
(cert_version, self.version)
)
if self.key_usage is not None:
failure = self._validate_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no keyUsage extension')
elif failure:
dummy, cert_key_usage = failure
messages.append(
'Invalid keyUsage components (got %s, expected all of %s to be present)' %
(cert_key_usage, self.key_usage)
)
if self.extended_key_usage is not None:
failure = self._validate_extended_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no extendedKeyUsage extension')
elif failure:
dummy, ext_cert_key_usage = failure
messages.append(
'Invalid extendedKeyUsage component (got %s, expected all of %s to be present)' % (ext_cert_key_usage, self.extended_key_usage)
)
if self.subject_alt_name is not None:
failure = self._validate_subject_alt_name()
if failure == NO_EXTENSION:
messages.append('Found no subjectAltName extension')
elif failure:
dummy, cert_san = failure
messages.append(
'Invalid subjectAltName component (got %s, expected all of %s to be present)' %
(cert_san, self.subject_alt_name)
)
if self.not_before is not None:
cert_not_valid_before = self._validate_not_before()
if cert_not_valid_before != self.get_relative_time_option(self.not_before, 'not_before'):
messages.append(
'Invalid not_before component (got %s, expected %s to be present)' %
(cert_not_valid_before, self.not_before)
)
if self.not_after is not None:
cert_not_valid_after = self._validate_not_after()
if cert_not_valid_after != self.get_relative_time_option(self.not_after, 'not_after'):
messages.append(
'Invalid not_after component (got %s, expected %s to be present)' %
(cert_not_valid_after, self.not_after)
)
if self.valid_at is not None:
not_before, valid_at, not_after = self._validate_valid_at()
if not (not_before <= valid_at <= not_after):
messages.append(
'Certificate is not valid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.valid_at, not_before, not_after)
)
if self.invalid_at is not None:
not_before, invalid_at, not_after = self._validate_invalid_at()
if (invalid_at <= not_before) or (invalid_at >= not_after):
messages.append(
'Certificate is not invalid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.invalid_at, not_before, not_after)
)
if self.valid_in is not None:
not_before, valid_in, not_after = self._validate_valid_in()
if not not_before <= valid_in <= not_after:
messages.append(
'Certificate is not valid in %s from now (that would be %s) - not_before: %s - not_after: %s' %
(self.valid_in, valid_in, not_before, not_after)
)
return messages
def generate(self, module):
"""Don't generate anything - only assert"""
messages = self.assertonly(module)
if messages:
module.fail_json(msg=' | '.join(messages))
def check(self, module, perms_required=False):
"""Ensure the resource is in its desired state."""
messages = self.assertonly(module)
return len(messages) == 0
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
return result
class AssertOnlyCertificateCryptography(AssertOnlyCertificateBase):
"""Validate the supplied cert, using the cryptography backend"""
def __init__(self, module):
super(AssertOnlyCertificateCryptography, self).__init__(module, 'cryptography')
def _validate_privatekey(self):
return self.cert.public_key().public_numbers() == self.privatekey.public_key().public_numbers()
def _validate_csr_signature(self):
if not self.csr.is_signature_valid:
return False
return self.csr.public_key().public_numbers() == self.cert.public_key().public_numbers()
def _validate_csr_subject(self):
return self.csr.subject == self.cert.subject
def _validate_csr_extensions(self):
cert_exts = self.cert.extensions
csr_exts = self.csr.extensions
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = csr_exts.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def _validate_signature_algorithms(self):
if self.cert.signature_algorithm_oid._name not in self.signature_algorithms:
return self.cert.signature_algorithm_oid._name
def _validate_subject(self):
expected_subject = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(sub[0]), value=to_text(sub[1]))
for sub in self.subject])
cert_subject = self.cert.subject
if not compare_sets(expected_subject, cert_subject, self.subject_strict):
return expected_subject, cert_subject
def _validate_issuer(self):
expected_issuer = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(iss[0]), value=to_text(iss[1]))
for iss in self.issuer])
cert_issuer = self.cert.issuer
if not compare_sets(expected_issuer, cert_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
cert_not_after = self.cert.not_valid_after
cert_expired = cert_not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
if self.cert.version == x509.Version.v1:
return 1
if self.cert.version == x509.Version.v3:
return 3
return "unknown"
def _validate_key_usage(self):
try:
current_key_usage = self.cert.extensions.get_extension_for_class(x509.KeyUsage).value
test_key_usage = dict(
digital_signature=current_key_usage.digital_signature,
content_commitment=current_key_usage.content_commitment,
key_encipherment=current_key_usage.key_encipherment,
data_encipherment=current_key_usage.data_encipherment,
key_agreement=current_key_usage.key_agreement,
key_cert_sign=current_key_usage.key_cert_sign,
crl_sign=current_key_usage.crl_sign,
encipher_only=False,
decipher_only=False
)
if test_key_usage['key_agreement']:
test_key_usage.update(dict(
encipher_only=current_key_usage.encipher_only,
decipher_only=current_key_usage.decipher_only
))
key_usages = crypto_utils.cryptography_parse_key_usage_params(self.key_usage)
if not compare_dicts(key_usages, test_key_usage, self.key_usage_strict):
return self.key_usage, [x for x in test_key_usage if x is True]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
try:
current_ext_keyusage = self.cert.extensions.get_extension_for_class(x509.ExtendedKeyUsage).value
usages = [crypto_utils.cryptography_name_to_oid(usage) for usage in self.extended_key_usage]
expected_ext_keyusage = x509.ExtendedKeyUsage(usages)
if not compare_sets(expected_ext_keyusage, current_ext_keyusage, self.extended_key_usage_strict):
return [eku.value for eku in expected_ext_keyusage], [eku.value for eku in current_ext_keyusage]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _validate_subject_alt_name(self):
try:
current_san = self.cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
expected_san = [crypto_utils.cryptography_get_name(san) for san in self.subject_alt_name]
if not compare_sets(expected_san, current_san, self.subject_alt_name_strict):
return self.subject_alt_name, current_san
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.not_valid_before
def _validate_not_after(self):
return self.cert.not_valid_after
def _validate_valid_at(self):
rt = self.get_relative_time_option(self.valid_at, 'valid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_invalid_at(self):
rt = self.get_relative_time_option(self.valid_at, 'valid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_valid_in(self):
valid_in_date = self.get_relative_time_option(self.valid_in, "valid_in")
return self.cert.not_valid_before, valid_in_date, self.cert.not_valid_after
class AssertOnlyCertificate(AssertOnlyCertificateBase):
"""validate the supplied certificate."""
def __init__(self, module):
super(AssertOnlyCertificate, self).__init__(module, 'pyopenssl')
# Ensure inputs are properly sanitized before comparison.
for param in ['signature_algorithms', 'key_usage', 'extended_key_usage',
'subject_alt_name', 'subject', 'issuer', 'not_before',
'not_after', 'valid_at', 'invalid_at']:
attr = getattr(self, param)
if isinstance(attr, list) and attr:
if isinstance(attr[0], str):
setattr(self, param, [to_bytes(item) for item in attr])
elif isinstance(attr[0], tuple):
setattr(self, param, [(to_bytes(item[0]), to_bytes(item[1])) for item in attr])
elif isinstance(attr, tuple):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, dict):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, str):
setattr(self, param, to_bytes(attr))
def _validate_privatekey(self):
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
def _validate_csr_signature(self):
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
def _validate_csr_subject(self):
if self.csr.get_subject() != self.cert.get_subject():
return False
def _validate_csr_extensions(self):
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
def _validate_signature_algorithms(self):
if self.cert.get_signature_algorithm() not in self.signature_algorithms:
return self.cert.get_signature_algorithm()
def _validate_subject(self):
expected_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in self.subject]
cert_subject = self.cert.get_subject().get_components()
current_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in cert_subject]
if not compare_sets(expected_subject, current_subject, self.subject_strict):
return expected_subject, current_subject
def _validate_issuer(self):
expected_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in self.issuer]
cert_issuer = self.cert.get_issuer().get_components()
current_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in cert_issuer]
if not compare_sets(expected_issuer, current_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
# The following 3 lines are the same as the current PyOpenSSL code for cert.has_expired().
# Older version of PyOpenSSL have a buggy implementation,
# to avoid issues with those we added the code from a more recent release here.
time_string = to_native(self.cert.get_notAfter())
not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ")
cert_expired = not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
# Version numbers in certs are off by one:
# v1: 0, v2: 1, v3: 2 ...
return self.cert.get_version() + 1
def _validate_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'keyUsage':
found = True
key_usage = [OpenSSL._util.lib.OBJ_txt2nid(key_usage) for key_usage in self.key_usage]
current_ku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if not compare_sets(key_usage, current_ku, self.key_usage_strict):
return self.key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'extendedKeyUsage':
found = True
extKeyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.extended_key_usage]
current_xku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if not compare_sets(extKeyUsage, current_xku, self.extended_key_usage_strict):
return self.extended_key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _normalize_san(self, san):
# Apparently OpenSSL returns 'IP address' not 'IP' as specifier when converting the subjectAltName to string
# although it won't accept this specifier when generating the CSR. (https://github.com/openssl/openssl/issues/4004)
if san.startswith('IP Address:'):
san = 'IP:' + san[len('IP Address:'):]
if san.startswith('IP:'):
ip = compat_ipaddress.ip_address(san[3:])
san = 'IP:{0}'.format(ip.compressed)
return san
def _validate_subject_alt_name(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'subjectAltName':
found = True
l_altnames = [self._normalize_san(altname.strip()) for altname in
to_text(extension, errors='surrogate_or_strict').split(', ')]
sans = [self._normalize_san(to_text(san, errors='surrogate_or_strict')) for san in self.subject_alt_name]
if not compare_sets(sans, l_altnames, self.subject_alt_name_strict):
return self.subject_alt_name, l_altnames
if not found:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.get_notBefore()
def _validate_not_after(self):
return self.cert.get_notAfter()
def _validate_valid_at(self):
return self.cert.get_notBefore(), self.valid_at, self.cert.get_notAfter()
def _validate_invalid_at(self):
return self.cert.get_notBefore(), self.valid_at, self.cert.get_notAfter()
def _validate_valid_in(self):
valid_in_asn1 = self.get_relative_time_option(self.valid_in, "valid_in")
valid_in_date = to_bytes(valid_in_asn1, errors='surrogate_or_strict')
return self.cert.get_notBefore(), valid_in_date, self.cert.get_notAfter()
class AcmeCertificate(Certificate):
"""Retrieve a certificate using the ACME protocol."""
# Since there's no real use of the backend,
# other than the 'self.check' function, we just pass the backend to the constructor
def __init__(self, module, backend):
super(AcmeCertificate, self).__init__(module, backend)
self.accountkey_path = module.params['acme_accountkey_path']
self.challenge_path = module.params['acme_challenge_path']
self.use_chain = module.params['acme_chain']
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not os.path.exists(self.accountkey_path):
raise CertificateError(
'The account key %s does not exist' % self.accountkey_path
)
if not os.path.exists(self.challenge_path):
raise CertificateError(
'The challenge path %s does not exist' % self.challenge_path
)
if not self.check(module, perms_required=False) or self.force:
acme_tiny_path = self.module.get_bin_path('acme-tiny', required=True)
command = [acme_tiny_path]
if self.use_chain:
command.append('--chain')
command.extend(['--account-key', self.accountkey_path])
command.extend(['--csr', self.csr_path])
command.extend(['--acme-dir', self.challenge_path])
try:
crt = module.run_command(command, check_rc=True)[1]
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, to_bytes(crt))
self.changed = True
except OSError as exc:
raise CertificateError(exc)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'accountkey': self.accountkey_path,
'csr': self.csr_path,
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
path=dict(type='path', required=True),
provider=dict(type='str', choices=['acme', 'assertonly', 'ownca', 'selfsigned']),
force=dict(type='bool', default=False,),
csr_path=dict(type='path'),
backup=dict(type='bool', default=False),
select_crypto_backend=dict(type='str', default='auto', choices=['auto', 'cryptography', 'pyopenssl']),
# General properties of a certificate
privatekey_path=dict(type='path'),
privatekey_passphrase=dict(type='str', no_log=True),
# provider: assertonly
signature_algorithms=dict(type='list', elements='str'),
subject=dict(type='dict'),
subject_strict=dict(type='bool', default=False),
issuer=dict(type='dict'),
issuer_strict=dict(type='bool', default=False),
has_expired=dict(type='bool', default=False),
version=dict(type='int'),
key_usage=dict(type='list', elements='str', aliases=['keyUsage']),
key_usage_strict=dict(type='bool', default=False, aliases=['keyUsage_strict']),
extended_key_usage=dict(type='list', elements='str', aliases=['extendedKeyUsage']),
extended_key_usage_strict=dict(type='bool', default=False, aliases=['extendedKeyUsage_strict']),
subject_alt_name=dict(type='list', elements='str', aliases=['subjectAltName']),
subject_alt_name_strict=dict(type='bool', default=False, aliases=['subjectAltName_strict']),
not_before=dict(type='str', aliases=['notBefore']),
not_after=dict(type='str', aliases=['notAfter']),
valid_at=dict(type='str'),
invalid_at=dict(type='str'),
valid_in=dict(type='str'),
# provider: selfsigned
selfsigned_version=dict(type='int', default=3),
selfsigned_digest=dict(type='str', default='sha256'),
selfsigned_not_before=dict(type='str', default='+0s', aliases=['selfsigned_notBefore']),
selfsigned_not_after=dict(type='str', default='+3650d', aliases=['selfsigned_notAfter']),
# provider: ownca
ownca_path=dict(type='path'),
ownca_privatekey_path=dict(type='path'),
ownca_privatekey_passphrase=dict(type='str', no_log=True),
ownca_digest=dict(type='str', default='sha256'),
ownca_version=dict(type='int', default=3),
ownca_not_before=dict(type='str', default='+0s'),
ownca_not_after=dict(type='str', default='+3650d'),
# provider: acme
acme_accountkey_path=dict(type='path'),
acme_challenge_path=dict(type='path'),
acme_chain=dict(type='bool', default=False),
),
supports_check_mode=True,
add_file_common_args=True,
)
try:
if module.params['state'] == 'absent':
certificate = CertificateAbsent(module)
else:
if module.params['provider'] != 'assertonly' and module.params['csr_path'] is None:
module.fail_json(msg='csr_path is required when provider is not assertonly')
base_dir = os.path.dirname(module.params['path']) or '.'
if not os.path.isdir(base_dir):
module.fail_json(
name=base_dir,
msg='The directory %s does not exist or the file is not a directory' % base_dir
)
provider = module.params['provider']
backend = module.params['select_crypto_backend']
if backend == 'auto':
# Detect what backend we can use
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# If cryptography is available we'll use it
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
backend = 'pyopenssl'
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.warn('crypto backend forced to pyopenssl. The cryptography library does not support v2 certificates')
backend = 'pyopenssl'
# Fail if no backend has been found
if backend == 'auto':
module.fail_json(msg=("Can't detect any of the required Python libraries "
"cryptography (>= {0}) or PyOpenSSL (>= {1})").format(
MINIMAL_CRYPTOGRAPHY_VERSION,
MINIMAL_PYOPENSSL_VERSION))
if backend == 'pyopenssl':
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSSL >= {0}'.format(MINIMAL_PYOPENSSL_VERSION)),
exception=PYOPENSSL_IMP_ERR)
if module.params['provider'] in ['selfsigned', 'ownca', 'assertonly']:
try:
getattr(crypto.X509Req, 'get_extensions')
except AttributeError:
module.fail_json(msg='You need to have PyOpenSSL>=0.15')
if provider == 'selfsigned':
certificate = SelfSignedCertificate(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'pyopenssl')
elif provider == 'ownca':
certificate = OwnCACertificate(module)
else:
certificate = AssertOnlyCertificate(module)
elif backend == 'cryptography':
if not CRYPTOGRAPHY_FOUND:
module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
exception=CRYPTOGRAPHY_IMP_ERR)
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.fail_json(msg='The cryptography backend does not support v2 certificates, '
'use select_crypto_backend=pyopenssl for v2 certificates')
if provider == 'selfsigned':
certificate = SelfSignedCertificateCryptography(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'cryptography')
elif provider == 'ownca':
certificate = OwnCACertificateCryptography(module)
else:
certificate = AssertOnlyCertificateCryptography(module)
if module.params['state'] == 'present':
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = module.params['force'] or not certificate.check(module)
module.exit_json(**result)
certificate.generate(module)
else:
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = os.path.exists(module.params['path'])
module.exit_json(**result)
certificate.remove(module)
result = certificate.dump()
module.exit_json(**result)
except crypto_utils.OpenSSLObjectError as exc:
module.fail_json(msg=to_native(exc))
if __name__ == "__main__":
main()
|
h3biomed/ansible
|
lib/ansible/modules/crypto/openssl_certificate.py
|
Python
|
gpl-3.0
| 79,027
|
# -*- coding: utf-8 -*-
__license__ = 'GPL 3'
__copyright__ = '2009, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
'''
Transform OEB content into RB compatible markup.
'''
import re
from calibre import prepare_string_for_xml
from calibre.ebooks.rb import unique_name
TAGS = [
'b',
'big',
'blockquote',
'br',
'center',
'code',
'div',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'hr',
'i',
'li',
'ol',
'p',
'pre',
'small',
'sub',
'sup',
'ul',
]
LINK_TAGS = [
'a',
]
IMAGE_TAGS = [
'img',
]
STYLES = [
('font-weight', {'bold' : 'b', 'bolder' : 'b'}),
('font-style', {'italic' : 'i'}),
('text-align', {'center' : 'center'}),
]
class RBMLizer(object):
def __init__(self, log, name_map={}):
self.log = log
self.name_map = name_map
self.link_hrefs = {}
def extract_content(self, oeb_book, opts):
self.log.info('Converting XHTML to RB markup...')
self.oeb_book = oeb_book
self.opts = opts
return self.mlize_spine()
def mlize_spine(self):
self.link_hrefs = {}
output = [u'<HTML><HEAD><TITLE></TITLE></HEAD><BODY>']
output.append(self.get_cover_page())
output.append(u'ghji87yhjko0Caliblre-toc-placeholder-for-insertion-later8ujko0987yjk')
output.append(self.get_text())
output.append(u'</BODY></HTML>')
output = ''.join(output).replace(u'ghji87yhjko0Caliblre-toc-placeholder-for-insertion-later8ujko0987yjk', self.get_toc())
output = self.clean_text(output)
return output
def get_cover_page(self):
from calibre.ebooks.oeb.stylizer import Stylizer
from calibre.ebooks.oeb.base import XHTML
output = u''
if 'cover' in self.oeb_book.guide:
if self.name_map.get(self.oeb_book.guide['cover'].href, None):
output += '<IMG SRC="%s">' % self.name_map[self.oeb_book.guide['cover'].href]
if 'titlepage' in self.oeb_book.guide:
self.log.debug('Generating cover page...')
href = self.oeb_book.guide['titlepage'].href
item = self.oeb_book.manifest.hrefs[href]
if item.spine_position is None:
stylizer = Stylizer(item.data, item.href, self.oeb_book,
self.opts, self.opts.output_profile)
output += ''.join(self.dump_text(item.data.find(XHTML('body')), stylizer, item))
return output
def get_toc(self):
toc = [u'']
if self.opts.inline_toc:
self.log.debug('Generating table of contents...')
toc.append(u'<H1>%s</H1><UL>\n' % _('Table of Contents:'))
for item in self.oeb_book.toc:
if item.href in self.link_hrefs.keys():
toc.append('<LI><A HREF="#%s">%s</A></LI>\n' % (self.link_hrefs[item.href], item.title))
else:
self.oeb.warn('Ignoring toc item: %s not found in document.' % item)
toc.append('</UL>')
return ''.join(toc)
def get_text(self):
from calibre.ebooks.oeb.stylizer import Stylizer
from calibre.ebooks.oeb.base import XHTML
output = [u'']
for item in self.oeb_book.spine:
self.log.debug('Converting %s to RocketBook HTML...' % item.href)
stylizer = Stylizer(item.data, item.href, self.oeb_book, self.opts, self.opts.output_profile)
output.append(self.add_page_anchor(item))
output += self.dump_text(item.data.find(XHTML('body')), stylizer, item)
return ''.join(output)
def add_page_anchor(self, page):
return self.get_anchor(page, '')
def get_anchor(self, page, aid):
aid = '%s#%s' % (page.href, aid)
if aid not in self.link_hrefs.keys():
self.link_hrefs[aid] = 'calibre_link-%s' % len(self.link_hrefs.keys())
aid = self.link_hrefs[aid]
return u'<A NAME="%s"></A>' % aid
def clean_text(self, text):
# Remove anchors that do not have links
anchors = set(re.findall(r'(?<=<A NAME=").+?(?="></A>)', text))
links = set(re.findall(r'(?<=<A HREF="#).+?(?=">)', text))
for unused in anchors.difference(links):
text = text.replace('<A NAME="%s"></A>' % unused, '')
return text
def dump_text(self, elem, stylizer, page, tag_stack=[]):
from calibre.ebooks.oeb.base import XHTML_NS, barename, namespace
if not isinstance(elem.tag, basestring) \
or namespace(elem.tag) != XHTML_NS:
return [u'']
text = [u'']
style = stylizer.style(elem)
if style['display'] in ('none', 'oeb-page-head', 'oeb-page-foot') \
or style['visibility'] == 'hidden':
return [u'']
tag = barename(elem.tag)
tag_count = 0
# Process tags that need special processing and that do not have inner
# text. Usually these require an argument
if tag in IMAGE_TAGS:
if elem.attrib.get('src', None):
if page.abshref(elem.attrib['src']) not in self.name_map.keys():
self.name_map[page.abshref(elem.attrib['src'])] = unique_name('%s' % len(self.name_map.keys()), self.name_map.keys())
text.append('<IMG SRC="%s">' % self.name_map[page.abshref(elem.attrib['src'])])
rb_tag = tag.upper() if tag in TAGS else None
if rb_tag:
tag_count += 1
text.append('<%s>' % rb_tag)
tag_stack.append(rb_tag)
# Anchors links
if tag in LINK_TAGS:
href = elem.get('href')
if href:
href = page.abshref(href)
if '://' not in href:
if '#' not in href:
href += '#'
if href not in self.link_hrefs.keys():
self.link_hrefs[href] = 'calibre_link-%s' % len(self.link_hrefs.keys())
href = self.link_hrefs[href]
text.append('<A HREF="#%s">' % href)
tag_count += 1
tag_stack.append('A')
# Anchor ids
id_name = elem.get('id')
if id_name:
text.append(self.get_anchor(page, id_name))
# Processes style information
for s in STYLES:
style_tag = s[1].get(style[s[0]], None)
if style_tag:
style_tag = style_tag.upper()
tag_count += 1
text.append('<%s>' % style_tag)
tag_stack.append(style_tag)
# Proccess tags that contain text.
if hasattr(elem, 'text') and elem.text:
text.append(prepare_string_for_xml(elem.text))
for item in elem:
text += self.dump_text(item, stylizer, page, tag_stack)
close_tag_list = []
for i in range(0, tag_count):
close_tag_list.insert(0, tag_stack.pop())
text += self.close_tags(close_tag_list)
if hasattr(elem, 'tail') and elem.tail:
text.append(prepare_string_for_xml(elem.tail))
return text
def close_tags(self, tags):
text = [u'']
for i in range(0, len(tags)):
tag = tags.pop()
text.append('</%s>' % tag)
return text
|
Eksmo/calibre
|
src/calibre/ebooks/rb/rbml.py
|
Python
|
gpl-3.0
| 7,383
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-25 12:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('venue', '0005_auto_20170916_0701'),
]
operations = [
migrations.CreateModel(
name='EventCalander',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='Default Event', max_length=200)),
('calander_id', models.TextField()),
('active', models.BooleanField(default=True)),
],
),
]
|
tornadoalert/kmcoffice
|
venue/migrations/0006_eventcalander.py
|
Python
|
gpl-3.0
| 725
|
import logging
from django.utils.html import format_html
import django_tables2 as tables
from django_tables2.rows import BoundPinnedRow, BoundRow
logger = logging.getLogger(__name__)
# A cheat to force BoundPinnedRows to use the same rendering as BoundRows
# otherwise links don't work
# BoundPinnedRow._get_and_render_with = BoundRow._get_and_render_with
class MultiLinkColumn(tables.RelatedLinkColumn):
"""
Like RelatedLinkColumn but allows multiple choices of accessor to be
rendered in a hierarchy, e.g.
accessors = ['foo.bar', 'baz.bof']
text = '{instance.number}: {instance}'
In this case if 'foo.bar' resolves, it will be rendered. Otherwise
'baz.bof' will be tested to resolve, and so on. If nothing renders,
the column will be blank. The text string will resolve using instance.
"""
def __init__(self, accessors, **kwargs):
"""Here we force order by the accessors. By default MultiLinkColumns
have empty_values: () to force calculation every time.
"""
defaults = {
'order_by': accessors,
'empty_values': (),
}
defaults.update(**kwargs)
super().__init__(**defaults)
self.accessors = [tables.A(a) for a in accessors]
def compose_url(self, record, bound_column):
"""Resolve the first accessor which resolves. """
for a in self.accessors:
try:
return a.resolve(record).get_absolute_url()
except (ValueError, AttributeError):
continue
return ""
def text_value(self, record, value):
"""If self.text is set, it will be used as a format string for the
instance returned by the accessor with the keyword `instance`.
"""
for a in self.accessors:
try:
instance = a.resolve(record)
if instance is None:
raise ValueError
except ValueError:
continue
# Use self.text as a format string
if self.text:
return self.text.format(instance=instance, record=record,
value=value)
else:
return str(instance)
# Finally if no accessors were resolved, return value or a blank string
# return super().text_value(record, value)
return value or ""
class XeroLinkColumn(tables.Column):
"""Renders a badge link to the objects record in xero."""
def render(self, value, record=None):
if record.xero_id:
return format_html(
'<span class="badge progress-bar-info">'
'<a class="alert-link" role="button" target="_blank" '
'href="{href}">View in Xero</a></span>',
href=record.get_xero_url()
)
class BaseTable(tables.Table):
class Meta:
attrs = {"class": "table table-bordered table-striped table-hover "
"table-condensed"}
# @classmethod
# def set_header_color(cls, color):
# """
# Sets all column headers to have this background colour.
# """
# for column in cls.base_columns.values():
# try:
# column.attrs['th'].update(
# {'style': f'background-color:{color};'})
# except KeyError:
# column.attrs['th'] = {'style': f'background-color:{color};'}
def set_header_color(self, color):
"""
Sets all column headers to have this background colour.
"""
for column in self.columns.columns.values():
try:
column.column.attrs['th'].update(
{'style': f'background-color:{color};'})
except KeyError:
column.column.attrs['th'] = {
'style': f'background-color:{color};'}
class ModelTable(BaseTable):
class Meta(BaseTable.Meta):
exclude = ('id',)
class CurrencyColumn(tables.Column):
"""Render a table column as GBP."""
def render(self, value):
return f'£{value:,.2f}'
class NumberColumn(tables.Column):
"""Only render decimal places if necessary."""
def render(self, value):
if value is not None:
return f'{value:n}'
class ColorColumn(tables.Column):
"""Render the colour in a box."""
def __init__(self, *args, **kwargs):
"""This will ignore other attrs passed in."""
kwargs.setdefault('attrs', {'td': {'class': "small-width text-center"}})
super().__init__(*args, **kwargs)
def render(self, value):
if value:
return format_html(
'<div class="color-box" style="background:{};"></div>', value)
|
sdolemelipone/django-crypsis
|
crypsis/tables.py
|
Python
|
gpl-3.0
| 4,778
|
from os import path
s1 = 'one seven three five one six two six six seven'
s2 = 'four zero two nine one eight five nine zero four'
s3 = 'one nine zero seven eight eight zero three two eight'
s4 = 'four nine one two one one eight five five one'
s5 = 'eight six three five four zero two one one two'
s6 = 'two three nine zero zero one six seven six four'
s7 = 'five two seven one six one three six seven zero'
s8 = 'nine seven four four four three five five eight seven'
s9 = 'six three eight five three nine eight five six five'
s10 = 'seven three two four zero one nine nine five zero'
digits = [s1, s2, s3, s4, s5, s6, s7, s8, s9, s10]
s31 = 'Excuse me'
s32 = 'Goodbye'
s33 = 'Hello'
s34 = 'How are you'
s35 = 'Nice to meet you'
s36 = 'See you'
s37 = 'I am sorry'
s38 = 'Thank you'
s39 = 'Have a good time'
s40 = 'You are welcome'
short = [s31, s32, s33, s34, s35, s36, s37, s38, s39, s40]
sentences = './splits/all.txt'
transcript_dir = '/run/media/john_tukey/download/datasets/ouluvs2/transcript_sentence/'
def get_sentence(user, sid):
with open(path.join(transcript_dir, user), 'r') as f:
contents = f.read().splitlines()
return contents[sid][:-1]
def main():
with open(sentences, 'r') as f:
contents = f.read().splitlines()
labels_dict = dict()
for line in contents:
user, sentence = line.split('_') # this looks like a neutral face. why ? <(^.^)>
key = line
sid = int(sentence[1:])
if sid <= 30:
value = digits[(sid-1)//3]
elif 30 < sid <= 60:
value = short[(sid-1)//3 - 10]
elif 60 < sid <= 70:
value = get_sentence(user, sid-61)
else:
raise Exception('Allowed sentence ids from 1 to 70')
labels_dict[key] = value
with open('labels.txt', 'w') as f:
for (k,v) in labels_dict.items():
f.write(k + ' ' + v + '\n')
if __name__ == '__main__':
main()
|
georgesterpu/pyVSR
|
pyVSR/ouluvs2/scripts/create_labels.py
|
Python
|
gpl-3.0
| 1,947
|
# Created By: Eric Mc Sween
# Created On: 2007-12-12
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import re
from calendar import monthrange
from datetime import date, datetime, timedelta
from hscommon.trans import tr
from hscommon.util import iterdaterange
ONE_DAY = timedelta(1)
# --- Date Ranges
class DateRange:
"""A range between two dates.
Much of the information supplied by moneyGuru is done so in a date range context. Give me a
profit report for this year, give me a running total chart for this month, and so on and so on.
This class represents that range, supplies a couple of useful methods, and can be subclassed to
represent more specific ranges, such as :class:`YearRange`.
Some ranges are :attr:`navigable <can_navigate>`, which means that they mostly represent a
duration, which can be placed anywhere in time. We can thus "navigate" time with this range
using prev/next buttons. Example: :class:`MonthRange`.
Other ranges, such as :class:`YearToDateRange`, represent two very fixed points in time: 1st of
january to today. These are not navigable.
The two most important attributes of a date range: :attr:`start` and :attr:`end`.
A couple of operators that can be used with ranges:
* r1 == r2 -> only if start and end are the same.
* bool(r) == True -> if it's a range that makes sense (start <= end)
* r1 & r2 -> intersection range between the two.
* date in r -> if start <= date <= end
* iter(r) -> iterates over all dates between start and end (inclusive).
The thing is hashable while, at the same time, being mutable. Use your common sense: don't go
around using date ranges as dict keys and then mutate them. Also, mutation of a date range is
a rather rare occurence and is only needed in a couple of tight spots. Try avoiding them.
"""
def __init__(self, start, end):
#: ``datetime.date``. Start of the range.
self.start = start
#: ``datetime.date``. End of the range.
self.end = end
def __repr__(self):
start_date_str = self.start.strftime('%Y/%m/%d') if self.start.year > 1900 else 'MINDATE'
return '<%s %s - %s>' % (type(self).__name__, start_date_str, self.end.strftime('%Y/%m/%d'))
def __bool__(self):
return self.start <= self.end
def __and__(self, other):
maxstart = max(self.start, other.start)
minend = min(self.end, other.end)
return DateRange(maxstart, minend)
def __eq__(self, other):
if not isinstance(other, DateRange):
raise TypeError()
return type(self) == type(other) and self.start == other.start and self.end == other.end
def __ne__(self, other):
return not self == other
def __contains__(self, date):
return self.start <= date <= self.end
def __iter__(self):
yield from iterdaterange(self.start, self.end)
def __hash__(self):
return hash((self.start, self.end))
def adjusted(self, new_date):
"""Kinda like :meth:`around`, but it can possibly enlarge the range.
Returns ``None`` if ``new_date`` doesn't trigger any adjustments.
To be frank, that method is there only for :class:`AllTransactionsRange`. When we add a new
transaction, we call this method to possibly enlarge/reposition the range. If it isn't
changed, we don't want to trigger all UI updated related to a date range adjustment, so we
return ``None`` to mean "nope, nothing happened here" (which is most of the time).
If it's changed, we return the new range.
"""
return None
def around(self, date):
"""Returns a date range of the same type as ``self`` that contains ``new_date``.
Some date ranges change when new transactions are beind added or changed. This is where
it happens. Returns a new adjusted date range.
For a non-navigable range, returns ``self``.
"""
return self
def next(self):
"""Returns the next range if navigable.
For example, if we're a month range, return a range with start and end increased by a month.
"""
return self
def prev(self):
"""Returns the previous range if navigable.
For example, if we're a month range, return a range with start and end decreased by a month.
We make a bit of an exception for this method and implement it in all ranges, rather than
only navigable ones. This is because it's used in the profit report for the "Last" column
(we want to know what our results were for the last date range). Some ranges, although not
navigable, can return a meaningful result here, like :class:`YearToDateRange`, which can
return the same period last year. Others, like :class:`AllTransactionsRange`, have nothing
to return, so they return an empty range.
"""
return self
@property
def can_navigate(self):
"""Returns whether this range is navigable.
In other words, if it's possible to use prev/next to navigate in date ranges.
"""
return False
@property
def days(self):
"""The number of days in the date range."""
return (self.end - self.start).days + 1
@property
def future(self):
"""The future part of the date range.
That is, the part of the range that is later than today.
"""
today = date.today()
if self.start > today:
return self
else:
return DateRange(today + ONE_DAY, self.end)
@property
def past(self):
"""The past part of the date range.
That is, the part of the range that is earlier than today.
"""
today = date.today()
if self.end < today:
return self
else:
return DateRange(self.start, today)
class NavigableDateRange(DateRange):
"""A navigable date range.
Properly implements navigation-related methods so that subclasses don't have to.
Subclasses :class:`DateRange`.
"""
def adjusted(self, new_date):
result = self.around(new_date)
if result == self:
result = None
return result
def around(self, date):
return type(self)(date)
def next(self):
return self.around(self.end + ONE_DAY)
def prev(self):
return self.around(self.start - ONE_DAY)
@property
def can_navigate(self): # if it's possible to use prev/next to navigate in date ranges
return True
class MonthRange(NavigableDateRange):
"""A navigable date range lasting one month.
``seed`` is a date for the range to wrap around.
A monthly range always starts at the first of the month and ends at the last day of that same
month.
Subclasses :class:`NavigableDateRange`.
"""
def __init__(self, seed):
if isinstance(seed, DateRange):
seed = seed.start
month = seed.month
year = seed.year
days_in_month = monthrange(year, month)[1]
start = date(year, month, 1)
end = date(year, month, days_in_month)
DateRange.__init__(self, start, end)
@property
def display(self):
"""String representation of the range (ex: "July 2013")."""
return self.start.strftime('%B %Y')
class QuarterRange(NavigableDateRange):
"""A navigable date range lasting one quarter.
``seed`` is a date for the range to wrap around.
A quarterly range always starts at the first day of the first month of the quarter and ends at
the last day of the last month of that same quarter.
Subclasses :class:`NavigableDateRange`.
"""
def __init__(self, seed):
if isinstance(seed, DateRange):
seed = seed.start
month = seed.month
year = seed.year
first_month = (month - 1) // 3 * 3 + 1
last_month = first_month + 2
days_in_last_month = monthrange(year, last_month)[1]
start = date(year, first_month, 1)
end = date(year, last_month, days_in_last_month)
DateRange.__init__(self, start, end)
@property
def display(self):
"""String representation of the range (ex: "Q2 2013")."""
return tr('Q{0} {1}').format(self.start.month // 3 + 1, self.start.year)
class YearRange(NavigableDateRange):
"""A navigable date range lasting one year.
``seed`` is a date for the range to wrap around.
We can specify ``year_start_month`` if we're weird and we want our year to start at a month
other than January.
A yearly range always starts at the first day of the first month of the year and ends at
the last day of the month 11 months later.
Subclasses :class:`NavigableDateRange`.
"""
def __init__(self, seed, year_start_month=1):
assert 1 <= year_start_month <= 12
if isinstance(seed, DateRange):
seed = seed.start
year = seed.year
if seed.month < year_start_month:
year -= 1
start = date(year, year_start_month, 1)
end = inc_year(start, 1) - ONE_DAY
DateRange.__init__(self, start, end)
def around(self, date):
return type(self)(date, year_start_month=self.start.month)
def next(self):
return YearRange(inc_year(self.start, 1), year_start_month=self.start.month)
def prev(self):
return YearRange(inc_year(self.start, -1), year_start_month=self.start.month)
@property
def display(self):
"""String representation of the range (ex: "Jan 2013 - Dec 2013")."""
return '{0} - {1}'.format(self.start.strftime('%b %Y'), self.end.strftime('%b %Y'))
class YearToDateRange(DateRange):
"""A date range starting at the beginning of the year and ending now.
We can specify ``year_start_month`` if we're weird and we want our year to start at a month
other than January.
A YTD range always starts at the first day of the first month of the year and ends today.
Subclasses :class:`DateRange`.
"""
def __init__(self, year_start_month=1):
start_year = date.today().year
if date.today().month < year_start_month:
start_year -= 1
start = date(start_year, year_start_month, 1)
end = date.today()
DateRange.__init__(self, start, end)
def prev(self):
start = inc_year(self.start, -1)
end = inc_year(self.end, -1)
return DateRange(start, end)
@property
def display(self):
"""String representation of the range (ex: "Jan 2013 - Now")."""
return tr('{0} - Now').format(self.start.strftime('%b %Y'))
def compute_ahead_months(ahead_months):
assert ahead_months < 12
if ahead_months == 0:
return date.today()
month_range = MonthRange(date.today())
for _ in range(ahead_months-1):
month_range = month_range.next()
return month_range.end
class RunningYearRange(DateRange):
"""A weird date range, spanning one year, with a user-defined buffer around today.
The goal of this date range is to represent the "current situation", spanning one year. We want
to see a bit in the future (to forecast stuff) and a bit in the past, for introspection.
The ``ahead_months`` preference tells us where we place our year compared to today's date. This
preference is the number of months we want to see in the future. ``0`` means "stop the range
at the end of the current month", ``1`` means "stop the range at the end of the next month", and
so on.
Once we know our end point, then we know our start point, which is exactly one year earlier.
Subclasses :class:`DateRange`.
"""
def __init__(self, ahead_months):
end = compute_ahead_months(ahead_months)
end_plus_one = end + ONE_DAY
start = end_plus_one.replace(year=end_plus_one.year-1)
if start.day != 1:
start = inc_month(start, 1).replace(day=1)
DateRange.__init__(self, start, end)
def prev(self):
start = self.start.replace(year=self.start.year - 1)
end = self.start - ONE_DAY
return DateRange(start, end)
@property
def display(self):
"""String representation of the range (ex: "Running year (Jun - May)")."""
return tr('Running year ({0} - {1})').format(self.start.strftime('%b'), self.end.strftime('%b'))
class AllTransactionsRange(DateRange):
"""A range big enough to show all transactions (+ ``ahead_months``).
Date ranges don't know anything about transactions, so those limit dates have to be supplied
"manually". In the spirit of :class:`RunningYearRange`, we go ahead of the last transaction by
``ahead_months`` months.
"""
def __init__(self, first_date, last_date, ahead_months):
start = first_date
end = max(last_date, compute_ahead_months(ahead_months))
DateRange.__init__(self, start, end)
self.ahead_months = ahead_months
def adjusted(self, new_date):
first_date = min(self.start, new_date)
last_date = max(self.end, new_date)
result = AllTransactionsRange(
first_date=first_date, last_date=last_date, ahead_months=self.ahead_months
)
if result == self:
result = None
return result
def prev(self):
start = self.start - ONE_DAY
return DateRange(start, start) # whatever, as long as there's nothing in it
@property
def display(self):
"""String representation of the range. Always "All Transactions"."""
return tr("All Transactions")
class CustomDateRange(DateRange):
"""A date range with limits of the user's choosing.
``format_func`` is needed for :attr:`display`, which is depnds on the user locale.
"""
def __init__(self, start, end, format_func):
DateRange.__init__(self, start, end)
self._format_func = format_func
def prev(self):
end = self.start - ONE_DAY
start = end - (self.end - self.start)
return CustomDateRange(start, end, self._format_func)
@property
def display(self):
"""String representation of the range (ex: "01-01-2013 - 15-01-2013")."""
return '{0} - {1}'.format(self._format_func(self.start), self._format_func(self.end))
# --- Date Incrementing
def inc_day(date, count):
"""Increments ``date`` by ``count`` days.
``count`` can be negative.
"""
return date + timedelta(count)
def inc_week(date, count):
"""Increments ``date`` by ``count * 7`` days.
``count`` can be negative.
"""
return inc_day(date, count * 7)
def inc_month(date, count):
"""Increments ``date`` by ``count`` months.
That is, we'll end up with a date on the same day of a different month. If that's impossible
(31st incrementing in a 30-days month), the day will be the last of the month.
``count`` can be negative.
"""
y, m, d = date.year, date.month, date.day
m += count
y += (m - 1) // 12
m = ((m - 1) % 12) + 1
days_in_month = monthrange(y, m)[1]
d = min(d, days_in_month)
return date.replace(year=y, month=m, day=d)
def inc_year(date, count):
"""Increments ``date`` by ``count * 12`` months.
``count`` can be negative.
"""
return inc_month(date, count * 12)
def inc_weekday_in_month(date, count):
"""Increments ``date`` by ``count`` months, preserving weekday.
For example, if ``date`` is the 2nd friday of its month, then the result will be the 2nd friday
of ``count`` months later.
``count`` can be negative.
If the result doesn't exist, returns ``None``.
"""
weekday = date.weekday()
weekno = (date.day - 1) // 7
new_month = inc_month(date, count)
first_weekday = new_month.replace(day=1).weekday()
diff = weekday - first_weekday
if diff < 0:
diff += 7
try:
return new_month.replace(day=weekno * 7 + diff + 1)
except ValueError:
return None
def inc_last_weekday_in_month(date, count):
"""Increments ``date`` by ``count`` months, preserving weekday, returning the last.
For example, if ``date`` is a friday, then the result will be the last friday of ``count``
months later.
``count`` can be negative.
"""
weekday = date.weekday()
new_month = inc_month(date, count)
days_in_month = monthrange(new_month.year, new_month.month)[1]
last_weekday = new_month.replace(day=days_in_month).weekday()
diff = last_weekday - weekday
if diff < 0:
diff += 7
return new_month.replace(day=days_in_month - diff)
# --- Date Formatting
# For the functions below, the format used is a subset of the Unicode format type
# http://unicode.org/reports/tr35/tr35-6.html#Date_Format_Patterns
# Only the basics are supported: /-. yyyy yy MM M dd d
# anything else in the format should be cleaned out *before* using parse and format
# Why not just convert the Unicode format to strftime's format? Because the strftime formatting
# does not support padding-less month and day.
re_separators = re.compile(r'/|-|\.| ')
def clean_format(format):
"""Removes any format element that is not supported.
If the result is an invalid format, return a fallback format.
:param format: ``str``
:rtype: str
"""
format = DateFormat(format)
format.make_numerical()
return format.iso_format
def parse_date(string, format):
"""Parses ``string`` into a ``datetime.date`` using ``format`` (ISO).
.. seealso:: :class:`DateFormat`
"""
return DateFormat(format).parse_date(string)
def format_date(date, format):
"""Formats ``date`` using ``format`` (ISO).
.. seealso:: :class:`DateFormat`
"""
return format_year_month_day(date.year, date.month, date.day, format)
def format_year_month_day(year, month, day, format):
result = format.replace('yyyy', str(year))
result = result.replace('yy', str(year)[-2:])
result = result.replace('MM', '%02d' % month)
result = result.replace('M', '%d' % month)
result = result.replace('dd', '%02d' % day)
result = result.replace('d', '%d' % day)
return result
class DateFormat:
"""Bridge "system" date formats (``%d-%m-%Y``) and "ISO" date formats (``dd-MM-yyyy``).
We only support simple short and numerical date formats, but we can freely convert each of them
from/to iso/sys, which is rather convenient.
This class also supports date format inputs with a moderate amount of garbage in it. It looks
for a separator, a day, a month and a year element, checks their order and precision, and
ignores the rest. In case of an unreadable format, it defaults to ``dd/MM/yyyy``.
The default initialization takes an ISO format. If you want to create a date format from a sys
format, use :meth:`from_sysformat`.
"""
ISO2SYS = {'yyyy': '%Y', 'yy': '%y', 'MMM': '%b', 'MM': '%m', 'M': '%m', 'dd': '%d', 'd': '%d'}
SYS2ISO = {'%Y': 'yyyy', '%y': 'yy', '%m': 'MM', '%b': 'MMM', '%d': 'dd'}
def __init__(self, format):
if format is None:
format = ''
# Default values in case we can't parse
self.separator = '/'
self.elements = ['dd', 'MM', 'yyyy']
m_separators = re_separators.search(format)
if m_separators:
self.separator = m_separators.group()
elements = format.split(self.separator)
if all(elem in self.ISO2SYS for elem in elements):
self.elements = elements
@staticmethod
def from_sysformat(format):
"""Creates a new instance from a "sys" format (``%d-%m-%Y``)."""
if format is None:
format = ''
for key, value in DateFormat.SYS2ISO.items():
format = format.replace(key, value)
return DateFormat(format)
def copy(self):
"""Returns a copy of self."""
return DateFormat(self.iso_format)
def parse_date(self, string):
"""Parses ``string`` to a ``datetime.date``."""
# From ticket #381, in some cases the user may input a date field which is in an
# intermediate editing state such as '1 /12/2012'. They may either accept that or continue
# to edit to another valid date such as '12/12/2012'. Instead of trying to make the system
# communicate the end of editing back to the UI level, we simply remove the spurious space
# characters in the model.
if self.separator == ' ':
# In instances where the separator is a space, we can not remove all space characters.
# Instead, we replace double spaces with a single space.
string = string.replace(' ', ' ')
else:
string = string.replace(' ', '')
return datetime.strptime(string, self.sys_format).date()
def make_numerical(self):
"""If the date format contains a non-numerical month, change it to a numerical one."""
if 'MMM' in self.elements:
self.elements[self.elements.index('MMM')] = 'MM'
@property
def iso_format(self):
"""Returns the format as ISO (``dd-MM-yyyy``)."""
return self.separator.join(self.elements)
@property
def sys_format(self):
"""Returns the format as sys (``%d-%m-%Y``)."""
repl_elems = [self.ISO2SYS[elem] for elem in self.elements]
return self.separator.join(repl_elems)
|
fokusov/moneyguru
|
core/model/date.py
|
Python
|
gpl-3.0
| 21,665
|
import sys
import os
import time
import serial
#DTR0 - blue DATA
#RTS0 - purple STB RX
#DTR1 - (blue on 232 side) then green CLK
#CTS0 - black LD
#RTS1 - purple STB TX
delay=0.001
def getserials():
s0 = serial.Serial("/dev/ttyUSB0")
s1 = serial.Serial("/dev/ttyUSB1")
return (s0,s1)
def test():
period=0.001
i = 0
s=serial.Serial("/dev/ttyUSB1")
while True:
s.setDTR(True)
#s.setRTS(True)
time.sleep(period)
i = i + 1
if i % 10 == 0:
print s.getCTS()
s.setDTR(False)
#s.setRTS(False)
time.sleep(period)
def outbit(s0,s1,valn):
clk=True
if valn==0:
val=True
else:
val=False
print valn
s0.setDTR(val) # rx strobe
time.sleep(delay/10)
s1.setDTR(clk)
time.sleep(delay)
s1.setDTR(not clk)
time.sleep(delay)
return
def latch(s0):
val=True
s0.setRTS(val)
s1.setRTS(val) # tx strobe
time.sleep(delay)
s0.setRTS(not val)
s1.setRTS(not val) # tx strobe
time.sleep(delay)
s0.setRTS(val)
s1.setRTS(val) # tx strobe
return
def enable_outputs(s0,s1):
d=[1,0,1,1]
for x in d:
outbit(s0,s1,x)
latch(s0)
return
if __name__=="__main__":
os.system("/usr/bin/chrt -r -p 99 %s"%os.getpid())
(s0,s1)=getserials()
# set up reference divider
# r=[1,0,0,0,0,0,0,0,0,0,0,1]
r=[1,1,1,1,1,1,1,1,1,1,1,0]
r=[0,0,1,1,1,1,0,0,0,0,1,0] # good
r=[1,1,1,1,1,1,1,0,0,0,1,0] # good 1 jan
r=[1,0,1,1,1,1,0,0,0,0,1,0] # good
r=[0,0,0,0,0,0,1,0,0,0,1,0] # good
r=[0,0,0,0,0,0,0,0,1,1,1,0] # good
# r=[0,0,0,0,0,0,0,0,0,1,0,0]
# r=[0,0,0,0,0,0,0,0,0,0,1,0]
# r=[1,1,1,1,1,1,1,1,1,1,1,1]
# r=[1,1,1,1,1,1,1,1,1,1,1,1]
# TODO figure out what L2 is - depends whether LPF is inverting or non
# inverting
l=[0,0]
c=[0,1]
s0.setRTS(True)
for x in r+l+c:
outbit(s0,s1,x)
print
latch(s0)
print
# enable_outputs(s0,s1)
#d=[1,0,1,1]
#for x in d:
# outbit(s0,s1,x)
#latch(s0)
# set up counter
# a = [1,1,0,0,0,1,1]
# 91.2 MHz minimum.
# 82.8 MHz minimum.
#a_min = [0,0,0,0,0,0,0]
a = [1,1,0,0,0,0,0]
n = [0,1,0,1,1,1,1,1,0,0,0]
n = [1,1,1,1,1,1,1,1,1,0,0]
n = [1,0,0,0,0,0,0,0,0,1,0]
# n = [1,1,1,1,1,1,1,1,1,1,1]
c = [1,0]
for x in a + n + c:
outbit(s0,s1,x)
print
latch(s0)
print
# phase comparator
# d = [0,0]
# c = [0,0]
# for x in d + c:
# outbit(s0,s1,x)
# latch(s0)
#while True:
# print s0.getCTS()
# time.sleep(1)
#test()
|
johngumb/danphone
|
wavetx2.py
|
Python
|
gpl-3.0
| 2,677
|
from django.core.management import BaseCommand
from django.db.models import Count
from zds.notification.models import Subscription
class Command(BaseCommand):
help = "Delete all but last duplicate subscriptions"
def handle(self, *args, **options):
self.stdout.write("Starting uniquifying subscriptions")
count = 0
# Find all duplicates
duplicates = (
Subscription.objects.values("user", "content_type", "object_id")
.annotate(Count("id"))
.filter(id__count__gt=1)
)
for sub in duplicates:
del sub["id__count"]
# Find PKs of duplicates, exclude the most recent one
pks = Subscription.objects.filter(**sub).order_by("-pubdate").values_list("id", flat=True)[1:]
count = count + len(pks)
# Delete each of them
for pk in pks:
Subscription.objects.filter(pk=pk).delete()
self.stdout.write(f"Deleted {count} duplicates")
|
ChantyTaguan/zds-site
|
zds/notification/management/commands/uniquify_subscriptions.py
|
Python
|
gpl-3.0
| 1,008
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##This file is part of pySequence
#############################################################################
#############################################################################
## ##
## pysequence ##
## ##
#############################################################################
#############################################################################
## Copyright (C) 2014 Cédrick FAURY - Jean-Claude FRICOU
##
## pyS�quence : aide � la construction
## de S�quences et Progressions p�dagogiques
## et � la validation de Projets
# pySequence is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# pySequence is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pySequence; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
module lien
***********
"""
import os, sys, subprocess
import wx
import re
from util_path import toFileEncoding, toSystemEncoding, SYSTEM_ENCODING
from widgets import messageErreur, scaleImage, Grammaire, img2str, str2img
import images
from drag_file import *
from util_path import *
from file2bmp import *
# from dpi_aware import *
SSCALE = 1.0
if sys.platform == 'darwin':
def openFolder(path):
subprocess.check_call(['open', '--', path])
elif sys.platform == 'linux2':
def openFolder(path):
subprocess.check_call(['xdg-open', '--', path])
elif sys.platform == 'win32':
def openFolder(path):
# subprocess.Popen(["explorer", path], shell=True)
subprocess.call(['explorer', path.encode(sys.getfilesystemencoding())], shell=True)
####################################################################################
#
# Objet lien vers un fichier, un dossier ou bien un site web
#
####################################################################################
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
class Lien():
def __init__(self, path = "", typ = ""):
self.path = path # Impérativement toujours encod� en FILE_ENCODING !!
self.type = typ # Type de lien ('d' = dossier, 'f' = fichier, 'u' = url)
self.ok = False # Etat du lien (False = lien rompu)
self.abs = False # Lien absolu (défaut = lien relatif)
######################################################################################
def __repr__(self):
return self.type + " : " + toSystemEncoding(self.path)
######################################################################################
def reset(self):
self.path = ""
self.type = ""
self.ok = False
self.abs = False
######################################################################################
def setPath(self, path):
self.path = path
######################################################################################
def __neq__(self, l):
if self.type != l.type:
return True
elif self.path != l.path:
return True
return False
######################################################################################
def __eq__(self, lien):
return self.path == lien.path
######################################################################################
def DialogCreer(self, pathref):
dlg = URLDialog(None, self, pathref)
dlg.ShowModal()
dlg.Destroy()
######################################################################################
def Afficher(self, pathref, fenSeq = None):
""" Lance l'affichage du contenu du lien
<pathref> = chemin de l'application pour déterminer le chemin absolu
"""
t = self.getTexte()
print("Afficher", self.type, self.path)
path = self.GetAbsPath(pathref)
# print " ", path
# print " ", path.decode("unicode-escape")
# print " ", path.encode(sys.getfilesystemencoding())
if self.type == "f":
if os.path.exists(path):
try:
os.startfile(path)
except:
messageErreur(None, "Ouverture impossible",
"Impossible d'ouvrir le fichier\n\n%s\n" %toSystemEncoding(path))
else:
messageErreur(None, "Chemin non trouvé",
"Le fichiern'a pas été trouvé\n\n%s" %toSystemEncoding(path))
elif self.type == 'd':
if os.path.isdir(path):
openFolder(path)
# try:
# # subprocess.Popen(["explorer", path])
#
# except:
# messageErreur(None, u"Ouverture impossible",
# u"Impossible d'acc�der au dossier\n\n%s\n" %toSystemEncoding(path))
else:
messageErreur(None, "Chemin non trouvé",
"Le dossiern'a pas été trouvé\n\n%s" %toSystemEncoding(path))
elif self.type == 'u':
try:
webbrowser.open(self.path)
except:
messageErreur(None, "Ouverture impossible",
"Impossible d'ouvrir l'url\n\n%s\n" %toSystemEncoding(self.path))
elif self.type == 's':
if os.path.isfile(path):
# self.Show(False)
child = fenSeq.commandeNouveau()
child.ouvrir(path)
######################################################################################
def isOk(self):
self.EvalTypeLien()
return self.ok
######################################################################################
def EvalTypeLien(self, pathref = ""):
""" Evaluation du de self.lien.path
par rapport à pathref
et attribue un type
"""
# print("EvalTypeLien\n ", self.path, "\n ", pathref)
abspath = self.GetAbsPath(pathref)
if os.path.exists(abspath):
if os.path.isfile(abspath):
self.type = 'f'
elif os.path.isdir(abspath):
self.type = 'd'
# if not self.abs:
# self.path = relpath
# else:
# self.path = abspath
self.ok = True
elif re.match(regex, self.path):
self.type = 'u'
self.ok = True
else:
self.type = ''
self.ok = False
return
######################################################################################
def EvalLien(self, path, pathref):
""" Teste la validité du chemin <path> (SYSTEM_ENCODING)
par rapport au dossier de référence <pathref> (FILE_ENCODING)
et change self.path (FILE_ENCODING)
"""
# print("EvalLien", path, pathref, os.path.exists(pathref))
# print " >", chardet.detect(bytes(path))
# print " >", chardet.detect(bytes(pathref))
if path == "" or path.split() == []:
self.reset()
return
self.EvalTypeLien(pathref)
######################################################################################
def GetAbsPath(self, pathdoc, path = None):
""" Renvoie le chemin absolu du lien
grace au chemin du document <pathdoc>
"""
# print("GetAbsPath", path, pathref)
if path == None:
path = self.path
# if path == ".":
# return pathdoc
cwd = os.getcwd()
if pathdoc != "":
try:
os.chdir(pathdoc)
except:
pass
# print os.path.exists(path)
# print os.path.exists(os.path.abspath(path))
# print os.path.exists(os.path.abspath(path).decode(util_path.FILE_ENCODING))
# Immonde bricolage !!
# if os.path.exists(os.path.abspath(path)) and os.path.exists(os.path.abspath(path)):#.decode(util_path.FILE_ENCODING)):
# path = path.decode(util_path.FILE_ENCODING)
path = os.path.abspath(path)#.decode(util_path.FILE_ENCODING)
# print(" abs >", path)
if os.path.exists(path):
path = path
else:
# print(path, "n'existe pas !")
try:
path = os.path.join(pathdoc, path)
except UnicodeDecodeError:
pathdoc = toFileEncoding(pathdoc)
path = os.path.join(pathdoc, path)
os.chdir(cwd)
return path
######################################################################################
def GetRelPath(self, pathdoc, path = None):
""" Renvoie le chemin relatif du lien
grace au chemin du document <pathdoc>
"""
if path == None:
path = self.path
if self.type != 'f' and self.type != 'd':
return path
# path = self.GetEncode(path)
if os.path.exists(path):
path = path
else:
try:
path = os.path.join(pathdoc, path)
except UnicodeDecodeError:
pathdoc = toFileEncoding(pathdoc)
path = os.path.join(pathdoc, path)
return path
###############################################################################################
def getTexte(self):
if self.type == 'd':
t = "dossier(s)$m"
elif self.type == 'f':
t = "fichier(s)$m"
elif self.type == 'u':
t = "URL(s)$f"
else:
t = ""
return Grammaire(t)
###############################################################################################
def getNomFichier(self):
return os.path.splitext(os.path.basename(self.path))[0]
######################################################################################
def getBranche(self, branche):
# branche.set("Lien", toSystemEncoding(os.path.normpath(self.path)))
branche.set("Lien", toSystemEncoding(self.path))
branche.set("TypeLien", self.type)
branche.set("Abs", str(self.abs))
######################################################################################
def setBranche(self, branche, pathdoc):
self.path = toFileEncoding(branche.get("Lien", ""))
if self.path == ".":
self.path = ""
# print("setBranche Lien", self.path)
# self.path = os.path.normpath(self.path)
self.type = branche.get("TypeLien", "")
self.abs = eval(branche.get("Abs", "False"))
if self.type == "" and self.path != "":
self.EvalTypeLien(pathdoc)
# print(" ", self.path)
return True
####################################################################################
#
# Objet lien vers une image
#
####################################################################################
class LienImage(Lien):
def __init__(self, path = ""):
Lien.__init__(self, path, "f")
self.image = None
######################################################################################
def getBranche(self, branche):
Lien.getBranche(self, branche)
# print(self.lien.path)
# bmp = file2bmp(self.path)
# if bmp is not None and bmp is not wx.NullBitmap:
self.setBitmap()
if self.image is not None and self.image is not wx.NullBitmap:
branche.text = img2str(self.image.ConvertToImage())
# elif self.image is not None and self.image is not wx.NullBitmap:
# branche.text = img2str(self.image.ConvertToImage())
######################################################################################
def setBranche(self, branche, pathdoc):
Lien.setBranche(self, branche, pathdoc)
self.setBitmap(str2img(branche.text))
######################################################################################
def setBitmap(self, bmp = None):
if bmp is not None and isinstance(bmp, wx.Bitmap):
self.image = bmp
elif self.ok:
bmp = file2bmp(self.path)
if bmp is not None and bmp is not wx.NullBitmap:
self.image = bmp
######################################################################################
def getBitmap(self, defaut = None):
""" Renvoie l'image au format wx.Bitmap
et met à jour l'image si le lien est Ok
priorité à l'image désignée par le lien
"""
# print("getBitmap")
# print(" ", self.type, self.ok)
self.setBitmap()
# print(" ", self.type, self.ok)
if self.image is not None and self.image is not wx.NullBitmap:
# print(" --", self.image.IsOk())
return self.image
elif isinstance(defaut, wx.Bitmap):
return defaut
else:
return wx.NullBitmap
######################################################################################
def getImageFile(self):
""" Renvoie le noms du fichier image obtenu
et un booléen indiquant s'il s'agit d'un fichier temporaire
ATTENTION : les fichiers temporaires doivent être effacés
"""
if self.ok:
nf = file2imgfile(self.path)
elif self.image is not None and self.image is not wx.NullBitmap:
nf = wximg2file(self.image)
else:
return None, None
return nf
######################################################################################
def setPath(self, path):
self.path = path
self.setBitmap()
##########################################################################################################
#
# Dialogue de sélection d'URL
#
##########################################################################################################
class URLDialog(wx.Dialog):
def __init__(self, parent, lien, pathref):
wx.Dialog.__init__(self, parent, -1, "Sélection de lien")
self.SetExtraStyle(wx.DIALOG_EX_CONTEXTHELP)
# self.Create(parent, -1, "S�lection de lien")
sizer = wx.BoxSizer(wx.VERTICAL)
label = wx.StaticText(self, -1, "Sélectionner un fichier, un dossier ou une URL")
label.SetHelpText("Sélectionner un fichier, un dossier ou une URL")
sizer.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
box = wx.BoxSizer(wx.HORIZONTAL)
label = wx.StaticText(self, -1, "Lien :")
# label.SetHelpText("This is the help text for the label")
box.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
url = URLSelectorCombo(self, lien, pathref)
# text.SetHelpText("Here's some help text for field #1")
box.Add(url, 1, wx.ALIGN_CENTRE|wx.ALL, 5)
self.url = url
sizer.Add(box, 0, wx.GROW|wx.ALL, 5)
line = wx.StaticLine(self, -1, size=(20,-1), style=wx.LI_HORIZONTAL)
sizer.Add(line, 0, wx.GROW|wx.RIGHT|wx.TOP, 5)
btnsizer = wx.StdDialogButtonSizer()
if wx.Platform != "__WXMSW__":
btn = wx.ContextHelpButton(self)
btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_OK)
btn.SetHelpText("Valider")
btn.SetDefault()
btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
btn.SetHelpText("Annuler")
btnsizer.AddButton(btn)
btnsizer.Realize()
sizer.Add(btnsizer, 0,wx.ALL, 5)
self.SetSizer(sizer)
sizer.Fit(self)
######################################################################################
def GetURL(self):
return self.url.GetPath()
######################################################################################
def OnPathModified(self, lien):
return
####################################################################################
#
# Evenement perso pour détecter une modification du chemin
#
####################################################################################
myEVT_PATH_MODIFIED = wx.NewEventType()
EVT_PATH_MODIFIED = wx.PyEventBinder(myEVT_PATH_MODIFIED, 1)
#----------------------------------------------------------------------
class PathEvent(wx.PyCommandEvent):
def __init__(self, evtType, idd):
wx.PyCommandEvent.__init__(self, evtType, idd)
self.lien = None
######################################################################################
def SetPath(self, lien):
self.lien = lien
######################################################################################
def GetPath(self):
return self.lien
####################################################################################
#
# Widget pour sélectionner un lien
#
####################################################################################
class URLSelectorBase(wx.Panel):
def __init__(self, parent, lien, pathref,
dossier = True, btn_ouvrir = False,
ext = ""):
"""
lien : type Lien
pathref : chemin du dossier de référence (pour chemins relatifs)
dossier : bool pour spécifier que le lien est un dossier
ext : Extension de fichier par défaut
"""
# print("init URLSelectorBase", lien.path)
wx.Panel.__init__(self, parent, -1)
self.SetMaxSize((-1,22*SSCALE))
self.ext = ext # Extension de fichier par défaut
self.lien = lien
# self.texte = None
sizer = wx.BoxSizer(wx.VERTICAL)
lsizer = self.CreateSelector(dossier, btn_ouvrir)
sizer.Add(lsizer, 1, flag = wx.EXPAND)
self.SetSizerAndFit(sizer)
self.SetPathSeq(pathref)
###############################################################################################
def CreateSelector(self, dossier = True, btn_ouvrir = False):
# Passage momentan� en Anglais (bug de wxpython)
# locale2EN()
# loc = wx.GetApp().locale.GetSystemLanguage()
# wx.GetApp().locale = wx.Locale(wx.LANGUAGE_ENGLISH)
sizer = wx.BoxSizer(wx.HORIZONTAL)
bsize = (16*SSCALE, 16*SSCALE)
# print(" ", self.lien.path)
self.texte = wx.TextCtrl(self, -1, toSystemEncoding(self.lien.path), size = (-1, bsize[1]))
if dossier:
# bt1 =wx.BitmapButton(self, 100, wx.ArtProvider.GetBitmap(wx.ART_FOLDER, wx.ART_OTHER, bsize))
bt1 =wx.BitmapButton(self, 100, scaleImage(images.Icone_folder.GetBitmap(), *bsize))
bt1.SetToolTip("Sélectionner un dossier")
self.Bind(wx.EVT_BUTTON, self.OnClick, bt1)
self.bt1 = bt1
sizer.Add(bt1)
# bt2 =wx.BitmapButton(self, 101, images.wx.ArtProvider.GetBitmap(wx.ART_NORMAL_FILE, wx.ART_OTHER, bsize))
bt2 =wx.BitmapButton(self, 101, scaleImage(images.Icone_fichier.GetBitmap(), *bsize))
bt2.SetToolTip("Sélectionner un fichier")
self.Bind(wx.EVT_BUTTON, self.OnClick, bt2)
self.Bind(wx.EVT_TEXT, self.EvtText, self.texte)
self.bt2 = bt2
self.cb = wx.CheckBox(self, label='/', pos=(20, 20))
self.cb.SetToolTip("Cocher pour utiliser un chemin absolu")
self.cb.Bind(wx.EVT_CHECKBOX, self.OnCbAbs, self.cb)
sizer.Add(bt2)
sizer.Add(self.cb, flag = wx.EXPAND)
sizer.Add(self.texte, 1, flag = wx.EXPAND)
if btn_ouvrir:
self.btnlien = wx.BitmapButton(self, -1, scaleImage(images.Icone_open.GetBitmap(), *bsize))
self.btnlien.Show(self.lien.path != "")
self.Bind(wx.EVT_BUTTON, self.OnClickLien, self.btnlien)
sizer.Add(self.btnlien)
# Pour drag&drop direct de fichiers !! (exp�rimental)
file_drop_target = MyFileDropTarget(self)
self.SetDropTarget(file_drop_target)
# locale2def()
# wx.GetApp().locale = wx.Locale(loc)
return sizer
#########################################################################################################
def sendEvent(self):
# print("sendEvent", modif, draw, verif)
evt = PathEvent(myEVT_PATH_MODIFIED, self.GetId())
evt.SetPath(self.lien)
self.GetEventHandler().ProcessEvent(evt)
###############################################################################################
def SetToolTipTexte(self):
t = self.lien.getTexte()
if self.lien.path == "":
self.texte.SetToolTip("Saisir un nom de fichier/dossier ou un URL\nou faire glisser un fichier")
elif self.lien.ok:
self.texte.SetToolTip(self.lien.path)
if hasattr(self, 'btnlien'):
self.btnlien.SetToolTip("Ouvrir le %s" %t.le_())
else:
self.texte.SetToolTip("Chemin non valide :\n"+self.lien.path)
#############################################################################
def OnClickLien(self, event):
self.lien.Afficher(self.pathref)
###############################################################################################
# Overridden from ComboCtrl, called when the combo button is clicked
def OnClick(self, event):
if event.GetId() == 100:
dlg = wx.DirDialog(self, "Sélectionner un dossier",
style=wx.DD_DEFAULT_STYLE,
defaultPath = toSystemEncoding(self.pathref)
#| wx.DD_DIR_MUST_EXIST
#| wx.DD_CHANGE_DIR
)
if dlg.ShowModal() == wx.ID_OK:
self.SetPath(dlg.GetPath(), 'd', marquerModifier = True)
dlg.Destroy()
else:
dlg = wx.FileDialog(self, "Sélectionner un fichier",
wildcard = self.ext,
defaultDir = toSystemEncoding(self.pathref),
# defaultPath = globdef.DOSSIER_EXEMPLES,
style = wx.DD_DEFAULT_STYLE
#| wx.DD_DIR_MUST_EXIST
#| wx.DD_CHANGE_DIR
)
if dlg.ShowModal() == wx.ID_OK:
self.SetPath(dlg.GetPath(), 'f', marquerModifier = True)
dlg.Destroy()
self.MiseAJour()
self.SetFocus()
###############################################################################################
def OnCbAbs(self, event):
box = event.GetEventObject()
self.lien.abs = box.GetValue()
self.lien.EvalLien(self.lien.path, self.pathref)
self.SetPath(marquerModifier = True)
event.Skip()
##########################################################################################
def EvtText(self, event):
# self.lien.EvalLien(event.GetString(), self.pathref)
# if not self.lien.ok:
# self.lien.EvalTypeLien(self.pathref)
self.SetPath(event.GetString(), marquerModifier = True)
###############################################################################################
def dropFiles(self, file_list):
for path in file_list:
self.SetPath(path, 'f', marquerModifier = True)
return
###############################################################################################
def Enable(self, etat):
self.texte.Enable(etat)
self.bt2.Enable(etat)
if hasattr(self, "bt1"):
self.bt1.Enable(etat)
##########################################################################################
def SetPathSeq(self, pathref):
self.pathref = pathref
self.lien.EvalTypeLien(self.pathref)
self.cb.SetValue(self.lien.abs)
self.SetToolTipTexte()
##########################################################################################
def SetPath(self, lien = None, typ = None, marquerModifier = False):
""" lien doit être de type 'String' encodé en SYSTEM_ENCODING
"""
# print("SetPath", self.lien)
# print " ", lien, typ
if lien is not None:
self.lien.setPath(lien)
self.lien.EvalLien(lien, self.pathref)
# print(" ", self.lien.path)
try:
self.texte.ChangeValue(self.lien.path)
except: # Ca ne devrait pas arriver ... et pourtant �a arrive !
self.lien.path = self.lien.path.decode(FILE_ENCODING)
# self.lien.path = self.lien.path.encode(SYSTEM_ENCODING)
self.texte.ChangeValue(toSystemEncoding(self.lien.path)) # On le met en SYSTEM_ENCODING
# print(" ", self.lien.ok)
self.MiseAJour()
if marquerModifier:
self.sendEvent()
##########################################################################################
def GetPath(self):
return self.lien
###############################################################################################
def MiseAJour(self):
# self.btnlien.Show(self.lien.path != "")
self.marquerValid()
self.cb.SetValue(self.lien.abs)
##########################################################################################
def marquerValid(self):
if self.lien.ok:
self.texte.SetBackgroundColour(
wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOW))
else:
self.texte.SetBackgroundColour("pink")
self.texte.SetFocus()
if hasattr(self, 'btnlien'):
self.btnlien.Enable(self.lien.ok)
self.SetToolTipTexte()
self.Refresh()
####################################################################################
#
# Widget pour sélectionner un lien
#
####################################################################################
class URLSelectorCombo(URLSelectorBase):
def __init__(self, parent, lien, pathref, dossier = True, ext = ""):
"""
lien : type Lien
pathref : chemin du dossier de référence (pour chemins relatifs)
dossier : bool pour spécifier que le lien est un dossier
ext :
"""
# print "init URLSelectorCombo", pathref
URLSelectorBase.__init__(self, parent, lien, pathref,
dossier, btn_ouvrir = True,
ext = ext)
# ###############################################################################################
# def CreateSelector(self):
# # Passage momentan� en Anglais (bug de wxpython)
# # locale2EN()
# # loc = wx.GetApp().locale.GetSystemLanguage()
# # wx.GetApp().locale = wx.Locale(wx.LANGUAGE_ENGLISH)
#
# sizer = wx.BoxSizer(wx.HORIZONTAL)
# bsize = (16*SSCALE, 16*SSCALE)
#
# self.texte = wx.TextCtrl(self, -1, toSystemEncoding(self.lien.path), size = (-1, bsize[1]))
#
#
# if self.dossier:
# # bt1 =wx.BitmapButton(self, 100, wx.ArtProvider.GetBitmap(wx.ART_FOLDER, wx.ART_OTHER, bsize))
# bt1 =wx.BitmapButton(self, 100, scaleImage(images.Icone_folder.GetBitmap(), *bsize))
# bt1.SetToolTip("Sélectionner un dossier")
# self.Bind(wx.EVT_BUTTON, self.OnClick, bt1)
# self.bt1 = bt1
# sizer.Add(bt1)
# # bt2 =wx.BitmapButton(self, 101, images.wx.ArtProvider.GetBitmap(wx.ART_NORMAL_FILE, wx.ART_OTHER, bsize))
#
# bt2 =wx.BitmapButton(self, 101, scaleImage(images.Icone_fichier.GetBitmap(), *bsize))
# bt2.SetToolTip("Sélectionner un fichier")
# self.Bind(wx.EVT_BUTTON, self.OnClick, bt2)
# self.Bind(wx.EVT_TEXT, self.EvtText, self.texte)
# self.bt2 = bt2
#
# sizer.Add(bt2)
# sizer.Add(self.texte,1,flag = wx.EXPAND)
#
# # self.btnlien = wx.BitmapButton(self, -1, wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN, wx.ART_OTHER, bsize))
# self.btnlien = wx.BitmapButton(self, -1, scaleImage(images.Icone_open.GetBitmap(), *bsize))
# self.btnlien.SetToolTip("Ouvrir le lien externe")
# self.btnlien.Show(self.lien.path != "")
# self.Bind(wx.EVT_BUTTON, self.OnClickLien, self.btnlien)
# sizer.Add(self.btnlien)
#
#
# # Pour drag&drop direct de fichiers !! (exp�rimental)
# file_drop_target = MyFileDropTarget(self)
# self.SetDropTarget(file_drop_target)
#
# # locale2def()
# # wx.GetApp().locale = wx.Locale(loc)
#
# return sizer
####################################################################################
#
# Widget pour sélectionner un lien
#
####################################################################################
class URLSelector(URLSelectorBase):
def __init__(self, parent, lien, pathref, dossier = True, ext = ""):
"""
lien : type Lien
pathref : chemin du dossier de référence (pour chemins relatifs)
dossier : bool pour spécifier que le lien est un dossier
ext :
"""
# print "init URLSelectorCombo", pathref
URLSelectorBase.__init__(self, parent, lien, pathref,
dossier = False , btn_ouvrir = False,
ext = ext)
# ###############################################################################################
# def CreateSelector(self):
# # Passage momentan� en Anglais (bug de wxpython)
# # locale2EN()
# # loc = wx.GetApp().locale.GetSystemLanguage()
# # wx.GetApp().locale = wx.Locale(wx.LANGUAGE_ENGLISH)
#
# sizer = wx.BoxSizer(wx.HORIZONTAL)
# bsize = (16*SSCALE, 16*SSCALE)
#
# self.texte = wx.TextCtrl(self, -1, toSystemEncoding(self.lien.path), size = (-1, bsize[1]))
#
#
# if self.dossier:
# # bt1 =wx.BitmapButton(self, 100, wx.ArtProvider.GetBitmap(wx.ART_FOLDER, wx.ART_OTHER, bsize))
# bt1 =wx.BitmapButton(self, 100, scaleImage(images.Icone_folder.GetBitmap(), *bsize))
# bt1.SetToolTip("Sélectionner un dossier")
# self.Bind(wx.EVT_BUTTON, self.OnClick, bt1)
# self.bt1 = bt1
# sizer.Add(bt1)
# # bt2 =wx.BitmapButton(self, 101, images.wx.ArtProvider.GetBitmap(wx.ART_NORMAL_FILE, wx.ART_OTHER, bsize))
#
# bt2 =wx.BitmapButton(self, 101, scaleImage(images.Icone_fichier.GetBitmap(), *bsize))
# bt2.SetToolTip("Sélectionner un fichier")
# self.Bind(wx.EVT_BUTTON, self.OnClick, bt2)
# self.Bind(wx.EVT_TEXT, self.EvtText, self.texte)
# self.bt2 = bt2
#
# sizer.Add(bt2)
# sizer.Add(self.texte,1,flag = wx.EXPAND)
#
#
# self.cb = wx.CheckBox(self, label='abs', pos=(20, 20))
# self.cb.SetToolTip("Cocher pour utiliser un chemin de fichier absolu")
# self.cb.Bind(wx.EVT_CHECKBOX, self.OnCbAbs, self.cb)
# sizer.Add(self.cb,flag = wx.EXPAND)
#
# # # self.btnlien = wx.BitmapButton(self, -1, wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN, wx.ART_OTHER, bsize))
# # self.btnlien = wx.BitmapButton(self, -1, scaleImage(images.Icone_open.GetBitmap(), *bsize))
# # self.btnlien.SetToolTip("Ouvrir le lien externe")
# # self.btnlien.Show(self.lien.path != "")
# # self.Bind(wx.EVT_BUTTON, self.OnClickLien, self.btnlien)
# # sizer.Add(self.btnlien)
#
#
# # Pour drag&drop direct de fichiers !! (exp�rimental)
# file_drop_target = MyFileDropTarget(self)
# self.SetDropTarget(file_drop_target)
#
# # locale2def()
# # wx.GetApp().locale = wx.Locale(loc)
#
# return sizer
#
#
#
##########################################################################################################
# #
# # Dialogue de paramétrage du selecteur
# #
# ##########################################################################################################
# class URLParamDialog(wx.Dialog):
# def __init__(self, parent, lien, pathref):
# wx.Dialog.__init__(self, parent, -1, "Paramètres")
# self.SetExtraStyle(wx.DIALOG_EX_CONTEXTHELP)
#
# sizer = wx.BoxSizer(wx.VERTICAL)
#
# label = wx.StaticText(self, -1, "Sélectionner un fichier, un dossier ou une URL")
# label.SetHelpText("Sélectionner un fichier, un dossier ou une URL")
# sizer.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
#
# box = wx.BoxSizer(wx.HORIZONTAL)
#
# label = wx.StaticText(self, -1, "Lien :")
# # label.SetHelpText("This is the help text for the label")
# box.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
#
# url = URLSelectorCombo(self, lien, pathref)
# # text.SetHelpText("Here's some help text for field #1")
# box.Add(url, 1, wx.ALIGN_CENTRE|wx.ALL, 5)
# self.url = url
#
# sizer.Add(box, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
#
# line = wx.StaticLine(self, -1, size=(20,-1), style=wx.LI_HORIZONTAL)
# sizer.Add(line, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.RIGHT|wx.TOP, 5)
#
# btnsizer = wx.StdDialogButtonSizer()
#
# if wx.Platform != "__WXMSW__":
# btn = wx.ContextHelpButton(self)
# btnsizer.AddButton(btn)
#
# btn = wx.Button(self, wx.ID_OK)
# btn.SetHelpText("Valider")
# btn.SetDefault()
# btnsizer.AddButton(btn)
#
# btn = wx.Button(self, wx.ID_CANCEL)
# btn.SetHelpText("Annuler")
# btnsizer.AddButton(btn)
# btnsizer.Realize()
#
# sizer.Add(btnsizer, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
#
# self.SetSizer(sizer)
# sizer.Fit(self)
#
#
# ######################################################################################
# def GetURL(self):
# return self.url.GetPath()
#
#
# ######################################################################################
# def OnPathModified(self, lien):
# return
#
#
|
cedrick-f/pySequence
|
src/lien.py
|
Python
|
gpl-3.0
| 36,961
|
from ..base import BaseShortener
from ..exceptions import ShorteningErrorException
class Shortener(BaseShortener):
"""
TinyURL.com shortener implementation
Example:
>>> import pyshorteners
>>> s = pyshorteners.Shortener()
>>> s.tinyurl.short('http://www.google.com')
'http://tinyurl.com/TEST'
>>> s.tinyurl.expand('http://tinyurl.com/test')
'http://www.google.com'
"""
api_url = "http://tinyurl.com/api-create.php"
def short(self, url):
"""Short implementation for TinyURL.com
Args:
url: the URL you want to shorten
Returns:
A string containing the shortened URL
Raises:
ShorteningErrorException: If the API returns an error as response
"""
url = self.clean_url(url)
response = self._get(self.api_url, params=dict(url=url))
if response.ok:
return response.text.strip()
raise ShorteningErrorException(response.content)
|
ellisonleao/pyshorteners
|
pyshorteners/shorteners/tinyurl.py
|
Python
|
gpl-3.0
| 1,019
|
###################################################################################################
# Author: Jodi Jones <venom@gen-x.co.nz>
# URL: https://github.com/VeNoMouS/Sick-Beard
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
###################################################################################################
import os
import re
import sys
import urllib
import generic
import datetime
import sickbeard
import exceptions
from lib import requests
from xml.sax.saxutils import escape
from sickbeard import db
from sickbeard import logger
from sickbeard import tvcache
from sickbeard.exceptions import ex
from sickbeard.common import Quality
from sickbeard.common import Overview
from sickbeard import show_name_helpers
class BitHDTVProvider(generic.TorrentProvider):
###################################################################################################
def __init__(self):
generic.TorrentProvider.__init__(self, "BitHDTV")
self.cache = BitHDTVCache(self)
self.name = "BitHDTV"
self.session = None
self.supportsBacklog = True
self.url = 'https://www.bit-hdtv.com/'
logger.log("[" + self.name + "] initializing...")
###################################################################################################
def isEnabled(self):
return sickbeard.BITHDTV
###################################################################################################
def imageName(self):
return 'bithdtv.png'
###################################################################################################
def getQuality(self, item):
quality = Quality.nameQuality(item[0])
return quality
###################################################################################################
def _get_title_and_url(self, item):
return item
###################################################################################################
def _get_airbydate_season_range(self, season):
if season == None:
return ()
year, month = map(int, season.split('-'))
min_date = datetime.date(year, month, 1)
if month == 12:
max_date = datetime.date(year, month, 31)
else:
max_date = datetime.date(year, month+1, 1) - datetime.timedelta(days=1)
return (min_date, max_date)
###################################################################################################
def _get_season_search_strings(self, show, season=None, scene=False):
search_string = []
if not show:
return []
myDB = db.DBConnection()
if show.air_by_date:
(min_date, max_date) = self._get_airbydate_season_range(season)
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ?", [show.tvdbid, min_date.toordinal(), max_date.toordinal()])
else:
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND scene_season = ?", [show.tvdbid, season])
for sqlEp in sqlResults:
if show.getOverview(int(sqlEp["status"])) in (Overview.WANTED, Overview.QUAL):
if show.air_by_date:
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ str(datetime.date.fromordinal(sqlEp["airdate"])).replace('-', '.')
search_string.append(ep_string)
else:
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ sickbeard.config.naming_ep_type[2] % {'seasonnumber': season, 'episodenumber': int(sqlEp["episode"])}
search_string.append(ep_string)
return search_string
###################################################################################################
def _get_episode_search_strings(self, ep_obj):
search_string = []
if not ep_obj:
return []
if ep_obj.show.air_by_date:
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ str(ep_obj.airdate).replace('-', '.')
search_string.append(ep_string)
else:
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, 'episodenumber': ep_obj.scene_episode}
search_string.append(ep_string)
return search_string
###################################################################################################
def _doSearch(self, search_params, show=None):
self.search_results = []
logger.log("[" + self.name + "] Performing Search: {0}".format(search_params))
search_params = search_params.replace(" ","+")
logger.log("[" + self.name + "] Searching TV Section")
self.parseResults(self.url + "torrents.php?search=" + urllib.quote(search_params) + "&cat=10")
logger.log("[" + self.name + "] Searching TV Pack Section")
self.parseResults(self.url + "torrents.php?search=" + urllib.quote(search_params) + "&cat=12")
if len(self.search_results):
logger.log("[" + self.name + "] parseResults() Some results found.")
else:
logger.log("[" + self.name + "] parseResults() No results found.")
return self.search_results
###################################################################################################
def parseResults(self, searchUrl):
data = self.getURL(searchUrl)
if data:
logger.log("[" + self.name + "] parseResults() URL: " + searchUrl, logger.DEBUG)
for torrent in re.compile("<td class=detail align=left><a title=\"(?P<title>.*?)\" href.*?<font class=small></font><a href=\"/(?P<url>.*?)\.torrent\"></a>",re.MULTILINE|re.DOTALL).finditer(data):
item = (torrent.group('title').replace('.',' ').decode('utf-8', 'ignore'), self.url + torrent.group('url') + ".torrent")
self.search_results.append(item)
logger.log("[" + self.name + "] parseResults() Title: " + torrent.group('title').decode('utf-8', 'ignore'), logger.DEBUG)
else:
logger.log("[" + self.name + "] parseResults() Error no data returned!!")
return self.search_results
###################################################################################################
def getURL(self, url, headers=None):
response = None
if not self.session:
if not self._doLogin():
return response
if not headers:
headers = []
try:
response = self.session.get(url, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log("[" + self.name + "] getURL() Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
return None
if response.status_code not in [200,302,303]:
logger.log("[" + self.name + "] getURL() requested URL - " + url +" returned status code is " + str(response.status_code), logger.ERROR)
return None
return response.content
###################################################################################################
def _doLogin(self):
login_params = {
'username': sickbeard.BITHDTV_USERNAME,
'password': sickbeard.BITHDTV_PASSWORD,
'login': 'submit'
}
self.session = requests.Session()
logger.log("[" + self.name + "] Attempting to Login")
try:
response = self.session.post(self.url + "takelogin.php", data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
raise Exception("[" + self.name + "] _doLogin() Error: " + ex(e))
return False
if re.search("Password not correct|<title>Login</title>",response.text) \
or response.status_code in [401,403]:
raise Exception("[" + self.name + "] Login Failed, Invalid username or password for " + self.name + ". Check your settings.")
return False
return True
###################################################################################################
class BitHDTVCache(tvcache.TVCache):
###################################################################################################
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
self.minTime = 15
###################################################################################################
def _getRSSData(self):
provider.search_results = []
provider.parseResults(provider.url + "torrents.php?cat=10")
provider.parseResults(provider.url + "torrents.php?cat=12")
xml = "<rss xmlns:atom=\"http://www.w3.org/2005/Atom\" version=\"2.0\">" + \
"<channel>" + \
"<title>" + provider.name + "</title>" + \
"<link>" + provider.url + "</link>" + \
"<description>torrent search</description>" + \
"<language>en-us</language>" + \
"<atom:link href=\"" + provider.url + "\" rel=\"self\" type=\"application/rss+xml\"/>"
for title, url in provider.search_results:
xml += "<item>" + "<title>" + escape(title) + "</title>" + "<link>" + urllib.quote(url,'/,:?') + "</link>" + "</item>"
xml += "</channel> </rss>"
return xml
###################################################################################################
provider = BitHDTVProvider()
|
AlexBoogaard/Sick-Beard-Torrent-Edition
|
sickbeard/providers/bithdtv.py
|
Python
|
gpl-3.0
| 11,100
|
class Solution(object):
def twoSum(self, nums, target):
lookup = {}
for i, num in enumerate(nums):
if target - num in lookup:
return [lookup[target - num], i]
lookup[num] = i
return []
if __name__ == '__main__':
print Solution().twoSum((0, 2, 7, 11, 15), 9)
|
ravyg/algorithms
|
python/1_twoSum.py
|
Python
|
gpl-3.0
| 331
|
# coding: utf-8
import os
from UserDict import DictMixin
from fnmatch import fnmatch
from datetime import datetime
from datetime import date
import pytz
from pyramid.threadlocal import get_current_registry
from pyramid.traversal import resource_path
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime, Date
from sqlalchemy import ForeignKey
from sqlalchemy import Integer, Float
from sqlalchemy import LargeBinary
from sqlalchemy import String
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy import UniqueConstraint
from sqlalchemy import Table, select
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.orm import backref
from sqlalchemy.orm import deferred
from sqlalchemy.orm import object_mapper
from sqlalchemy.orm import relation, relationship
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import and_
from sqlalchemy.sql import select
from sqlalchemy.util import classproperty
from transaction import commit
from zope.deprecation.deprecation import deprecated
from zope.interface import implements
import kotti
from kotti import Base
from kotti import DBSession
from kotti import get_settings
from kotti import metadata
from kotti.interfaces import INode
from kotti.interfaces import IContent
from kotti.interfaces import IDocument
from kotti.interfaces import IFile
from kotti.interfaces import IImage
from kotti.interfaces import IDefaultWorkflow
from kotti.migrate import stamp_heads
from kotti.security import PersistentACLMixin
from kotti.security import has_permission
from kotti.security import view_permitted, SITE_ACL
from kotti.security import Principals, get_principals
from kotti.sqla import ACLType
from kotti.sqla import JsonType
from kotti.sqla import MutationList
from kotti.sqla import NestedMutationDict
from kotti.util import ViewLink
#from kotti.util import _
from kotti.util import camel_case_to_name
from kotti.util import get_paste_items
from kotti.util import camel_case_to_name
from kotti.resources import Document
from mba import _
TZ_HK = pytz.timezone('Asia/Hong_Kong')
friend = Table(
'friends', Base.metadata,
Column('user_a_id', Integer, ForeignKey('mba_users.id'), primary_key=True),
Column('user_b_id', Integer, ForeignKey('mba_users.id'), primary_key=True),
Column('status', Integer, default=0) # 0: No friend yet, 1: friend already
)
# Meetup Invitation
class MeetupInvitation(Base):
id = Column('id', Integer, nullable=False, primary_key=True, autoincrement=True)
inviter_id = Column('inviter_id',Integer, ForeignKey('mba_users.id')) #邀请者
inviter = relationship("MbaUser", foreign_keys="[MeetupInvitation.inviter_id]")
invitee_id = Column('invitee_id', Integer, ForeignKey('mba_users.id') ) #被邀请者
invitee = relationship("MbaUser", foreign_keys="[MeetupInvitation.invitee_id]")
meetup_id = Column(Integer, ForeignKey('acts.id'))
meetup = relationship('Act')
status = Column(Integer, default=0) # 0 : unread, 1: ignore 2:accept, 3: reject 4: deleted
class UserInterest(Base):
interest_id = Column(Integer, ForeignKey('interests.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
# interest = relationship('Interest', backref='interest_items')
# name = association_proxy('interest', 'name')
user = relationship("MbaUser",
backref=backref("user_interests",
cascade="all, delete-orphan")
)
interest = relationship("Interest")
interest_name = association_proxy("interest", "name")
@classmethod
def _interest_find_or_create(cls, name):
with DBSession.no_autoflush:
interest = DBSession.query(Interest).filter_by(name=name).first()
if interest is None:
interest = Interest(name=name)
return cls(interest=interest)
class UserSkill(Base):
interest_id = Column(Integer, ForeignKey('interests.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
user = relationship("MbaUser",
backref=backref("user_skills",
cascade="all, delete-orphan")
)
skill = relationship("Interest")
skill_name = association_proxy("skill", "name")
@classmethod
def _interest_find_or_create(cls, name):
with DBSession.no_autoflush:
interest = DBSession.query(Interest).filter_by(name=name).first()
if interest is None:
interest = Interest(name=name)
return cls(skill=interest)
class Interest(Base):
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(250), nullable=False)
description = Column(UnicodeText())
def __init__(self, name, **kw):
self.name = name
Base.__init__(self,**kw)
# def __repr__(self):
# return (self.name)
@property
def users(self):
return [rel.user for rel in self.interest_items]
#TODO for deleting
class PositionCollect(Base):
position_id = Column(Integer, ForeignKey('positions.id', ondelete='cascade'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id', ondelete='cascade'), primary_key=True)
create_date = Column(DateTime(), default=datetime.now(tz=None))
position = relationship('Position', backref='position_items')
@classmethod
def _create(cls, p):
if p is None:
raise Exception('position can not be None')
return cls(position=p)
class Visit(Base):
user_id1 = Column('user_id1', Integer, ForeignKey('mba_users.id'), primary_key=True)
user_id2 = Column('user_id2', Integer, ForeignKey('mba_users.id'), primary_key=True)
visit_date = Column(DateTime(), default=datetime.now(tz=None))
# 1 <--> 1
user = relationship("MbaUser", foreign_keys="[Visit.user_id2]")
class City(Base):
__tablename__ = 'city'
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True)
name = Column(Unicode(50), nullable=False)
acts = relationship("Act", backref='city', order_by='desc(Act.creation_date)')
usercity = relationship("MbaUser", backref='city', order_by='desc(MbaUser.creation_date)')
@classmethod
def _find_or_create(cls, name):
with DBSession.no_autoflush:
obj = DBSession.query(City).filter_by(name=name).first()
if obj is None:
obj = City(name=name)
# print 'cannt find city create one'
#return cls(city=obj)
return obj
class UserBetween(Base):
city_id = Column(Integer, ForeignKey('city.id'), primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
user = relationship("MbaUser",
backref=backref("user_between",
cascade="all, delete-orphan")
)
city = relationship("City")
city_name = association_proxy("city", "name")
@classmethod
def _city_find_or_create(cls, name):
city = City._find_or_create(name=name)
return cls(city=city)
class Message(Base):
id = Column(Integer, primary_key=True, autoincrement=True)
sender_id = Column(Integer, ForeignKey('mba_users.id'))
sender = relationship("MbaUser", foreign_keys="[Message.sender_id]")
reciever_id = Column(Integer, ForeignKey('mba_users.id'))
reciever = relationship("MbaUser", foreign_keys="[Message.reciever_id]")
# message type,
# 0: system message
# 1: admin message
# 2: friend private message
# 10: somebody ask to be friend
# 11: friends invite me some person
# 12: friends invite me some meetup
type = Column(Integer)
content = Column(String(500))
status = Column(Integer,default=0) # 0: unread, 1:read, 2:deleted
from mba.utils import assign_default_avatar
#This is a base class for all users
class MbaUser(Base):
__mapper_args__ = dict(
order_by='mba_users.id',
# polymorphic_on='type',
# polymorphic_identity='mba_users',
#with_polymorphic='*',
)
id = Column(Integer, primary_key=True)
name = Column(Unicode(100), unique=True)
password = Column(Unicode(100))
real_name = Column(Unicode(50))
_avatar = Column(String(100))
@property
def avatar(self):
if not self._avatar:
assign_default_avatar(self)
return self._avatar
@avatar.setter
def avatar(self, value):
self._avatar = value
[INACTIVE, ACTIVE, TO_FULLFIL_DATA, BANNED] = [0, 1, 2, 9999] #未激活、激活、待完善资料、禁封
status = Column(Integer, default=INACTIVE)
confirm_token = Column(Unicode(100))
phone = Column(String(20))
phone_privacy_level = Column(Integer, default=5) ## 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
title = Column(Unicode(100), nullable=True)
title_privacy_level = Column(Integer, default=5) # 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
email = Column(Unicode(100), unique=True)
email_privacy_level = Column(Integer, default=5) # 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
groups = Column(JsonType(), nullable=True)
creation_date = Column(DateTime(), nullable=True)
last_login_date = Column(DateTime())
[MALE, FEMALE] = range(2)
sex = Column(Integer())
# type = Column(String(50), nullable=True) # change type string to integer by sunset 2015.1.27
[USER_TYPE_MBA, USER_TYPE_EMBA, USER_TYPE_MANAGER, USER_TYPE_EXPERT] = range(4)
type = Column(Integer, default=USER_TYPE_MBA)
# _interests = relationship("UserInterest", backref='user')
interests = association_proxy(
'user_interests',
'interest_name',
creator=UserInterest._interest_find_or_create,
)
special_skills = association_proxy(
'user_skills',
'skill_name',
creator=UserSkill._interest_find_or_create,
)
between = association_proxy(
'user_between',
'city_name',
creator=UserBetween._city_find_or_create,
)
company = Column(String(255), default=u"")
company_privacy_level = Column(Integer, default=1) # 1: 对所有会员公开 5: 成功交换名片可看, 9: 完全保密
industry = Column(String(255), default=u"")
# special_skill = Column(String(255), default=u"")
interest = Column(String(255), default=u"") # job interest
# between = Column(String(255), default=u"")
introduction = Column(String(255), default=u"")
_positions = relationship("PositionCollect", backref='user')
positions = association_proxy("_positions","position", creator=PositionCollect._create)
#http://stackoverflow.com/questions/17252816/create-many-to-many-on-one-table
#http://docs.sqlalchemy.org/en/rel_0_8/orm/relationships.html#adjacency-list-relationships
#visit = relationship("Visit", foreign_keys="[Visit.user_id2]", backref='users', order_by='desc(Visit.visit_date)')
# 1 <--> 1
visit = relationship("Visit", primaryjoin="and_(MbaUser.id==Visit.user_id1)"
, order_by='desc(Visit.visit_date)')
# 1 <--> n
visitors = association_proxy("visit", "user")
#
friendship = relationship("MbaUser", secondary=friend,
primaryjoin=id==friend.c.user_a_id,
secondaryjoin=id==friend.c.user_b_id)
invited_meetups = relationship("MeetupInvitation",
foreign_keys="[MeetupInvitation.invitee_id]" )
messages = relationship('Message',foreign_keys="[Message.reciever_id]")
# newmessages = Message.query.filter(status=10).count()
newmessages = relationship('Message',
# foreign_keys="[Message.reciever_id]",
primaryjoin="and_(MbaUser.id==Message.reciever_id, Message.status==0)")
available_invitation_codes = relationship('InvitationCode',
primaryjoin="and_(MbaUser.id==InvitationCode.sender_id,"\
"InvitationCode.status==0)")
city_id = Column(Integer, ForeignKey('city.id') ) # backref is defined in class City
city_name = association_proxy('city'
, 'name'
, creator=City._find_or_create)
is_lunar_canlender = Column(Boolean)
lunar_birthday = Column(String(255), default="")
wechat = Column(String(255),default="")
def __init__(self, name, password=None, confirm_token=None,
title=u"", email=None, groups=(), city_name='',
real_name='', birth_date=None, school=u"", school_year=0,
company=u"", industry=u"", special_skill=u"", interest=u"",
between=u"", introduction=u"", **kwargs):
self.name = name
if password is not None:
password = get_principals().hash_password(password)
self.password = password
self.confirm_token = confirm_token
self.title = title
self.email = email
self.groups = groups
self.creation_date = datetime.now(tz=None)
self.last_login_date = None
if city_name:
self.city_name = city_name
else:
# default city_name
self.city_name = u'深圳'
self.real_name = real_name
self.birth_date = birth_date
self.school = school
self.school_year = school_year
self.company = company
self.industry = industry
self.special_skill = special_skill
self.between = between
self.introduction = introduction
super(MbaUser, self).__init__(**kwargs)
@property
def position_items(self):
return [(rel, rel.position) for rel in self._positions]
def __repr__(self): # pragma: no cover
return '<MbaUser %r>' % self.name
@property
def sex_info(self):
if 0 == self.sex:
return u"男"
return u"女"
def add_visit(self, u):
v = None
new_v = False
try:
v = DBSession.query(Visit).filter(Visit.user_id1==self.id, Visit.user_id2==u.id).one()
except:
new_v = True
if not v:
v = Visit(user_id1=self.id, user_id2=u.id)
v.visit_date = datetime.now(tz=None)
if new_v:
DBSession.add(v)
# @classproperty
# def __mapper_args__(cls):
# return dict(
# order_by='mba_users.name',
# polymorphic_identity=camel_case_to_name(cls.__name__)
# )
# id = Column('id', Integer, ForeignKey('mba_users.id'), primary_key=True)
school = Column(String(100))
school_year = Column(Integer())
# real_name = Column(String(20)), real_name is put in superclass ,for global site, real name is needed
birth_date = Column(Date())
identify_type = Column(Integer())
identify = Column(String(30))
home_number = Column(String(20))
# location = Column(String(20)) # location is duplicated with city_name in MbaUser
salary = Column(Integer())
work_years = Column(Integer())
company_phone = Column(String(30))
keyword = Column(String(100))
job_status = Column(String(100))
[AUTH_STATUS_UNAUTH, AUTH_STATUS_AUTHED, AUTH_STATUS_FAIL, AUTH_STATUS_REQ_FOR_AUTH ] = range(4)
auth_info = Column(Integer,default=AUTH_STATUS_UNAUTH) # 0, unauthed, 1 authed, 2 authfail, ( 3 request for auth?)
auth_meetup = Column(Integer,default=AUTH_STATUS_UNAUTH)
auth_friend = Column(Integer,default=AUTH_STATUS_UNAUTH) #
auth_expert = Column(Integer,default=AUTH_STATUS_UNAUTH) #
auth_expert_req = relationship('ExpertAuthReq', uselist=False)
auth_expert_reason = association_proxy('auth_expert_req', 'reason')
@property
def auth_honesty(self):
return [self.auth_info, self.auth_meetup, self.auth_friend].count(self.AUTH_STATUS_AUTHED) >= 2
resume = relationship('Resume', backref='user', uselist=False)
#resumes = relationship('Resume', backref='user')
def __repr__(self): # pragma: no cover
return '<Student %r>' % self.name
@property
def work_info(self):
arrs = [u"小于一年", u"一到三年", u"三到五年", u"五年以上"]
if self.work_years >= 0 and self.work_years < len(arrs):
return arrs[self.work_years]
return arrs[0]
@property
def birth_old(self):
return abs(date.today().year - self.birth_date.year)+1
Student = MbaUser
friend_union = select([
friend.c.user_a_id,
friend.c.user_b_id
]).where(friend.c.status==1).union(
select([
friend.c.user_b_id,
friend.c.user_a_id,
]).where(friend.c.status==1)
).alias()
MbaUser.all_friends = relationship('MbaUser',
secondary=friend_union,
primaryjoin=MbaUser.id==friend_union.c.user_a_id,
secondaryjoin=MbaUser.id==friend_union.c.user_b_id,
viewonly=True
)
my_requests = select([
friend.c.user_a_id,
friend.c.user_b_id
]).where(friend.c.status==0).alias()
MbaUser.my_requests = relationship('MbaUser',
secondary=my_requests,
primaryjoin=MbaUser.id==my_requests.c.user_a_id,
secondaryjoin=MbaUser.id==my_requests.c.user_b_id,
viewonly=True)
others_requests = select([
friend.c.user_a_id,
friend.c.user_b_id,
]).where(friend.c.status==0).alias()
MbaUser.others_requests = relationship('MbaUser',
secondary=others_requests,
primaryjoin=MbaUser.id==others_requests.c.user_b_id,
secondaryjoin=MbaUser.id==others_requests.c.user_a_id,
viewonly=True)
class Participate(Base):
__tablename__ = 'participate'
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
act_id = Column(Integer, ForeignKey('acts.id'), primary_key=True)
creation_date = Column(DateTime(), nullable=False, default=datetime.now)
#用户参加活动之后可进行评分
rating = Column(Integer())
user = relationship("MbaUser", backref=backref("partin",
cascade="all, delete-orphan") )
meetup = relationship("Act")
class TeacherTag(Base):
__tablename__ = 'teacher_tags'
id = Column(Integer, primary_key=True)
title = Column(Unicode(100), unique=True, nullable=False)
def __repr__(self):
return "<TeacherTag ('%s')>" % self.title
@property
def items(self):
return [rel.item for rel in self.content_tags]
class TeacherTagToActs(Base):
__tablename__ = 'teacher_tag_to_acts'
#
tag_id = Column(Integer, ForeignKey('teacher_tags.id'), primary_key=True)
content_id = Column(Integer, ForeignKey('acts.id'), primary_key=True)
teacher_tag = relation(TeacherTag, backref=backref('teacher_tags', cascade='all'))
position = Column(Integer, nullable=False)
title = association_proxy('teacher_tag', 'title')
@classmethod
def _tag_find_or_create(cls, title):
with DBSession.no_autoflush:
tag = DBSession.query(TeacherTag).filter_by(title=title).first()
if tag is None:
tag = TeacherTag(title=title)
return cls(teacher_tag=tag)
# class ActStatus:
# PUBLIC, DRAFT, PRIVATE, CANCEL, DELETED = 0, 1, 2, 3, 4
# # public : seen by anyone
# # priveate: seen by admins
# # draft: seen by self.
# # cancel: meetup is canceled . 由于某些原因 管理员人为的取消活动
# # deleted: meetup is deleted . 如果活动已经有人报名,将不能删除
# # 是否是活动首页推荐、全站首页推荐,全站首页推荐待考虑
# class HeadLine:
# NOT_TOP, MEETUPS_TOP, SITE_TOP = 0, 1, 2
# 活动的类别
class MeetupType(Base):
id = Column(Integer, primary_key=True)
title = Column(String(100), nullable=True)
acts = relationship("Act", backref='meetup_types')
from kotti.views.edit.content import Image
#Image.acts = relationship("Act", backref('images'))
#人数限制、钱钱、地点、嘉宾
# Act means activity
class Act(Document):
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
__acl__ = SITE_ACL
[STATUS_PUBLIC, STATUS_DRAFT, STATUS_PRIVATE, STATUS_CANCEL, STATUS_DELETED] = range(5)
status = Column(Integer(), nullable=False, default=STATUS_PUBLIC)
[PUTONBANNER_NO, PUTONBANNER_MEETUP, PUTONBANNER_HOME] = range(3)
headline = Column(Integer, nullable=False, default=PUTONBANNER_NO)
meetup_type = Column(Integer, ForeignKey('meetup_types.id'))
meetup_type_title = association_proxy('meetup_types', 'title' )
#海报ID
# poster_id = Column(Integer, ForeignKey('images.id'))
# poster = relationship('Image')
# @property
# def poster_img(self):
# # return "/images/%s/image/" % (self.poster.name)
# return self.poster_img_url
poster_img = Column(String(200)) # change 50 to 200 , 2014.10.29 by sunset
# TODO Ignore the city ?
city_id = Column(Integer, ForeignKey('city.id'))
city_name = association_proxy('city'
, 'name'
, creator=City._find_or_create)
# Meetup start time
meetup_start_time = Column(DateTime(timezone=TZ_HK))
# Meetup finish time
meetup_finish_time = Column(DateTime(timezone=TZ_HK))
enroll_finish_time = Column(DateTime(timezone=TZ_HK))
enroll_start_time = Column(DateTime(timezone=TZ_HK))
location = Column(UnicodeText())
#经度
latitude = Column(Float())
longitude = Column(Float())
zoomlevel = Column(Integer())
_teacher_tags = relation(
TeacherTagToActs,
backref=backref('item'),
order_by=[TeacherTagToActs.position],
collection_class=ordering_list("position"),
cascade='all, delete-orphan',
)
teachers = association_proxy(
'_teacher_tags',
'title',
creator=TeacherTagToActs._tag_find_or_create,
)
limit_num = Column(Integer(), default=500)
pay_count = Column(Integer(), default=0)
#TODO for teacher selected
type_info = Document.type_info.copy(
name=u'Act',
title=_(u'Act'),
add_view=u'add_act',
addable_to=[u'Act'],
)
_parts = relationship('Participate', backref='act')
@property
def parts(self):
return [rel.user for rel in self._parts]
_comments = relationship('Comment', backref='act')
reviews = relationship('Review', backref='act')
# @property
# def comments(self):
# return [i. for rel in self._comments]
class Review(Document):
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
review_to_meetup_id = Column('review_to_meetup_id', Integer)
type_info = Document.type_info.copy(
name=u'Review',
title=_(u'Review'),
add_view=u'add_review',
addable_to=[u'Review'],
)
comments = relationship('Comment', backref='reivew')
class Infomation(Document):
'''This Class stores the infomatino recommended by admins '''
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
[STATUS_PUBLIC, STATUS_DRAFT, STATUS_PRIVATE, STATUS_CANCEL, STATUS_DELETED] = range(5)
status = Column(Integer(), nullable=False, default=STATUS_PUBLIC)
type_info = Document.type_info.copy(
name=u'Infomation',
title=_(u'推荐信息'),
add_view=u'add_info',
addable_to=[u'Infomation'],
)
comments = relationship('Comment', backref='infomation')
class Comment(Base):
__tablename__ = 'comments'
id = Column(Integer, primary_key=True)
TYPE_MEETUP = 0
TYPE_MEETUP_REVIEW = 1
TYPE_INFOMATION = 2
# 评论类型,0=活动评论,1=活动回顾评论, 2=推荐信息评论
type = Column(Integer, default=TYPE_MEETUP)
# 评论关联的活动、活动回顾的ID
document_id = Column(Integer, ForeignKey('documents.id'))
user_id = Column(Integer, ForeignKey('mba_users.id'))
content = Column(String(500), nullable=True)
user = relationship("MbaUser", backref='comment')
post_date = Column(DateTime(), nullable=False, default=datetime.now)
# Tables about resume
# Education n -- 1 Resume
class Education(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
school_name = Column(String(100), nullable=False)
location = Column(String(100))
start_date = Column(Date())
finish_date = Column(Date())
major = Column(String(30))
degree = Column(Integer())
abroad = Column(Boolean)
summary = Column(UnicodeText())
# Job n -- 1 Resume
class Job(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
location = Column(String(200))
industy = Column(String(100))
industy_type = Column(Integer())
industy_scale = Column(Integer())
duty = Column(String(200))
start_date = Column(Date())
finish_date = Column(Date())
description = Column(UnicodeText())
is_current = Column(Boolean, default=False)
class Train(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
start_date = Column(DateTime())
finish_date = Column(DateTime())
location = Column(String(200))
course = Column(String(100))
certificate = Column(String(50))
summary = Column(UnicodeText())
class ProjectInfo(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
start_date = Column(DateTime())
finish_date = Column(DateTime())
name = Column(String(200))
tool = Column(String(200))
hardware = Column(String(200))
software = Column(String(200))
description = Column(UnicodeText())
duty = Column(UnicodeText)
class Language(Base):
id = Column(Integer, primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'))
lang_type = Column(String(20))
grasp = Column(String(20))
read_cap = Column(String(20))
write_cap = Column(String(20))
# resume many to many skill
class ResumeSkill(Base):
resume_id = Column(Integer, ForeignKey('resumes.id'), primary_key=True)
skill_id = Column(Integer, ForeignKey('skills.id'), primary_key=True)
skill = relationship('Skill', backref='resume_items')
name = association_proxy('skill', 'name')
@classmethod
def _skill_find_or_create(cls, name):
with DBSession.no_autoflush:
skill = DBSession.query(Skill).filter_by(name=name).first()
if skill is None:
skill = Skill(name=name)
return cls(skill=skill)
class Skill(Base):
__table_args__ = (
UniqueConstraint('name'),
)
id = Column(Integer, primary_key=True)
name = Column(String(250))
@property
def resumes(self):
return [rel.resume for rel in self.resume_items]
class Resume(Base):
#id = Column(Integer, primary_key=True)
#user_id = Column(Integer, ForeignKey('mba_users.id'))
id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
title = Column(String(250))
create_date = Column(DateTime(), default=datetime.utcnow)
modify_date = Column(DateTime(), default=datetime.utcnow, onupdate=datetime.utcnow)
_skills = relationship('ResumeSkill', backref='resume')
skills = association_proxy(
'_skills',
'name',
creator=ResumeSkill._skill_find_or_create,
)
# String like jobid1,jobid2,jobid3 5,6,3,1
job_order = Column(String(100), nullable=True)
jobs = relationship('Job', cascade="save-update, merge, delete")
projects = relationship('ProjectInfo', cascade="save-update, merge, delete")
educations = relationship('Education', cascade="save-update, merge, delete")
trains = relationship('Train', cascade="save-update, merge, delete")
langs = relationship('Language', cascade="save-update, merge, delete")
publicity = Column(Boolean, default=True)
def order_jobs(self):
jobs = self.jobs
ids = dict([(obj.id,obj) for obj in jobs])
rlts = []
for s in self.job_order.split(','):
id = int(s)
if id in ids:
rlts.append(ids[id])
return (rlts+list(set(jobs).difference(set(rlts))))
def get_act_root(request=None):
return DBSession.query(Document).filter_by(name="meetup").one()
def get_review_root(request=None):
return DBSession.query(Document).filter_by(name="review").one()
def get_image_root(request=None):
return DBSession.query(Document).filter_by(name="images").one()
def get_info_root(request=None):
return DBSession.query(Document).filter_by(name="infomation").one()
class CompanyInfo(Base):
id = Column('id', Integer, primary_key=True)
name = Column(String(100))
scope = Column(String(200))
industry = Column(String(200))
type_info = Column(String(200))
location = Column(String(300))
description = Column(UnicodeText())
#用户投给职位的简历
class PositionResume(Base):
position_id = Column(Integer, ForeignKey('positions.id'), primary_key=True)
resume_id = Column(Integer, ForeignKey('resumes.id'), primary_key=True)
create_date = Column(DateTime(), default=datetime.utcnow())
#反馈状态
status = Column(Integer())
resume = relationship('Resume', backref='postition_items')
user = association_proxy('resume', 'user')
#工作职位表 views/active.py
class Position(Document):
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
company_id = Column(Integer, ForeignKey('company_infos.id'))
city_name = Column(String(100))
degree = Column(String(100))
experience = Column(String(100))
salary = Column(Integer(), default=0)
public_date = Column(Date(), default=datetime.now(tz=None).date())
end_date = Column(Date(), default=datetime.now(tz=None).date())
location = Column(UnicodeText())
#猎头/公司
hunting_type = Column(Integer(), default=0)
[STATUS_PUBLIC, STATUS_DRAFT] = range(2)
status = Column(Integer(), nullable=False, default=STATUS_DRAFT)
resumes = relationship('PositionResume', backref='position')
users = association_proxy('resumes', 'user')
company = relationship('CompanyInfo', backref='postitions')
company_name = association_proxy('company', 'name')
industry = association_proxy('company', 'industry')
create_date = Column(DateTime(), default=datetime.now(tz=None))
type_info = Document.type_info.copy(
name=u'Position',
title=_(u'Position'),
add_view=u'add_position',
addable_to=[u'Position'],
)
row2dict = lambda r: {c.name: getattr(r, c.name) for c in r.__table__.columns}
class Banner(Base):
id = Column(Integer, primary_key=True)
banner_position = Column(Integer, default=0) # 0:home banner, 1:meetup 2: Job, Currently, home banner is the only selection
[TYPE_HOME, TYPE_MEETUP, TYPE_JOB ] = [0, 1 , 2]
type = Column(Integer, default=TYPE_HOME) # 0: home Banner, 1:Meetup Banner, 2: Job Banner
title = Column(String(100))
img_url = Column(String(100))
link_url = Column(String(200))
htmlcontent = Column(String(500), default=0)
last_edit_date = Column(Date(), default=datetime.now(tz=None).date())
[VALID, INVALID ] = [1, 0]
status = Column(Integer, default=VALID) # 1: 生效, 0:失效
class ValidationSms(Base):
'''注册、重置密码时发送的验证短信的表'''
__tablename__ = "register_sms"
[TYPE_REGISTER, TYPE_RESET_PASSWORD] = [0,1]
id = Column(Integer, primary_key=True)
phonenum = Column(String(20))
# type = Column(Integer, default=TYPE_REGISTER) #类型,TYPE_REGISTER注册时,TYPE_RESET_PASSWORD:重置密码时
validate_code = Column(String(20)) # 注册时发送的验证码
send_datetime = Column(DateTime(), default=datetime.now(tz=None) )
ip = Column(String(50))
class InvitationCode(Base):
'''注册邀请码表'''
[AVAILABLE, USED, EXPIRED ] = [0, 1, -1] # # 0, unused, 1: used. -1: unvailable
id = Column(Integer, primary_key=True)
code = Column(String(10))
sender_id = Column('sender_id', Integer, ForeignKey('mba_users.id'))
sender = relationship("MbaUser", foreign_keys="[InvitationCode.sender_id]")
receiver_id = Column('receiver_id', Integer, ForeignKey('mba_users.id'))
receiver = relationship("MbaUser", foreign_keys="[InvitationCode.receiver_id]",
backref=backref("invitation_code",
cascade="all, delete-orphan"))
expiration = Column(DateTime() )
status = Column(Integer, default=AVAILABLE)
class GlobalSiteSetting(Base):
[TRUE, FALSE] = [1, 0]
id = Column(Integer, primary_key=True)
need_invitationcode = Column(Integer, default=True)
class Univs(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
pinyin = Column(String(250), nullable=False)
pprev = Column(String(250), nullable=False)
class ExpertAuthReq(Base):
'''专家申请'''
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('mba_users.id', ondelete='cascade') )
reason = Column(String(400))
[VALID, INVALID] = range(2) #申请状态在MbaUser里
status = Column(Integer, default=VALID)
|
toway/towaymeetups
|
mba/resources.py
|
Python
|
gpl-3.0
| 34,486
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def findTilt(self, root):
"""
:type root: TreeNode
:rtype: int
"""
def dfs(root):
if not root:
return 0
left = dfs(root.left)
right = dfs(root.right)
diff[0] += abs(left - right)
return left + root.val + right
diff = [0]
dfs(root)
return diff[0]
|
zqfan/leetcode
|
algorithms/563. Binary Tree Tilt/solution.py
|
Python
|
gpl-3.0
| 585
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './QtGUI.ui'
#
# Created: Sat Oct 11 18:25:23 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
from gui.LrBase.main import *
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(770, 604)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setStyleSheet(_fromUtf8(""))
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.Demo = QtGui.QWidget()
self.Demo.setObjectName(_fromUtf8("Demo"))
self.gridLayout = QtGui.QGridLayout(self.Demo)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(self.Demo)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.textBrowser = QtGui.QTextBrowser(self.Demo)
self.textBrowser.setObjectName(_fromUtf8("textBrowser"))
self.verticalLayout.addWidget(self.textBrowser)
self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 2)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.label_2 = QtGui.QLabel(self.Demo)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.verticalLayout_2.addWidget(self.label_2)
self.textBrowser_2 = QtGui.QTextBrowser(self.Demo)
self.textBrowser_2.setObjectName(_fromUtf8("textBrowser_2"))
self.verticalLayout_2.addWidget(self.textBrowser_2)
self.gridLayout.addLayout(self.verticalLayout_2, 1, 0, 1, 2)
self.comboBox = QtGui.QComboBox(self.Demo)
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBox, 2, 0, 1, 1)
self.pushButton = QtGui.QPushButton(self.Demo)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.gridLayout.addWidget(self.pushButton, 2, 1, 1, 1)
self.tabWidget.addTab(self.Demo, _fromUtf8(""))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName(_fromUtf8("tab_4"))
self.gridLayout_2 = QtGui.QGridLayout(self.tab_4)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.textBrowser_3 = QtGui.QTextBrowser(self.tab_4)
self.textBrowser_3.setObjectName(_fromUtf8("textBrowser_3"))
self.gridLayout_2.addWidget(self.textBrowser_3, 0, 0, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label_3 = QtGui.QLabel(self.tab_4)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_2.addWidget(self.label_3)
self.pushButton_2 = QtGui.QPushButton(self.tab_4)
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.horizontalLayout_2.addWidget(self.pushButton_2)
self.gridLayout_2.addLayout(self.horizontalLayout_2, 0, 1, 1, 1)
self.tabWidget.addTab(self.tab_4, _fromUtf8(""))
self.tab_5 = QtGui.QWidget()
self.tab_5.setObjectName(_fromUtf8("tab_5"))
self.tabWidget.addTab(self.tab_5, _fromUtf8(""))
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName(_fromUtf8("tab_3"))
self.tabWidget.addTab(self.tab_3, _fromUtf8(""))
self.horizontalLayout.addWidget(self.tabWidget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 770, 25))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.comboBox, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(int)")), self.textBrowser.reload)
QtCore.QObject.connect(self.comboBox, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(int)")), self.textBrowser_2.reload)
QtCore.QObject.connect(self.pushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), self.statusbar.show)
QtCore.QObject.connect(self.pushButton_2, QtCore.SIGNAL(_fromUtf8("clicked()")), compileLr.compileMEAM_REAX)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Lammps Unofficial GUI", None))
self.label.setText(_translate("MainWindow", "Script Preview", None))
self.label_2.setText(_translate("MainWindow", "Notes", None))
self.comboBox.setItemText(0, _translate("MainWindow", "Lattice Constant", None))
self.comboBox.setItemText(1, _translate("MainWindow", "Elastic", None))
self.pushButton.setText(_translate("MainWindow", "Run", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Demo), _translate("MainWindow", "Demo", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Homework", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Project", None))
self.label_3.setText(_translate("MainWindow", "Minimum", None))
self.pushButton_2.setText(_translate("MainWindow", "Compile", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _translate("MainWindow", "Compile", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_5), _translate("MainWindow", "Preferences", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _translate("MainWindow", "About", None))
if __name__ == "__main__":
#mainLr.main()
app = QApplication(sys.argv)
window = QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(window)
window.show()
sys.exit(app.exec_())
|
Rareson/LammpsRelated
|
gui.py
|
Python
|
gpl-3.0
| 7,329
|
#!/usr/bin/env python
# License: GPLv3 Copyright: 2020, Kovid Goyal <kovid at kovidgoyal.net>
from typing import TYPE_CHECKING, Optional
from .base import (
MATCH_TAB_OPTION, ArgsType, Boss, PayloadGetType, PayloadType, RCOptions,
RemoteCommand, ResponseType, Window
)
if TYPE_CHECKING:
from kitty.cli_stub import FocusTabRCOptions as CLIOptions
class FocusTab(RemoteCommand):
'''
match: The tab to focus
'''
short_desc = 'Focus the specified tab'
desc = 'The active window in the specified tab will be focused.'
options_spec = MATCH_TAB_OPTION + '''
--no-response
type=bool-set
default=false
Don't wait for a response indicating the success of the action. Note that
using this option means that you will not be notified of failures.
'''
argspec = ''
def message_to_kitty(self, global_opts: RCOptions, opts: 'CLIOptions', args: ArgsType) -> PayloadType:
return {'match': opts.match}
def response_from_kitty(self, boss: Boss, window: Optional[Window], payload_get: PayloadGetType) -> ResponseType:
for tab in self.tabs_for_match_payload(boss, window, payload_get):
if tab:
boss.set_active_tab(tab)
break
return None
focus_tab = FocusTab()
|
kovidgoyal/kitty
|
kitty/rc/focus_tab.py
|
Python
|
gpl-3.0
| 1,267
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "commanding_velocity"
PROJECT_SPACE_DIR = "/home/computing/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
|
lukeexton96/Robotics
|
catkin_ws/build/commanding_velocity/catkin_generated/pkg.installspace.context.pc.py
|
Python
|
gpl-3.0
| 386
|
#!/usr/bin/python
import pypm, sys, time
DEV_NAME = 'nanoPAD2 MIDI 1'
OUT_NAME = 'fs'
#OUT_NAME = 'MIDI IN'
#OUT_NAME = 'Synth input port (20116:0)'
FIRST_NOTE = 24 + 4 # E, not C
SECOND_NOTE = FIRST_NOTE + 5
PADS1 = range(51, 35, -2)
PADS2 = range(50, 34, -2)
shorten_bools = lambda bool_list: ''.join(('0' if b else '.' for b in bool_list))
def find_device(name_):
for i in range(pypm.CountDevices()):
interf,name,inp,outp,opened = pypm.GetDeviceInfo(i)
if name_ == name: return i + 1
dev_num = find_device(DEV_NAME)
if dev_num == None:
print DEV_NAME, 'not found, aborting!'
sys.exit(1)
print DEV_NAME, 'found at number', dev_num
out_num = find_device(OUT_NAME) - 1
if out_num == None:
print OUT_NUM, 'not found, aborting!'
sys.exit(1)
print OUT_NAME, 'found at number', out_num
midi_in = pypm.Input(dev_num)
#midi_out = pypm.Output(pypm.GetDefaultOutputDeviceID(), 0)
midi_out = pypm.Output(out_num, 0)
def noteon(chan, note, vel):
midi_out.Write([[[0x90 + chan, note, vel], pypm.Time()]])
def noteoff(chan, note):
midi_out.Write([[[0x80 + chan, note, 0], pypm.Time()]])
def press(chan, base_note, vel):
noteon(chan, base_note, vel)
noteon(chan, base_note - 7, vel / 3)
noteon(chan, base_note + 7, vel / 4)
noteon(chan, base_note + 12, vel / 5)
def release(chan, base_note):
noteoff(chan, base_note)
noteon(chan, base_note, 24)
pressed = False
pads1_pressed = [False] * 7
pads2_pressed = [False] * 7
note = 0
while True:
while not midi_in.Poll():
time.sleep(0.0001)
continue
midi_data = midi_in.Read(1) # read only 1 message at a time
t = midi_data[0][1]
a, b, c, d = midi_data[0][0][0], midi_data[0][0][1], midi_data[0][0][2], midi_data[0][0][3]
if a == 176:
# touchpad
if b == 16:
pressed = (c == 127)
if pressed:
pass
#midi_out.Write([[[0x90+0, FIRST_NOTE + 0, c], pypm.Time()]])
#print 'on'
else:
midi_out.Write([[[0x80+0, FIRST_NOTE + 0, c], pypm.Time()]])
midi_out.Write([[[0x80+1, SECOND_NOTE + 0, c], pypm.Time()]])
#midi_out.Write([[[0x90+0, FIRST_NOTE + 0, c], pypm.Time()]])
pass
#midi_out.Write([[[0x80+0, FIRST_NOTE + 0, c], pypm.Time()]])
#print 'off'
else:
continue
elif a == 144:
# pad pressed
if b == PADS1[-1]:
# noteon for the first pad row
press(0, FIRST_NOTE, c)
continue
if b == PADS2[-1]:
# noteon for the second pad row
press(1, SECOND_NOTE, c)
continue
if b in PADS1: pads1_pressed[PADS1.index(b)] = True
if b in PADS2: pads2_pressed[PADS2.index(b)] = True
elif a == 128:
if b == PADS1[-1]:
# noteoff for the first pad row
if not pressed:
release(0, FIRST_NOTE)
continue
if b == PADS2[-1]:
# noteoff for the second pad row
if not pressed:
release(1, SECOND_NOTE)
continue
if b in PADS1: pads1_pressed[PADS1.index(b)] = False
if b in PADS2: pads2_pressed[PADS2.index(b)] = False
#else:
# continue
note1 = max([i if p else 0 for i, p in zip(range(1, 8+2-1), pads1_pressed)])
note2 = max([i if p else 0 for i, p in zip(range(1, 8+2-1), pads2_pressed)])
midi_out.Write([[[0xe0+0, 0, 0x40 + note1 * 0x4, c], pypm.Time()]])
midi_out.Write([[[0xe0+1, 0, 0x40 + note2 * 0x4, c], pypm.Time()]])
#print [i if p else 0 for i, p in zip(range(1, 8+2), pads1_pressed)]
print t, a, b, c, d, '\t', 'X' if pressed else '_',
print note1, shorten_bools(pads1_pressed),
print note2, shorten_bools(pads2_pressed)
del midi_in
|
t184256/soundcontrol
|
nanopad2/test.py
|
Python
|
gpl-3.0
| 3,864
|
# -*- coding: utf-8 -*-
# Aplicación web que gestiona los datos a completar en el formulario de Declaración Jurada de Generación de Residuos Sólidos Urbanos No Domiciliarios para presentar en la Intendencia de Montevideo
# Copyright (C) 2016 LKSur S.A.
#
# Este programa es software libre: usted puede redistribuirlo y/o modificarlo conforme a los términos de la Licencia Pública General de GNU publicada por
# la Fundación para el Software Libre, ya sea la versión 3 de esta Licencia o (a su elección) cualquier versión posterior.
#
# Este programa se distribuye con el deseo de que le resulte útil, pero SIN GARANTÍAS DE NINGÚN TIPO; ni siquiera con las garantías implícitas de
# COMERCIABILIDAD o APTITUD PARA UN PROPÓSITO DETERMINADO. Para más información, consulte la Licencia Pública General de GNU.
#
# Junto con este programa, se debería incluir una copia de la Licencia Pública General de GNU. De no ser así, ingrese en <http://www.gnu.org/licenses/>.
#
# Si modifica este programa, agradecemos envíe las modificaciones a la dirección especificada en el README, con el objetivo de estudiar su incorporación a nuestra distribución.
from gluon import current
# The logger
import logging
class Utils(object):
"""
Esta clase provee funciones para el manejo de la sesión, validaciones
y para exportar los datos en formato PDF.
Args:
db: La base de datos.
"""
def __init__(self, db):
self.db = db
def resetSession(self):
""" Resetea los datos de la session """
session = current.session
session.empresa = None
session.predio = None
session.datosfuncId = None
session.genera = None
def cargarResiduos(self,decid):
s = current.session
s.genera = self.cargarResiduos(decid)
def actualizarIndice(self):
db = self.db
session = current.session
if session.info:
db.DF_declaracion[session.declaracionId] = dict(indiceform=-1)
session.indiceForm = -1
session.incompleto = False
elif session.indiceForm < 7:
session.indiceForm +=1
db.DF_declaracion[session.declaracionId] = dict(indiceform=int(session.indiceForm))
def verificarRUT(self,rut):
valido = False
rut_tam = len(rut)
#Si los primeros 2 dígitos son menores a 10, se agrega un cero delante para alinear las posiciones con las validaciones
if rut_tam in range(10,12) and int(rut[8:10]) != 0:
rut = "0" + rut
rut_tam +=1
if rut_tam == 11 or rut_tam == 12:
val1 = int(rut[:2]) in range(1, 22)
val2 = int(rut[2:8]) != 0
val3 = int(rut[8:10]) == 0
# if not val1:
# logger.debug("Los primeros 2 dígitos (%d) deben estar en el rango de 1 a 21"%int(rut[:2]))
# if not val2:
# logger.debug("Los dígitos del 3 al 8 deben ser distintos de 000000")
# if not val3:
# logger.debug("Los dígitos 9 y 10 deben ser 0")
# logger.debug("Val1: %s, %d; Val2: %s, %s; Val3: %s, %d" %(val1,int(rut[:2]),val2,rut[2:8],val3,int(rut[8:10])))
if val1 and val2 and val3:
modulo = 11
numero_base = [4, 3, 2, 9, 8, 7, 6, 5, 4, 3, 2]
digito_verificador = int(rut[-1])
indice = 0
suma = 0
for numero in numero_base:
suma = suma + int(rut[indice]) * numero
indice = indice + 1
resto = suma % modulo
if resto == 0:
valido = (digito_verificador == 0)
# if not valido: logger.debug("resto 0,digito %d "%digito_verificador)
elif resto == 1:
valido = (len(rut) == 11)
# if not valido: logger.debug("resto 1,digito %d " % digito_verificador)
else:
valido = (digito_verificador == (modulo - resto))
# if not valido: logger.debug("digito correcto: %d,digito introducido %d ; suma: %d; resto: %d " % (modulo-resto,suma,resto,digito_verificador))
return valido
def verificarDocumento(self,doc):
try:
modulo = 10
digito_verificador = int(doc[-1] )
if len(doc)==7:
numero_base = [9, 8, 7, 6, 3, 4]
else:
numero_base = [2, 9, 8, 7, 6, 3, 4]
indice = 0
suma = 0
for numero in numero_base:
suma = suma + int(doc[indice])*numero
indice = indice + 1
resto = suma % modulo
if resto == 0:
valido = (digito_verificador == 0)
else:
valido = (digito_verificador == (modulo-(suma % modulo)))
return valido
except Exception as e:
return False
def autorizado(self):
session = current.session
from gluon.http import redirect
from gluon.html import URL
if (session.declaracionId is None):
session.flash = "Debe establecer una declaración"
redirect(URL('declaracion','index', extension=False))
# redirect(URL(request.application, request.controller,'index'))
return False
return True
def puedoExportarPDF(self):
from gluon.http import redirect
from gluon.html import URL
pasoscompletos = True
session = current.session
falta = None
from collections import OrderedDict
verificar = OrderedDict([
('empresa','Empresa'),
('predio','Ubicación'),
('datosfuncId','Datos de funcionamiento'),
('residuosCargados','Residuos generados'),
('info','Información Adicional')
])
for i, (clave, valor) in enumerate(verificar.items()):
if session[clave] is None:
pasoscompletos = False
tit = valor
if falta is None:
falta = tit
else:
falta += ', '+ tit
if pasoscompletos:
return True
else:
session.flash = "Para poder exportar la declaración debe elegir una y completar todos sus datos. Falta completar: "+falta
redirect(URL('declaracion','index', extension=False))
def residuosCargados(self):
db = self.db
session = current.session
listo = False
for tipo in session.genera:
if (session.genera[tipo] is not None):
listo = True
break
if listo:
session.residuosCargados = True
else:
session.residuosCargados = None
def obtenerCamposAuditoria(self,omitidos=[]):
campos=['password','registration_id','registration_key','reset_password_key','is_active', 'created_on', 'created_by', 'modified_on', 'modified_by']
campos+=['centro','id','declaracion','empresa','predio']
campos+=omitidos
return campos
def traducirlista(self,lista,tabla):
if lista == '' or lista.strip() == '':
return lista
else:
db = self.db
aux = lista.split(',')
traduccion = ""
if len(aux)>0:
e=db[tabla][aux[0]].nombre
traduccion = traduccion + e
if len(aux)>1:
for row in aux[1:]:
e=db[tabla][int(row.strip())].nombre
traduccion = traduccion + "," + e
return traduccion
def obtenerHorario(self,data):
import json
semana = {0: 'Domingo', 1: 'Lunes', 2: 'Martes', 3: 'Miércoles', 4: 'Jueves', 5: 'Viernes', 6: 'Sábado'}
#Codificado
data_string = json.dumps(data)
#Decodificado
decoded = json.loads(data_string)
horario = {}
resultado = ''
for i in range(0,7):
horario[i] = []
for h in decoded:
dia = h['day']
hora = h['hour']
horario[dia].append(hora)
cont = 0
for dia in horario:
horas = horario[dia]
if horas:
cont = cont + 1
canth = len(horas)+1
rango = horas[-1] - horas[0]
if canth == (rango + 2):
# Se imprime el primer día
if cont == 1:
resultado += '%s: %s a %s Hs.\n'%(semana[dia],horas[0],horas[-1]+1)
# Imprime el resto de los días
else:
resultado += ' ; %s: %s a %s Hs.\n'%(semana[dia],horas[0],horas[-1]+1)
else:
rangos = {}
indice = 0
anterior = horas[0]
rangos[indice] = []
rangos[indice].append(anterior)
for h in horas[1:]:
if h != anterior + 1:
indice = indice + 1
rangos[indice] = []
rangos[indice].append(h)
anterior = h
stringrangos = ''
for r in rangos:
if len(rangos[r]) == 1:
stringrangos += '%s Hs.' %(rangos[r][0])
else:
stringrangos += '%s a %s Hs.' %(rangos[r][0],rangos[r][-1]+1)
if r+1 != (len(rangos)):#ultimo
stringrangos += ' ,'
else:
stringrangos += ''
# Se imprime el primer día
if cont == 1:
resultado += '%s: %s\n'%(semana[dia],stringrangos)
# Imprime el resto de los días
else:
resultado += ' ; %s: %s \n'%(semana[dia],stringrangos)
return resultado
def obtenerHtml(self,tabla,id,omitidos=[],traducir=[],omitircondicion=[],consulta=None):
from gluon.html import *
db=self.db
q=db(db["%s"%(tabla)].id==id)
q=q.select().render(0)
campos_auditoria = self.obtenerCamposAuditoria(omitidos)
fi=db["%s"%(tabla)].fields
fa = [x for x in fi if x not in campos_auditoria]
html=FORM()
for row in fa:
label=db["%s"%(tabla)][row].label
valor=q[row]
# Se realiza traducción de horario desde json
if row == 'horario':
valor = self.obtenerHorario(valor)
elif row == 'nropuerta' and q['bis']:
valor = str(valor)+' BIS'
# Se traduce Sexo de 0 o 1 a Masculino o Femenino
elif label == 'Sexo':
if valor == 0:
valor = 'Masculino'
else:
valor = 'Femenino'
elif row == 'contenedor':
if valor == 'True':
valor = 'Sí'
else:
valor = 'No'
# Cambio el nombre de las etiquetas "Entre", "Y" referentes a las esquinas
elif row == 'esquina1' or row == 'esquina2':
label = db["%s"%(tabla)][row].comment
# Se traduce lista de enteros por su valor en la tabla referenciada
elif row in traducir:
valor = self.traducirlista(q(row),traducir[row])
# Se omite la impresión de las filas que no correspondan, ej. si no genera residuos del tipo 'Seco'
elif row in omitircondicion:
if q[row]==omitircondicion[row]:
html = "No corresponde"
break
# Se cambian valores True y False por Sí y No. Se pregunta si el campo es entero porque si no se realizan casteos a True o False incorrectos.
elif (isinstance(valor, int)):
if valor ==True:
valor='Sí'
else:
valor='No'
# Se cambia salida en caso de valores nulos
if valor in [None, '', ' ', 'None']:
valor = 'No corresponde'
# Se crea un DIV por cada campo con su respectiva etiqueta y valor.
if not("otro" in label.lower() and valor == 'No corresponde'):
html.append(DIV(P(B("%s: "%label)),P(valor),BR()*2))
return html
def inicializarSesion(self):
consulta = db(db.DF_declaracion)
#Si no existe ninguna declaración, se crea la primera y se establece en la sesión
if consulta.isempty():
session.declaracionId = db.DF_declaracion.insert(info='')
#Si existe alguna, se establece en la sesión la última modificada
else:
session.declaracionId = db(db.DF_declaracion).select(db.DF_declaracion.id, orderby=~db.DF_declaracion.modified_on).first().id
#Ahora se pasa a comprobar si existen datos cargados para la declaración
#Consulta por empresa
consulta = db(db.DF_empresa.declaracion==session.declaracionId)
#Si no existe ningun predio asociado a la declaración
if consulta.isempty():
session.empresa = None
#Si existe alguno, se establece el predio en la sesión
else:
#si hay un solo registro
session.empresa = consulta.select().first().id
def cargarResiduos(self,decId):
db = self.db
genera = dict()
tipos = db().select(db.DF_tipo_residuo.id,orderby=db.DF_tipo_residuo.id)
consulta = db(db.DF_genera.declaracion==decId)
for row in tipos:
tipoid = int(row.id)
genera[tipoid] = None
if not consulta.isempty():
residuo= consulta(db.DF_genera.tipo_residuo==tipoid).select(db.DF_genera.id,orderby=db.DF_genera.id).first()
if residuo:
genera[tipoid]=int(residuo.id)
return genera
def establecerSesion(self,decId):
db = self.db
session = current.session
session.declaracionId = decId
info = empresa = predio = datosfuncId= residuos = None
session.indiceForm = indiceVerif =0
session.incompleto = True
consultaDeclaracion = db(db.DF_declaracion.id == decId)
if not consultaDeclaracion.isempty():
indiceForm = db.DF_declaracion[decId].indiceform
if indiceForm is not None:
session.indiceForm = indiceForm
session.incompleto = (indiceForm !=-1)
#Consulta por empresa
consultaEmpresa = db(db.DF_empresa.declaracion==decId)
#Si existe una empresa asociada a la declaración
if not consultaEmpresa.isempty():
#si hay un solo registro
# if len(consulta.select())==1:
empresa = consultaEmpresa.select().first().id
indiceVerif +=1
#Consulta por predio
consulta = db(db.DF_predio.declaracion==decId)
#Si no existe ningun predio asociado a la declaración
if not consulta.isempty():
#si hay un solo registro
predio = consulta.select().first().id
indiceVerif += 1
#Consulta por datos de funcionamiento
consultaDatosfunc = db(db.DF_reside.empresa==empresa)(db.DF_reside.predio==predio)
#Si existen datos asociados a la declaración
if not consultaDatosfunc.isempty():
registro = consultaDatosfunc.select().first()
datosfuncId = registro.id
indiceVerif += 1
residuos = self.cargarResiduos(decId)
if indiceForm != -1 and indiceVerif > indiceForm:
db.DF_declaracion[decId] = dict(indiceform = indiceVerif)
session.indiceForm = indiceVerif
session.incompleto = True
if session.incompleto is not None and not session.incompleto:
info = True
#Se setean los valores en la sesión
[session.empresa,session.predio,session.datosfuncId,session.genera,session.info] = [empresa,predio,datosfuncId,residuos,info]
self.residuosCargados()
|
LKSur/gestion-residuos
|
modules/app/utils.py
|
Python
|
gpl-3.0
| 16,591
|
# Licensed under GPL version 3 - see LICENSE.rst
import numpy as np
import astropy.units as u
from .utils import norm_vector, e2h
__all__ = ['polarization_vectors', 'Q_reflection', 'paralleltransport_matrix',
'parallel_transport']
def polarization_vectors(dir_array, angles):
'''Converts polarization angles to vectors in the direction of polarization.
Follows convention: Vector perpendicular to photon direction and closest to +y axis is
angle 0 for polarization direction, unless photon direction is parallel to the y axis,
in which case the vector closest to the +x axis is angle 0.
Parameters
----------
dir_array : nx4 np.array
each row is the homogeneous coordinates for a photon's direction vector
angles : np.array
1D array with the polarization angles
'''
n = len(angles)
polarization = np.zeros((n, 4))
x = np.array([1., 0., 0.])
y = np.array([0., 1., 0.])
# NOTE: The commented code works and is more readable, but the current code is faster.
# for i in range(0, n):
# r = h2e(dir_array[i])
# r /= np.linalg.norm(r)
# if not (np.isclose(r[0], 0.) and np.isclose(r[2], 0.)):
# # polarization relative to positive y at 0
# v_1 = y - (r * np.dot(r, y))
# v_1 /= np.linalg.norm(v_1)
# else:
# # polarization relative to positive x at 0
# v_1 = x - (r * np.dot(r, x))
# v_1 /= np.linalg.norm(v_1)
#
# # right hand coordinate system is v_1, v_2, r (photon direction)
# v_2 = np.cross(r, v_1)
# polarization[i, 0:3] = v_1 * np.cos(angles[i]) + v_2 * np.sin(angles[i])
# polarization[i, 3] = 0
r = dir_array.copy()[:,0:3]
r /= np.linalg.norm(r, axis=1)[:, np.newaxis]
pol_convention_x = np.isclose(r[:,0], 0.) & np.isclose(r[:,2], 0.)
if hasattr(angles, "unit") and (angles.unit is not None):
angles = angles.to(u.rad)
# polarization relative to positive y or x at 0
v_1 = ~pol_convention_x[:, np.newaxis] * (y - r * np.dot(r, y)[:, np.newaxis])
v_1 += pol_convention_x[:, np.newaxis] * (x - r * np.dot(r, x)[:, np.newaxis])
v_1 /= np.linalg.norm(v_1, axis=1)[:, np.newaxis]
# right hand coordinate system is v_1, v_2, r (photon direction)
v_2 = np.cross(r, v_1)
polarization[:, 0:3] = v_1 * np.cos(angles)[:, np.newaxis] + v_2 * np.sin(angles)[:, np.newaxis]
return polarization
def Q_reflection(delta_dir):
'''Reflection of a polarization vector on a non-polarizing surface.
This can also be used for other elements that change the direction of the
photon without adding any more polarization and where both sides
propagate in the same medium.
See `Yun (2011) <http://hdl.handle.net/10150/202979>`_, eqn 4.3.13 for details.
Parameters
----------
delta_dir : np.array of shape (n, 4)
Array of photon direction coordinates in homogeneous coordinates:
``delta_dir = photon['dir_old'] - photons['dir_new']``.
Note that this vector is **not** normalized.
Returns
-------
q : np.array of shape (n, 4, 4)
Array of parallel transport ray tracing matrices.
'''
if delta_dir.shape != 2:
raise ValueError('delta_dir must have dimension (n, 4).')
m = delta_dir[..., None, :] * delta_dir[..., :, None]
return np.eye(4) - 2 / (np.linalg.norm(delta_dir, axis=1)**2)[:, None, None] * m
def paralleltransport_matrix(dir1, dir2, jones=np.eye(2), replace_nans=True):
'''Calculate parallel transport ray tracing matrix.
Parallel transport for a vector implies that the component s
(perpendicular, from German *senkrecht*) to the planes spanned by
``dir1`` and ``dir2`` stays the same. If ``dir1`` is parallel to ``dir2``
this plane is not well defined and the resulting matrix elements will
be set to ``np.nan``, unless ``replace_nans`` is set.
Note that the ray matrix returned works on an eukledian 3d vector, not a
homogeneous vector. (Polarization is a vector, thus the forth element of the
homogeneous vector is always 0 and returning (4,4) matrices is just a waste
of space.)
Parameters
----------
dir1, dir2 : np.array of shape (n, 3)
Direction before and after the interaction.
jones : np.array of shape (2,2)
Jones matrix in the local s,p system of the optical element.
replace_nans : bool
If ``True`` return an identity matrix for those rays with
``dir1=dir2``. In those cases, the local coordinate system is not well
defined and thus no Jones matrix can be applied. In MARXS ``dir1=dir2``
often happens if some photons in a list miss the optical element in
question - these photons just pass through and their polarization vector
should be unchanged.
Returns
-------
p_mat : np.array of shape(n, 3, 3)
'''
dir1 = norm_vector(dir1)
dir2 = norm_vector(dir2)
jones_3 = np.eye(3)
jones_3[:2, :2] = jones
pmat = np.zeros((dir1.shape[0], 3, 3))
s = np.cross(dir1, dir2)
s_norm = np.linalg.norm(s, axis=1)
# Find dir values that remain unchanged
# For these the cross prodict will by 0
# and a numerical error is raised in s / norm(s)
# Expected output value for these depends on "replace_nans"
ind = np.isclose(s_norm, 0)
if (~ind).sum() > 0:
s = s[~ind, :] / s_norm[~ind][:, None]
p_in = np.cross(dir1[~ind, :], s)
p_out = np.cross(dir2[~ind, :], s)
Oininv = np.array([s, p_in, dir1[~ind, :]]).swapaxes(1, 0)
Oout = np.array([s, p_out, dir2[~ind, :]]).swapaxes(1, 2).T
temp = np.einsum('...ij,kjl->kil', jones_3, Oininv)
pmat[~ind, :, :] = np.einsum('ijk,ikl->ijl', Oout, temp)
factor = 1 if replace_nans else np.nan
pmat[ind, :, :] = factor * np.eye(3)[None, :, :]
return pmat
def parallel_transport(dir_old, dir_new, pol_old, **kwargs):
'''Parallel transport of the polarization vector with no polarization happening.
Parameters
----------
dir_old, dir_new : np.array of shape (n, 4)
Old and new photon direction in homogeneous coordinates.
pol_old : np.array of shape (n, 4)
Old polarization vector in homogeneous coordinates.
kwargs : dict
All other arguments are passed on to `~marxs.math.polarization.paralleltransport_matrix`.
Returns
-------
pol : np.array of shape (m, 4)
Parallel transported vectors.
'''
pmat = paralleltransport_matrix(dir_old[:, :3], dir_new[:, :3])
out = np.einsum('ijk,ik->ij', pmat, pol_old[:, :3])
return e2h(out, 0)
|
Chandra-MARX/marxs
|
marxs/math/polarization.py
|
Python
|
gpl-3.0
| 6,636
|
# -*- coding: utf-8 -*-
# TextGridTools -- Read, write, and manipulate Praat TextGrid files
# Copyright (C) 2013-2014 Hendrik Buschmeier, Marcin Włodarczak
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function
import unittest
from ..core import *
class TestTime(unittest.TestCase):
def setUp(self):
self.t1 = Time(1.0)
self.t2 = Time(1.1)
self.t3 = Time(1.01)
self.t4 = Time(1.001)
self.t5 = Time(1.00001)
def test_equals(self):
self.assertTrue(self.t1 == self.t1)
self.assertFalse(self.t1 == self.t2)
self.assertFalse(self.t1 == self.t3)
self.assertFalse(self.t1 == self.t4)
self.assertTrue(self.t1 == self.t5)
def test_not_equals(self):
self.assertFalse(self.t1 != self.t1)
self.assertTrue(self.t1 != self.t2)
self.assertTrue(self.t1 != self.t3)
self.assertTrue(self.t1 != self.t4)
self.assertFalse(self.t1 != self.t5)
def test_less(self):
self.assertFalse(self.t1 < self.t1)
self.assertTrue(self.t1 < self.t2)
self.assertTrue(self.t1 < self.t3)
self.assertTrue(self.t1 < self.t4)
self.assertFalse(self.t1 < self.t5)
def test_greater(self):
self.assertFalse(self.t1 > self.t1)
self.assertFalse(self.t1 > self.t2)
self.assertFalse(self.t1 > self.t3)
self.assertFalse(self.t1 > self.t4)
self.assertFalse(self.t1 > self.t5)
self.assertTrue(self.t2 > self.t1)
def test_greater_equal(self):
self.assertTrue(self.t1 >= self.t1)
self.assertFalse(self.t1 >= self.t2)
self.assertFalse(self.t1 >= self.t3)
self.assertFalse(self.t1 >= self.t4)
self.assertTrue(self.t1 >= self.t5)
self.assertTrue(self.t2 >= self.t1)
def test_less_equal(self):
self.assertTrue(self.t1 <= self.t1)
self.assertTrue(self.t1 <= self.t2)
self.assertTrue(self.t1 <= self.t3)
self.assertTrue(self.t1 <= self.t4)
self.assertTrue(self.t1 <= self.t5)
self.assertFalse(self.t2 <= self.t1)
class TestTier(unittest.TestCase):
def test_adding(self):
t = Tier()
# Add to empty tier
ao1 = Annotation(0.0, 0.5, 'ao1')
t.add_annotation(ao1)
self.assertTrue(len(t) == 1)
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 0.5)
# Append to tier leaving empty space
ao2 = Annotation(0.6, 0.75, 'ao2')
t.add_annotation(ao2)
self.assertTrue(len(t) == 2)
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 0.75)
ao3 = Annotation(0.81, 0.9, 'ao3')
t.add_annotation(ao3)
self.assertTrue(len(t) == 3)
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 0.9)
# Insert between existing annotations
# - leaving gaps on both sides
ao4 = Annotation(0.75, 0.77, 'ao4')
t.add_annotation(ao4)
self.assertTrue(len(t) == 4)
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 0.9)
# - meeting preceeding annotation
ao5 = Annotation(0.77, 0.79, 'ao5')
t.add_annotation(ao5)
self.assertTrue(len(t) == 5)
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 0.9)
# - meeting preceeding and succeeding annotation
ao6 = Annotation(0.8, 0.81, 'ao6')
t.add_annotation(ao6)
self.assertTrue(len(t) == 6)
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 0.9)
# Insert at a place that is already occupied
# - within ao3
with self.assertRaises(ValueError):
ao7 = Annotation(0.85, 0.87, 'ao7')
t.add_annotation(ao7)
# - same boundaries as ao3
with self.assertRaises(ValueError):
ao8 = Annotation(0.81, 0.9, 'ao8')
t.add_annotation(ao8)
# - start time earlier than start time of ao3
with self.assertRaises(ValueError):
ao9 = Annotation(0.8, 0.89, 'ao9')
t.add_annotation(ao9)
# - end time later than end time of ao3
with self.assertRaises(ValueError):
ao10 = Annotation(0.82, 0.91, 'ao10')
t.add_annotation(ao10)
# - start time earlier than start time of ao3 and
# end time later than end time of ao3
with self.assertRaises(ValueError):
ao11 = Annotation(0.8, 0.91, 'ao11')
t.add_annotation(ao11)
# - Check that no annotation was added
self.assertTrue(len(t) == 6)
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 0.9)
def test_start_end_times(self):
t = Tier(1, 2)
# Check whether specified start/end times are used
self.assertTrue(t.start_time == 1)
self.assertTrue(t.end_time == 2)
# Check whether adding an annotation within specified
# start and end times leaves them unchanged
t.add_annotation(Annotation(1.1, 1.9, 'text'))
self.assertTrue(t.start_time == 1)
self.assertTrue(t.end_time == 2)
# Expand end time by adding an annotation that ends later
t.add_annotation(Annotation(2, 3, 'text'))
self.assertTrue(t.start_time == 1)
self.assertTrue(t.end_time == 3)
# Expand start time by adding an annotation that starts ealier
t.add_annotation(Annotation(0, 1, 'text'))
self.assertTrue(t.start_time == 0)
self.assertTrue(t.end_time == 3)
def test_queries(self):
t = Tier()
ao1 = Annotation(0, 1, 'ao1')
ao2 = Annotation(1, 2, 'ao2')
ao3 = Annotation(5, 6, 'ao3')
t.add_annotations([ao1, ao2, ao3])
# Query with start time
# - query for existing objects
ao1_retr = t.get_annotation_by_start_time(ao1.start_time)
self.assertTrue(ao1_retr == ao1)
ao2_retr = t.get_annotation_by_start_time(ao2.start_time)
self.assertTrue(ao2_retr == ao2)
ao3_retr = t.get_annotation_by_start_time(ao3.start_time)
self.assertTrue(ao3_retr == ao3)
# - query for non-existing object
aox_retr = t.get_annotation_by_start_time(0.5)
self.assertTrue(aox_retr is None)
# Query with end time
# - query for existing objects
ao1_retr = t.get_annotation_by_end_time(ao1.end_time)
self.assertTrue(ao1_retr == ao1)
ao2_retr = t.get_annotation_by_end_time(ao2.end_time)
self.assertTrue(ao2_retr == ao2)
ao3_retr = t.get_annotation_by_end_time(ao3.end_time)
self.assertTrue(ao3_retr == ao3)
# - query for non-existing object
aox_retr = t.get_annotation_by_end_time(0.5)
self.assertTrue(aox_retr is None)
# Query with time
# - query for existing objects
# - time falls within object
ao1_retr = t.get_annotations_by_time(ao1.start_time + (ao1.end_time - ao1.start_time) * 0.5)
self.assertTrue(ao1_retr[0] == ao1)
# - time equals end time of object
ao2_retr = t.get_annotations_by_time(ao2.end_time)
self.assertTrue(ao2_retr[0] == ao2)
# - time equals start time of object
ao3_retr = t.get_annotations_by_time(ao3.start_time)
self.assertTrue(ao3_retr[0] == ao3)
# - time equals start time of one object and end_time of another
ao12_retr = t.get_annotations_by_time(ao1.end_time)
self.assertTrue(len(ao12_retr) == 2)
self.assertTrue(ao12_retr[0] == ao1)
self.assertTrue(ao12_retr[1] == ao2)
# - query for non-existing object
aox_retr = t.get_annotations_by_time(3)
self.assertTrue(aox_retr == [])
# Query with text/regex
# - one match
ao1_retr = t.get_annotations_with_matching_text('ao1')
self.assertTrue(len(ao1_retr) == 1)
self.assertTrue(ao1_retr[0] == ao1)
# - mutiple matches
ao31 = Annotation(7, 8, 'ao3')
ao32 = Annotation(9, 10, 'ao3')
ao33 = Annotation(11, 12, 'ao3')
t.add_annotations([ao31, ao32, ao33])
ao3x_retr = t.get_annotations_with_matching_text('ao3')
self.assertTrue(len(ao3x_retr) == 4)
self.assertTrue(ao3x_retr[0] == ao3)
self.assertTrue(ao3x_retr[1] == ao31)
self.assertTrue(ao3x_retr[2] == ao32)
self.assertTrue(ao3x_retr[3] == ao33)
# - multiple matches, select first n
ao3xn_retr = t.get_annotations_with_matching_text('ao3', 2)
self.assertTrue(len(ao3xn_retr) == 2)
self.assertTrue(ao3xn_retr[0] == ao3)
self.assertTrue(ao3xn_retr[1] == ao31)
# - multiple matches, select last n
ao3xn_retr = t.get_annotations_with_matching_text('ao3', -2)
self.assertTrue(len(ao3xn_retr) == 2)
self.assertTrue(ao3xn_retr[0] == ao32)
self.assertTrue(ao3xn_retr[1] == ao33)
def test_get_nearest_annotation(self):
t = Tier()
ao1 = Annotation(0, 1, 'ao1')
ao2 = Annotation(1, 2, 'ao2')
ao3 = Annotation(3, 4, 'ao3')
ao4 = Annotation(5, 6, 'ao4')
t.add_annotations([ao1, ao2, ao3, ao4])
# - coincides with start time of the first interval
r = t.get_nearest_annotation(0.0, boundary='start', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.0, boundary='end', direction='left')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(0.0, boundary='both', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.0, boundary='start', direction='right')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.0, boundary='end', direction='right')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.0, boundary='both', direction='right')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.0, boundary='start', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.0, boundary='end', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.0, boundary='both', direction='both')
self.assertTrue(r == set([ao1]))
# - lies between start and end time of the first interval
r = t.get_nearest_annotation(0.4, boundary='start', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.4, boundary='end', direction='left')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(0.4, boundary='both', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.4, boundary='start', direction='right')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(0.4, boundary='end', direction='right')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.4, boundary='both', direction='right')
self.assertTrue(r == set([ao1, ao2]))
r = t.get_nearest_annotation(0.4, boundary='start', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.4, boundary='end', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.4, boundary='both', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.5, boundary='start', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.5, boundary='end', direction='left')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(0.5, boundary='both', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.5, boundary='start', direction='right')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(0.5, boundary='end', direction='right')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.5, boundary='both', direction='right')
self.assertTrue(r == set([ao1, ao2]))
r = t.get_nearest_annotation(0.5, boundary='start', direction='both')
self.assertTrue(r == set([ao1, ao2]))
r = t.get_nearest_annotation(0.5, boundary='end', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.5, boundary='both', direction='both')
self.assertTrue(r == set([ao1, ao2]))
r = t.get_nearest_annotation(0.6, boundary='start', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.6, boundary='end', direction='left')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(0.6, boundary='both', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.6, boundary='start', direction='right')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(0.6, boundary='end', direction='right')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.6, boundary='both', direction='right')
self.assertTrue(r == set([ao1, ao2]))
r = t.get_nearest_annotation(0.6, boundary='start', direction='both')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(0.6, boundary='end', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(0.6, boundary='both', direction='both')
self.assertTrue(r == set([ao1, ao2]))
# - coincides with end time of one interval and coincides with start
# time of another interval
r = t.get_nearest_annotation(1.0, boundary='start', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(1.0, boundary='end', direction='left')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(1.0, boundary='both', direction='left')
self.assertTrue(r == set([ao1, ao2]))
r = t.get_nearest_annotation(1.0, boundary='start', direction='right')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(1.0, boundary='end', direction='right')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(1.0, boundary='both', direction='right')
self.assertTrue(r == set([ao1, ao2]))
r = t.get_nearest_annotation(1.0, boundary='start', direction='both')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(1.0, boundary='end', direction='both')
self.assertTrue(r == set([ao1]))
r = t.get_nearest_annotation(1.0, boundary='both', direction='both')
self.assertTrue(r == set([ao1, ao2]))
## - lies between two intervals
r = t.get_nearest_annotation(2.4, boundary='start', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.4, boundary='end', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.4, boundary='both', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.4, boundary='start', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.4, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.4, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.4, boundary='start', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.4, boundary='end', direction='both')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.4, boundary='both', direction='both')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.5, boundary='start', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.5, boundary='end', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.5, boundary='both', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.5, boundary='start', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.5, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.5, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.5, boundary='start', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.5, boundary='end', direction='both')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.5, boundary='both', direction='both')
self.assertTrue(r == set([ao3, ao2]))
r = t.get_nearest_annotation(2.6, boundary='start', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.6, boundary='end', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.6, boundary='both', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.6, boundary='start', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.6, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.6, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.6, boundary='start', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(2.6, boundary='end', direction='both')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(2.6, boundary='both', direction='both')
self.assertTrue(r == set([ao3]))
## - coincides with start time of an isolated interval
r = t.get_nearest_annotation(3.0, boundary='start', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.0, boundary='end', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(3.0, boundary='both', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.0, boundary='start', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.0, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.0, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.0, boundary='start', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.0, boundary='end', direction='both')
self.assertTrue(r == set([ao2, ao3]))
r = t.get_nearest_annotation(3.0, boundary='both', direction='both')
self.assertTrue(r == set([ao3]))
## - lies withing an isolated interval
r = t.get_nearest_annotation(3.6, boundary='start', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='end', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(3.6, boundary='both', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='start', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(3.6, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='start', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='end', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='both', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.5, boundary='start', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.5, boundary='end', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(3.5, boundary='both', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.5, boundary='start', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(3.5, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.5, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.5, boundary='start', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.5, boundary='end', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.5, boundary='both', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='start', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='end', direction='left')
self.assertTrue(r == set([ao2]))
r = t.get_nearest_annotation(3.6, boundary='both', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='start', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(3.6, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='start', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='end', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(3.6, boundary='both', direction='both')
self.assertTrue(r == set([ao3]))
## - coincides with end time of an isolated interval
r = t.get_nearest_annotation(4.0, boundary='start', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(4.0, boundary='end', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(4.0, boundary='both', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(4.0, boundary='start', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(4.0, boundary='end', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(4.0, boundary='both', direction='right')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(4.0, boundary='start', direction='both')
self.assertTrue(r == set([ao3, ao4]))
r = t.get_nearest_annotation(4.0, boundary='end', direction='both')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(4.0, boundary='both', direction='both')
self.assertTrue(r == set([ao3]))
#5.0, 5.4, 5.5, 5.6, 6.0
## - coincides with start time of the last interval
r = t.get_nearest_annotation(5.0, boundary='start', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.0, boundary='end', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(5.0, boundary='both', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.0, boundary='start', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.0, boundary='end', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.0, boundary='both', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.0, boundary='start', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.0, boundary='end', direction='both')
self.assertTrue(r == set([ao3, ao4]))
r = t.get_nearest_annotation(5.0, boundary='both', direction='both')
self.assertTrue(r == set([ao4]))
## - lies withing an the last interval
r = t.get_nearest_annotation(5.4, boundary='start', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.4, boundary='end', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(5.4, boundary='both', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.4, boundary='start', direction='right')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(5.4, boundary='end', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.4, boundary='both', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.4, boundary='start', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.4, boundary='end', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.4, boundary='both', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.5, boundary='start', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.5, boundary='end', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(5.5, boundary='both', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.5, boundary='start', direction='right')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(5.5, boundary='end', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.5, boundary='both', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.5, boundary='start', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.5, boundary='end', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.5, boundary='both', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.6, boundary='start', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.6, boundary='end', direction='left')
self.assertTrue(r == set([ao3]))
r = t.get_nearest_annotation(5.6, boundary='both', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.6, boundary='start', direction='right')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(5.6, boundary='end', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.6, boundary='both', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.6, boundary='start', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.6, boundary='end', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(5.6, boundary='both', direction='both')
self.assertTrue(r == set([ao4]))
## - coincides with end time of the last interval
r = t.get_nearest_annotation(6.0, boundary='start', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(6.0, boundary='end', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(6.0, boundary='both', direction='left')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(6.0, boundary='start', direction='right')
self.assertTrue(r == set([]))
r = t.get_nearest_annotation(6.0, boundary='end', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(6.0, boundary='both', direction='right')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(6.0, boundary='start', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(6.0, boundary='end', direction='both')
self.assertTrue(r == set([ao4]))
r = t.get_nearest_annotation(6.0, boundary='both', direction='both')
self.assertTrue(r == set([ao4]))
def test_get_copy_with_gaps_filled(self):
i1 = Interval(0,2, 'i1')
i2 = Interval(2,3, 'i2')
i3 = Interval(4,5, 'i3')
i4 = Interval(7,8, 'i4')
i5 = Interval(8.5,9.5, 'i5')
# - insert empty start interval
t1 = IntervalTier(0, 3, 't1')
t1.add_annotations([i2])
t1_c = t1.get_copy_with_gaps_filled()
self.assertTrue(len(t1) == 1)
self.assertTrue(len(t1_c) == 2)
# - insert emtpy end interval
t2 = IntervalTier(0, 3, 't2')
t2.add_annotations([i1])
t2_c = t2.get_copy_with_gaps_filled()
self.assertTrue(len(t2) == 1)
self.assertTrue(len(t2_c) == 2)
# - insert all over the place
t3 = IntervalTier(0, 10, 't3')
t3.add_annotations([i2, i3, i4, i5])
t3_c = t3.get_copy_with_gaps_filled()
self.assertTrue(len(t3) == 4)
self.assertTrue(len(t3_c) == 9)
# - insert into emtpy tier
t4 = IntervalTier(0, 5, 't4')
t4_c = t4.get_copy_with_gaps_filled()
self.assertTrue(len(t4) == 0)
self.assertTrue(len(t4_c) == 1)
# - do nothing
t5 = IntervalTier(0, 3, 't5')
t5.add_annotations([i1, i2])
t5_c = t5.get_copy_with_gaps_filled()
self.assertTrue(len(t5) == 2)
self.assertTrue(len(t5_c) == 2)
class TestInterval(unittest.TestCase):
def test_change_time(self):
ict = Interval(0, 1)
# Changing start and end times has an effect
ict.start_time = 0.5
self.assertTrue(ict.start_time == 0.5)
ict.end_time = 1.5
self.assertTrue(ict.end_time == 1.5)
# Correct order of start and end times is checked
with self.assertRaises(ValueError):
Interval(1,0)
with self.assertRaises(ValueError):
ict.start_time = 2.0
with self.assertRaises(ValueError):
ict.end_time = 0
def test_change_text(self):
ict = Interval(0, 1, 'text')
self.assertTrue(ict.text == 'text')
ict.text = 'text changed'
self.assertTrue(ict.text == 'text changed')
def test_duration(self):
self.id1 = Interval(0, 1)
self.assertTrue(self.id1.duration() == 1.0)
self.id2 = Interval(1, 1)
self.assertTrue(self.id2.duration() == 0)
def test_equality(self):
ie1 = Interval(0, 1, 'text')
ie2 = Interval(0, 1, 'text')
self.assertTrue(ie1 == ie2)
ie3 = Interval(1, 1, 'text')
self.assertFalse(ie1 == ie3)
ie4 = Interval(0, 2, 'text')
self.assertFalse(ie1 == ie4)
ie5 = Interval(0, 1, 'text changed')
self.assertFalse(ie1 == ie5)
def test_repr(self):
ir = Interval(0, 1, 'text')
s = repr(ir)
ir_recreated = eval(s)
self.assertTrue(ir == ir_recreated)
class TestPoint(unittest.TestCase):
def test_change_time(self):
pct = Point(0)
# Changing start and end times has an effect
pct.time = 0.5
self.assertTrue(pct.time == 0.5)
self.assertTrue(pct.start_time == 0.5)
self.assertTrue(pct.end_time == 0.5)
pct.start_time = 1
self.assertTrue(pct.time == 1)
self.assertTrue(pct.start_time == 1)
self.assertTrue(pct.end_time == 1)
pct.end_time = 1.5
self.assertTrue(pct.time == 1.5)
self.assertTrue(pct.start_time == 1.5)
self.assertTrue(pct.end_time == 1.5)
def test_change_text(self):
pct = Point(0, 'text')
self.assertTrue(pct.text == 'text')
pct.text = 'text changed'
self.assertTrue(pct.text == 'text changed')
def test_equality(self):
pe1 = Point(0, 'text')
pe2 = Point(0, 'text')
self.assertTrue(pe1 == pe2)
pe3 = Point(1, 'text')
self.assertFalse(pe1 == pe3)
pe4 = Point(0, 'text changed')
self.assertFalse(pe1 == pe4)
def test_repr(self):
pr = Point(0, 'text')
s = repr(pr)
pr_recreated = eval(s)
self.assertTrue(pr == pr_recreated)
|
hbuschme/TextGridTools
|
tgt/tests/test_core.py
|
Python
|
gpl-3.0
| 33,889
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-09 18:13
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bot', '0013_auto_20171109_1759'),
]
operations = [
migrations.AlterField(
model_name='alertausuario',
name='chat_id',
field=models.IntegerField(blank=True),
),
migrations.AlterField(
model_name='alertausuario',
name='ultima_actualizacion',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 9, 18, 13, 50, 254179)),
),
]
|
foxcarlos/decimemijobot
|
bot/migrations/0014_auto_20171109_1813.py
|
Python
|
gpl-3.0
| 694
|
#!/usr/bin/env python
# rsak - Router Swiss Army Knife
# Copyright (C) 2011 Pablo Castellano <pablo@anche.no>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class RouterModelBase:
def login(self, user, passw):
raise NotImplementedError
def logout(self):
raise NotImplementedError
def guess(self):
raise NotImplementedError
def getClientsList(self):
raise NotImplementedError
def forwardPort(self):
raise NotImplementedError
def protocolsSupported(self):
raise NotImplementedError
|
PabloCastellano/rsak
|
src/routermodelbase.py
|
Python
|
gpl-3.0
| 1,149
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
"""
Backends are a way to permanently store a project on a medium
(like on the hard disk or on the internet)
and to read projects from this medium
"""
import sys
import uuid
import os.path
from GTG.tools.logger import Log
from GTG.tools.borg import Borg
from GTG.backends.genericbackend import GenericBackend
from GTG.core import firstrun_tasks
from GTG.core.dirs import PROJECTS_XMLFILE
from GTG.tools import cleanxml
class BackendFactory(Borg):
'''
This class holds the information about the backend types.
Since it's about types, all information is static. The instantiated
backends are handled in the Datastore.
It is a Borg for what matters its only state (_backend_modules),
since it makes no sense of keeping multiple instances of this.
'''
BACKEND_PREFIX = "backend_"
def __init__(self):
"""
Creates a dictionary of the currently available backend modules
"""
Borg.__init__(self)
if hasattr(self, "backend_modules"):
# This object has already been constructed
return
self.backend_modules = {}
backend_files = self._find_backend_files()
# Create module names
module_names = [f.replace(".py", "") for f in backend_files]
Log.debug("Backends found: " + str(module_names))
# Load backend modules
for module_name in module_names:
extended_module_name = "GTG.backends." + module_name
try:
__import__(extended_module_name)
except ImportError as exception:
# Something is wrong with this backend, skipping
Log.warning("Backend %s could not be loaded: %s" %
(module_name, str(exception)))
continue
except Exception as exception:
# Other exception log as errors
Log.error("Malformated backend %s: %s" %
(module_name, str(exception)))
continue
self.backend_modules[module_name] = \
sys.modules[extended_module_name]
def _find_backend_files(self):
# Look for backends in the GTG/backends dir
this_dir = os.path.dirname(__file__)
for filename in os.listdir(this_dir):
is_python = filename.endswith(".py")
has_prefix = filename.startswith(self.BACKEND_PREFIX)
if is_python and has_prefix:
yield filename
def get_backend(self, backend_name):
'''
Returns the backend module for the backend matching
backend_name. Else, returns none
'''
if backend_name in self.backend_modules:
return self.backend_modules[backend_name]
else:
Log.debug("Trying to load backend %s, but failed!" % backend_name)
return None
def get_all_backends(self):
'''
Returns a dictionary containing all the backends types
'''
return self.backend_modules
def get_new_backend_dict(self, backend_name, additional_parameters={}):
'''
Constructs a new backend initialization dictionary. In more
exact terms, creates a dictionary, containing all the necessary
entries to initialize a backend.
'''
if backend_name not in self.backend_modules:
return None
dic = {}
module = self.get_backend(backend_name)
# Different pids are necessary to discern between backends of the same
# type
parameters = module.Backend.get_static_parameters()
# we all the parameters and their default values in dic
for param_name, param_dic in parameters.items():
dic[param_name] = param_dic[GenericBackend.PARAM_DEFAULT_VALUE]
dic["pid"] = str(uuid.uuid4())
dic["module"] = module.Backend.get_name()
for param_name, param_value in additional_parameters.items():
dic[param_name] = param_value
dic["backend"] = module.Backend(dic)
return dic
def restore_backend_from_xml(self, dic):
'''
Function restoring a backend from its xml description.
dic should be a dictionary containing at least the key
- "module", with the module name
- "xmlobject", with its xml description.
Every other key is passed as-is to the backend, as parameter.
Returns the backend instance, or None is something goes wrong
'''
if "module" not in dic or "xmlobject" not in dic:
Log.debug("Malformed backend configuration found! %s" %
dic)
module = self.get_backend(dic["module"])
if module is None:
Log.debug("could not load module for backend %s" %
dic["module"])
return None
# we pop the xml object, as it will be redundant when the parameters
# are set directly in the dict
xp = dic.pop("xmlobject")
# Building the dictionary
parameters_specs = module.Backend.get_static_parameters()
dic["pid"] = str(xp.getAttribute("pid"))
for param_name, param_dic in parameters_specs.items():
if xp.hasAttribute(param_name):
# we need to convert the parameter to the right format.
# we fetch the format from the static_parameters
param_type = param_dic[GenericBackend.PARAM_TYPE]
param_value = GenericBackend.cast_param_type_from_string(
xp.getAttribute(param_name), param_type)
dic[param_name] = param_value
# We put the backend itself in the dict
dic["backend"] = module.Backend(dic)
return dic["backend"]
def get_saved_backends_list(self):
backends_dic = self._read_backend_configuration_file()
# Retrocompatibility: default backend has changed name
for dic in backends_dic:
if dic["module"] == "localfile":
dic["module"] = "backend_localfile"
dic["pid"] = str(uuid.uuid4())
dic["need_conversion"] = \
dic["xmlobject"].getAttribute("filename")
# Now that the backend list is build, we will construct them
for dic in backends_dic:
self.restore_backend_from_xml(dic)
# If no backend available, we create a new using localfile. Xmlobject
# will be filled in by the backend
if len(backends_dic) == 0:
dic = BackendFactory().get_new_backend_dict(
"backend_localfile")
dic["backend"].this_is_the_first_run(firstrun_tasks.populate())
backends_dic.append(dic)
return backends_dic
def _read_backend_configuration_file(self):
'''
Reads the file describing the current backend configuration
(project.xml) and returns a list of dictionaries, each containing:
- the xml object defining the backend characteristics under
"xmlobject"
- the name of the backend under "module"
'''
# Read configuration file, if it does not exist, create one
doc, configxml = cleanxml.openxmlfile(PROJECTS_XMLFILE, "config")
xmlproject = doc.getElementsByTagName("backend")
# collect configured backends
return [{"xmlobject": xp,
"module": xp.getAttribute("module")} for xp in xmlproject]
|
jakubbrindza/gtg
|
GTG/backends/__init__.py
|
Python
|
gpl-3.0
| 8,418
|
# coding=utf-8
"""
This file is used to make a crawl
"""
import __init__
import os
import re
import urllib
from utility import prgbar
def get_html(url):
"""Get the html """
page = urllib.urlopen(url)
html = page.read()
return html
def get_pdf(html):
""" xxx"""
reg = r'href="(.+?\.pdf)">pdf'
pdfre = re.compile(reg)
pdflist = re.findall(pdfre, html)
dir_name = 'COLT2015'
maxrows = len(pdflist)
pbar = prgbar.ProgressBar(total=maxrows)
if os.path.exists(dir_name) is False:
os.mkdir(dir_name)
for idx, pdfurl in enumerate(pdflist):
filename = dir_name + '/' + pdfurl
pbar.log('http://jmlr.org/proceedings/papers/v40/' + pdfurl)
if os.path.exists(filename) is True:
pbar.log('Exist')
else:
urllib.urlretrieve(
'http://jmlr.org/proceedings/papers/v40/' + pdfurl, filename)
pbar.update(index=(idx + 1))
pbar.finish()
if __name__ == '__main__':
HTML = get_html("http://jmlr.org/proceedings/papers/v40/")
print(get_pdf(HTML))
|
JustJokerX/PaperCrawler
|
COLT/COLT2015.py
|
Python
|
gpl-3.0
| 1,083
|
# pyGeoNet_readGeotiff
#import sys
#import os
from osgeo import gdal
#from string import *
import numpy as np
from time import clock
import pygeonet_defaults as defaults
import pygeonet_prepare as Parameters
from math import modf, floor
#from scipy.stats.mstats import mquantiles
def read_dem_from_geotiff(demFileName,demFilePath):
# Open the GeoTIFF format DEM
fullFilePath = demFilePath + demFileName
#fullFilePath = "G:\\HarishLaptop_Backup\\TI102782W0E\\PythonScripts\\pyGeoNet1.0\\data\\skunk.tif"
print fullFilePath
ary = []
ds = gdal.Open(fullFilePath, gdal.GA_ReadOnly)
geotransform = ds.GetGeoTransform()
'''
print 'Driver: ', ds.GetDriver().ShortName,'/', \
ds.GetDriver().LongName
print 'Size is ',ds.RasterXSize,'x',ds.RasterYSize, \
'x',ds.RasterCount
print 'Projection is ',ds.GetProjection()
if not geotransform is None:
print 'Origin = (',geotransform[0], ',',geotransform[3],')'
print 'Pixel Size = (',geotransform[1], ',',geotransform[5],')'
'''
ary = ds.GetRasterBand(1).ReadAsArray()
#Parameters.geospatialReferenceArray
#Parameters.geoReferencingMatrix
#Parameters.geoBoundingBox
Parameters.demPixelScale = geotransform[1]
Parameters.xLowerLeftCoord = geotransform[0]
Parameters.yLowerLeftCoord = geotransform[3]
return ary
def quantile(x, q, qtype = 7, issorted = False):
"""
Args:
x - input data
q - quantile
qtype - algorithm
issorted- True if x already sorted.
Compute quantiles from input array x given q.For median,
specify q=0.5.
References:
http://reference.wolfram.com/mathematica/ref/Quantile.html
http://wiki.r-project.org/rwiki/doku.php?id=rdoc:stats:quantile
Author:
Ernesto P.Adorio Ph.D.
UP Extension Program in Pampanga, Clark Field.
"""
if not issorted:
y = sorted(x)
else:
y = x
if not (1 <= qtype <= 9):
return None # error!
# Parameters for the Hyndman and Fan algorithm
abcd = [(0, 0, 1, 0), # inverse empirical distrib.function., R type 1
(0.5, 0, 1, 0), # similar to type 1, averaged, R type 2
(0.5, 0, 0, 0), # nearest order statistic,(SAS) R type 3
(0, 0, 0, 1), # California linear interpolation, R type 4
(0.5, 0, 0, 1), # hydrologists method, R type 5
(0, 1, 0, 1), # mean-based estimate(Weibull method), (SPSS,Minitab), type 6
(1, -1, 0, 1), # mode-based method,(S, S-Plus), R type 7
(1.0/3, 1.0/3, 0, 1), # median-unbiased , R type 8
(3/8.0, 0.25, 0, 1) # normal-unbiased, R type 9.
]
a, b, c, d = abcd[qtype-1]
n = len(x)
g, j = modf( a + (n+b) * q -1)
if j < 0:
return y[0]
elif j >= n:
return y[n-1] # oct. 8, 2010 y[n]???!! uncaught off by 1 error!!!
j = int(floor(j))
if g == 0:
return y[j]
else:
return y[j] + (y[j+1]- y[j])* (c + d * g)
def main():
#demFileName = "skunk.tif"
#demFilePath = "G:\\HarishLaptop_Backup\\TI102782W0E\\PythonScripts\\pyGeoNet1.0\\data\\"
print "Reading input file path :",Parameters.demDataFilePath
print "Reading input file :",Parameters.demFileName
rawDemArray = read_dem_from_geotiff(Parameters.demFileName,Parameters.demDataFilePath)
nanDemArray=rawDemArray
nanDemArray[nanDemArray < defaults.demNanFlag]= np.NAN
Parameters.minDemValue= np.min(nanDemArray[:])
Parameters.maxDemValue= np.max(nanDemArray[:])
# Area of analysis
Parameters.xDemSize=np.size(rawDemArray,0)
Parameters.yDemSize=np.size(rawDemArray,1)
# Calculate pixel length scale and assume square
Parameters.maxLowerLeftCoord = np.max([Parameters.xDemSize, Parameters.yDemSize])
print 'DTM size: ',Parameters.xDemSize, 'x' ,Parameters.yDemSize
#-----------------------------------------------------------------------------
# Compute slope magnitude for raw and filtered DEMs
print 'Computing slope of raw DTM'
slopeMagnitudeDemArray = np.gradient(nanDemArray,Parameters.demPixelScale)
print slopeMagnitudeDemArray
# Computation of the threshold lambda used in Perona-Malik nonlinear
# filtering. The value of lambda (=edgeThresholdValue) is given by the 90th
# quantile of the absolute value of the gradient.
print'Computing lambda = q-q-based nonlinear filtering threshold'
mult = Parameters.xDemSize * Parameters.yDemSize
print np.size(slopeMagnitudeDemArray,0)
edgeThresholdValue = quantile(np.reshape(slopeMagnitudeDemArray,mult),defaults.demSmoothingQuantile)
print edgeThresholdValue
if __name__ == '__main__':
t0 = clock()
main()
t1 = clock()
print "time taken to complete the script is::",t1-t0," seconds"
print "script complete"
|
harish2rb/pyGeoNet
|
test/test_pygeonet_processing.py
|
Python
|
gpl-3.0
| 4,709
|
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from splinter.driver.webdriver.firefox import WebDriver as FirefoxWebDriver
from splinter.driver.webdriver.remote import WebDriver as RemoteWebDriver
from splinter.driver.webdriver.chrome import WebDriver as ChromeWebDriver
from splinter.driver.webdriver.phantomjs import WebDriver as PhantomJSWebDriver
from splinter.exceptions import DriverNotFoundError
_DRIVERS = {
'firefox': FirefoxWebDriver,
'remote': RemoteWebDriver,
'chrome': ChromeWebDriver,
'phantomjs': PhantomJSWebDriver,
}
try:
from splinter.driver.zopetestbrowser import ZopeTestBrowser
_DRIVERS['zope.testbrowser'] = ZopeTestBrowser
except ImportError:
pass
try:
import django # noqa
from splinter.driver.djangoclient import DjangoClient
_DRIVERS['django'] = DjangoClient
except ImportError:
pass
def Browser(driver_name='firefox', *args, **kwargs):
"""
Returns a driver instance for the given name.
When working with ``firefox``, it's possible to provide a profile name
and a list of extensions.
If you don't provide any driver_name, then ``firefox`` will be used.
If there is no driver registered with the provided ``driver_name``, this
function will raise a :class:`splinter.exceptions.DriverNotFoundError`
exception.
"""
try:
driver = _DRIVERS[driver_name]
except KeyError:
raise DriverNotFoundError("No driver for %s" % driver_name)
return driver(*args, **kwargs)
|
ritashugisha/OmniTube
|
splinter/browser.py
|
Python
|
gpl-3.0
| 1,638
|
from setuptools import setup#, find_packages, Extension
import distutils.command.build as _build
import setuptools.command.install as _install
import sys
import os
import os.path as op
import distutils.spawn as ds
import distutils.dir_util as dd
import posixpath
def run_cmake(arg=""):
"""
Forcing to run cmake
"""
if ds.find_executable('cmake') is None:
print "CMake is required to build zql"
print "Please install cmake version >= 2.8 and re-run setup"
sys.exit(-1)
print "Configuring zql build with CMake.... "
cmake_args = arg
try:
build_dir = op.join(op.split(__file__)[0], 'build')
dd.mkpath(build_dir)
os.chdir("build")
ds.spawn(['cmake', '..'] + cmake_args.split())
ds.spawn(['make', 'clean'])
ds.spawn(['make'])
os.chdir("..")
except ds.DistutilsExecError:
print "Error while running cmake"
print "run 'setup.py build --help' for build options"
print "You may also try editing the settings in CMakeLists.txt file and re-running setup"
sys.exit(-1)
class build(_build.build):
def run(self):
run_cmake()
# Now populate the extension module attribute.
#self.distribution.ext_modules = get_ext_modules()
_build.build.run(self)
class install(_install.install):
def run(self):
if not posixpath.exists("src/zq.so"):
run_cmake()
ds.spawn(['make', 'install'])
#self.distribution.ext_modules = get_ext_modules()
self.do_egg_install()
with open('README.txt') as file:
clips6_long_desc = file.read()
setup(
name = "zq",
version = '0.6',
description = 'ZQL - Zabbix Query Language',
install_requires = ["cython", "msgpack-python", "simplejson", "hy", "pyfiglet",
"gevent", "json", "termcolor", "humanfriendly", "ipaddr", "pyfscache",
"Cheetah", "dateparser", "pygithub",
],
requires = [],
include_package_data = True,
url = 'https://github.com/vulogov/zq/',
author='Vladimir Ulogov',
author_email = 'vladimir.ulogov@me.com',
maintainer_email = 'vladimir.ulogov@me.com',
license = "GNU GPL Versin 3",
long_description = clips6_long_desc,
keywords = "zql, monitoring, zabbix",
platforms = ['GNU/Linux','Unix','Mac OS-X'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
'Environment :: Console',
'Environment :: Console :: Curses'
],
# ext_modules is not present here. This will be generated through CMake via the
# build or install commands
cmdclass={'install':install,'build': build},
zip_safe=False,
packages = ['zq'],
package_data = {
'zq': ['zq.so', '*.pyx', '*.pyi']
}
)
|
vulogov/zq
|
setup.py
|
Python
|
gpl-3.0
| 3,597
|
#
# core.py
#
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import os
import time
import hashlib
import logging
from subprocess import Popen, PIPE
from deluge.plugins.pluginbase import CorePluginBase
import deluge.component as component
from deluge.configmanager import ConfigManager
from deluge.core.rpcserver import export
from deluge.event import DelugeEvent
log = logging.getLogger(__name__)
DEFAULT_CONFIG = {
"commands": []
}
EXECUTE_ID = 0
EXECUTE_EVENT = 1
EXECUTE_COMMAND = 2
EVENT_MAP = {
"complete": "TorrentFinishedEvent",
"added": "TorrentAddedEvent"
}
class ExecuteCommandAddedEvent(DelugeEvent):
"""
Emitted when a new command is added.
"""
def __init__(self, command_id, event, command):
self._args = [command_id, event, command]
class ExecuteCommandRemovedEvent(DelugeEvent):
"""
Emitted when a command is removed.
"""
def __init__(self, command_id):
self._args = [command_id]
class Core(CorePluginBase):
def enable(self):
self.config = ConfigManager("execute.conf", DEFAULT_CONFIG)
event_manager = component.get("EventManager")
self.registered_events = {}
# Go through the commands list and register event handlers
for command in self.config["commands"]:
event = command[EXECUTE_EVENT]
if event in self.registered_events:
continue
def create_event_handler(event):
def event_handler(torrent_id):
self.execute_commands(torrent_id, event)
return event_handler
event_handler = create_event_handler(event)
event_manager.register_event_handler(EVENT_MAP[event], event_handler)
self.registered_events[event] = event_handler
log.debug("Execute core plugin enabled!")
def execute_commands(self, torrent_id, event):
torrent = component.get("TorrentManager").torrents[torrent_id]
info = torrent.get_status(["name", "save_path", "move_on_completed", "move_on_completed_path"])
# Grab the torrent name and save path
torrent_name = info["name"]
if event == "complete":
save_path = info["move_on_completed_path"] if info ["move_on_completed"] else info["save_path"]
else:
save_path = info["save_path"]
log.debug("[execute] Running commands for %s", event)
# Go through and execute all the commands
for command in self.config["commands"]:
if command[EXECUTE_EVENT] == event:
command = os.path.expandvars(command[EXECUTE_COMMAND])
command = os.path.expanduser(command)
log.debug("[execute] running %s", command)
p = Popen([command, torrent_id, torrent_name, save_path], stdin=PIPE, stdout=PIPE, stderr=PIPE)
if p.wait() != 0:
log.warn("Execute command failed with exit code %d", p.returncode)
def disable(self):
self.config.save()
event_manager = component.get("EventManager")
for event, handler in self.registered_events.iteritems():
event_manager.deregister_event_handler(event, handler)
log.debug("Execute core plugin disabled!")
### Exported RPC methods ###
@export
def add_command(self, event, command):
command_id = hashlib.sha1(str(time.time())).hexdigest()
self.config["commands"].append((command_id, event, command))
self.config.save()
component.get("EventManager").emit(ExecuteCommandAddedEvent(command_id, event, command))
@export
def get_commands(self):
return self.config["commands"]
@export
def remove_command(self, command_id):
for command in self.config["commands"]:
if command[EXECUTE_ID] == command_id:
self.config["commands"].remove(command)
component.get("EventManager").emit(ExecuteCommandRemovedEvent(command_id))
break
self.config.save()
@export
def save_command(self, command_id, event, cmd):
for i, command in enumerate(self.config["commands"]):
if command[EXECUTE_ID] == command_id:
self.config["commands"][i] = (command_id, event, cmd)
break
self.config.save()
|
s0undt3ch/Deluge
|
deluge/plugins/Execute/deluge/plugins/execute/core.py
|
Python
|
gpl-3.0
| 5,677
|
'''
Created on Oct 29, 2015
@author: yangke
'''
from model.TaintVar import TaintVar
from TraceTrackTest import TraceTrackTest
class Test_objdump_addr:
def test(self):
passed_message="BINUTILS-2.23 'addr[1]' TEST PASSED!"
not_pass_message="ERRORS FOUND IN BINUTILS-2.23 'addr[1]' TEST!"
answer_path='answers/binutils/binutils-2.23/objdump/'
name='binutils-2.23_objdump_addr'
logfile_path="gdb_logs/binutils-2.23/binutils-2.23_objdump_gdb.txt"
c_proj_path="gdb_logs/binutils-2.23/binutils-2.23"
taintVars=[TaintVar("addr",['*'])]
test=TraceTrackTest(answer_path,name,logfile_path,taintVars,passed_message,not_pass_message)
test.set_c_proj_path(c_proj_path)
passed=test.test()
return passed
if __name__ == '__main__':
test=Test_objdump_addr()
test.test()
|
yangke/cluehunter
|
test/Test_objdump_addr.py
|
Python
|
gpl-3.0
| 852
|
# coding=utf-8
import traceback
from flask_babel import lazy_gettext
from mycodo.config import SQL_DATABASE_MYCODO
from mycodo.databases.models import Conversion
from mycodo.databases.models import DeviceMeasurements
from mycodo.databases.utils import session_scope
from mycodo.inputs.base_input import AbstractInput
from mycodo.inputs.sensorutils import convert_from_x_to_y_unit
from mycodo.utils.database import db_retrieve_table_daemon
from mycodo.utils.system_pi import get_measurement
from mycodo.utils.system_pi import return_measurement_info
MYCODO_DB_PATH = 'sqlite:///' + SQL_DATABASE_MYCODO
def constraints_pass_positive_value(mod_input, value):
"""
Check if the user input is acceptable
:param mod_input: SQL object with user-saved Input options
:param value: float or int
:return: tuple: (bool, list of strings)
"""
errors = []
all_passed = True
# Ensure value is positive
if value <= 0:
all_passed = False
errors.append("Must be a positive value")
return all_passed, errors, mod_input
def execute_at_modification(
messages,
mod_input,
request_form,
custom_options_dict_presave,
custom_options_channels_dict_presave,
custom_options_dict_postsave,
custom_options_channels_dict_postsave):
try:
if (custom_options_dict_postsave['adc_channel_ph'] ==
custom_options_dict_postsave['adc_channel_ec']):
messages["error"].append("Cannot set pH and EC to be measured from the same channel.")
else:
with session_scope(MYCODO_DB_PATH) as new_session:
measurements = new_session.query(DeviceMeasurements).filter(
DeviceMeasurements.device_id == mod_input.unique_id).all()
for each_measure in measurements:
if each_measure.channel == int(custom_options_dict_postsave['adc_channel_ph']):
if each_measure.measurement != 'ion_concentration':
messages["page_refresh"] = True
each_measure.conversion_id = ''
each_measure.measurement = 'ion_concentration'
each_measure.unit = 'pH'
elif each_measure.channel == int(custom_options_dict_postsave['adc_channel_ec']):
if each_measure.measurement != 'electrical_conductivity':
messages["page_refresh"] = True
each_measure.conversion_id = ''
each_measure.measurement = 'electrical_conductivity'
each_measure.unit = 'uS_cm'
else:
if each_measure.measurement != 'electrical_potential':
messages["page_refresh"] = True
each_measure.conversion_id = ''
each_measure.measurement = 'electrical_potential'
each_measure.unit = 'V'
new_session.commit()
except Exception:
messages["error"].append("execute_at_modification() Error: {}".format(traceback.print_exc()))
return (messages,
mod_input,
custom_options_dict_postsave,
custom_options_channels_dict_postsave)
# Measurements
measurements_dict = {
0: {
'measurement': 'ion_concentration',
'unit': 'pH'
},
1: {
'measurement': 'electrical_conductivity',
'unit': 'uS_cm'
},
2: {
'measurement': 'electrical_potential',
'unit': 'V'
},
3: {
'measurement': 'electrical_potential',
'unit': 'V'
},
4: {
'measurement': 'electrical_potential',
'unit': 'V'
},
5: {
'measurement': 'electrical_potential',
'unit': 'V'
},
6: {
'measurement': 'electrical_potential',
'unit': 'V'
},
7: {
'measurement': 'electrical_potential',
'unit': 'V'
}
}
# Input information
INPUT_INFORMATION = {
'input_name_unique': 'ADS1256_ANALOG_PH_EC',
'input_manufacturer': 'Texas Instruments',
'input_name': 'ADS1256: Generic Analog pH/EC',
'input_library': 'wiringpi, kizniche/PiPyADC-py3',
'measurements_name': 'Ion Concentration/Electrical Conductivity',
'measurements_dict': measurements_dict,
'execute_at_modification': execute_at_modification,
'message': 'This input relies on an ADS1256 analog-to-digital converter (ADC) to measure pH and/or electrical conductivity (EC) from analog sensors. You can enable or disable either measurement if you want to only connect a pH sensor or an EC sensor by selecting which measurements you want to under Measurements Enabled. Select which channel each sensor is connected to on the ADC. There are default calibration values initially set for the Input. There are also functions to allow you to easily calibrate your sensors with calibration solutions. If you use the Calibrate Slot actions, these values will be calculated and will replace the currently-set values. You can use the Clear Calibration action to delete the database values and return to using the default values. If you delete the Input or create a new Input to use your ADC/sensors with, you will need to recalibrate in order to store new calibration data.',
'options_enabled': [
'measurements_select',
'adc_gain',
'adc_sample_speed',
'period',
'pre_output'
],
'options_disabled': ['interface'],
'dependencies_module': [
('pip-pypi', 'wiringpi', 'wiringpi'),
('pip-pypi', 'pipyadc_py3', 'git+https://github.com/kizniche/PiPyADC-py3.git') # PiPyADC ported to Python3
],
'interfaces': ['UART'],
# TODO: Next major revision, move settings such as these to custom_options
'adc_gain': [
(1, '1 (±5 V)'),
(2, '2 (±2.5 V)'),
(4, '4 (±1.25 V)'),
(8, '8 (±0.5 V)'),
(16, '16 (±0.25 V)'),
(32, '32 (±0.125 V)'),
(64, '64 (±0.0625 V)')
],
'adc_sample_speed': [
('30000', '30,000'),
('15000', '15,000'),
('7500', '7,500'),
('3750', '3,750'),
('2000', '2,000'),
('1000', '1,000'),
('500', '500'),
('100', '100'),
('60', '60'),
('50', '50'),
('30', '30'),
('25', '25'),
('15', '15'),
('10', '10'),
('5', '5'),
('2d5', '2.5')
],
'custom_options': [
{
'id': 'adc_channel_ph',
'type': 'select',
'default_value': '0',
'options_select': [
('-1', 'Not Connected'),
('0', 'Channel 0'),
('1', 'Channel 1'),
('2', 'Channel 2'),
('3', 'Channel 3'),
('4', 'Channel 4'),
('5', 'Channel 5'),
('6', 'Channel 6'),
('7', 'Channel 7'),
],
'name': 'ADC Channel: pH',
'phrase': 'The ADC channel the pH sensor is connected'
},
{
'id': 'adc_channel_ec',
'type': 'select',
'default_value': '1',
'options_select': [
('-1', 'Not Connected'),
('0', 'Channel 0'),
('1', 'Channel 1'),
('2', 'Channel 2'),
('3', 'Channel 3'),
('4', 'Channel 4'),
('5', 'Channel 5'),
('6', 'Channel 6'),
('7', 'Channel 7'),
],
'name': 'ADC Channel: EC',
'phrase': 'The ADC channel the EC sensor is connected'
},
{
'type': 'message',
'default_value': 'Temperature Compensation',
},
{
'id': 'temperature_comp_meas',
'type': 'select_measurement',
'default_value': '',
'options_select': [
'Input',
'Function',
'Math'
],
'name': "{}: {}".format(lazy_gettext('Temperature Compensation'), lazy_gettext('Measurement')),
'phrase': lazy_gettext('Select a measurement for temperature compensation')
},
{
'id': 'max_age',
'type': 'integer',
'default_value': 120,
'required': True,
'constraints_pass': constraints_pass_positive_value,
'name': "{}: {}".format(lazy_gettext('Temperature Compensation'), lazy_gettext('Max Age')),
'phrase': lazy_gettext('The maximum age (seconds) of the measurement to use')
},
{
'type': 'message',
'default_value': 'pH Calibration Data',
},
{
'id': 'ph_cal_v1',
'type': 'float',
'default_value': 1.500,
'name': 'Cal data: V1 (internal)',
'phrase': 'Calibration data: Voltage'
},
{
'id': 'ph_cal_ph1',
'type': 'float',
'default_value': 7.0,
'name': 'Cal data: pH1 (internal)',
'phrase': 'Calibration data: pH'
},
{
'id': 'ph_cal_t1',
'type': 'float',
'default_value': 25.0,
'name': 'Cal data: T1 (internal)',
'phrase': 'Calibration data: Temperature'
},
{
'type': 'new_line'
},
{
'id': 'ph_cal_v2',
'type': 'float',
'default_value': 2.032,
'name': 'Cal data: V2 (internal)',
'phrase': 'Calibration data: Voltage'
},
{
'id': 'ph_cal_ph2',
'type': 'float',
'default_value': 4.0,
'name': 'Cal data: pH2 (internal)',
'phrase': 'Calibration data: pH'
},
{
'id': 'ph_cal_t2',
'type': 'float',
'default_value': 25.0,
'name': 'Cal data: T2 (internal)',
'phrase': 'Calibration data: Temperature'
},
{
'type': 'message',
'default_value': 'EC Calibration Data'
},
{
'id': 'ec_cal_v1',
'type': 'float',
'default_value': 0.232,
'name': 'EC cal data: V1 (internal)',
'phrase': 'EC calibration data: Voltage'
},
{
'id': 'ec_cal_ec1',
'type': 'float',
'default_value': 1413.0,
'name': 'EC cal data: EC1 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'id': 'ec_cal_t1',
'type': 'float',
'default_value': 25.0,
'name': 'EC cal data: T1 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'type': 'new_line'
},
{
'id': 'ec_cal_v2',
'type': 'float',
'default_value': 2.112,
'name': 'EC cal data: V2 (internal)',
'phrase': 'EC calibration data: Voltage'
},
{
'id': 'ec_cal_ec2',
'type': 'float',
'default_value': 12880.0,
'name': 'EC cal data: EC2 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'id': 'ec_cal_t2',
'type': 'float',
'default_value': 25.0,
'name': 'EC cal data: T2 (internal)',
'phrase': 'EC calibration data: EC'
},
{
'type': 'new_line'
},
{
'id': 'adc_calibration',
'type': 'select',
'default_value': '',
'options_select': [
('', 'No Calibration'),
('SELFOCAL', 'Self Offset'),
('SELFGCAL', 'Self Gain'),
('SELFCAL', 'Self Offset + Self Gain'),
('SYSOCAL', 'System Offset'),
('SYSGCAL', 'System Gain')
],
'name': lazy_gettext('Calibration'),
'phrase': lazy_gettext('Set the calibration method to perform during Input activation')
},
],
'custom_actions': [
{
'type': 'message',
'default_value': """pH Calibration Actions: Place your probe in a solution of known pH.
Set the known pH value in the `Calibration buffer pH` field, and press `Calibrate pH, slot 1`.
Repeat with a second buffer, and press `Calibrate pH, slot 2`.
You don't need to change the values under `Custom Options`."""
},
{
'id': 'calibration_ph',
'type': 'float',
'default_value': 7.0,
'name': 'Calibration buffer pH',
'phrase': 'This is the nominal pH of the calibration buffer, usually labelled on the bottle.'
},
{
'id': 'calibrate_ph_slot_1',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate pH, slot 1'
},
{
'id': 'calibrate_ph_slot_2',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate pH, slot 2'
},
{
'id': 'clear_ph_calibrate_slots',
'type': 'button',
'wait_for_return': True,
'name': 'Clear pH Calibration Slots'
},
{
'type': 'message',
'default_value': """EC Calibration Actions: Place your probe in a solution of known EC.
Set the known EC value in the `Calibration standard EC` field, and press `Calibrate EC, slot 1`.
Repeat with a second standard, and press `Calibrate EC, slot 2`.
You don't need to change the values under `Custom Options`."""
},
{
'id': 'calibration_ec',
'type': 'float',
'default_value': 1413.0,
'name': 'Calibration standard EC',
'phrase': 'This is the nominal EC of the calibration standard, usually labelled on the bottle.'
},
{
'id': 'calibrate_ec_slot_1',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate EC, slot 1'
},
{
'id': 'calibrate_ec_slot_2',
'type': 'button',
'wait_for_return': True,
'name': 'Calibrate EC, slot 2'
},
{
'id': 'clear_ec_calibrate_slots',
'type': 'button',
'wait_for_return': True,
'name': 'Clear EC Calibration Slots'
},
]
}
class InputModule(AbstractInput):
"""Read ADC
Choose a gain of 1 for reading measurements from 0 to 4.09V.
Or pick a different gain to change the range of measurements that are read:
- 1 = ±5 V
- 2 = ±2.5 V
- 4 = ±1.25 V
- 8 = ±0.5 V
- 16 = ±0.25 V
- 32 = ±0.125 V
- 64 = ±0.0625 V
See table 3 in the ADS1256 datasheet for more info on gain.
"""
def __init__(self, input_dev, testing=False):
super(InputModule, self).__init__(input_dev, testing=testing, name=__name__)
self.sensor = None
self.CH_SEQUENCE = None
self.adc_gain = None
self.adc_sample_speed = None
self.adc_calibration = None
self.dict_gains = {
1: 0.125,
2: 0.0625,
4: 0.03125,
8: 0.015625,
16: 0.0078125,
32: 0.00390625,
64: 0.00195312,
}
self.adc_channel_ph = None
self.adc_channel_ec = None
self.temperature_comp_meas_device_id = None
self.temperature_comp_meas_measurement_id = None
self.max_age = None
self.ph_cal_v1 = None
self.ph_cal_ph1 = None
self.ph_cal_t1 = None
self.ph_cal_v2 = None
self.ph_cal_ph2 = None
self.ph_cal_t2 = None
self.ec_cal_v1 = None
self.ec_cal_ec1 = None
self.ec_cal_t1 = None
self.ec_cal_v2 = None
self.ec_cal_ec2 = None
self.ec_cal_t2 = None
self.slope = None
self.intercept = None
if not testing:
self.setup_custom_options(
INPUT_INFORMATION['custom_options'], input_dev)
self.initialize_input()
def initialize_input(self):
#import adafruit_ads1x15.ads1115 as ADS
#from adafruit_ads1x15.analog_in import AnalogIn
#from adafruit_extended_bus import ExtendedI2C
import glob
from pipyadc_py3 import ADS1256
from pipyadc_py3.ADS1256_definitions import POS_AIN0
from pipyadc_py3.ADS1256_definitions import POS_AIN1
from pipyadc_py3.ADS1256_definitions import POS_AIN2
from pipyadc_py3.ADS1256_definitions import POS_AIN3
from pipyadc_py3.ADS1256_definitions import POS_AIN4
from pipyadc_py3.ADS1256_definitions import POS_AIN5
from pipyadc_py3.ADS1256_definitions import POS_AIN6
from pipyadc_py3.ADS1256_definitions import POS_AIN7
from pipyadc_py3.ADS1256_definitions import NEG_AINCOM
# Input pin for the potentiometer on the Waveshare Precision ADC board
POTI = POS_AIN0 | NEG_AINCOM
# Light dependant resistor
LDR = POS_AIN1 | NEG_AINCOM
# The other external input screw terminals of the Waveshare board
EXT2, EXT3, EXT4 = POS_AIN2 | NEG_AINCOM, POS_AIN3 | NEG_AINCOM, POS_AIN4 | NEG_AINCOM
EXT5, EXT6, EXT7 = POS_AIN5 | NEG_AINCOM, POS_AIN6 | NEG_AINCOM, POS_AIN7 | NEG_AINCOM
channels = {
0: POTI,
1: LDR,
2: EXT2,
3: EXT3,
4: EXT4,
5: EXT5,
6: EXT6,
7: EXT7,
}
#self.analog_in = AnalogIn
#self.ads = ADS
# Generate the channel sequence for enabled channels
self.CH_SEQUENCE = []
for channel in self.channels_measurement:
if self.is_enabled(channel):
self.CH_SEQUENCE.append(channels[channel])
self.CH_SEQUENCE = tuple(self.CH_SEQUENCE)
if self.input_dev.adc_gain == 0:
self.adc_gain = 1
else:
self.adc_gain = self.input_dev.adc_gain
self.adc_sample_speed = self.input_dev.adc_sample_speed
if glob.glob('/dev/spi*'):
self.sensor = ADS1256()
# Perform selected calibration
if self.adc_calibration == 'SELFOCAL':
self.sensor.cal_self_offset()
elif self.adc_calibration == 'SELFGCAL':
self.sensor.cal_self_gain()
elif self.adc_calibration == 'SELFCAL':
self.sensor.cal_self()
elif self.adc_calibration == 'SYSOCAL':
self.sensor.cal_system_offset()
elif self.adc_calibration == 'SYSGCAL':
self.sensor.cal_system_gain()
else:
raise Exception(
"SPI device /dev/spi* not found. Ensure SPI is enabled and the device is recognized/setup by linux.")
# self.adc = ADS.ADS1115(
# ExtendedI2C(self.input_dev.i2c_bus),
# address=int(str(self.input_dev.i2c_location), 16))
def calibrate_ph(self, cal_slot, args_dict):
"""Calibration helper method."""
if 'calibration_ph' not in args_dict:
self.logger.error("Cannot conduct calibration without a buffer pH value")
return
if (not isinstance(args_dict['calibration_ph'], float) and
not isinstance(args_dict['calibration_ph'], int)):
self.logger.error("buffer value does not represent a number: '{}', type: {}".format(
args_dict['calibration_ph'], type(args_dict['calibration_ph'])))
return
v = self.get_volt_data(self.get_voltages(), int(self.adc_channel_ph)) # pH
temp = self.get_temp_data()
if temp is not None:
# Use measured temperature
t = temp
else:
# Assume room temperature of 25C
t = 25
self.logger.debug("Assigning voltage {} and temperature {} to pH {}".format(
v, t, args_dict['calibration_ph']))
if cal_slot == 1:
# set values currently being used
self.ph_cal_v1 = v
self.ph_cal_ph1 = args_dict['calibration_ph']
self.ph_cal_t1 = t
# save values for next startup
self.set_custom_option("ph_cal_v1", v)
self.set_custom_option("ph_cal_ph1", args_dict['calibration_ph'])
self.set_custom_option("ph_cal_t1", t)
elif cal_slot == 2:
# set values currently being used
self.ph_cal_v2 = v
self.ph_cal_ph2 = args_dict['calibration_ph']
self.ph_cal_t2 = t
# save values for next startup
self.set_custom_option("ph_cal_v2", v)
self.set_custom_option("ph_cal_ph2", args_dict['calibration_ph'])
self.set_custom_option("ph_cal_t2", t)
def calibrate_ph_slot_1(self, args_dict):
"""calibrate."""
self.calibrate_ph(1, args_dict)
def calibrate_ph_slot_2(self, args_dict):
"""calibrate."""
self.calibrate_ph(2, args_dict)
def clear_ph_calibrate_slots(self, args_dict):
self.delete_custom_option("ph_cal_v1")
self.delete_custom_option("ph_cal_ph1")
self.delete_custom_option("ph_cal_t1")
self.delete_custom_option("ph_cal_v2")
self.delete_custom_option("ph_cal_ph2")
self.delete_custom_option("ph_cal_t2")
self.setup_custom_options(
INPUT_INFORMATION['custom_options'], self.input_dev)
def calibrate_ec(self, cal_slot, args_dict):
"""Calibration helper method."""
if 'calibration_ec' not in args_dict:
self.logger.error("Cannot conduct calibration without a standard EC value")
return
if (not isinstance(args_dict['calibration_ec'], float) and
not isinstance(args_dict['calibration_ec'], int)):
self.logger.error("standard value does not represent a number: '{}', type: {}".format(
args_dict['calibration_ec'], type(args_dict['calibration_ec'])))
return
v = self.get_volt_data(self.get_voltages(), int(self.adc_channel_ec)) # EC
temp = self.get_temp_data()
if temp is not None:
# Use measured temperature
t = temp
else:
# Assume room temperature of 25C
t = 25
self.logger.debug("Assigning voltage {} and temperature {} to EC {}".format(
v, t, args_dict['calibration_ec']))
# For future sessions
if cal_slot == 1:
# set values currently being used
self.ec_cal_v1 = v
self.ec_cal_ec1 = args_dict['calibration_ec']
self.ec_cal_t1 = t
# save values for next startup
self.set_custom_option("ec_cal_v1", v)
self.set_custom_option("ec_cal_ec1", args_dict['calibration_ec'])
self.set_custom_option("ec_cal_t1", t)
elif cal_slot == 2:
self.ec_cal_v2 = v
self.ec_cal_ec2 = args_dict['calibration_ec']
self.ec_cal_t2 = t
self.set_custom_option("ec_cal_v2", v)
self.set_custom_option("ec_cal_ec2", args_dict['calibration_ec'])
self.set_custom_option("ec_cal_t2", t)
def calibrate_ec_slot_1(self, args_dict):
"""calibrate."""
self.calibrate_ec(1, args_dict)
def calibrate_ec_slot_2(self, args_dict):
"""calibrate."""
self.calibrate_ec(2, args_dict)
def clear_ec_calibrate_slots(self, args_dict):
self.delete_custom_option("ec_cal_v1")
self.delete_custom_option("ec_cal_ec1")
self.delete_custom_option("ec_cal_t1")
self.delete_custom_option("ec_cal_v2")
self.delete_custom_option("ec_cal_ec2")
self.delete_custom_option("ec_cal_t2")
self.setup_custom_options(
INPUT_INFORMATION['custom_options'], self.input_dev)
@staticmethod
def nernst_correction(volt, temp):
"""Apply temperature correction for pH. This provides the voltage as if it were measured at 25C.
Based on the Nernst equation: E = E0 - ln(10) * RT/nF * pH; this gives E = E0 - 0.198 * T * pH.
The correction is a simple ratio of absolute temperature."""
volt_25C = volt * 298/(temp+273)
return volt_25C
@staticmethod
def viscosity_correction(volt, temp):
"""Apply temperature correction for EC. This provides the voltage as if it were measured at 25C.
Based on the Nernst-Einstein and Stokes-Einstein relations, related to viscosity: EC/EC25 = vis25/vis.
The correction is a linear approximation to the full curve, valid for 10-30C."""
volt_25C = volt / (1 + 0.020 * (temp - 25))
return volt_25C
def get_voltages(self):
voltages_list = []
for _ in range(2):
raw_channels = self.sensor.read_sequence(self.CH_SEQUENCE)
voltages_list = [i * self.sensor.v_per_digit for i in raw_channels]
if 0 not in voltages_list:
break
return voltages_list
def get_temp_data(self):
"""Get the temperature."""
if self.temperature_comp_meas_measurement_id:
self.logger.debug("Temperature corrections will be applied")
last_measurement = self.get_last_measurement(
self.temperature_comp_meas_device_id,
self.temperature_comp_meas_measurement_id,
max_age=self.max_age
)
if last_measurement and len(last_measurement) > 1:
device_measurement = get_measurement(
self.temperature_comp_meas_measurement_id)
conversion = db_retrieve_table_daemon(
Conversion, unique_id=device_measurement.conversion_id)
_, unit, _ = return_measurement_info(
device_measurement, conversion)
if unit != "C":
out_value = convert_from_x_to_y_unit(
unit, "C", last_measurement[1])
else:
out_value = last_measurement[1]
self.logger.debug("Latest temperature: {temp} C".format(
temp=out_value))
else:
self.logger.error(
"Temperature measurement not found within the "
"past {} seconds".format(self.max_age))
out_value = None
else:
self.logger.debug("No temperature corrections applied")
out_value = None
return out_value
def get_volt_data(self, voltages, channel):
"""Measure voltage at ADC channel."""
if not voltages or 0 in voltages:
self.logger.error("ADC returned measurement of 0 (indicating something is wrong).")
return
volt_data = voltages[channel]
# chan = self.analog_in(self.adc, channel)
# self.adc.gain = self.adc_gain
# self.logger.debug("Channel {}: Gain {}, {} raw, {} volts".format(
# channel, self.adc_gain, chan.value, chan.voltage))
# volt_data = chan.voltage
# raw_channel2 = self.sensor.read_oneshot(self.chan)
# volt_data2 = raw_channel2 * self.sensor.v_per_digit
return volt_data
def convert_volt_to_ph(self, volt, temp):
"""Convert voltage to pH."""
# Calculate slope and intercept from calibration points.
self.slope = ((self.ph_cal_ph1 - self.ph_cal_ph2) /
(self.nernst_correction(self.ph_cal_v1, self.ph_cal_t1) -
self.nernst_correction(self.ph_cal_v2, self.ph_cal_t2)))
self.intercept = (self.ph_cal_ph1 -
self.slope *
self.nernst_correction(self.ph_cal_v1, self.ph_cal_t1))
if temp is not None:
# Perform temperature corrections
ph = self.slope * self.nernst_correction(volt, temp) + self.intercept
else:
# Don't perform temperature corrections
ph = self.slope * volt + self.intercept
return ph
def convert_volt_to_ec(self, volt, temp):
"""Convert voltage to EC."""
# Calculate slope and intercept from calibration points.
self.slope = ((self.ec_cal_ec1 - self.ec_cal_ec2) /
(self.viscosity_correction(self.ec_cal_v1, self.ec_cal_t1) -
self.viscosity_correction(self.ec_cal_v2, self.ec_cal_t2)))
self.intercept = (self.ec_cal_ec1 -
self.slope *
self.viscosity_correction(self.ec_cal_v1, self.ec_cal_t1))
if temp is not None:
# Perform temperature corrections
ec = self.slope * self.viscosity_correction(volt, temp) + self.intercept
else:
# Don't perform temperature corrections
ec = self.slope * volt + self.intercept
return ec
def generate_dict(self):
return_dict = {}
with session_scope(MYCODO_DB_PATH) as new_session:
measurements = new_session.query(DeviceMeasurements).filter(
DeviceMeasurements.device_id == self.unique_id).all()
for each_measure in measurements:
return_dict[each_measure.channel] = {
'measurement': each_measure.measurement,
'unit': each_measure.unit
}
return return_dict
def get_measurement(self):
"""Gets the measurement."""
if not self.sensor:
self.logger.error("Error 101: Device not set up. See https://kizniche.github.io/Mycodo/Error-Codes#error-101 for more info.")
return
self.return_dict = self.generate_dict()
voltages = self.get_voltages()
for each_channel in range(8):
if (each_channel == int(self.adc_channel_ph) and
self.is_enabled(int(self.adc_channel_ph))): # pH
self.value_set(
int(self.adc_channel_ph),
self.convert_volt_to_ph(
self.get_volt_data(voltages, int(self.adc_channel_ph)),
self.get_temp_data()))
elif (each_channel == int(self.adc_channel_ec) and
self.is_enabled(int(self.adc_channel_ec))): # EC
self.value_set(
int(self.adc_channel_ec),
self.convert_volt_to_ec(
self.get_volt_data(voltages, int(self.adc_channel_ec)),
self.get_temp_data()))
elif self.is_enabled(each_channel):
self.value_set(
each_channel, self.get_volt_data(voltages, each_channel))
return self.return_dict
|
kizniche/Mycodo
|
mycodo/inputs/ads1256_analog_ph_ec.py
|
Python
|
gpl-3.0
| 31,269
|
//codecademy course answer
#Set eggs equal to 100 using exponentiation on line 3!
eggs = 10 ** 2
print eggs
|
nurhandipa/python
|
codecademy/exponentiation.py
|
Python
|
gpl-3.0
| 111
|
import sys
from core import loop
from util import jsonmanager, debug
def make_console_menu(name):
menu_data_file_path = '_Resources/Data/MenuData/'
path = menu_data_file_path + name + '.json'
data = jsonmanager.get_data(path)
title = data['Title']
item_data = data['Items']
args = []
for item_datum in item_data:
args.append((item_datum['Text'], item_datum['Action']))
return ConsoleMenu(title, args)
class ConsoleMenuItem:
def __init__(self, text, action):
self.text = text
self.action = action
def invoke(self):
try:
getattr(sys.modules[__name__], self.action)()
except AttributeError as error:
debug.log('Something went wrong :(')
debug.log(error.args)
raise error
class ConsoleMenu:
def __init__(self, title, args):
self.title = title
self.menu_items = []
for argument in args:
self.add_menu_item(argument[0], argument[1])
def add_menu_item(self, text, action):
self.menu_items.append(ConsoleMenuItem(text, action))
def get_menu_item(self, index):
return self.menu_items[index]
def display_menu_item(self, index):
menu_item = self.get_menu_item(index)
print('[' + str(index) + '] - ' + menu_item.text)
def run(self):
for index in range(0, len(self.menu_items)):
self.display_menu_item(index)
result = input('Choose an option: ')
self.get_menu_item(int(result)).invoke()
def run_loop(game_loop):
game_loop.set_scene('pallet-town')
game_loop.run()
def run_editor():
run_loop(loop.EditorLoop())
def run_game():
run_loop(loop.DefaultGameLoop())
|
monodokimes/pythonmon
|
core/menu.py
|
Python
|
gpl-3.0
| 1,737
|
#!/usr/bin/python3
import sys
from lxml import html
import urllib3
import re
http = urllib3.PoolManager()
baseUrl = 'http://scoreboard.uscyberpatriot.org/'
scoresPage = html.fromstring(http.request('GET', baseUrl + 'team.php?team=' + sys.argv[1]).data)
# XPath for chart script: /html/body/div[2]/div/script[1]
chart = scoresPage.xpath('/html/body/div[2]/div/script[1]')[0]
scoreTimes = re.compile(r'\[\'([0-9]{2}/[0123456789 :]+)\'((, (-?[0-9]{1,3}|null))+)\],?', re.MULTILINE)
reSearch = scoreTimes.findall(chart.text)
for res in reSearch:
# Tuple result
# Capture 0 is time
# Capture 1 is screwyformat scores
print(res[0], end='')
for score in filter(None, res[1].split(',')):
print('\t' + score, end='')
print()
|
glen3b/CyberPatriotScoreboardParser
|
scoreovertime.py
|
Python
|
gpl-3.0
| 756
|
from buildpal_client import compile as buildpal_compile
import os
import subprocess
import asyncio
import sys
import struct
import threading
import pytest
from buildpal.common import MessageProtocol
class ProtocolTester(MessageProtocol):
@classmethod
def check_exit_code(cls, code):
if hasattr(cls, 'expected_exit_code'):
assert code == cls.expected_exit_code
def __init__(self, loop):
self.initial = True
self.loop = loop
super().__init__()
def process_msg(self, msg):
if self.initial:
assert len(msg) > 5
self.compiler_name = msg[0].decode()
assert self.compiler_name == 'msvc'
self.executable = msg[1].decode()
assert os.path.exists(self.executable)
assert os.path.isfile(self.executable)
assert os.path.basename(self.executable) == 'cl.exe'
self.sysinclude_dirs = msg[2].decode().rstrip(';').split(';')
for path in self.sysinclude_dirs:
assert os.path.exists(path)
assert os.path.isdir(path)
self.cwd = msg[3].decode()
assert os.path.exists(self.cwd)
assert os.path.isdir(self.cwd)
self.command = [x.decode() for x in msg[4:]]
self.send_request()
self.initial = False
else:
self.process_response(msg)
def send_request(self):
raise NotImplementedError()
def process_response(self, msg):
raise NotImplementedError()
def connection_lost(self, exc):
self.loop.stop()
class RunLocallyTester(ProtocolTester):
expected_exit_code = 0
def send_request(self):
self.send_msg([b'RUN_LOCALLY'])
class ExecuteAndExitTester(ProtocolTester):
@classmethod
def check_exit_code(cls, code):
assert code != 0
def send_request(self):
self.send_msg([b'EXECUTE_AND_EXIT', b'/nologo'])
class ExecuteGetOutputTester(ProtocolTester):
expected_exit_code = 6132
def send_request(self):
self.send_msg([b'EXECUTE_GET_OUTPUT', b'/nologo'])
def process_response(self, msg):
retcode, stdout, stderr = msg
retcode = int(retcode.memory())
assert retcode != 0
assert not stdout.memory()
assert b'missing source filename' in stderr.tobytes()
self.send_msg([b'EXIT', struct.pack('!I', self.expected_exit_code & 0xFFFFFFFF), b'',
b''])
class ExitTester(ProtocolTester):
expected_exit_code = 666
def send_request(self):
self.send_msg([b'EXIT', struct.pack('!I', self.expected_exit_code & 0xFFFFFFFF), b'',
b''])
class LocateFiles(ProtocolTester):
expected_exit_code = 3124
files = [b'cl.exe', b'c1xx.dll']
def send_request(self):
self.send_msg([b'LOCATE_FILES'] + self.files)
def process_response(self, msg):
assert len(msg) == len(self.files)
for file, full in zip(self.files, msg):
assert os.path.basename(full.tobytes()) == file
assert os.path.isfile(full.tobytes())
self.send_msg([b'EXIT', struct.pack('!I', self.expected_exit_code & 0xFFFFFFFF), b'',
b''])
@pytest.fixture(scope='function')
def buildpal_compile_args(tmpdir, vcenv_and_cl):
port = 'test_protocol_{}'.format(os.getpid())
file = os.path.join(str(tmpdir), 'aaa.cpp')
with open(file, 'wt'):
pass
args = ['compile', '/c', file]
env, cl = vcenv_and_cl
return ("msvc", cl, env, subprocess.list2cmdline(args), port)
@pytest.mark.parametrize("protocol_tester", [RunLocallyTester,
ExecuteGetOutputTester, ExecuteAndExitTester, ExitTester, LocateFiles])
def test_protocol(buildpal_compile_args, protocol_tester):
loop = asyncio.ProactorEventLoop()
[server] = loop.run_until_complete(loop.start_serving_pipe(
lambda : protocol_tester(loop), "\\\\.\\pipe\\BuildPal_{}".format(buildpal_compile_args[-1])))
class ExitCode:
pass
def run_thread():
ExitCode.exit_code = buildpal_compile(*buildpal_compile_args)
thread = threading.Thread(target=run_thread)
thread.start()
loop.run_forever()
thread.join()
@asyncio.coroutine
def close_server():
server.close()
loop.run_until_complete(close_server())
assert ExitCode.exit_code != None
protocol_tester.check_exit_code(ExitCode.exit_code)
|
pkesist/buildpal
|
Python/test/test_client.py
|
Python
|
gpl-3.0
| 4,561
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-12-09 00:06
from __future__ import unicode_literals
from django.conf import settings
import django.core.files.storage
from django.db import migrations, models
import django.db.migrations.operations.special
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
replaces = [
("crashmanager", "0001_initial"),
("crashmanager", "0002_bugzillatemplate_security"),
("crashmanager", "0003_bucket_frequent"),
("crashmanager", "0004_add_tool"),
("crashmanager", "0005_add_user"),
("crashmanager", "0006_user_defaultproviderid"),
("crashmanager", "0007_bugzillatemplate_comment"),
("crashmanager", "0008_crashentry_crashaddressnumeric"),
("crashmanager", "0009_copy_crashaddress"),
("crashmanager", "0010_bugzillatemplate_security_group"),
("crashmanager", "0011_bucket_permanent"),
("crashmanager", "0012_crashentry_cachedcrashinfo"),
("crashmanager", "0013_init_cachedcrashinfo"),
("crashmanager", "0014_bugzillatemplate_testcase_filename"),
("crashmanager", "0015_crashentry_triagedonce"),
("crashmanager", "0016_auto_20160308_1500"),
("crashmanager", "0017_user_restricted"),
("crashmanager", "0018_auto_20170620_1503"),
("crashmanager", "0019_bucket_optimizedsignature"),
("crashmanager", "0020_add_app_permissions"),
]
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Bucket",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("signature", models.TextField()),
("shortDescription", models.CharField(blank=True, max_length=1023)),
],
),
migrations.CreateModel(
name="Bug",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("externalId", models.CharField(blank=True, max_length=255)),
("closed", models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="BugProvider",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("classname", models.CharField(max_length=255)),
("hostname", models.CharField(max_length=255)),
("urlTemplate", models.CharField(max_length=1023)),
],
),
migrations.CreateModel(
name="BugzillaTemplate",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.TextField()),
("product", models.TextField()),
("component", models.TextField()),
("summary", models.TextField(blank=True)),
("version", models.TextField()),
("description", models.TextField(blank=True)),
("whiteboard", models.TextField(blank=True)),
("keywords", models.TextField(blank=True)),
("op_sys", models.TextField(blank=True)),
("platform", models.TextField(blank=True)),
("priority", models.TextField(blank=True)),
("severity", models.TextField(blank=True)),
("alias", models.TextField(blank=True)),
("cc", models.TextField(blank=True)),
("assigned_to", models.TextField(blank=True)),
("qa_contact", models.TextField(blank=True)),
("target_milestone", models.TextField(blank=True)),
("attrs", models.TextField(blank=True)),
("security", models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name="Client",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name="CrashEntry",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created", models.DateTimeField(default=django.utils.timezone.now)),
("rawStdout", models.TextField(blank=True)),
("rawStderr", models.TextField(blank=True)),
("rawCrashData", models.TextField(blank=True)),
("metadata", models.TextField(blank=True)),
("env", models.TextField(blank=True)),
("args", models.TextField(blank=True)),
("crashAddress", models.CharField(blank=True, max_length=255)),
("shortSignature", models.CharField(blank=True, max_length=255)),
(
"bucket",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Bucket",
),
),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Client",
),
),
],
),
migrations.CreateModel(
name="OS",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
("version", models.CharField(blank=True, max_length=127, null=True)),
],
),
migrations.CreateModel(
name="Platform",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
],
),
migrations.CreateModel(
name="Product",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
("version", models.CharField(blank=True, max_length=127, null=True)),
],
),
migrations.CreateModel(
name="TestCase",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"test",
models.FileField(
storage=django.core.files.storage.FileSystemStorage(
location=None
),
upload_to=b"tests",
),
),
("size", models.IntegerField(default=0)),
("quality", models.IntegerField(default=0)),
("isBinary", models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name="crashentry",
name="os",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="crashmanager.OS"
),
),
migrations.AddField(
model_name="crashentry",
name="platform",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Platform"
),
),
migrations.AddField(
model_name="crashentry",
name="product",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Product"
),
),
migrations.AddField(
model_name="crashentry",
name="testcase",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.TestCase",
),
),
migrations.AddField(
model_name="bug",
name="externalType",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.BugProvider",
),
),
migrations.AddField(
model_name="bucket",
name="bug",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Bug",
),
),
migrations.AddField(
model_name="bucket",
name="frequent",
field=models.BooleanField(default=False),
),
migrations.CreateModel(
name="Tool",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=63)),
],
),
migrations.CreateModel(
name="User",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("defaultTemplateId", models.IntegerField(default=0)),
("defaultToolsFilter", models.ManyToManyField(to="crashmanager.Tool")),
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
("defaultProviderId", models.IntegerField(default=1)),
],
),
migrations.AddField(
model_name="crashentry",
name="tool",
field=models.ForeignKey(
default=1,
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Tool",
),
preserve_default=False,
),
migrations.AddField(
model_name="bugzillatemplate",
name="comment",
field=models.TextField(blank=True, default=""),
preserve_default=False,
),
migrations.AddField(
model_name="crashentry",
name="crashAddressNumeric",
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="bugzillatemplate",
name="security_group",
field=models.TextField(blank=True, default=""),
preserve_default=False,
),
migrations.AddField(
model_name="bucket",
name="permanent",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="crashentry",
name="cachedCrashInfo",
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name="bugzillatemplate",
name="testcase_filename",
field=models.TextField(blank=True, default=""),
preserve_default=False,
),
migrations.AddField(
model_name="crashentry",
name="triagedOnce",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="user",
name="restricted",
field=models.BooleanField(default=False),
),
migrations.CreateModel(
name="BucketWatch",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("lastCrash", models.IntegerField(default=0)),
(
"bucket",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.Bucket",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="crashmanager.User",
),
),
],
),
migrations.AddField(
model_name="user",
name="bucketsWatching",
field=models.ManyToManyField(
through="crashmanager.BucketWatch", to="crashmanager.Bucket"
),
),
migrations.AddField(
model_name="bucket",
name="optimizedSignature",
field=models.TextField(blank=True, null=True),
),
migrations.AlterModelOptions(
name="user",
options={
"permissions": (
("view_crashmanager", "Can see CrashManager app"),
("view_covmanager", "Can see CovManager app"),
("view_ec2spotmanager", "Can see EC2SpotManager app"),
)
},
),
]
|
MozillaSecurity/FuzzManager
|
server/crashmanager/migrations/0001_squashed_0020_add_app_permissions.py
|
Python
|
mpl-2.0
| 15,946
|
""" Setup file.
"""
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
requires = ['cornice', 'metlog-py', 'mozsvc', 'PasteScript', 'waitress', 'PyBrowserID', 'Requests', 'webtest']
setup(name='fxap',
version=0.1,
description='fxap',
long_description=README,
license='MPLv2.0',
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application"
],
keywords="web services",
author='',
author_email='',
url='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
entry_points = """\
[paste.app_factory]
main = fxap:main
""",
paster_plugins=['pyramid'],
)
|
ncalexan/server-fxap
|
setup.py
|
Python
|
mpl-2.0
| 1,001
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
from __future__ import division, absolute_import, print_function, unicode_literals
import OpenSSL
import uuid
import random
import os
import json
import subprocess
import sys
sha256 = 'sha256'
if sys.version_info[:1] == (2,):
input = raw_input
sha256 = b'sha256'
def create_ca(size=2048, valid=315360000, CN=None):
"""
Creates a CA key and cert
size - The RSA key size to be used
valid - The time is seconds the key should be valid for
CN - The CN to be used for the cert. None will create a UUID
"""
if CN is None:
CN = 'CA-'+str(uuid.uuid4())
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, size)
ca = OpenSSL.crypto.X509()
ca.set_version(2)
#ca.set_serial_number(1)
ca.get_subject().CN = CN
ca.gmtime_adj_notBefore(0)
ca.gmtime_adj_notAfter(valid)
ca.set_issuer(ca.get_subject())
ca.set_pubkey(key)
ca.add_extensions([
OpenSSL.crypto.X509Extension(b"basicConstraints", False, b"CA:TRUE"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=ca)
])
ca.add_extensions([
OpenSSL.crypto.X509Extension(b"authorityKeyIdentifier", False, b"keyid:always",issuer=ca)
])
ca.sign(key, sha256)
return ca, key
def create_cert(is_server, cacert, cakey, size=2048, valid=315360000, CN=None):
"""
Creates a client/server key and cert
is_server - Must be True for a server, False for a client
cacert - The OpenSSL.crypto.X509 object of the CA
cakey - The OpenSSL.crypto.PKey object of the CA
Optional:
size - The RSA key size to be used
valid - The time is seconds the key should be valid for
CN - The CN to be used for the cert. None will create a UUID
"""
if CN is None:
if is_server:
CN='server-'+str(uuid.uuid4())
else:
CN = 'client-'+str(uuid.uuid4())
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, size)
cert = OpenSSL.crypto.X509()
cert.set_version(2)
cert.set_serial_number(random.randint(1, 99999999))
cert.get_subject().CN = CN
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(valid)
cert.set_issuer(cacert.get_subject())
cert.set_pubkey(key)
if is_server:
cert.add_extensions([
OpenSSL.crypto.X509Extension(b"basicConstraints", False, b"CA:FALSE"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"digitalSignature,keyEncipherment"),
OpenSSL.crypto.X509Extension(b"extendedKeyUsage", False, b"serverAuth"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=cert),
OpenSSL.crypto.X509Extension(b"authorityKeyIdentifier", False, b"keyid:always",issuer=cacert),
OpenSSL.crypto.X509Extension(b"nsCertType", False, b"server")
])
else:
cert.add_extensions([
OpenSSL.crypto.X509Extension(b"basicConstraints", False, b"CA:FALSE"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"digitalSignature"),
OpenSSL.crypto.X509Extension(b"extendedKeyUsage", False, b"clientAuth"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=cert),
OpenSSL.crypto.X509Extension(b"authorityKeyIdentifier", False, b"keyid:always",issuer=cacert),
OpenSSL.crypto.X509Extension(b"nsCertType", False, b"client")
])
cert.sign(cakey, sha256)
return cert, key
def gen_dhparams(size=2048):
"""
Generate Diffie Hellman parameters by calling openssl. Returns a string.
I don't like doing it like this but pyopenssl doesn't seem to
have a way to do this natively.
size - The size of the prime to generate.
"""
cmd = ['openssl', 'dhparam', '-out', 'dh.tmp', str(size)]
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
# Sometimes we get a non-zero exit code, no idea why...
print('Calling of openssl failed... Trying again')
subprocess.check_call(cmd)
with open('dh.tmp') as dh:
params = dh.read()
os.remove('dh.tmp')
return params
def gen_tlsauth_key():
"""Generate an openvpn secret key by calling openvpn. Returns a string."""
cmd = ['openvpn', '--genkey', '--secret', 'ta.tmp']
ret = subprocess.check_call(cmd)
with open('ta.tmp') as key:
key = key.read()
os.remove('ta.tmp')
return key
def _create_server_conf(name, confdict, port, cacert, serverkey, servercert, tls_auth=False, dh_params=None, path='.'):
if dh_params is None:
dh_params = gen_dhparams()
serverfile = open(os.path.join(path, name+'_server.ovpn'), 'w')
for key, value in confdict['both'].items():
if value is False:
continue
elif value is True:
serverfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
serverfile.write(key + ' ' + v + '\n')
else:
serverfile.write(key + ' ' + value + '\n')
for key, value in confdict['server'].items():
if value is False:
continue
elif value is True:
serverfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
serverfile.write(key + ' ' + v + '\n')
else:
serverfile.write(key + ' ' + value + '\n')
serverfile.write('port ' + port + '\n')
if 'meta' in confdict:
if confdict['meta'].get('embedkeys', True):
serverfile.write('<ca>\n'+cacert+'</ca>\n')
serverfile.write('<key>\n'+serverkey+'</key>\n')
serverfile.write('<cert>\n'+servercert+'</cert>\n')
serverfile.write('<dh>\n'+dh_params+'</dh>\n')
if tls_auth is not False:
serverfile.write('key-direction 0\n')
serverfile.write('<tls-auth>\n'+tls_auth+'</tls-auth>\n')
def _create_client_conf(name, confdict, host, port, cacert, clientkey, clientcert, tls_auth=False, path='.'):
clientfile = open(os.path.join(path, name+'_client.ovpn'), 'w')
clientfile.write('client\n')
clientfile.write('remote ' + host + ' ' + port + '\n')
for key, value in confdict['both'].items():
if value is False:
continue
elif value is True:
clientfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
clientfile.write(key + ' ' + v + '\n')
else:
clientfile.write(key + ' ' + value + '\n')
for key, value in confdict['client'].items():
if value is False:
continue
elif value is True:
clientfile.write(key + '\n')
elif isinstance(value, list):
for v in value:
clientfile.write(key + ' ' + v + '\n')
else:
clientfile.write(key + ' ' + value + '\n')
if 'meta' in confdict:
if confdict['meta'].get('embedkeys', True):
clientfile.write('<ca>\n'+cacert+'</ca>\n')
clientfile.write('<key>\n'+clientkey+'</key>\n')
clientfile.write('<cert>\n'+clientcert+'</cert>\n')
if tls_auth is not False:
clientfile.write('key-direction 1\n')
clientfile.write('<tls-auth>\n'+tls_auth+'</tls-auth>\n')
def create_confs(name, confdict, path='.', host=None, port=None):
"""
Creates the client and server configs.
name - The name of the run which is prepended to the config file names
confdict - A dictionary representing the config parameters.
"""
if host is None:
host = str(input("Enter Hostname/IP: ")).rstrip()
if port is None:
port = str(input("Enter port number: ")).rstrip()
tls_auth = False
keysize = None
dhsize = None
if 'meta' in confdict:
if confdict['meta'].get('tls-auth', False):
tls_auth = gen_tlsauth_key()
keysize = confdict['meta'].get('keysize', 2048)
dhsize = confdict['meta'].get('dhsize', 2048)
# Create CA
cacert, cakey = create_ca(size=keysize)
text_cacert = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cacert).decode('ascii')
text_cakey = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, cakey).decode('ascii')
# Create a server
servercert, serverkey = create_cert(True, cacert, cakey, size=keysize)
serverkey = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, serverkey).decode('ascii')
servercert = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, servercert).decode('ascii')
_create_server_conf(name, confdict, port, text_cacert, serverkey, servercert, tls_auth=tls_auth, dh_params=gen_dhparams(dhsize), path=path)
# Create a client
clientcert, clientkey = create_cert(False, cacert, cakey, size=keysize)
clientkey = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, clientkey).decode('ascii')
clientcert = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, clientcert).decode('ascii')
_create_client_conf(name, confdict, host, port, text_cacert, clientkey, clientcert, tls_auth=tls_auth, path=path)
if 'meta' in confdict:
if confdict['meta'].get('savecerts', False):
try:
with open(name+'_client.cer', 'w') as fileout:
fileout.write(clientcert)
except Exception as e:
print('Unable to write', name+'_client.cer')
print(e)
try:
with open(name+'_client.key', 'w') as fileout:
fileout.write(clientkey)
except Exception as e:
print('Unable to write', name+'_client.key')
print(e)
try:
with open(name+'_server.cer', 'w') as fileout:
fileout.write(servercert)
except Exception as e:
print('Unable to write', name+'_server.cer')
print(e)
try:
with open(name+'_server.key', 'w') as fileout:
fileout.write(serverkey)
except Exception as e:
print('Unable to write', name+'_server.key')
print(e)
try:
with open(name+'_ca.cer', 'w') as fileout:
fileout.write(text_cacert)
except Exception as e:
print('Unable to write', name+'_ca.cer')
print(e)
try:
with open(name+'_ca.key', 'w') as fileout:
fileout.write(text_cakey)
except Exception as e:
print('Unable to write', name+'_ca.key')
print(e)
def _parse_args():
"""Parse command line args"""
import argparse
parser = argparse.ArgumentParser(description='Create OpenVPN client/server configs.')
parser.add_argument('-i', '--interactive', action='store_true', help='Interactively configure templates')
parser.add_argument('-t', '--template', help='The config file/directory to use', default=os.path.join(os.path.dirname(__file__), 'templates'))
parser.add_argument('-s', '--server', help='The hostname or ip of the server to use', default=None)
parser.add_argument('-p', '--port', help='The port number to use', default=None)
parser.add_argument('-n', '--name', help='The name to use when saving configs', default=None)
return parser.parse_args()
def _ask_template(templates):
"""Prompts user for the template to use"""
i = 1
print('Which template would you like to use?')
for template in templates:
print(i, ') ', template['meta']['name'], ': ', template['meta']['description'],sep='')
i += 1
ret = int(input('Enter selection: '))
while ret <= 0 or ret > i-1:
ret = int(input('Enter selection: '))
return templates[ret-1]
def _ask_interactive():
conf_changes = {'meta': {}, 'client': {}, 'server': {}}
ret = input('Would you like to allow more then one client to connect with the same config at the same time? [Y/n]: ').lower()
if ret == 'n':
conf_changes['server']['duplicate-cn'] = False
else:
conf_changes['server']['duplicate-cn'] = True
return conf_changes
def main():
args = _parse_args()
# Read in configs
confs = []
if os.path.isdir(args.template):
dir_list = os.listdir(args.template)
for filename in dir_list:
filename = os.path.join(args.template, filename)
if os.path.isfile(filename):
with open(filename, 'r') as fh:
try:
data = json.loads(fh.read())
except Exception as e:
print('WARNING:', filename, 'is not valid json.', e, file=sys.stderr)
continue
if 'meta' in data:
if 'name' not in data['meta']:
data['meta']['name'] = filename
if 'description' not in data['meta']:
data['meta']['description'] = ''
confs.append(data)
elif os.path.isfile(args.template):
with open(args.template, 'r') as fh:
try:
confs.append(json.loads(fh.read()))
except Exception as e:
print('WARNING:', args.template, 'is not valid json.', e, file=sys.stderr)
else:
print('ERROR:', args.template, 'is not valid file or dir.', file=sys.stderr)
if len(confs) == 0:
print('ERROR: No valid templates to use', file=sys.stderr)
exit(-1)
elif len(confs) == 1:
template = confs[0]
else:
template = _ask_template(confs)
name = args.name
if name is None:
name = input('Enter a name for the configs: ')
if args.interactive:
updates = _ask_interactive()
for key in updates:
template[key].update(updates[key])
create_confs(name, template, host=args.server, port=args.port)
if __name__ == "__main__":
main()
|
Drewsif/OpenVPN-Config-Generator
|
OpenVPNConfig.py
|
Python
|
mpl-2.0
| 14,441
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import pytest
import math as m
import numpy as np
from sisl import Spin
pytestmark = [pytest.mark.physics, pytest.mark.spin]
def test_spin1():
for val in ['unpolarized', '', Spin.UNPOLARIZED,
'polarized', 'p', Spin.POLARIZED,
'non-collinear', 'nc', Spin.NONCOLINEAR,
'spin-orbit', 'so', Spin.SPINORBIT]:
s = Spin(val)
str(s)
s1 = s.copy()
assert s == s1
def test_spin2():
s1 = Spin()
s2 = Spin('p')
s3 = Spin('nc')
s4 = Spin('so')
assert s1.kind == Spin.UNPOLARIZED
assert s2.kind == Spin.POLARIZED
assert s3.kind == Spin.NONCOLINEAR
assert s4.kind == Spin.SPINORBIT
assert s1 == s1.copy()
assert s2 == s2.copy()
assert s3 == s3.copy()
assert s4 == s4.copy()
assert s1 < s2
assert s2 < s3
assert s3 < s4
assert s1 <= s2
assert s2 <= s3
assert s3 <= s4
assert s2 > s1
assert s3 > s2
assert s4 > s3
assert s2 >= s1
assert s3 >= s2
assert s4 >= s3
assert s1.is_unpolarized
assert not s1.is_polarized
assert not s1.is_noncolinear
assert not s1.is_spinorbit
assert not s2.is_unpolarized
assert s2.is_polarized
assert not s2.is_noncolinear
assert not s2.is_spinorbit
assert not s3.is_unpolarized
assert not s3.is_polarized
assert s3.is_noncolinear
assert not s3.is_spinorbit
assert not s4.is_unpolarized
assert not s4.is_polarized
assert not s4.is_noncolinear
assert s4.is_spinorbit
def test_spin3():
with pytest.raises(ValueError):
s = Spin('satoehus')
def test_spin4():
s1 = Spin(Spin.UNPOLARIZED)
S1 = Spin(Spin.UNPOLARIZED, np.complex64)
s2 = Spin(Spin.POLARIZED)
S2 = Spin(Spin.POLARIZED, np.complex64)
s3 = Spin(Spin.NONCOLINEAR)
S3 = Spin(Spin.NONCOLINEAR, np.complex64)
s4 = Spin(Spin.SPINORBIT)
S4 = Spin(Spin.SPINORBIT, np.complex64)
assert s1 == S1
assert s2 == S2
assert s3 == S3
assert s4 == S4
# real comparison
assert s1 < S2
assert s1 < S3
assert s1 < S4
assert s2 > S1
assert s2 < S3
assert s2 < S4
assert s3 > S1
assert s3 > S2
assert s3 < S4
assert s4 > S1
assert s4 > S2
assert s4 > S3
# complex complex
assert S1 < S2
assert S1 < S3
assert S1 < S4
assert S2 > S1
assert S2 < S3
assert S2 < S4
assert S3 > S1
assert S3 > S2
assert S3 < S4
assert S4 > S1
assert S4 > S2
assert S4 > S3
# real comparison
assert S1 < s2
assert S1 < s3
assert S1 < s4
assert S2 > s1
assert S2 < s3
assert S2 < s4
assert S3 > s1
assert S3 > s2
assert S3 < s4
assert S4 > s1
assert S4 > s2
assert S4 > s3
# complex complex
assert S1 < s2
assert S1 < s3
assert S1 < s4
assert S2 > s1
assert S2 < s3
assert S2 < s4
assert S3 > s1
assert S3 > s2
assert S3 < s4
assert S4 > s1
assert S4 > s2
assert S4 > s3
def test_pauli():
# just grab the default spin
S = Spin()
# Create a fictituous wave-function
sq2 = 2 ** .5
W = np.array([
[1/sq2, 1/sq2], # M_x = 1
[1/sq2, -1/sq2], # M_x = -1
[0.5 + 0.5j, 0.5 + 0.5j], # M_x = 1
[0.5 - 0.5j, -0.5 + 0.5j], # M_x = -1
[1/sq2, 1j/sq2], # M_y = 1
[1/sq2, -1j/sq2], # M_y = -1
[0.5 - 0.5j, 0.5 + 0.5j], # M_y = 1
[0.5 + 0.5j, 0.5 - 0.5j], # M_y = -1
[1, 0], # M_z = 1
[0, 1], # M_z = -1
])
x = np.array([1, -1, 1, -1, 0, 0, 0, 0, 0, 0])
assert np.allclose(x, (np.conj(W)*S.X.dot(W.T).T).sum(1).real)
y = np.array([0, 0, 0, 0, 1, -1, 1, -1, 0, 0])
assert np.allclose(y, (np.conj(W)*np.dot(S.Y, W.T).T).sum(1).real)
z = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, -1])
assert np.allclose(z, (np.conj(W)*np.dot(S.Z, W.T).T).sum(1).real)
def test_pickle():
import pickle as p
S = Spin('nc')
n = p.dumps(S)
s = p.loads(n)
assert S == s
|
zerothi/sisl
|
sisl/physics/tests/test_spin.py
|
Python
|
mpl-2.0
| 4,252
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Rudimentary system stats collection using ``psutil``.
"""
import time
from switchy import event_callback, utils
def sys_stats(df):
"""Reindex on the call index to allign with call metrics data
and interpolate.
"""
df.index = df.call_index
ci = df.pop('call_index')
# iterpolate all system stats since the arrays will be sparse
# compared to the associated call metrics data.
return df.reindex(range(int(ci.iloc[-1]) + 1)).interpolate()
class SysStats(object):
"""A switchy app for capturing system performance stats during load test
using the `psutil`_ module.
An instance of this app should be loaded if rate limited data gathering is
to be shared across multiple slaves (threads).
.. _psutil:
https://pythonhosted.org/psutil/
"""
operators = {
'sys_stats': sys_stats,
}
def __init__(self, psutil, rpyc=None):
self._psutil = psutil
self.rpyc = rpyc
self._conn = None
self.log = utils.get_logger(__name__)
# required to define the columns for the data frame storer
self.fields = [
'call_index',
'total_cpu_percent',
'percent_cpu_sys',
'percent_cpu_usr',
'percent_cpu_idle',
'percent_cpu_iow',
'phymem_percent_usage',
'load_avg',
]
# this call should ensure we have the correct type
self._times_tup_type = psutil.cpu_times().__class__
self.log = utils.get_logger(type(self).__name__)
# initial cpu usage
self._last_cpu_times = self.psutil.cpu_times()
@property
def psutil(self):
try:
return self._psutil
except (ReferenceError, EOFError): # rpyc and its weakrefs being flaky
if self.rpyc:
self.log.warn("resetting rypc connection...")
self._conn = conn = self.rpyc.classic_connect()
self._psutil = conn.modules.psutil
return self._psutil
raise
def prepost(self, collect_rate=2, storer=None):
self.storer = storer
self.count = 0
self._collect_period = 1. / collect_rate
self._last_collect_time = 0
@property
def collect_rate(self):
return 1. / self._collect_period
@collect_rate.setter
def collect_rate(self, rate):
self._collect_period = 1. / rate
@event_callback("CHANNEL_CREATE")
def on_create(self, sess):
now = time.time()
if sess.is_outbound():
# rate limiting
if (now - self._last_collect_time) >= self._collect_period:
# XXX important to keep this here for performance and
# avoiding thread racing
self._last_collect_time = now
psutil = self.psutil
self.log.debug("writing psutil row at time '{}'".format(now))
curr_times = self.psutil.cpu_times()
delta = self._times_tup_type(*tuple(
now - last for now, last in
zip(curr_times, self._last_cpu_times)
))
self._last_cpu_times = curr_times
tottime = sum(delta)
self.storer.append_row((
sess.call.vars['call_index'],
psutil.cpu_percent(interval=None),
delta.system / tottime * 100.,
delta.user / tottime * 100.,
delta.idle / tottime * 100.,
delta.iowait / tottime * 100.,
psutil.phymem_usage().percent,
psutil.os.getloadavg()[0],
))
|
sangoma/switchy
|
switchy/apps/measure/sys.py
|
Python
|
mpl-2.0
| 3,923
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Multicompany Usability',
'version': '8.0.1.2.0',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'depends': [
'account',
],
'data': [
'views/res_company_property_view.xml',
'views/res_company_view.xml',
'views/res_partner_view.xml',
'views/product_view.xml',
'security/security.xml',
'security/ir.model.access.csv',
],
'test': [
],
'demo': [],
'installable': True,
'auto_install': False,
}
|
ClearCorp/account-financial-tools
|
account_multicompany_usability/__openerp__.py
|
Python
|
agpl-3.0
| 1,507
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import account_inv
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ksrajkumar/openerp-6.1
|
openerp/addons/itara_multi_payment/__init__.py
|
Python
|
agpl-3.0
| 1,080
|
# encoding: utf-8
"""Tests of Branding API """
from __future__ import absolute_import, unicode_literals
import mock
from django.conf import settings
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
from branding.api import _footer_business_links, get_footer, get_home_url, get_logo_url
from edxmako.shortcuts import marketing_link
from openedx.core.djangoapps.site_configuration.tests.test_util import with_site_configuration
test_config_disabled_contact_us = { # pylint: disable=invalid-name
"CONTACT_US_ENABLE": False,
}
test_config_custom_url_contact_us = { # pylint: disable=invalid-name
"CONTACT_US_ENABLE": True,
"CONTACT_US_CUSTOM_LINK": "https://open.edx.org/",
}
class TestHeader(TestCase):
"""Test API end-point for retrieving the header. """
def test_cdn_urls_for_logo(self):
# Ordinarily, we'd use `override_settings()` to override STATIC_URL,
# which is what the staticfiles storage backend is using to construct the URL.
# Unfortunately, other parts of the system are caching this value on module
# load, which can cause other tests to fail. To ensure that this change
# doesn't affect other tests, we patch the `url()` method directly instead.
cdn_url = "http://cdn.example.com/static/image.png"
with mock.patch('branding.api.staticfiles_storage.url', return_value=cdn_url):
logo_url = get_logo_url()
self.assertEqual(logo_url, cdn_url)
def test_home_url_with_mktg_disabled(self):
expected_url = get_home_url()
self.assertEqual(reverse('dashboard'), expected_url)
@mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True})
@mock.patch.dict('django.conf.settings.MKTG_URLS', {
"ROOT": "https://edx.org",
})
def test_home_url_with_mktg_enabled(self):
expected_url = get_home_url()
self.assertEqual(marketing_link('ROOT'), expected_url)
class TestFooter(TestCase):
"""Test retrieving the footer. """
maxDiff = None
@mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True})
@mock.patch.dict('django.conf.settings.MKTG_URLS', {
"ROOT": "https://edx.org",
"ENTERPRISE": "/enterprise"
})
@override_settings(ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS={}, PLATFORM_NAME='\xe9dX')
def test_footer_business_links_no_marketing_query_params(self):
"""
Enterprise marketing page values returned should be a concatenation of ROOT and
ENTERPRISE marketing url values when ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS
is not set.
"""
business_links = _footer_business_links()
assert business_links[0]['url'] == 'https://edx.org/enterprise'
@mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True})
@mock.patch.dict('django.conf.settings.MKTG_URLS', {
"ROOT": "https://edx.org",
"ABOUT": "/about-us",
"NEWS": "/news-announcements",
"CONTACT": "/contact",
"CAREERS": '/careers',
"FAQ": "/student-faq",
"BLOG": "/edx-blog",
"DONATE": "/donate",
"JOBS": "/jobs",
"SITE_MAP": "/sitemap",
"TRADEMARKS": "/trademarks",
"TOS_AND_HONOR": "/edx-terms-service",
"PRIVACY": "/edx-privacy-policy",
"ACCESSIBILITY": "/accessibility",
"AFFILIATES": '/affiliate-program',
"MEDIA_KIT": "/media-kit",
"ENTERPRISE": "https://business.edx.org"
})
@override_settings(PLATFORM_NAME='\xe9dX')
def test_get_footer(self):
actual_footer = get_footer(is_secure=True)
business_url = 'https://business.edx.org/?utm_campaign=edX.org+Referral&utm_source=edX.org&utm_medium=Footer'
expected_footer = {
'copyright': '\xa9 \xe9dX. All rights reserved except where noted. '
' EdX, Open edX and their respective logos are '
'trademarks or registered trademarks of edX Inc.',
'navigation_links': [
{'url': 'https://edx.org/about-us', 'name': 'about', 'title': 'About'},
{'url': 'https://business.edx.org', 'name': 'enterprise', 'title': '\xe9dX for Business'},
{'url': 'https://edx.org/edx-blog', 'name': 'blog', 'title': 'Blog'},
{'url': 'https://edx.org/news-announcements', 'name': 'news', 'title': 'News'},
{'url': 'https://support.example.com', 'name': 'help-center', 'title': 'Help Center'},
{'url': '/support/contact_us', 'name': 'contact', 'title': 'Contact'},
{'url': 'https://edx.org/careers', 'name': 'careers', 'title': 'Careers'},
{'url': 'https://edx.org/donate', 'name': 'donate', 'title': 'Donate'}
],
'business_links': [
{'url': 'https://edx.org/about-us', 'name': 'about', 'title': 'About'},
{'url': business_url, 'name': 'enterprise', 'title': '\xe9dX for Business'},
{'url': 'https://edx.org/affiliate-program', 'name': 'affiliates', 'title': 'Affiliates'},
{'url': 'http://open.edx.org', 'name': 'openedx', 'title': 'Open edX'},
{'url': 'https://edx.org/careers', 'name': 'careers', 'title': 'Careers'},
{'url': 'https://edx.org/news-announcements', 'name': 'news', 'title': 'News'},
],
'more_info_links': [
{'url': 'https://edx.org/edx-terms-service',
'name': 'terms_of_service_and_honor_code',
'title': 'Terms of Service & Honor Code'},
{'url': 'https://edx.org/edx-privacy-policy', 'name': 'privacy_policy', 'title': 'Privacy Policy'},
{'url': 'https://edx.org/accessibility',
'name': 'accessibility_policy',
'title': 'Accessibility Policy'},
{'url': 'https://edx.org/trademarks', 'name': 'trademarks', 'title': 'Trademark Policy'},
{'url': 'https://edx.org/sitemap', 'name': 'sitemap', 'title': 'Sitemap'},
],
'connect_links': [
{'url': 'https://edx.org/edx-blog', 'name': 'blog', 'title': 'Blog'},
# pylint: disable=line-too-long
{'url': '{base_url}/support/contact_us'.format(base_url=settings.LMS_ROOT_URL), 'name': 'contact', 'title': 'Contact Us'},
{'url': 'https://support.example.com', 'name': 'help-center', 'title': 'Help Center'},
{'url': 'https://edx.org/media-kit', 'name': 'media_kit', 'title': 'Media Kit'},
{'url': 'https://edx.org/donate', 'name': 'donate', 'title': 'Donate'}
],
'legal_links': [
{'url': 'https://edx.org/edx-terms-service',
'name': 'terms_of_service_and_honor_code',
'title': 'Terms of Service & Honor Code'},
{'url': 'https://edx.org/edx-privacy-policy', 'name': 'privacy_policy', 'title': 'Privacy Policy'},
{'url': 'https://edx.org/accessibility',
'name': 'accessibility_policy',
'title': 'Accessibility Policy'},
{'url': 'https://edx.org/sitemap', 'name': 'sitemap', 'title': 'Sitemap'},
{'name': 'media_kit',
'title': u'Media Kit',
'url': u'https://edx.org/media-kit'}
],
'social_links': [
{'url': '#', 'action': 'Like \xe9dX on Facebook', 'name': 'facebook',
'icon-class': 'fa-facebook-square', 'title': 'Facebook'},
{'url': '#', 'action': 'Follow \xe9dX on Twitter', 'name': 'twitter',
'icon-class': 'fa-twitter-square', 'title': 'Twitter'},
{'url': '#', 'action': 'Subscribe to the \xe9dX YouTube channel',
'name': 'youtube', 'icon-class': 'fa-youtube-square', 'title': 'Youtube'},
{'url': '#', 'action': 'Follow \xe9dX on LinkedIn', 'name': 'linkedin',
'icon-class': 'fa-linkedin-square', 'title': 'LinkedIn'},
{'url': '#', 'action': 'Follow \xe9dX on Google+', 'name': 'google_plus',
'icon-class': 'fa-google-plus-square', 'title': 'Google+'},
{'url': '#', 'action': 'Subscribe to the \xe9dX subreddit',
'name': 'reddit', 'icon-class': 'fa-reddit-square', 'title': 'Reddit'}
],
'mobile_links': [],
'logo_image': 'https://edx.org/static/images/logo.png',
'openedx_link': {
'url': 'http://open.edx.org',
'image': 'https://files.edx.org/openedx-logos/edx-openedx-logo-tag.png',
'title': 'Powered by Open edX'
},
'edx_org_link': {
'url': 'https://www.edx.org/?utm_medium=affiliate_partner&utm_source=opensource-partner&utm_content=open-edx-partner-footer-link&utm_campaign=open-edx-footer',
'text': 'Take free online courses at edX.org',
},
}
self.assertEqual(actual_footer, expected_footer)
@with_site_configuration(configuration=test_config_disabled_contact_us)
def test_get_footer_disabled_contact_form(self):
"""
Test retrieving the footer with disabled contact form.
"""
actual_footer = get_footer(is_secure=True)
self.assertEqual(any(l['name'] == 'contact' for l in actual_footer['connect_links']), False)
self.assertEqual(any(l['name'] == 'contact' for l in actual_footer['navigation_links']), False)
@with_site_configuration(configuration=test_config_custom_url_contact_us)
def test_get_footer_custom_contact_url(self):
"""
Test retrieving the footer with custom contact form url.
"""
actual_footer = get_footer(is_secure=True)
contact_us_link = [l for l in actual_footer['connect_links'] if l['name'] == 'contact'][0]
self.assertEqual(
contact_us_link['url'],
test_config_custom_url_contact_us['CONTACT_US_CUSTOM_LINK']
)
navigation_link_contact_us = [l for l in actual_footer['navigation_links'] if l['name'] == 'contact'][0]
self.assertEqual(
navigation_link_contact_us['url'],
test_config_custom_url_contact_us['CONTACT_US_CUSTOM_LINK']
)
|
jolyonb/edx-platform
|
lms/djangoapps/branding/tests/test_api.py
|
Python
|
agpl-3.0
| 10,470
|
#
# Copyright (C) 2014 Jonathan Finlay <jfinlay@riseup.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
The patient visit module
========================
Implements the classes:
* Visit: Main visit module
* ConsultingRoom: Consultings room module
"""
from openerp.osv import osv, fields
class Visit(osv.osv):
"""
The visit module
"""
_name = 'visit'
_description = 'The visit module'
_states = [
('draft', 'Draft'),
('confirmed', 'Confirmed'),
('canceled', 'Canceled'),
('assisted', 'Assisted'),
]
def _default_room(self, cr, uid, id, context=None):
consulroom_obj = self.pool.get('consulting.room')
room = consulroom_obj.search(cr, uid, [('default', '=', '1')])
if room:
return room[0]
return 1
def check_duration(self, cr, uid, id, context=None):
"""
Check the consistency of the visit duration
:param cr:
:param uid:
:param id:
:param context:
:return:
"""
return {}
def onchange_consulting_room(self, cr, uid, id, consulting_room, context=None):
"""
:param cr:
:param uid:
:param id:
:param starts:
:param consulting_room:
:param context:
:return:
"""
if consulting_room:
consulroom_obj = self.pool.get('consulting.room')
duration = consulroom_obj.browse(cr, uid, consulting_room, context=context)[0].duration
else:
duration = 0.0
vals = {
'value': {
'duration': duration,
}
}
return vals
_columns = {
'name': fields.char('Identifier'),
'starts': fields.datetime('Start date'),
'duration': fields.float('Duration',
help='Duration in minutes'),
'patient_id': fields.many2one('patient', 'Patient'),
'consultingroom_id': fields.many2one('consulting.room',
'Consulting room'),
'state': fields.selection(_states, 'State')
}
_defaults = {
'consultingroom_id': _default_room,
}
class ConsultingRoom(osv.osv):
""" Consulting rooms """
_name = 'consulting.room'
_description = 'Consulting rooms configuration module'
_columns = {
'name': fields.char('Name'),
'duration': fields.float('Standard duration',
help='Visit standard duration time in minutes'),
'price': fields.float('Price',
help='Standard consultation fee'),
'address': fields.text('Address'),
'default': fields.boolean('Default', help='Set as default consulting room'),
}
|
jonathanf/infosalud
|
visit/visit.py
|
Python
|
agpl-3.0
| 3,450
|
import sys
for m in range(0, 2):
n = raw_input()
for i in range(0, int(n)):
print "\x11" + str(m) + ": " + raw_input() + "\x11"
#sys.stdout.flush()
|
pniedzielski/fb-hackathon-2013-11-21
|
src/pythonCode.py
|
Python
|
agpl-3.0
| 155
|
# -*- coding: utf-8 -*-
import StringIO
import csv
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
from xml.dom import minidom
import configdb
def prettify(elem):
"""Return a pretty-printed XML string for the Element.
"""
rough_string = tostring(elem, 'utf-8')
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent=" ")
## SYNTAX
# script.py cities.csv 2015-01-01 2015-04-01 csv|xml
# cities.csv obtained from "Gestió agrupada impost 1.5%"
class MunicipalTaxesInvoicingReport:
def __init__(self, cursor, start_date, end_date, tax, aggregated):
self.cursor = cursor
self.start_date = start_date
self.end_date = end_date
self.tax = tax
self.aggregated = aggregated
pass
def by_city(self, ids, file_type):
sql = '''
SELECT
municipi.name AS name,
municipi.ine AS ine,
EXTRACT(YEAR FROM invoice.date_invoice) AS invoice_year,
EXTRACT(QUARTER FROM invoice.date_invoice) AS invoice_quarter,
COALESCE(SUM(invoice_line.price_subtotal::float*(
CASE
WHEN factura_line.tipus IN ('subtotal_xml') AND invoice.type='in_invoice' THEN 1
WHEN factura_line.tipus IN ('subtotal_xml') AND invoice.type='in_refund' THEN -1
ELSE 0
END
)),0.0) AS provider_amount,
COALESCE(SUM(invoice_line.price_subtotal::float*(
CASE
WHEN factura_line.tipus IN ('energia','reactiva','potencia') AND invoice.type='out_invoice' THEN 1
WHEN factura_line.tipus IN ('energia','reactiva','potencia') AND invoice.type='out_refund' THEN -1
ELSE 0
END
)),0.0) AS client_amount
FROM giscedata_facturacio_factura_linia AS factura_line
LEFT JOIN account_invoice_line AS invoice_line ON invoice_line.id = factura_line.invoice_line_id
LEFT JOIN giscedata_facturacio_factura AS factura ON factura.id = factura_line.factura_id
LEFT JOIN account_invoice AS invoice ON invoice.id = factura.invoice_id
LEFT JOIN giscedata_polissa AS polissa ON polissa.id = factura.polissa_id
LEFT JOIN giscedata_cups_ps AS cups ON cups.id = polissa.cups
LEFT JOIN res_municipi as municipi on municipi.id = cups.id_municipi
WHERE municipi.ID IN ({0})
AND ((invoice.date_invoice >= '{1}') AND (invoice.date_invoice < '{2}'))
AND (((invoice.type LIKE 'out_%%')
AND ((invoice.state = 'open') OR (invoice.state = 'paid')))
OR (invoice.type LIKE 'in_%%'))
GROUP BY 1,2,3,4
ORDER BY 1,2,3,4
'''.format(','.join(map(str, ids)), self.start_date, self.end_date)
self.cursor.execute(sql, {'start_date': self.start_date,
'end_date': self.end_date,
'ids': ids})
return self.build_report(self.cursor.fetchall(), file_type)
def build_report(self, records, file_type):
invoicing_by_name = {}
invoicing_by_date = {}
ines = {}
for record in records:
name = record[0]
ine = record[1]
year = record[2]
quarter = record[3]
invoicing_by_name.setdefault(name, {'total_provider_amount': 0, 'total_client_amount': 0, 'quarters': []})
invoicing_by_name[name]['total_provider_amount'] += record[4]
invoicing_by_name[name]['total_client_amount'] += record[5]
invoicing_by_name[name]['quarters'].append({
'year': record[2],
'quarter': record[3],
'provider_amount': record[4],
'client_amount': record[5]
})
invoicing_by_date.setdefault(year, {})
invoicing_by_date[year].setdefault(quarter, {'total_provider_amount': 0, 'total_client_amount': 0})
invoicing_by_date[year][quarter]['total_provider_amount'] += record[4]
invoicing_by_date[year][quarter]['total_client_amount'] += record[5]
ines.setdefault(name, ine)
if file_type=='csv':
## CSV
csv_doc=StringIO.StringIO()
writer_report = csv.writer(csv_doc)
for name,v in sorted(invoicing_by_name.items()):
writer_report.writerow([name])
writer_report.writerow(['Año', 'Trimestre', 'Pagos a distribuidora', 'Facturas a clientes'])
for quarter in v['quarters']:
writer_report.writerow([
quarter['year'],
quarter['quarter'],
round(quarter['provider_amount'], 2),
round(quarter['client_amount'], 2)
])
writer_report.writerow([])
writer_report.writerow(['', '', '', '', 'Ingresos brutos', 'Tasa', 'Total'])
diff = v['total_client_amount'] - v['total_provider_amount']
writer_report.writerow(['Total',
'',
round(v['total_provider_amount'], 2),
round(v['total_client_amount'], 2),
round(diff, 2),
self.tax,
round(diff*(self.tax/100.0), 2)
])
writer_report.writerow([])
writer_report.writerow([])
writer_report.writerow(['Año', 'Trimestre', 'Pagos a distribuidora', 'Factuas a clientes', 'Ingresos',
'Tasta', 'Total'])
for year, v in sorted(invoicing_by_date.items()):
for quarter, v in sorted(invoicing_by_date[year].items()):
diff = v['total_client_amount'] - v['total_provider_amount']
writer_report.writerow([
year,
quarter,
round(v['total_provider_amount'], 2),
round(v['total_client_amount'], 2),
round(diff, 2),
self.tax,
round(diff*(self.tax/100.0), 2)
])
doc = csv_doc.getvalue()
if file_type == 'xml':
## XML
_empresa = Element("EMPRESA")
_datos = SubElement(_empresa, 'DATOS')
_nombre = SubElement(_datos, 'NOMBRE')
_nombre.text = "Som Energia SCCL"
_nif = SubElement(_datos, 'NIF')
_nif.text = "F55091367"
_municipios = SubElement(_empresa, 'MUNICIPIOS')
for name,v in sorted(invoicing_by_name.items()):
for quarter in v['quarters']:
_municipio = SubElement(_municipios, 'MUNICIPIO')
_ine = SubElement(_municipio, 'INEMUNICIPIO')
_ine.text = ines[name]
_ejercicio = SubElement(_municipio, 'EJERCICIO')
_ejercicio.text = str(int(quarter['year']))
_periodo = SubElement(_municipio, 'PERIODO')
_periodo.text = str(int(quarter['quarter']))
_fechaalta = SubElement(_municipio, 'FECHAALTA')
_fechabaja = SubElement(_municipio, 'FECHABAJA')
_tiposumin = SubElement(_municipio, 'TIPOSUMIN')
_tiposumin.text = '2'
_descsum = SubElement(_municipio, 'DESCSUM')
_descsum.text = 'Electricidad'
_descperi = SubElement(_municipio, 'DESCPERI')
_facturacion = SubElement(_municipio, 'FACTURACION')
_facturacion.text = '%0.2f' % quarter['client_amount']
_derechosacceso = SubElement(_municipio, 'DERECHOSACCESO')
_derechosacceso.text = '%0.2f' % quarter['provider_amount']
_compensacion = SubElement(_municipio, 'COMPENSACION')
_compensacion.text = '0.00'
_baseimponible = SubElement(_municipio, 'BASEIMPONIBLE')
diff = (quarter['client_amount'] - quarter['provider_amount'])
_baseimponible.text = '%0.2f' % diff
_cuotabasica = SubElement(_municipio, 'CUOTABASICA')
_cuotabasica.text = '%0.2f' % (self.tax/100)
_totalingresar = SubElement(_municipio, 'TOTALINGRESAR')
_totalingresar.text = '%0.2f' % (diff*(self.tax/100.0))
doc = prettify(_empresa)
return doc
import psycopg2
import psycopg2.extras
import csv
import sys
municipis_file = sys.argv[1]
start_date = sys.argv[2]
end_date = sys.argv[3]
type_file = sys.argv[4]
municipis_id = []
with open(municipis_file, 'r') as csvfile:
reader = csv.reader(csvfile, delimiter=';')
for row in reader:
municipis_id.append(int(row[0]))
try:
dbconn=psycopg2.connect(**configdb.psycopg)
except Exception, ex:
print "Unable to connect to database " + configdb['DB_NAME']
raise ex
m = MunicipalTaxesInvoicingReport(dbconn.cursor(), start_date,end_date,1.5,False)
print m.by_city(municipis_id, type_file)
|
Som-Energia/invoice-janitor
|
Taxes/municipaltax.py
|
Python
|
agpl-3.0
| 9,574
|
import ckanext.deadoralive.config as config
import ckanext.deadoralive.tests.helpers as custom_helpers
class TestConfig(custom_helpers.FunctionalTestBaseClass):
def test_that_it_reads_settings_from_config_file(self):
"""Test that non-default config settings in the config file work."""
# These non-default settings are in the test.ini config file.
assert config.recheck_resources_after == 48
assert config.resend_pending_resources_after == 12
# TODO: Test falling back on defaults when there's nothing in the config
# file.
|
ckan/ckanext-deadoralive
|
ckanext/deadoralive/tests/test_config.py
|
Python
|
agpl-3.0
| 572
|
import datetime
import decimal
from django.test import TestCase
from django.core.cache import cache
from httmock import HTTMock
from django_dynamic_fixture import G, N
from postnl_checkout.contrib.django_postnl_checkout.models import Order
from .base import PostNLTestMixin
class OrderTests(PostNLTestMixin, TestCase):
""" Tests for Order model. """
maxDiff = None
def setUp(self):
super(OrderTests, self).setUp()
self.order_datum = datetime.datetime(
year=2011, month=7, day=21,
hour=20, minute=11, second=0
)
self.verzend_datum = datetime.datetime(
year=2011, month=7, day=22,
hour=20, minute=11, second=0
)
def test_save(self):
""" Test saving an Order model. """
instance = N(Order)
instance.clean()
instance.save()
def test_prepare_order(self):
""" Test prepare_order class method. """
# Setup mock response
def response(url, request):
self.assertXMLEqual(
request.body, self.read_file('prepare_order_request.xml')
)
return self.read_file('prepare_order_response.xml')
kwargs = {
'AangebodenBetaalMethoden': {
'PrepareOrderBetaalMethode': {
'Code': 'IDEAL',
'Prijs': '5.00'
}
},
'AangebodenCommunicatieOpties': {
'PrepareOrderCommunicatieOptie': {
'Code': 'NEWS'
}
},
# FIXME: the following is not submitted by SUDS
# Most probably because it is not properly defined in the WSDL
# Contact PostNL about this.
# 'AangebodenOpties': {
# 'PrepareOrderOptie': {
# 'Code': 'WRAP',
# 'Prijs': '2.50'
# }
# },
# 'AfleverOpties': {
# 'AfleverOptie': {
# 'Code': 'PG',
# 'Kosten': '0.00',
# 'Toegestaan': True
# }
# },
'Consument': {
'ExtRef': 'test@e-id.nl'
},
'Contact': {
'Url': 'http://www.kadowereld.nl/url/contact'
},
'Order': {
'ExtRef': '1105_900',
'OrderDatum': self.order_datum,
'Subtotaal': '125.00',
'VerzendDatum': self.verzend_datum,
'VerzendKosten': '12.50'
},
'Retour': {
'BeschrijvingUrl': 'http://www.kadowereld.nl/url/beschrijving',
'PolicyUrl': 'http://www.kadowereld.nl/url/policy',
'RetourTermijn': 28,
'StartProcesUrl': 'http://www.kadowereld.nl/url/startproces'
},
'Service': {
'Url': 'http://www.kadowereld.nl/url/service'
}
}
# Execute API call
with HTTMock(response):
instance = Order.prepare_order(**kwargs)
# Assert model field values
self.assertTrue(instance.pk)
self.assertEquals(
instance.order_token, '0cfb4be2-47cf-4eac-865c-d66657953d5c'
)
self.assertEquals(
instance.order_ext_ref, '1105_900'
)
self.assertEquals(
instance.order_date, self.order_datum
)
# Assert JSON values
self.assertEquals(instance.prepare_order_request, kwargs)
self.assertEquals(instance.prepare_order_response, {
'Checkout': {
'OrderToken': '0cfb4be2-47cf-4eac-865c-d66657953d5c',
'Url': (
'http://tpppm-test.e-id.nl/Orders/OrderCheckout'
'?token=0cfb4be2-47cf-4eac-865c-d66657953d5c'
)
},
'Webshop': {
'IntRef': 'a0713e4083a049a996c302f48bb3f535'
}
})
def test_read_order(self):
""" Test read_order method. """
# Setup mock response
def response(url, request):
self.assertXMLEqual(
request.body, self.read_file('read_order_request.xml')
)
return self.read_file('read_order_response.xml')
instance = G(
Order,
order_token='0cfb4be2-47cf-4eac-865c-d66657953d5c'
)
# Read order data
with HTTMock(response):
new_instance = instance.read_order()
response_data = new_instance.read_order_response
self.assertTrue(response_data)
self.assertEquals(response_data, {
'Voorkeuren': {
'Bezorging': {
'Tijdvak': {
'Start': u'10:30',
'Eind': u'08:30'
},
'Datum': datetime.datetime(2012, 4, 26, 0, 0)
}
},
'Consument': {
'GeboorteDatum': datetime.datetime(1977, 6, 15, 0, 0),
'ExtRef': u'jjansen',
'TelefoonNummer': u'06-12345678',
'Email': u'j.jansen@e-id.nl'
},
'Facturatie': {
'Adres': {
'Huisnummer': u'1',
'Initialen': u'J',
'Geslacht': u'Meneer',
'Deurcode': None,
'Gebruik': u'P',
'Gebouw': None,
'Verdieping': None,
'Achternaam': u'Jansen',
'Afdeling': None,
'Regio': None,
'Land': u'NL',
'Wijk': None,
'Postcode': u'4131LV',
'Straat': 'Lage Biezenweg',
'Bedrijf': None,
'Plaats': u'Vianen',
'Tussenvoegsel': None,
'Voornaam': u'Jan',
'HuisnummerExt': None
}
},
'Webshop': {
'IntRef': u'a0713e4083a049a996c302f48bb3f535'
},
'CommunicatieOpties': {
'ReadOrderResponseCommunicatieOptie': [
{
'Text': u'Do not deliver to neighbours',
'Code': u'REMARK'
}
]
},
'Bezorging': {
'ServicePunt': {
'Huisnummer': None,
'Initialen': None,
'Geslacht': None,
'Deurcode': None,
'Gebruik': None,
'Gebouw': None,
'Verdieping': None,
'Achternaam': None,
'Afdeling': None,
'Regio': None,
'Land': None,
'Wijk': None,
'Postcode': None,
'Straat': None,
'Bedrijf': None,
'Plaats': None,
'Tussenvoegsel': None,
'Voornaam': None,
'HuisnummerExt': None
},
'Geadresseerde': {
'Huisnummer': u'1',
'Initialen': u'J',
'Geslacht': u'Meneer',
'Deurcode': None,
'Gebruik': u'Z',
'Gebouw': None,
'Verdieping': None,
'Achternaam': u'Janssen',
'Afdeling': None,
'Regio': None,
'Land': u'NL',
'Wijk': None,
'Postcode': u'4131LV',
'Straat': u'Lage Biezenweg ',
'Bedrijf': u'E-ID',
'Plaats': u'Vianen',
'Tussenvoegsel': None,
'Voornaam': u'Jan',
'HuisnummerExt': None
}
},
'Opties': {
'ReadOrderResponseOpties': [
{
'Text': u'Congratulat ions with your new foobar!',
'Code': u'CARD',
'Prijs': decimal.Decimal('2.00')
}
]
},
'Order': {
'ExtRef': u'15200_001'
},
'BetaalMethode': {
'Optie': u'0021',
'Code': u'IDEAL',
'Prijs': decimal.Decimal('0.00')
}
})
def test_confirm_order(self):
""" Test confirm_order """
def response(url, request):
self.assertXMLEqual(
request.body, self.read_file('confirm_order_request.xml')
)
return self.read_file('confirm_order_response.xml')
kwargs = {
'Order': {
'PaymentTotal': decimal.Decimal('183.25')
}
}
instance = G(
Order,
order_token='0cfb4be2-47cf-4eac-865c-d66657953d5c',
order_ext_ref='1105_900'
)
# Execute API call
with HTTMock(response):
instance.confirm_order(**kwargs)
def test_update_order(self):
""" Test update_order """
def response_success(url, request):
self.assertXMLEqual(
request.body, self.read_file('update_order_request.xml')
)
return self.read_file('update_order_response_success.xml')
def response_fail(url, request):
self.assertXMLEqual(
request.body, self.read_file('update_order_request.xml')
)
return self.read_file('update_order_response_fail.xml')
kwargs = {
'Order': {
'ExtRef': 'FDK004',
'Zending': {
'UpdateOrderOrderZending': {
'Busstuk': {
'UpdateOrderOrderZendingBusstuk': {
'Verzonden': '23-08-2011 12:00:00'
}
},
'ExtRef': '642be996-6ab3-4a4c-b7d6-2417a4cee0df',
'Pakket': {
'UpdateOrderOrderZendingPakket': {
'Barcode': '3s123456789',
'Postcode': '4131LV'
}
}
}
}
}
}
instance = G(
Order,
order_token='0cfb4be2-47cf-4eac-865c-d66657953d5c',
order_ext_ref='1105_900'
)
# Make call fail
with HTTMock(response_fail):
self.assertRaises(
Exception, lambda: instance.update_order(**kwargs)
)
# Make call pass
with HTTMock(response_success):
response = instance.update_order(**kwargs)
self.assertTrue(response)
# Make sure the requested stuff is saved
self.assertEquals(
instance.update_order_request, {
'Checkout': {
'OrderToken': '0cfb4be2-47cf-4eac-865c-d66657953d5c'
},
'Order': {
'ExtRef': 'FDK004',
'Zending': {
'UpdateOrderOrderZending': {
'Busstuk': {
'UpdateOrderOrderZendingBusstuk': {
'Verzonden': '23-08-2011 12:00:00'
}
},
'ExtRef': '642be996-6ab3-4a4c-b7d6-2417a4cee0df',
'Pakket': {
'UpdateOrderOrderZendingPakket': {
'Barcode': '3s123456789',
'Postcode': '4131LV'
}
}
}
}
}
}
)
def test_ping_status(self):
""" Test ping_status """
instance = G(Order)
self.response_called = 0
def ok_response(url, request):
# Assert
self.assertXMLEqual(
request.body,
self.read_file('ping_status_request.xml')
)
self.response_called += 1
return self.read_file('ping_status_response_ok.xml')
def nok_response(url, request):
return self.read_file('ping_status_response_nok.xml')
with HTTMock(ok_response):
self.assertEquals(instance.ping_status(), True)
self.assertEquals(self.response_called, 1)
# Repeated call should not cause the response to be called
with HTTMock(ok_response):
self.assertEquals(instance.ping_status(), True)
self.assertEquals(self.response_called, 1)
# Clear cache
cache.clear()
with HTTMock(nok_response):
self.assertEquals(instance.ping_status(), False)
|
dokterbob/python-postnl-checkout
|
tests/test_django.py
|
Python
|
agpl-3.0
| 13,320
|
from django.conf.urls import url
from ..views import (PowerCycleListView, PowerCycleCreateView, PowerCycleDetailView,
PowerCycleUpdateView, PowerCycleDeleteView)
from django.contrib.auth.decorators import login_required
urlpatterns = [
url(r'^create/$', # NOQA
login_required(PowerCycleCreateView.as_view()),
name="power_cycle_create"),
url(r'^(?P<pk>.+)/update/$',
login_required(PowerCycleUpdateView.as_view()),
name="power_cycle_update"),
url(r'^(?P<pk>.+)/delete/$',
login_required(PowerCycleDeleteView.as_view()),
name="power_cycle_delete"),
url(r'^(?P<pk>.+)/$',
PowerCycleDetailView.as_view(),
name="power_cycle_detail"),
url(r'^$',
PowerCycleListView.as_view(),
name="power_cycle_list"),
]
|
Hattivat/hypergolic-django
|
hypergolic/catalog/urls/power_cycle_urls.py
|
Python
|
agpl-3.0
| 826
|
# Standard Library Imports
# 3rd Party Imports
# Local Imports
from PokeAlarm.Utils import get_gmaps_link, get_applemaps_link, \
get_waze_link, get_dist_as_str, get_team_emoji, get_ex_eligible_emoji
from . import BaseEvent
from PokeAlarm import Unknown
class GymEvent(BaseEvent):
""" Event representing the change occurred in a Gym. """
def __init__(self, data):
""" Creates a new Gym Event based on the given dict. """
super(GymEvent, self).__init__('gym')
check_for_none = BaseEvent.check_for_none
# Identification
self.gym_id = data.get('gym_id', data.get('id'))
# Location
self.lat = float(data['latitude'])
self.lng = float(data['longitude'])
self.distance = Unknown.SMALL # Completed by Manager
self.direction = Unknown.TINY # Completed by Manager
# Team Info
self.old_team_id = Unknown.TINY
self.new_team_id = int(data.get('team_id', data.get('team')))
# Gym Details
self.gym_name = check_for_none(
str, data.get('name'), Unknown.REGULAR).strip()
self.gym_description = check_for_none(
str, data.get('description'), Unknown.REGULAR).strip()
self.gym_image = check_for_none(
str, data.get('url'), Unknown.REGULAR)
self.ex_eligible = check_for_none(
int, data.get('is_ex_raid_eligible'), Unknown.REGULAR)
# Gym Guards
self.slots_available = check_for_none(
int, data.get('slots_available'), Unknown.TINY)
self.guard_count = (
(6 - self.slots_available)
if Unknown.is_not(self.slots_available)
else Unknown.TINY)
self.name = self.gym_id
self.geofence = Unknown.REGULAR
self.custom_dts = {}
def generate_dts(self, locale, timezone, units):
""" Return a dict with all the DTS for this event. """
dts = self.custom_dts.copy()
dts.update({
# Identification
'gym_id': self.gym_id,
# Location
'lat': self.lat,
'lng': self.lng,
'lat_5': "{:.5f}".format(self.lat),
'lng_5': "{:.5f}".format(self.lng),
'distance': (
get_dist_as_str(self.distance, units)
if Unknown.is_not(self.distance) else Unknown.SMALL),
'direction': self.direction,
'gmaps': get_gmaps_link(self.lat, self.lng),
'applemaps': get_applemaps_link(self.lat, self.lng),
'waze': get_waze_link(self.lat, self.lng),
'geofence': self.geofence,
# Team Info
'old_team': locale.get_team_name(self.old_team_id),
'old_team_id': self.old_team_id,
'old_team_emoji': get_team_emoji(self.old_team_id),
'old_team_color': locale.get_team_color(self.old_team_id),
'old_team_leader': locale.get_leader_name(self.old_team_id),
'new_team': locale.get_team_name(self.new_team_id),
'new_team_id': self.new_team_id,
'new_team_emoji': get_team_emoji(self.new_team_id),
'new_team_color': locale.get_team_color(self.new_team_id),
'new_team_leader': locale.get_leader_name(self.new_team_id),
# Details
'gym_name': self.gym_name,
'gym_description': self.gym_description,
'gym_image': self.gym_image,
'ex_eligible':
self.ex_eligible > 0 if Unknown.is_not(self.ex_eligible)
else Unknown.REGULAR,
'ex_eligible_emoji': get_ex_eligible_emoji(self.ex_eligible),
# Guards
'slots_available': self.slots_available,
'guard_count': self.guard_count,
})
return dts
|
kvangent/PokeAlarm
|
PokeAlarm/Events/GymEvent.py
|
Python
|
agpl-3.0
| 3,808
|
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino
#
# The licence is in the file __manifest__.py
#
##############################################################################
from datetime import datetime
import werkzeug
from dateutil.relativedelta import relativedelta
from odoo import http, _, fields
from odoo.addons.http_routing.models.ir_http import slug
from odoo.addons.website.models.ir_http import sitemap_qs2dom
from odoo.http import request
from odoo.addons.cms_form.controllers.main import FormControllerMixin
from odoo.addons.cms_form_compassion.controllers.payment_controller import (
PaymentFormController,
)
class EventsController(PaymentFormController, FormControllerMixin):
def sitemap_events(env, rule, qs):
today = fields.Date.to_string(datetime.today())
events = env["crm.event.compassion"]
dom = sitemap_qs2dom(qs, '/events', events._rec_name)
dom += request.website.website_domain()
dom += [("website_published", "=", True), ("end_date", ">=", today)]
for reg in events.search(dom):
loc = '/event/%s' % slug(reg)
if not qs or qs.lower() in loc:
yield {'loc': loc}
def sitemap_participants(env, rule, qs):
registrations = env["event.registration"]
dom = sitemap_qs2dom(qs, '/event', registrations._rec_name)
dom += request.website.website_domain()
dom += [("website_published", "=", True)]
for reg in registrations.search(dom):
loc = '/event/%s/%s' % (slug(reg.compassion_event_id), slug(reg))
if not qs or qs.lower() in loc:
yield {'loc': loc}
@http.route("/events/", auth="public", website=True, sitemap=False)
def list(self, **kwargs):
today = fields.Date.to_string(datetime.today())
# Events that are set to finish after today
started_events = request.env["crm.event.compassion"].search([
("website_published", "=", True), ("end_date", ">=", today),
])
if len(started_events) == 1:
return request.redirect("/event/" + str(started_events.id))
return request.render(
"website_event_compassion.list", {"events": started_events}
)
###################################################
# Methods for the event page and event registration
###################################################
@http.route(
'/event/<model("crm.event.compassion"):event>/', auth="public", website=True,
sitemap=sitemap_events
)
def event_page(self, event, **kwargs):
if not event.is_published and request.env.user.share:
return request.redirect("/events")
if not event.can_access_from_current_website():
raise werkzeug.exceptions.NotFound()
values = self.get_event_page_values(event, **kwargs)
registration_form = values["form"]
if registration_form.form_success:
# The user submitted a registration, redirect to confirmation
result = werkzeug.utils.redirect(
registration_form.form_next_url(), code=303
)
else:
# Check if registration was already present
errors = registration_form.form_render_values.get("errors")
if errors and errors.get("_integrity"):
request.env.cr.rollback()
# Replace error message with more friendly text.
request.website.get_status_message()
request.website.add_status_message(
_("You are already registered to this trip."),
type_="danger",
title=_("Error"),
)
# Display the Event page
result = request.render(values.pop("website_template"), values)
if event.event_type_id.sudo().travel_features:
# Travel events are full not called by AJAX popup form
return result
return result
@http.route(
'/event/<model("crm.event.compassion"):event>/faq', auth="public", website=True,
sitemap=False
)
def event_faq(self, event, **kwargs):
if not event.is_published:
return request.redirect("/events")
return request.render("website_event_compassion.event_faq", {"event": event})
@http.route(
'/event/<model("event.event"):event>/registration/'
'<int:registration_id>/success',
auth="public",
website=True, sitemap=False
)
def registration_success(self, event, registration_id, **kwargs):
limit_date = datetime.now() - relativedelta(days=1)
registration = request.env["event.registration"].sudo().browse(registration_id)
if not registration.exists() or registration.create_date < limit_date:
return request.redirect("/events")
values = {"event": event, "attendees": registration}
return request.render(
"website_event_compassion.event_registration_successful", values
)
@http.route(
'/event/<model("crm.event.compassion"):event>/confirmation/',
auth="public",
website=True, sitemap=False
)
def confirmation_page(self, event, **kwargs):
if not event.is_published:
return request.redirect("/events")
values = {
"confirmation_title": kwargs.get("title"),
"confirmation_message": kwargs.get("message"),
"event": event,
}
return request.render(
"website_event_compassion.event_confirmation_page", values
)
def get_event_page_values(self, event, **kwargs):
"""
Processes the registration form and gets the values used by the website to
render the event page.
:param event: crm.event.compassion record to render
:param kwargs: request arguments
:return: dict: values for the event website template
(must contain event, start_date, end_date, form,
main_object and website_template values)
"""
values = kwargs.copy()
# This allows the translation to still work on the page
values.pop("edit_translations", False)
values.update(
{
"event": event,
"start_date": event.get_date("start_date", "date_full"),
"end_date": event.get_date("end_date", "date_full"),
"additional_title": _("- Registration"),
}
)
# Travel display only registration form, others do have a page.
template = "website_event_compassion."
if event.event_type_id.sudo().travel_features:
values["form_model_key"] = "cms.form.group.visit.registration"
template += "event_full_page_form"
else:
template += "event_page"
registration_form = self.get_form("event.registration", **values)
registration_form.form_process()
values.update(
{
"form": registration_form,
"main_object": event,
"website_template": template,
"event_step": 1,
}
)
return values
###################################################
# Methods for the participant page and the donation
###################################################
@http.route(
[
"/event/<model('crm.event.compassion'):event>/<reg_string>-<int:reg_id>",
"/event/<model('crm.event.compassion'):event>/<int:reg_id>",
],
auth="public", website=True, sitemap=sitemap_participants
)
def participant_details(self, event, reg_id, **kwargs):
"""
:param event: the event record
:param reg_id: the registration record
:return:the rendered page
"""
if not event.is_published:
return request.redirect("/events")
reg_obj = request.env["event.registration"].sudo()
registration = reg_obj.browse(reg_id).exists().filtered("website_published")
if not registration:
return werkzeug.utils.redirect("/event/" + str(event.id), 301)
kwargs["form_model_key"] = "cms.form.event.donation"
values = self.get_participant_page_values(event, registration, **kwargs)
donation_form = values["form"]
if donation_form.form_success:
# The user submitted a donation, redirect to confirmation
result = werkzeug.utils.redirect(donation_form.form_next_url(), code=303)
else:
result = request.render(values["website_template"], values)
return result
def get_participant_page_values(self, event, registration, **kwargs):
"""
Gets the values used by the website to render the participant page.
:param event: crm.event.compassion record to render
:param registration: event.registration record to render
:param kwargs: request arguments
:return: dict: values for the event website template
(must contain event, start_date, end_date, form,
main_object and website_template values)
"""
values = kwargs.copy()
# This allows the translation to still work on the page
values.pop("edit_translations", False)
values.update({
"event": event, "registration": registration,
})
donation_form = self.get_form(False, **values)
donation_form.form_process()
values.update(
{
"form": donation_form,
"main_object": registration,
"website_template": "website_event_compassion.participant_page",
}
)
return values
########################################
# Methods for after donation redirection
########################################
@http.route("/event/payment/validate/<int:invoice_id>",
type="http", auth="public", website=True,
sitemap=False)
def donation_payment_validate(self, invoice_id=None, **kwargs):
""" Method that should be called by the server when receiving an update
for a transaction.
"""
try:
invoice = request.env["account.invoice"].browse(int(invoice_id)).sudo()
invoice.exists().ensure_one()
transaction = invoice.get_portal_last_transaction()
except ValueError:
transaction = request.env["payment.transaction"]
invoice_lines = invoice.invoice_line_ids
event = invoice_lines.mapped("event_id")
if transaction.state != "done":
return request.render(
self.get_donation_failure_template(event), {"error_intro": ""}
)
ambassador = invoice_lines.mapped("user_id")
registration = event.registration_ids.filtered(
lambda r: r.partner_id == ambassador
)
values = {"registration": registration, "event": event, "error_intro": ""}
success_template = self.get_donation_success_template(event)
return request.render(success_template, values)
@http.route(
"/event/payment/gpv_payment_validate/<int:invoice_id>", type="http",
auth="public", website=True, sitemap=False
)
def down_payment_validate(self, invoice_id=None, **post):
""" Method that should be called by the server when receiving an update
for a transaction.
"""
failure_template = "website_event_compassion.donation_failure"
error_intro = _(
"Thank you for your efforts in the Compassion trip registration " "process."
)
try:
invoice = request.env["account.invoice"].browse(int(invoice_id))
invoice.exists().ensure_one()
tx = invoice.get_portal_last_transaction()
except ValueError:
tx = request.env["payment.transaction"]
if tx.state != "done":
return request.render(failure_template, {"error_intro": error_intro})
invoice_lines = invoice.invoice_line_ids
event = invoice_lines.mapped("event_id")
registration = tx.registration_id
post.update(
{
"attendees": registration,
"event": event,
"confirmation_title": _("We are glad to confirm your registration!"),
"confirmation_message": _(
"Thank you for your efforts in the Compassion trip "
"registration process."
)
+ "<br/><br/>"
+ _(
"Your payment was successful and your are now a confirmed "
"participant of the trip. You will receive all the "
"documentation for the preparation of your trip by e-mail in "
"the coming weeks."
),
"error_intro": error_intro,
}
)
template = "website_event_compassion.event_confirmation_page"
if invoice == registration.group_visit_invoice_id:
post["confirmation_message"] = _(
"Congratulations! Everything is ready for this beautiful "
"trip to happen. You will receive all the practical "
"information about the trip preparation a few weeks before "
"the departure. Until then, don't hesitate to contact us if "
"you have any question."
)
return super().compassion_payment_validate(
tx, template, failure_template, **post
)
def get_donation_success_template(self, event):
"""
Gets the website templates for donation confirmation
:param event: crm.event.compassion record
:return: xml_id of website template
"""
return "website_event_compassion.donation_successful"
|
CompassionCH/compassion-switzerland
|
website_event_compassion/controllers/events_controller.py
|
Python
|
agpl-3.0
| 14,186
|
"""
Script to process pytest warnings output by pytest-json-report plugin and output it as a html
"""
import argparse
import io
import itertools
import json
import os
import re
from collections import Counter
from write_to_html import (
HtmlOutlineWriter,
) # noqa pylint: disable=import-error,useless-suppression
columns = [
"message",
"category",
"filename",
"lineno",
"high_location",
"label",
"num",
"deprecated",
]
columns_index_dict = {key: index for index, key in enumerate(columns)}
def seperate_warnings_by_location(warnings_data):
"""
Warnings originate from multiple locations, this function takes in list of warning objects
and separates them based on their filename location
"""
# first create regex for each n file location
warnings_locations = {
r".*/python\d\.\d/site-packages/.*\.py": "python", # noqa pylint: disable=W1401
r".*/edx-platform/lms/.*\.py": "lms", # noqa pylint: disable=W1401
r".*/edx-platform/openedx/.*\.py": "openedx", # noqa pylint: disable=W1401
r".*/edx-platform/cms/.*\.py": "cms", # noqa pylint: disable=W1401
r".*/edx-platform/common/.*\.py": "common", # noqa pylint: disable=W1401
}
# separate into locations flow:
# - iterate through each wanring_object, see if its filename matches any regex in warning locations.
# - If so, change high_location index on warnings_object to location name
for warnings_object in warnings_data:
warning_origin_located = False
for key in warnings_locations:
if (
re.search(key, warnings_object[columns_index_dict["filename"]])
is not None
):
warnings_object[
columns_index_dict["high_location"]
] = warnings_locations[key]
warning_origin_located = True
break
if not warning_origin_located:
warnings_object[columns_index_dict["high_location"]] = "other"
return warnings_data
def convert_warning_dict_to_list(warning_dict):
"""
converts our data dict into our defined list based on columns defined at top of this file
"""
output = []
for column in columns:
if column in warning_dict:
output.append(warning_dict[column])
else:
output.append(None)
output[columns_index_dict["num"]] = 1
return output
def read_warning_data(dir_path):
"""
During test runs in jenkins, multiple warning json files are output. This function finds all files
and aggregates the warnings in to one large list
"""
# pdb.set_trace()
dir_path = os.path.expanduser(dir_path)
# find all files that exist in given directory
files_in_dir = [
f for f in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, f))
]
warnings_files = []
# TODO(jinder): currently this is hard-coded in, maybe create a constants file with info
# THINK(jinder): but creating file for one constant seems overkill
warnings_file_name_regex = (
r"pytest_warnings_?\d*\.json" # noqa pylint: disable=W1401
)
# iterate through files_in_dir and see if they match our know file name pattern
for temp_file in files_in_dir:
if re.search(warnings_file_name_regex, temp_file) is not None:
warnings_files.append(temp_file)
# go through each warning file and aggregate warnings into warnings_data
warnings_data = []
for temp_file in warnings_files:
with io.open(os.path.expanduser(dir_path + "/" + temp_file), "r") as read_file:
json_input = json.load(read_file)
if "warnings" in json_input:
data = [
convert_warning_dict_to_list(warning_dict)
for warning_dict in json_input["warnings"]
]
warnings_data.extend(data)
else:
print(temp_file)
return warnings_data
def compress_similar_warnings(warnings_data):
"""
find all warnings that are exactly the same, count them, and return set with count added to each warning
"""
tupled_data = [tuple(data) for data in warnings_data]
test_counter = Counter(tupled_data)
output = [list(value) for value in test_counter.keys()]
for data_object in output:
data_object[columns_index_dict["num"]] = test_counter[tuple(data_object)]
return output
def process_warnings_json(dir_path):
"""
Master function to process through all warnings and output a dict
dict structure:
{
location: [{warning text: {file_name: warning object}}]
}
flow:
- Aggregate data from all warning files
- Separate warnings by deprecated vs non deprecated(has word deprecate in it)
- Further categorize warnings
- Return output
Possible Error/enhancement: there might be better ways to separate deprecates vs
non-deprecated warnings
"""
warnings_data = read_warning_data(dir_path)
for warnings_object in warnings_data:
warnings_object[columns_index_dict["deprecated"]] = bool(
"deprecated" in warnings_object[columns_index_dict["message"]]
)
warnings_data = seperate_warnings_by_location(warnings_data)
compressed_warnings_data = compress_similar_warnings(warnings_data)
return compressed_warnings_data
def group_and_sort_by_sumof(data, group, sort_by):
"""
Group and sort data.
Return
List of tuples. Each tuple has:
- Group key
- Iterable of warnings that belongs to that group
- Count of warnings that belong to that group
"""
sorted_data = sorted(data, key=lambda x: x[columns.index(group)])
groups_by = itertools.groupby(sorted_data, lambda x: x[columns_index_dict[group]])
temp_list_to_sort = []
for key, generator in groups_by:
value = list(generator)
temp_list_to_sort.append((key, value, sum([item[columns_index_dict[sort_by]] for item in value])))
# sort by count
return sorted(temp_list_to_sort, key=lambda x: -x[2])
def write_html_report(warnings_data, html_path):
"""
converts from list of lists data to our html
"""
html_path = os.path.expanduser(html_path)
if "/" in html_path:
location_of_last_dir = html_path.rfind("/")
dir_path = html_path[:location_of_last_dir]
os.makedirs(dir_path, exist_ok=True)
with io.open(html_path, "w") as fout:
html_writer = HtmlOutlineWriter(fout)
category_sorted_by_count = group_and_sort_by_sumof(
warnings_data, "category", "num"
)
for category, group_in_category, category_count in category_sorted_by_count:
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{category}, count: {count}</span> '.format(
category=category, count=category_count
)
html_writer.start_section(html, klass=u"category")
locations_sorted_by_count = group_and_sort_by_sumof(
group_in_category, "high_location", "num"
)
for (
location,
group_in_location,
location_count,
) in locations_sorted_by_count:
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{location}, count: {count}</span> '.format(
location=location, count=location_count
)
html_writer.start_section(html, klass=u"location")
message_group_sorted_by_count = group_and_sort_by_sumof(
group_in_location, "message", "num"
)
for (
message,
message_group,
message_count,
) in message_group_sorted_by_count:
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{warning_text}, count: {count}</span> '.format(
warning_text=message, count=message_count
)
html_writer.start_section(html, klass=u"warning_text")
# warnings_object[location][warning_text] is a list
for warning in message_group:
# xss-lint: disable=python-wrap-html
html = u'<span class="count">{warning_file_path}</span> '.format(
warning_file_path=warning[columns_index_dict["filename"]]
)
html_writer.start_section(html, klass=u"warning")
# xss-lint: disable=python-wrap-html
html = u'<p class="lineno">lineno: {lineno}</p> '.format(
lineno=warning[columns_index_dict["lineno"]]
)
html_writer.write(html)
# xss-lint: disable=python-wrap-html
html = u'<p class="num">num_occur: {num}</p> '.format(
num=warning[columns_index_dict["num"]]
)
html_writer.write(html)
html_writer.end_section()
html_writer.end_section()
html_writer.end_section()
html_writer.end_section()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Process and categorize pytest warnings and output html report."
)
parser.add_argument("--dir-path", default="test_root/log")
parser.add_argument("--html-path", default="test_html.html")
args = parser.parse_args()
data_output = process_warnings_json(args.dir_path)
write_html_report(data_output, args.html_path)
|
msegado/edx-platform
|
openedx/core/process_warnings.py
|
Python
|
agpl-3.0
| 9,900
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014-present Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def make_diff(first: dict, second: dict, not_found_value=None,
excluded_keys: tuple = ()) -> dict:
"""
Compute a diff between two dicts.
"""
diff = {}
# Check all keys in first dict
for key in first:
if key not in second:
diff[key] = (first[key], not_found_value)
elif first[key] != second[key]:
diff[key] = (first[key], second[key])
# Check all keys in second dict to find missing
for key in second:
if key not in first:
diff[key] = (not_found_value, second[key])
# Remove A -> A changes that usually happens with None -> None
for key, value in list(diff.items()):
frst, scnd = value
if frst == scnd:
del diff[key]
# Removed excluded keys
for key in excluded_keys:
if key in diff:
del diff[key]
return diff
|
taigaio/taiga-back
|
taiga/base/utils/diff.py
|
Python
|
agpl-3.0
| 1,614
|
# -*- coding: utf-8 -*-
"""
Copyright 2015 Telefonica Investigacion y Desarrollo, S.A.U
This file is part of Orion Context Broker.
Orion Context Broker is free software: you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Orion Context Broker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Orion Context Broker. If not, see http://www.gnu.org/licenses/.
For those usages not covered by this license please contact with
iot_support at tid dot es
"""
__author__ = 'Iván Arias León (ivan dot ariasleon at telefonica dot com)'
import behave
from behave import step
from iotqatools.helpers_utils import *
from iotqatools.cb_v2_utils import CB
from iotqatools.mongo_utils import Mongo
from iotqatools.remote_log_utils import Remote_Log
from iotqatools.fabric_utils import FabricSupport
from tools.properties_config import Properties # methods in properties class
from tools.NGSI_v2 import NGSI
# constants
properties_class = Properties()
CONTEXT_BROKER_ENV = u'context_broker_env'
MONGO_ENV = u'mongo_env'
# HTTP status code
status_codes = {'OK': 200,
'Created': 201,
'No Content': 204,
'Moved Permanently': 301,
'Redirect': 307,
'Bad Request': 400,
'unauthorized': 401,
'Not Found': 404,
'Method Not Allowed': 405,
'Not Acceptable': 406,
'Conflict': 409,
'Content Length Required': 411,
'Request Entity Too Large': 413,
'Unsupported Media Type': 415,
'Unprocessable Entity': 422,
'Internal Server Error': 500}
behave.use_step_matcher("re")
__logger__ = logging.getLogger("steps")
# --------------- general_operations ----------------------
@step(u'send a API entry point request')
def send_a_base_request(context):
"""
send a API entry point request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Sending a API entry point request: /v2 ...")
props = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=props["CB_PROTOCOL"], host=props["CB_HOST"], port=props["CB_PORT"])
context.resp = context.cb.get_base_request()
__logger__.info("...Sent a API entry point request: /v2 correctly")
@step(u'send a version request')
def send_a_version_request(context):
"""
send a version request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Sending a version request...")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.get_version_request()
__logger__.info("..Sent a version request correctly")
send_a_version_request = step(u'send a version request')(send_a_version_request)
@step(u'send a statistics request')
def send_a_statistics_request(context):
"""
send a statistics request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Sending a statistics request...")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.get_statistics_request()
__logger__.info("..Sent a statistics request correctly")
@step(u'send a cache statistics request')
def send_a_cache_statistics_request(context):
"""
send a cache statistics request
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Sending a statistics request...")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.get_cache_statistics_request()
__logger__.info("..Sent a statistics request correctly")
@step(u'delete database in mongo')
def delete_database_in_mongo(context):
"""
Delete database used in mongo
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
fiware_service_header = u'Fiware-Service'
orion_prefix = u'orion'
database_name = orion_prefix
props_mongo = properties_class.read_properties()[MONGO_ENV] # mongo properties dict
mongo = Mongo(host=props_mongo["MONGO_HOST"], port=props_mongo["MONGO_PORT"], user=props_mongo["MONGO_USER"],
password=props_mongo["MONGO_PASS"])
headers = context.cb.get_headers()
if fiware_service_header in headers:
if headers[fiware_service_header] != EMPTY:
if headers[fiware_service_header].find(".") < 0:
database_name = "%s-%s" % (database_name, headers[fiware_service_header].lower())
else:
postfix = headers[fiware_service_header].lower()[0:headers[fiware_service_header].find(".")]
database_name = "%s-%s" % (database_name, postfix)
__logger__.debug("Deleting database \"%s\" in mongo..." % database_name)
mongo.connect(database_name)
mongo.drop_database()
mongo.disconnect()
__logger__.info("...Database \"%s\" is deleted" % database_name)
@step(u'check in log, label "([^"]*)" and message "([^"]*)"')
def check_in_log_label_and_text(context, label, text):
"""
Verify in log file if a label with a message exists
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param label: label to find
:param text: text to find (begin since the end)
"""
__logger__.debug("Looking for in log the \"%s\" label and the \"%s\" text..." % (label, text))
props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
remote_log = Remote_Log(file="%s/contextBroker.log" % props_cb_env["CB_LOG_FILE"], fabric=context.my_fab)
line = remote_log.find_line(label, text)
assert line is not None, " ERROR - the \"%s\" label and the \"%s\" text do not exist in the log" % (label, text)
__logger__.info("log line: \n%s" % line)
ngsi = NGSI()
ngsi.verify_log(context, line)
__logger__.info("...confirmed traces in log")
@step(u'delay for "([^"]*)" seconds')
def delay_for_seconds(context, seconds):
"""
delay for N seconds
:param seconds: seconds to delay
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.info("delay for \"%s\" seconds" % seconds)
time.sleep(int(seconds))
@step(u'retrieve the log level')
def retrieve_the_log_level(context):
"""
retrieve the log level in Context Broker
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.info("retrieving the log level in Context Broker")
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.retrieve_the_log_level()
__logger__.info("..retrieved the log level in Context Broker")
@step(u'change the log level')
def change_the_log_level(context):
"""
change the log level
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.info("changing the log level in Context Broker")
query_param = {}
if context.table is not None:
for row in context.table:
query_param[row["parameter"]] = row["value"]
__logger__.info("query param: %s = %s" % (row["parameter"], row["value"]))
context.props_cb_env = properties_class.read_properties()[CONTEXT_BROKER_ENV]
context.cb = CB(protocol=context.props_cb_env["CB_PROTOCOL"], host=context.props_cb_env["CB_HOST"], port=context.props_cb_env["CB_PORT"])
context.resp = context.cb.change_the_log_level(query_param)
__logger__.info("..changed the log level in Context Broker")
# ------------------------------------- validations ----------------------------------------------
@step(u'verify that receive a.? "([^"]*)" http code')
def verify_that_receive_an_http_code(context, http_code):
"""
verify that receive an http code
:param http_code: http code expected
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("context: %s" % repr(context.resp.text))
__logger__.debug("Verifying that return an http codes...")
assert context.resp.status_code == status_codes[http_code], \
" ERROR - http code is wrong\n" \
" expected: %s \n" \
" received: %s" % (str(status_codes[http_code]), str(context.resp.status_code))
__logger__.info('...Verified that http code returned is "%s"' % http_code)
@step(u'verify "([^"]*)" url with "([^"]*)" value in response')
def verify_entry_point(context, url, value):
"""
verify API entry point response.
Ex:
{
"entities_url":"/v2/entities",
"types_url":"/v2/types",
"subscriptions_url":"/v2/subscriptions",
"registrations_url":"/v2/registrations"
}
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param url: url key to verify
:param value: value expected
"""
__logger__.debug("Verifying url in API entry point response...")
resp_dict = convert_str_to_dict(context.resp.text, "JSON")
assert resp_dict[url] == value, " ERROR - in \"%s\" url with \"%s\" value " % (url, value)
__logger__.info("...Verified url in API entry point response")
@step(u'verify statistics "([^"]*)" field does exists')
def verify_stat_fields(context, field_to_test):
"""
verify statistics and cache statistics fields in response.
Ex: /statistics
{
"uptime_in_secs":2,
"measuring_interval_in_secs":2
}
/cache/statistics
{
"ids":"",
"refresh":1,
"inserts":0,
"removes":0,
"updates":0,
"items":0
}
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param field_to_test: field to verify if it does exists
"""
__logger__.debug("Verifying statistics field: %s does exists..." % field_to_test)
resp_dict = convert_str_to_dict(context.resp.text, "JSON")
assert field_to_test in resp_dict.keys(), "ERROR - \"%s\" field does no exist in statistics response" % field_to_test
__logger__.info("...Verified that statistics field %s is correct" % field_to_test)
@step(u'verify version "([^"]*)" field does exists')
def verify_version_fields(context, field):
"""
verify version fields in response.
Ex:
{
"orion" : {
"version" : "0.23.0_20150722131636",
"uptime" : "0 d, 0 h, 4 m, 46 s",
"git_hash" : "3c0767f91997a25925229b836dc48bba0f4801ba",
"compile_time" : "Wed Jul 22 13:18:54 CEST 2015",
"compiled_by" : "develenv",
"compiled_in" : "ci-fiware-01"
}
}
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param field: field to verify if it does exists
"""
__logger__.debug("Verifying version field: %s does exists..." % field)
resp_dict = convert_str_to_dict(context.resp.text, "JSON")
assert "orion" in resp_dict, "ERROR - orion field does no exist in version response"
assert field in resp_dict["orion"], "ERROR - %s field does no exist in version response" % field
__logger__.info("...Verified that version field %s is correct" % field)
@step(u'verify if version is the expected')
def verify_if_version_is_the_expected(context):
"""
verify if version is the expected
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
resp_dict = convert_str_to_dict(str(context.resp.text), "JSON")
assert resp_dict["orion"]["version"].find(context.props_cb_env["CB_VERSION"]) >= 0, \
" ERROR in context broker version value, \n" \
" expected: %s \n" \
" installed: %s" % (context.props_cb_env["CB_VERSION"], resp_dict["orion"]["version"])
__logger__.info("-- version %s is correct in version request" % context.props_cb_env["CB_VERSION"])
@step(u'verify that receive several "([^"]*)" http code')
def verify_that_receive_several_http_codes(context, http_code):
"""
verify that receive several http codes in multi entities
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param http_code: http code in all entities
"""
__logger__.debug("Verifying that return an http code in several entities...")
entities_context = context.cb.get_entity_context()
for i in range(int(entities_context["entities_number"])):
assert context.resp_list[i].status_code == status_codes[http_code], \
" ERROR - http code is wrong in position: %s \n" \
"expected: %s \n" \
" received: %s" % (str(i), str(status_codes[http_code]), str(context.resp_list[i].status_code))
__logger__.debug(" -- status code \"%s\" is the expected in position: %s" % (http_code, str(i)))
__logger__.info("...Verified that http code returned in all entities are %s" % http_code)
@step(u'verify an error response')
def verify_error_message(context):
"""
verify error response
:param context: parameters to evaluate. It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying error message ...")
ngsi = NGSI()
ngsi.verify_error_response(context, context.resp)
__logger__.info("...Verified that error message is the expected")
@step(u'verify several error responses')
def verify_error_message(context):
"""
verify error response
:param context: parameters to evaluate. It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying error message in several entities...")
entities_context = context.cb.get_entity_context()
ngsi = NGSI()
for i in range(int(entities_context["entities_number"])):
ngsi.verify_error_response(context, context.resp_list[i])
__logger__.info("...Verified that error message is the expected in all entities ")
@step(u'verify headers in response')
def verify_headers_in_response(context):
"""
verify headers in response
Ex:
| parameter | value |
| fiware-total-count | 5 |
| location | /v2/subscriptions/.* |
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying headers in response...")
ngsi = NGSI()
ngsi.verify_headers_response(context)
__logger__.info("...Verified headers in response")
@step(u'verify if the log level "([^"]*)" is the expected')
def verify_if_the_log_level_is_the_expected(context, level):
"""
verify if the log level is the expected
:param level: log level expected
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
"""
__logger__.debug("Verifying if the log level \"%s\" is the expected in response..." % level)
ngsi = NGSI()
ngsi.verify_log_level(context, level)
__logger__.info("...Verified log level in response")
@step(u'verify admin error "([^"]*)"')
def verify_admin_error(context, error):
"""
verify admin error message
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param error: error message expected
"""
__logger__.debug("Verifying the admin error message: %s..." % error)
ngsi = NGSI()
ngsi.verify_admin_error(context, error)
__logger__.info("...Verified that the admin error message is the expected")
|
jmcanterafonseca/fiware-orion
|
test/acceptance/behave/components/common_steps/general_steps.py
|
Python
|
agpl-3.0
| 18,232
|
# pylint: disable=unused-import
"""
Python APIs exposed by the bulk_email app to other in-process apps.
"""
# Public Bulk Email Functions
from __future__ import absolute_import
from bulk_email.models_api import (
is_bulk_email_enabled_for_course,
is_bulk_email_feature_enabled,
is_user_opted_out_for_course
)
def get_emails_enabled(user, course_id):
"""
Get whether or not emails are enabled in the context of a course.
Arguments:
user: the user object for which we want to check whether emails are enabled
course_id (string): the course id of the course
Returns:
(bool): True if emails are enabled for the course associated with course_id for the user;
False otherwise
"""
if is_bulk_email_feature_enabled(course_id=course_id):
return not is_user_opted_out_for_course(user=user, course_id=course_id)
return None
|
ESOedX/edx-platform
|
lms/djangoapps/bulk_email/api.py
|
Python
|
agpl-3.0
| 899
|
from flask import Flask
__version__ = '0.1.1'
app = Flask(__name__)
app.config.from_object('frijoles.default_settings')
app.config.from_envvar('FRIJOLES_SETTINGS', silent=True)
import frijoles.views
|
Antojitos/frijoles
|
src/frijoles/__init__.py
|
Python
|
agpl-3.0
| 202
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Normalize soundfiles in a folder, and write them to a new folder
# called normalized/
# Import Python modules
import contextlib
import os
import shutil
import sys
import wave
# Import user modules
def normalize():
""" Normalizes a set of sound files to norm-To dB
return -->> 1
"""
# Get the names of the files in sortFolder.
files = os.listdir(folderToSort)
# Make a directory for the renamed sorted files:
dirname = folderToSort + 'normalized/'
try:
os.makedirs(dirname)
except OSError:
if os.path.exists(dirname):
pass
else:
raise
for singleFile in files:
#Only work with .wav files
if singleFile[-4:] == '.wav':
inputFile = folderToSort + singleFile
outfile = dirname + singleFile
command = 'sox --norm={0} {1} {2}'.format(normalizeTo, inputFile,
outfile)
os.system(command)
return 1
def inputCheck(argValues):
""" Check whether the input data is valid. If not print usage
information.
argValues ---> a list of the scripts command-line parameters.
"""
return 1
# Check that the input parameters are valid. Get the name of the folder
# that contains the sound files and the sort type from the command-line
# arguments.
argValues = sys.argv
inputCheck(argValues)
folderToSort = argValues[1]
try:
normalizeTo = argValues[2]
except IndexError:
normalizeTo = -3
print 'Normalizing to -3dB'
# Exectue the script.
normalize()
|
elerno/cascaBell
|
normalizeFiles.py
|
Python
|
agpl-3.0
| 1,436
|
''' -- imports from python libraries -- '''
# from datetime import datetime
import datetime
import json
''' -- imports from installed packages -- '''
from django.http import HttpResponseRedirect # , HttpResponse uncomment when to use
from django.http import HttpResponse
from django.http import Http404
from django.shortcuts import render_to_response # , render uncomment when to use
from django.template import RequestContext
from django.template import TemplateDoesNotExist
from django.template.loader import render_to_string
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
''' -- imports from application folders/files -- '''
from gnowsys_ndf.settings import GAPPS, MEDIA_ROOT, GSTUDIO_TASK_TYPES
from gnowsys_ndf.ndf.models import NodeJSONEncoder
from gnowsys_ndf.ndf.models import Node, AttributeType, RelationType
from gnowsys_ndf.ndf.models import node_collection, triple_collection
from gnowsys_ndf.ndf.views.file import save_file
from gnowsys_ndf.ndf.templatetags.ndf_tags import edit_drawer_widget
from gnowsys_ndf.ndf.views.methods import get_node_common_fields, parse_template_data, get_execution_time, delete_node
from gnowsys_ndf.ndf.views.notify import set_notif_val
from gnowsys_ndf.ndf.views.methods import get_property_order_with_value
from gnowsys_ndf.ndf.views.methods import create_gattribute, create_grelation, create_task
GST_COURSE = node_collection.one({'_type': "GSystemType", 'name': GAPPS[7]})
app = GST_COURSE
# @login_required
@get_execution_time
def course(request, group_id, course_id=None):
"""
* Renders a list of all 'courses' available within the database.
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
if course_id is None:
course_ins = node_collection.find_one({'_type': "GSystemType", "name": "Course"})
if course_ins:
course_id = str(course_ins._id)
if request.method == "POST":
# Course search view
title = GST_COURSE.name
search_field = request.POST['search_field']
course_coll = node_collection.find({'member_of': {'$all': [ObjectId(GST_COURSE._id)]},
'$or': [
{'$and': [
{'name': {'$regex': search_field, '$options': 'i'}},
{'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [{'access_policy': u"PRIVATE"}, {'created_by': request.user.id}]}
]
}
]
},
{'$and': [
{'tags': {'$regex': search_field, '$options': 'i'}},
{'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [{'access_policy': u"PRIVATE"}, {'created_by': request.user.id}]}
]
}
]
}
],
'group_set': {'$all': [ObjectId(group_id)]}
}).sort('last_update', -1)
# course_nodes_count = course_coll.count()
return render_to_response("ndf/course.html",
{'title': title,
'appId': app._id,
'searching': True, 'query': search_field,
'course_coll': course_coll, 'groupid': group_id, 'group_id':group_id
},
context_instance=RequestContext(request)
)
else:
# Course list view
title = GST_COURSE.name
course_coll = node_collection.find({'member_of': {'$all': [ObjectId(course_id)]},
'group_set': {'$all': [ObjectId(group_id)]},
'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [
{'access_policy': u"PRIVATE"},
{'created_by': request.user.id}
]
}
]
})
template = "ndf/course.html"
variable = RequestContext(request, {'title': title, 'course_nodes_count': course_coll.count(), 'course_coll': course_coll, 'groupid':group_id, 'appId':app._id, 'group_id':group_id})
return render_to_response(template, variable)
@login_required
@get_execution_time
def create_edit(request, group_id, node_id=None):
"""Creates/Modifies details about the given quiz-item.
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
context_variables = {'title': GST_COURSE.name,
'group_id': group_id,
'groupid': group_id
}
if node_id:
course_node = node_collection.one({'_type': u'GSystem', '_id': ObjectId(node_id)})
else:
course_node = node_collection.collection.GSystem()
available_nodes = node_collection.find({'_type': u'GSystem', 'member_of': ObjectId(GST_COURSE._id),'group_set': ObjectId(group_id) })
nodes_list = []
for each in available_nodes:
nodes_list.append(str((each.name).strip().lower()))
if request.method == "POST":
# get_node_common_fields(request, course_node, group_id, GST_COURSE)
course_node.save(is_changed=get_node_common_fields(request, course_node, group_id, GST_COURSE))
return HttpResponseRedirect(reverse('course', kwargs={'group_id': group_id}))
else:
if node_id:
context_variables['node'] = course_node
context_variables['groupid'] = group_id
context_variables['group_id'] = group_id
context_variables['appId'] = app._id
context_variables['nodes_list'] = json.dumps(nodes_list)
return render_to_response("ndf/course_create_edit.html",
context_variables,
context_instance=RequestContext(request)
)
@login_required
@get_execution_time
def course_detail(request, group_id, _id):
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
course_structure_exists = False
title = GST_COURSE.name
course_node = node_collection.one({"_id": ObjectId(_id)})
if course_node.collection_set:
course_structure_exists = True
return render_to_response("ndf/course_detail.html",
{'node': course_node,
'groupid': group_id,
'group_id': group_id,
'appId': app._id,
'title':title,
'course_structure_exists': course_structure_exists
},
context_instance=RequestContext(request)
)
@login_required
@get_execution_time
def course_create_edit(request, group_id, app_id, app_set_id=None, app_set_instance_id=None, app_name=None):
"""
Creates/Modifies document of given sub-types of Course(s).
"""
auth = None
if ObjectId.is_valid(group_id) is False:
group_ins = node_collection.one({'_type': "Group", "name": group_id})
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
if auth:
group_id = str(auth._id)
else:
pass
app = None
if app_id is None:
app = node_collection.one({'_type': "GSystemType", 'name': app_name})
if app:
app_id = str(app._id)
else:
app = node_collection.one({'_id': ObjectId(app_id)})
app_name = app.name
# app_set = ""
app_collection_set = []
title = ""
course_gst = None
course_gs = None
mis_admin = None
property_order_list = []
template = ""
template_prefix = "mis"
if request.user:
if auth is None:
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
agency_type = auth.agency_type
agency_type_node = node_collection.one({
'_type': "GSystemType", 'name': agency_type
}, {
'collection_set': 1
})
if agency_type_node:
for eachset in agency_type_node.collection_set:
app_collection_set.append(
node_collection.one({
"_id": eachset
}, {
'_id': 1, 'name': 1, 'type_of': 1
})
)
if app_set_id:
course_gst = node_collection.one({
'_type': "GSystemType", '_id': ObjectId(app_set_id)
}, {
'name': 1, 'type_of': 1
})
template = "ndf/" + course_gst.name.strip().lower().replace(' ', '_') \
+ "_create_edit.html"
title = course_gst.name
if app_set_instance_id:
course_gs = node_collection.one({
'_type': "GSystem", '_id': ObjectId(app_set_instance_id)
})
else:
course_gs = node_collection.collection.GSystem()
course_gs.member_of.append(course_gst._id)
property_order_list = get_property_order_with_value(course_gs)
if request.method == "POST":
# [A] Save course-node's base-field(s)
start_time = ""
if "start_time" in request.POST:
start_time = request.POST.get("start_time", "")
start_time = datetime.datetime.strptime(start_time, "%m/%Y")
end_time = ""
if "end_time" in request.POST:
end_time = request.POST.get("end_time", "")
end_time = datetime.datetime.strptime(end_time, "%m/%Y")
nussd_course_type = ""
if "nussd_course_type" in request.POST:
nussd_course_type = request.POST.get("nussd_course_type", "")
nussd_course_type = unicode(nussd_course_type)
unset_ac_options = []
if "unset-ac-options" in request.POST:
unset_ac_options = request.POST.getlist("unset-ac-options")
else:
# Just to execute loop at least once for Course Sub-Types
# other than 'Announced Course'
unset_ac_options = ["dummy"]
if course_gst.name == u"Announced Course":
announce_to_colg_list = request.POST.get(
"announce_to_colg_list", ""
)
announce_to_colg_list = [ObjectId(colg_id) for colg_id in announce_to_colg_list.split(",")]
colg_ids = []
# Parsing ObjectId -- from string format to ObjectId
for each in announce_to_colg_list:
if each and ObjectId.is_valid(each):
colg_ids.append(ObjectId(each))
# Fetching college(s)
colg_list_cur = node_collection.find({
'_id': {'$in': colg_ids}
}, {
'name': 1, 'attribute_set.enrollment_code': 1
})
if "_id" in course_gs:
# It means we are in editing mode of given Announced Course GSystem
unset_ac_options = [course_gs._id]
ac_nc_code_list = []
# Prepare a list
# 0th index (ac_node): Announced Course node,
# 1st index (nc_id): NUSSD Course node's ObjectId,
# 2nd index (nc_course_code): NUSSD Course's code
for cid in unset_ac_options:
ac_node = None
nc_id = None
nc_course_code = ""
# Here course_gst is Announced Course GSytemType's node
ac_node = node_collection.one({
'_id': ObjectId(cid), 'member_of': course_gst._id
})
# If ac_node found, means
# (1) we are dealing with creating Announced Course
# else,
# (2) we are in editing phase of Announced Course
course_node = None
if not ac_node:
# In this case, cid is of NUSSD Course GSystem
# So fetch that to extract course_code
# Set to nc_id
ac_node = None
course_node = node_collection.one({
'_id': ObjectId(cid)
})
else:
# In this case, fetch NUSSD Course from
# Announced Course GSystem's announced_for relationship
for rel in ac_node.relation_set:
if "announced_for" in rel:
course_node_ids = rel["announced_for"]
break
# Fetch NUSSD Course GSystem
if course_node_ids:
course_node = node_collection.find_one({
"_id": {"$in": course_node_ids}
})
# If course_code doesn't exists then
# set NUSSD Course GSystem's name as course_code
if course_node:
nc_id = course_node._id
for attr in course_node.attribute_set:
if "course_code" in attr:
nc_course_code = attr["course_code"]
break
if not nc_course_code:
nc_course_code = course_node.name.replace(" ", "-")
# Append to ac_nc_code_list
ac_nc_code_list.append([ac_node, nc_id, nc_course_code])
# For each selected college
# Create Announced Course GSystem
for college_node in colg_list_cur:
# Fetch Enrollment code from "enrollment_code" (Attribute)
college_enrollment_code = ""
if college_node:
for attr in college_node.attribute_set:
if attr and "enrollment_code" in attr:
college_enrollment_code = attr["enrollment_code"]
break
ann_course_id_list = []
# For each selected course to Announce
for ac_nc_code in ac_nc_code_list:
course_gs = ac_nc_code[0]
nc_id = ac_nc_code[1]
nc_course_code = ac_nc_code[2]
if not course_gs:
# Create new Announced Course GSystem
course_gs = node_collection.collection.GSystem()
course_gs.member_of.append(course_gst._id)
# Prepare name for Announced Course GSystem
c_name = unicode(
nc_course_code + "_" + college_enrollment_code + "_"
+ start_time.strftime("%b_%Y") + "-"
+ end_time.strftime("%b_%Y")
)
request.POST["name"] = c_name
is_changed = get_node_common_fields(
request, course_gs, group_id, course_gst
)
if is_changed:
# Remove this when publish button is setup on interface
course_gs.status = u"PUBLISHED"
course_gs.save(is_changed=is_changed)
# [B] Store AT and/or RT field(s) of given course-node
for tab_details in property_order_list:
for field_set in tab_details[1]:
# Fetch only Attribute field(s) / Relation field(s)
if '_id' in field_set:
field_instance = node_collection.one({
'_id': field_set['_id']
})
field_instance_type = type(field_instance)
if (field_instance_type in
[AttributeType, RelationType]):
field_data_type = field_set['data_type']
# Fetch field's value depending upon AT/RT
# and Parse fetched-value depending upon
# that field's data-type
if field_instance_type == AttributeType:
if "File" in field_instance["validators"]:
# Special case: AttributeTypes that require file instance as it's value in which case file document's ObjectId is used
if field_instance["name"] in request.FILES:
field_value = request.FILES[field_instance["name"]]
else:
field_value = ""
# Below 0th index is used because that function returns tuple(ObjectId, bool-value)
if field_value != '' and field_value != u'':
file_name = course_gs.name + " -- " + field_instance["altnames"]
content_org = ""
tags = ""
field_value = save_file(field_value, file_name, request.user.id, group_id, content_org, tags, oid=True)[0]
else:
# Other AttributeTypes
field_value = request.POST.get(field_instance["name"], "")
if field_instance["name"] in ["start_time", "end_time"]:
# Course Duration
field_value = parse_template_data(field_data_type, field_value, date_format_string="%m/%Y")
else:
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
course_gs_triple_instance = create_gattribute(course_gs._id, node_collection.collection.AttributeType(field_instance), field_value)
else:
# i.e if field_instance_type == RelationType
if field_instance["name"] == "announced_for":
field_value = ObjectId(nc_id)
# Pass ObjectId of selected Course
elif field_instance["name"] == "acourse_for_college":
field_value = college_node._id
# Pass ObjectId of selected College
course_gs_triple_instance = create_grelation(course_gs._id, node_collection.collection.RelationType(field_instance), field_value)
ann_course_id_list.append(course_gs._id)
else:
is_changed = get_node_common_fields(request, course_gs, group_id, course_gst)
if is_changed:
# Remove this when publish button is setup on interface
course_gs.status = u"PUBLISHED"
course_gs.save(is_changed=is_changed)
# [B] Store AT and/or RT field(s) of given course-node
for tab_details in property_order_list:
for field_set in tab_details[1]:
# Fetch only Attribute field(s) / Relation field(s)
if '_id' in field_set:
field_instance = node_collection.one({'_id': field_set['_id']})
field_instance_type = type(field_instance)
if field_instance_type in [AttributeType, RelationType]:
field_data_type = field_set['data_type']
# Fetch field's value depending upon AT/RT
# and Parse fetched-value depending upon
# that field's data-type
if field_instance_type == AttributeType:
if "File" in field_instance["validators"]:
# Special case: AttributeTypes that require file instance as it's value in which case file document's ObjectId is used
if field_instance["name"] in request.FILES:
field_value = request.FILES[field_instance["name"]]
else:
field_value = ""
# Below 0th index is used because that function returns tuple(ObjectId, bool-value)
if field_value != '' and field_value != u'':
file_name = course_gs.name + " -- " + field_instance["altnames"]
content_org = ""
tags = ""
field_value = save_file(field_value, file_name, request.user.id, group_id, content_org, tags, oid=True)[0]
else:
# Other AttributeTypes
field_value = request.POST.get(field_instance["name"], "")
# if field_instance["name"] in ["start_time","end_time"]:
# field_value = parse_template_data(field_data_type, field_value, date_format_string="%m/%Y")
# elif field_instance["name"] in ["start_enroll", "end_enroll"]: #Student Enrollment DUration
# field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y")
if field_instance["name"] in ["mast_tr_qualifications", "voln_tr_qualifications"]:
# Needs sepcial kind of parsing
field_value = []
tr_qualifications = request.POST.get(field_instance["name"], '')
if tr_qualifications:
qualifications_dict = {}
tr_qualifications = [qual.strip() for qual in tr_qualifications.split(",")]
for i, qual in enumerate(tr_qualifications):
if (i % 2) == 0:
if qual == "true":
qualifications_dict["mandatory"] = True
elif qual == "false":
qualifications_dict["mandatory"] = False
else:
qualifications_dict["text"] = unicode(qual)
field_value.append(qualifications_dict)
qualifications_dict = {}
elif field_instance["name"] in ["max_marks", "min_marks"]:
# Needed because both these fields' values are dependent upon evaluation_type field's value
evaluation_type = request.POST.get("evaluation_type", "")
if evaluation_type == u"Continuous":
field_value = None
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
else:
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
course_gs_triple_instance = create_gattribute(
course_gs._id,
node_collection.collection.AttributeType(field_instance),
field_value
)
else:
#i.e if field_instance_type == RelationType
if field_instance["name"] == "announced_for":
field_value = ObjectId(cid)
#Pass ObjectId of selected Course
elif field_instance["name"] == "acourse_for_college":
field_value = college_node._id
#Pass ObjectId of selected College
course_gs_triple_instance = create_grelation(
course_gs._id,
node_collection.collection.RelationType(field_instance),
field_value
)
return HttpResponseRedirect(
reverse(
app_name.lower() + ":" + template_prefix + '_app_detail',
kwargs={
'group_id': group_id, "app_id": app_id,
"app_set_id": app_set_id
}
)
)
univ = node_collection.one({
'_type': "GSystemType", 'name': "University"
}, {
'_id': 1
})
university_cur = None
if not mis_admin:
mis_admin = node_collection.one(
{'_type': "Group", 'name': "MIS_admin"},
{'_id': 1, 'name': 1, 'group_admin': 1}
)
if univ and mis_admin:
university_cur = node_collection.find(
{'member_of': univ._id, 'group_set': mis_admin._id},
{'name': 1}
).sort('name', 1)
default_template = "ndf/course_create_edit.html"
context_variables = {
'groupid': group_id, 'group_id': group_id,
'app_id': app_id, 'app_name': app_name,
'app_collection_set': app_collection_set,
'app_set_id': app_set_id,
'title': title,
'university_cur': university_cur,
'property_order_list': property_order_list
}
if app_set_instance_id:
course_gs.get_neighbourhood(course_gs.member_of)
context_variables['node'] = course_gs
if "Announced Course" in course_gs.member_of_names_list:
for attr in course_gs.attribute_set:
if attr:
for eachk, eachv in attr.items():
context_variables[eachk] = eachv
for rel in course_gs.relation_set:
if rel:
for eachk, eachv in rel.items():
if eachv:
get_node_name = node_collection.one({'_id': eachv[0]})
context_variables[eachk] = get_node_name.name
try:
return render_to_response(
[template, default_template],
context_variables, context_instance=RequestContext(request)
)
except TemplateDoesNotExist as tde:
error_message = "\n CourseCreateEditViewError: This html template (" \
+ str(tde) + ") does not exists !!!\n"
raise Http404(error_message)
except Exception as e:
error_message = "\n CourseCreateEditViewError: " + str(e) + " !!!\n"
raise Exception(error_message)
@login_required
@get_execution_time
def mis_course_detail(request, group_id, app_id=None, app_set_id=None, app_set_instance_id=None, app_name=None):
"""
Detail view of NUSSD Course/ Announced Course
"""
# print "\n Found course_detail n gone inn this...\n\n"
auth = None
if ObjectId.is_valid(group_id) is False:
group_ins = node_collection.one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
app = None
if app_id is None:
app = node_collection.one({'_type': "GSystemType", 'name': app_name})
if app:
app_id = str(app._id)
else:
app = node_collection.one({'_id': ObjectId(app_id)})
app_name = app.name
# app_name = "mis"
app_set = ""
app_collection_set = []
title = ""
course_gst = None
course_gs = None
nodes = None
node = None
property_order_list = []
property_order_list_ac = []
is_link_needed = True # This is required to show Link button on interface that link's Student's/VoluntaryTeacher's node with it's corresponding Author node
template_prefix = "mis"
context_variables = {}
#Course structure collection _dict
course_collection_dict = {}
course_collection_list = []
course_structure_exists = False
if request.user:
if auth is None:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username)})
if auth:
agency_type = auth.agency_type
agency_type_node = node_collection.one({'_type': "GSystemType", 'name': agency_type}, {'collection_set': 1})
if agency_type_node:
for eachset in agency_type_node.collection_set:
app_collection_set.append(node_collection.one({"_id": eachset}, {'_id': 1, 'name': 1, 'type_of': 1}))
if app_set_id:
course_gst = node_collection.one({'_type': "GSystemType", '_id': ObjectId(app_set_id)}, {'name': 1, 'type_of': 1})
title = course_gst.name
template = "ndf/course_list.html"
if request.method == "POST":
search = request.POST.get("search", "")
classtype = request.POST.get("class", "")
# nodes = list(node_collection.find({'name':{'$regex':search, '$options': 'i'},'member_of': {'$all': [course_gst._id]}}))
nodes = node_collection.find({'member_of': course_gst._id, 'name': {'$regex': search, '$options': 'i'}})
else:
nodes = node_collection.find({'member_of': course_gst._id, 'group_set': ObjectId(group_id)})
if app_set_instance_id:
template = "ndf/course_details.html"
node = node_collection.one({'_type': "GSystem", '_id': ObjectId(app_set_instance_id)})
property_order_list = get_property_order_with_value(node)
node.get_neighbourhood(node.member_of)
if title == u"Announced Course":
property_order_list_ac = node.attribute_set
# Course structure as list of dicts
if node.collection_set:
course_structure_exists = True
context_variables = { 'groupid': group_id, 'group_id': group_id,
'app_id': app_id, 'app_name': app_name, 'app_collection_set': app_collection_set,
'app_set_id': app_set_id,
'course_gst_name': course_gst.name,
'title': title,
'course_structure_exists': course_structure_exists,
'nodes': nodes, 'node': node,
'property_order_list': property_order_list,
'property_order_list_ac': property_order_list_ac,
'is_link_needed': is_link_needed
}
try:
# print "\n template-list: ", [template, default_template]
# template = "ndf/fgh.html"
# default_template = "ndf/dsfjhk.html"
# return render_to_response([template, default_template],
return render_to_response(template,
context_variables,
context_instance = RequestContext(request)
)
except TemplateDoesNotExist as tde:
error_message = "\n CourseDetailListViewError: This html template (" + str(tde) + ") does not exists !!!\n"
raise Http404(error_message)
except Exception as e:
error_message = "\n CourseDetailListViewError: " + str(e) + " !!!\n"
raise Exception(error_message)
@login_required
@get_execution_time
def create_course_struct(request, group_id, node_id):
"""
This view is to create the structure of the Course.
A Course holds CourseSection, which further holds CourseSubSection
in their respective collection_set.
A tree depiction to this is as follows:
Course Name:
1. CourseSection1
1.1. CourseSubSection1
1.2. CourseSubSection2
2. CourseSection2
2.1. CourseSubSection3
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
app_id = None
app_set_id = None
property_order_list_cs = []
property_order_list_css = []
course_structure_exists = False
title = "Course Authoring"
course_node = node_collection.one({"_id": ObjectId(node_id)})
cs_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSection"})
cs_gs = node_collection.collection.GSystem()
cs_gs.member_of.append(cs_gst._id)
property_order_list_cs = get_property_order_with_value(cs_gs)
css_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSubSection"})
css_gs = node_collection.collection.GSystem()
css_gs.member_of.append(css_gst._id)
property_order_list_css = get_property_order_with_value(css_gs)
course_collection_list = course_node.collection_set
if course_collection_list:
course_structure_exists = True
# for attr in course_node.attribute_set:
# if attr.has_key("evaluation_type"):
# eval_type = attr["evaluation_type"]
#If evaluation_type flag is True, it is Final. If False, it is Continous
# if(eval_type==u"Final"):
# eval_type_flag = True
# else:
# eval_type_flag = False
if request.method == "GET":
app_id = request.GET.get("app_id", "")
app_set_id = request.GET.get("app_set_id", "")
return render_to_response("ndf/create_course_structure.html",
{'cnode': course_node,
'groupid': group_id,
'group_id': group_id,
'title': title,
'app_id': app_id, 'app_set_id': app_set_id,
'property_order_list': property_order_list_cs,
'property_order_list_css': property_order_list_css
},
context_instance=RequestContext(request)
)
@login_required
def save_course_section(request, group_id):
'''
Accepts:
* NUSSD Course/Course node _id
* CourseSection name
Actions:
* Creates CourseSection GSystem with name received.
* Appends this new CourseSection node id into
NUSSD Course/Course collection_set
Returns:
* success (i.e True/False)
* ObjectId of CourseSection node
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
cs_node_name = request.POST.get("cs_name", '')
course_node_id = request.POST.get("course_node_id", '')
cs_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSection"})
cs_new = node_collection.collection.GSystem()
cs_new.member_of.append(cs_gst._id)
cs_new.name = cs_node_name
cs_new.modified_by = int(request.user.id)
cs_new.created_by = int(request.user.id)
cs_new.contributors.append(int(request.user.id))
course_node = node_collection.one({"_id": ObjectId(course_node_id)})
cs_new.prior_node.append(ObjectId(course_node._id))
cs_new.save()
node_collection.collection.update({'_id': course_node._id}, {'$push': {'collection_set': cs_new._id }}, upsert=False, multi=False)
response_dict["success"] = True
response_dict["cs_new_id"] = str(cs_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def save_course_sub_section(request, group_id):
'''
Accepts:
* CourseSection node _id
* CourseSubSection name
Actions:
* Creates CourseSubSection GSystem with name received.
* Appends this new CourseSubSection node id into
CourseSection collection_set
Returns:
* success (i.e True/False)
* ObjectId of CourseSubSection node
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
css_node_name = request.POST.get("css_name", '')
cs_node_id = request.POST.get("cs_node_id", '')
css_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSubSection"})
css_new = node_collection.collection.GSystem()
css_new.member_of.append(css_gst._id)
# set name
css_new.name = css_node_name
css_new.modified_by = int(request.user.id)
css_new.created_by = int(request.user.id)
css_new.contributors.append(int(request.user.id))
cs_node = node_collection.one({"_id": ObjectId(cs_node_id)})
css_new.prior_node.append(cs_node._id)
css_new.save()
node_collection.collection.update({'_id': cs_node._id}, {'$push': {'collection_set': css_new._id }}, upsert=False, multi=False)
response_dict["success"] = True
response_dict["css_new_id"] = str(css_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def change_node_name(request, group_id):
'''
Accepts:
* CourseSection/ CourseSubSection node _id
* New name for CourseSection node
Actions:
* Updates received node's name
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
node_id = request.POST.get("node_id", '')
new_name = request.POST.get("new_name", '')
node = node_collection.one({"_id": ObjectId(node_id)})
node.name = new_name.strip()
node.save()
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
@login_required
def change_order(request, group_id):
'''
Accepts:
* 2 node ids.
Basically, either of CourseSection or CourseSubSection
* Parent node id
Either a NUSSD Course/Course or CourseSection
Actions:
* Swaps the 2 node ids in the collection set of received
parent node
'''
response_dict = {"success": False}
collection_set_list = []
if request.is_ajax() and request.method == "POST":
node_id_up = request.POST.get("node_id_up", '')
node_id_down = request.POST.get("node_id_down", '')
parent_node_id = request.POST.get("parent_node", '')
parent_node = node_collection.one({"_id": ObjectId(parent_node_id)})
collection_set_list = parent_node.collection_set
a, b = collection_set_list.index(ObjectId(node_id_up)), collection_set_list.index(ObjectId(node_id_down))
collection_set_list[b], collection_set_list[a] = collection_set_list[a], collection_set_list[b]
node_collection.collection.update({'_id': parent_node._id}, {'$set': {'collection_set': collection_set_list }}, upsert=False, multi=False)
parent_node.reload()
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
@login_required
def course_sub_section_prop(request, group_id):
'''
Accepts:
* CourseSubSection node _id
* Properties dict
Actions:
* Creates GAttributes with the values of received dict
for the respective CourseSubSection node
Returns:
* success (i.e True/False)
* If request.method is POST, all GAttributes in a dict structure,
'''
response_dict = {"success": False}
if request.is_ajax():
if request.method == "POST":
assessment_flag = False
css_node_id = request.POST.get("css_node_id", '')
prop_dict = request.POST.get("prop_dict", '')
assessment_chk = json.loads(request.POST.get("assessment_chk", ''))
prop_dict = json.loads(prop_dict)
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
at_cs_hours = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_minutes'})
at_cs_assessment = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_assessment'})
at_cs_assignment = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_assignment'})
at_cs_min_marks = node_collection.one({'_type': 'AttributeType', 'name': 'min_marks'})
at_cs_max_marks = node_collection.one({'_type': 'AttributeType', 'name': 'max_marks'})
if assessment_chk is True:
create_gattribute(css_node._id, at_cs_assessment, True)
assessment_flag = True
for propk, propv in prop_dict.items():
# add attributes to css gs
if(propk == "course_structure_minutes"):
create_gattribute(css_node._id, at_cs_hours, int(propv))
elif(propk == "course_structure_assignment"):
create_gattribute(css_node._id, at_cs_assignment, propv)
if assessment_flag:
if(propk == "min_marks"):
create_gattribute(css_node._id, at_cs_min_marks, int(propv))
if(propk == "max_marks"):
create_gattribute(css_node._id, at_cs_max_marks, int(propv))
css_node.reload()
response_dict["success"] = True
else:
css_node_id = request.GET.get("css_node_id", '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
if css_node.attribute_set:
for each in css_node.attribute_set:
for k, v in each.items():
response_dict[k] = v
response_dict["success"] = True
else:
response_dict["success"] = False
return HttpResponse(json.dumps(response_dict))
@login_required
def add_units(request, group_id):
'''
Accepts:
* CourseSubSection node _id
* NUSSD Course/Course node _id
Actions:
* Redirects to course_units.html
'''
variable = None
unit_node = None
css_node_id = request.GET.get('css_node_id', '')
unit_node_id = request.GET.get('unit_node_id', '')
course_node_id = request.GET.get('course_node', '')
app_id = request.GET.get('app_id', '')
app_set_id = request.GET.get('app_set_id', '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
course_node = node_collection.one({"_id": ObjectId(course_node_id)})
title = "Course Units"
try:
unit_node = node_collection.one({"_id": ObjectId(unit_node_id)})
except:
unit_node = None
variable = RequestContext(request, {
'group_id': group_id, 'groupid': group_id,
'css_node': css_node,
'title': title,
'app_set_id': app_set_id,
'app_id': app_id,
'unit_node': unit_node,
'course_node': course_node,
})
template = "ndf/course_units.html"
return render_to_response(template, variable)
@login_required
def get_resources(request, group_id):
'''
Accepts:
* Name of GSystemType (Page, File, etc.)
* CourseSubSection node _id
* widget_for
Actions:
* Fetches all GSystems of selected GSystemType as resources
Returns:
* Returns Drawer with resources
'''
response_dict = {'success': False, 'message': ""}
try:
if request.is_ajax() and request.method == "POST":
css_node_id = request.POST.get('css_node_id', "")
unit_node_id = request.POST.get('unit_node_id', "")
widget_for = request.POST.get('widget_for', "")
resource_type = request.POST.get('resource_type', "")
resource_type = resource_type.strip()
list_resources = []
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
try:
unit_node = node_collection.one({"_id": ObjectId(unit_node_id)})
except:
unit_node = None
if resource_type:
if resource_type == "Pandora":
resource_type = "Pandora_video"
resource_gst = node_collection.one({'_type': "GSystemType", 'name': resource_type})
res = node_collection.find(
{
'member_of': resource_gst._id,
'group_set': ObjectId(group_id),
'status': u"PUBLISHED"
}
)
for each in res:
list_resources.append(each)
drawer_template_context = edit_drawer_widget("CourseUnits", group_id, unit_node, None, checked="collection_set", left_drawer_content=list_resources)
drawer_template_context["widget_for"] = widget_for
drawer_widget = render_to_string(
'ndf/drawer_widget.html',
drawer_template_context,
context_instance=RequestContext(request)
)
return HttpResponse(drawer_widget)
else:
error_message = "Resource Drawer: Either not an ajax call or not a POST request!!!"
response_dict["message"] = error_message
return HttpResponse(json.dumps(response_dict))
except Exception as e:
error_message = "Resource Drawer: " + str(e) + "!!!"
response_dict["message"] = error_message
return HttpResponse(json.dumps(response_dict))
@login_required
def save_resources(request, group_id):
'''
Accepts:
* List of resources (i.e GSystem of Page, File, etc.)
* CourseSubSection node _id
Actions:
* Sets the received resources in respective node's collection_set
'''
response_dict = {"success": False,"create_new_unit": True}
if request.is_ajax() and request.method == "POST":
list_of_res = json.loads(request.POST.get('list_of_res', ""))
css_node_id = request.POST.get('css_node', "")
unit_name = request.POST.get('unit_name', "")
unit_name = unit_name.strip()
unit_node_id = request.POST.get('unit_node_id', "")
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
list_of_res_ids = [ObjectId(each_res) for each_res in list_of_res]
try:
cu_new = node_collection.one({'_id': ObjectId(unit_node_id)})
except:
cu_new = None
if not cu_new:
cu_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseUnit"})
cu_new = node_collection.collection.GSystem()
cu_new.member_of.append(cu_gst._id)
# set name
cu_new.name = unit_name.strip()
cu_new.modified_by = int(request.user.id)
cu_new.created_by = int(request.user.id)
cu_new.contributors.append(int(request.user.id))
cu_new.prior_node.append(css_node._id)
cu_new.save()
response_dict["create_new_unit"] = True
node_collection.collection.update({'_id': cu_new._id}, {'$set': {'name': unit_name }}, upsert=False, multi=False)
if cu_new._id not in css_node.collection_set:
node_collection.collection.update({'_id': css_node._id}, {'$push': {'collection_set': cu_new._id }}, upsert=False, multi=False)
node_collection.collection.update({'_id': cu_new._id}, {'$set': {'collection_set':list_of_res_ids}},upsert=False,multi=False)
cu_new.reload()
response_dict["success"] = True
response_dict["cu_new_id"] = str(cu_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def create_edit_unit(request, group_id):
'''
Accepts:
* ObjectId of unit node if exists
* ObjectId of CourseSubSection node
Actions:
* Creates/Updates Unit node
Returns:
* success (i.e True/False)
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
css_node_id = request.POST.get("css_node_id", '')
unit_node_id = request.POST.get("unit_node_id", '')
unit_name = request.POST.get("unit_name", '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
try:
cu_node = node_collection.one({'_id': ObjectId(unit_node_id)})
except:
cu_node = None
if cu_node is None:
cu_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseUnit"})
cu_node = node_collection.collection.GSystem()
cu_node.member_of.append(cu_gst._id)
# set name
cu_node.name = unit_name.strip()
cu_node.modified_by = int(request.user.id)
cu_node.created_by = int(request.user.id)
cu_node.contributors.append(int(request.user.id))
cu_node.prior_node.append(css_node._id)
cu_node.save()
response_dict["unit_node_id"] = str(cu_node._id)
node_collection.collection.update({'_id': cu_node._id}, {'$set': {'name': unit_name}}, upsert=False, multi=False)
if cu_node._id not in css_node.collection_set:
node_collection.collection.update({'_id': css_node._id}, {'$push': {'collection_set': cu_node._id}}, upsert=False, multi=False)
return HttpResponse(json.dumps(response_dict))
@login_required
def delete_from_course_structure(request, group_id):
'''
Accepts:
* ObjectId of node that is to be deleted.
It can be CourseSection/CourseSubSection/CourseUnit
Actions:
* Deletes the received node
Returns:
* success (i.e True/False)
'''
response_dict = {"success": False}
del_stat = False
if request.is_ajax() and request.method == "POST":
oid = request.POST.get("oid", '')
del_stat = delete_item(oid)
if del_stat:
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
def delete_item(item):
node_item = node_collection.one({'_id': ObjectId(item)})
if u"CourseUnit" not in node_item.member_of_names_list and node_item.collection_set:
for each in node_item.collection_set:
d_st = delete_item(each)
del_status, del_status_msg = delete_node(
node_id=node_item._id,
deletion_type=0
)
return del_status
@login_required
def publish_course(request, group_id):
if request.is_ajax() and request.method == "POST":
try:
node_id = request.POST.get("node_id", "")
node = node_collection.one({'_id': ObjectId(node_id)})
node.status = unicode("PUBLISHED")
node.modified_by = int(request.user.id)
node.save()
except:
return HttpResponse("Fail")
return HttpResponse("Success")
|
sunnychaudhari/gstudio
|
gnowsys-ndf/gnowsys_ndf/ndf/views/course.py
|
Python
|
agpl-3.0
| 54,618
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('search', '0008_auto_20151117_1526'),
]
operations = [
migrations.AlterField(
model_name='docket',
name='slug',
field=models.SlugField(help_text=b'URL that the document should map to (the slug)', max_length=75, null=True, db_index=False),
),
migrations.AlterField(
model_name='opinioncluster',
name='slug',
field=models.SlugField(help_text=b'URL that the document should map to (the slug)', max_length=75, null=True, db_index=False),
),
]
|
brianwc/courtlistener
|
cl/search/migrations/0009_auto_20151210_1124.py
|
Python
|
agpl-3.0
| 734
|
"""
Answers with an Error message to a previous Error message.
"""
import copy
import random
from src import Msg
from src import SomeIPPacket
from src.attacks import AttackerHelper
def sendErrorOnError(a, msgOrig):
""" Attack Specific Function. """
sender = msgOrig.receiver
receiver = msgOrig.sender
timestamp = None
message = {}
message['service'] = msgOrig.message['service']
message['method'] = msgOrig.message['method']
message['client'] = msgOrig.message['client']
message['session'] = msgOrig.message['session']
message['proto'] = SomeIPPacket.VERSION
message['iface'] = SomeIPPacket.INTERFACE
message['type'] = SomeIPPacket.messageTypes['ERROR']
errors = ['E_NOT_OK', 'E_NOT_READY', 'E_NOT_REACHABLE', 'E_TIMEOUT', 'E_MALFORMED_MESSAGE']
message['ret'] = SomeIPPacket.errorCodes[random.choice(errors)]
msg = Msg.Msg(sender, receiver, message, timestamp)
return msg
def doAttack(curAttack, msgOrig, a, attacksSuc):
""" Generic Function called from Attacker module. """
RetVal = {}
if a.verbose:
print ('Send Error On Error Attack')
if msgOrig.message['type'] == SomeIPPacket.messageTypes['ERROR']:
msg = sendErrorOnError(a, msgOrig)
if a.verbose:
print ('MALICIOUS MSG: ', msg.message, ' FROM=', msg.sender, ' TO=', msg.receiver)
RetVal['msg'] = msg
RetVal['attackOngoing'] = False
RetVal['dropMsg'] = False
RetVal['counter'] = attacksSuc + 1
else:
RetVal['msg'] = None
RetVal['attackOngoing'] = True
RetVal['dropMsg'] = False
RetVal['counter'] = attacksSuc
return RetVal
|
Egomania/SOME-IP_Generator
|
src/attacks/sendErrorOnError.py
|
Python
|
agpl-3.0
| 1,693
|
# Copyright 2014-2015 Tecnativa S.L. - Jairo Llopis
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2017 Tecnativa S.L. - David Vidal
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
"name": "Partner Contact Department",
"summary": "Assign contacts to departments",
"version": "14.0.1.0.1",
"category": "Customer Relationship Management",
"author": "Tecnativa, Odoo Community Association (OCA)",
"license": "AGPL-3",
"website": "https://github.com/OCA/partner-contact",
"application": False,
"depends": ["contacts"],
"data": [
"security/ir.model.access.csv",
"views/res_partner_department_view.xml",
"views/res_partner_view.xml",
],
"installable": True,
}
|
OCA/partner-contact
|
partner_contact_department/__manifest__.py
|
Python
|
agpl-3.0
| 765
|
from .common import *
INTERNAL_IPS = ['127.0.0.1', ]
CORS_ORIGIN_WHITELIST = (
'localhost:8000',
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
Q_CLUSTER = {
'name': 'DjangORM',
'workers': 2,
'timeout': 90,
'retry': 120,
'queue_limit': 50,
'bulk': 10,
'orm': 'default',
'catch_up': False # do not replay missed schedules past
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'reminders': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'messages': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
}
|
spacedogXYZ/sms_checkin
|
sms_checkin/settings/development.py
|
Python
|
agpl-3.0
| 883
|
"""Tests covering utilities for integrating with the catalog service."""
# pylint: disable=missing-docstring
from __future__ import absolute_import
from collections import defaultdict
from datetime import timedelta
import mock
import six
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from django.utils.timezone import now
from opaque_keys.edx.keys import CourseKey
from course_modes.helpers import CourseMode
from course_modes.tests.factories import CourseModeFactory
from entitlements.tests.factories import CourseEntitlementFactory
from openedx.core.constants import COURSE_UNPUBLISHED
from openedx.core.djangoapps.catalog.cache import (
COURSE_PROGRAMS_CACHE_KEY_TPL,
PATHWAY_CACHE_KEY_TPL,
PROGRAM_CACHE_KEY_TPL,
PROGRAMS_BY_TYPE_CACHE_KEY_TPL,
SITE_PATHWAY_IDS_CACHE_KEY_TPL,
SITE_PROGRAM_UUIDS_CACHE_KEY_TPL
)
from openedx.core.djangoapps.catalog.models import CatalogIntegration
from openedx.core.djangoapps.catalog.tests.factories import (
CourseFactory,
CourseRunFactory,
PathwayFactory,
ProgramFactory,
ProgramTypeFactory
)
from openedx.core.djangoapps.catalog.tests.mixins import CatalogIntegrationMixin
from openedx.core.djangoapps.catalog.utils import (
child_programs,
course_run_keys_for_program,
is_course_run_in_program,
get_course_run_details,
get_course_runs,
get_course_runs_for_course,
get_currency_data,
get_localized_price_text,
get_owners_for_course,
get_pathways,
get_program_types,
get_programs,
get_programs_by_type,
get_visible_sessions_for_entitlement,
normalize_program_type,
)
from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory
from openedx.core.djangoapps.site_configuration.tests.factories import SiteFactory
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from student.tests.factories import CourseEnrollmentFactory, UserFactory
UTILS_MODULE = 'openedx.core.djangoapps.catalog.utils'
User = get_user_model() # pylint: disable=invalid-name
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.logger.info')
@mock.patch(UTILS_MODULE + '.logger.warning')
class TestGetPrograms(CacheIsolationTestCase):
ENABLED_CACHES = ['default']
def setUp(self):
super(TestGetPrograms, self).setUp()
self.site = SiteFactory()
def test_get_many(self, mock_warning, mock_info):
programs = ProgramFactory.create_batch(3)
# Cache details for 2 of 3 programs.
partial_programs = {
PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid']): program for program in programs[:2]
}
cache.set_many(partial_programs, None)
# When called before UUIDs are cached, the function should return an
# empty list and log a warning.
self.assertEqual(get_programs(site=self.site), [])
mock_warning.assert_called_once_with(
u'Failed to get program UUIDs from the cache for site {}.'.format(self.site.domain)
)
mock_warning.reset_mock()
# Cache UUIDs for all 3 programs.
cache.set(
SITE_PROGRAM_UUIDS_CACHE_KEY_TPL.format(domain=self.site.domain),
[program['uuid'] for program in programs],
None
)
actual_programs = get_programs(site=self.site)
# The 2 cached programs should be returned while info and warning
# messages should be logged for the missing one.
self.assertEqual(
set(program['uuid'] for program in actual_programs),
set(program['uuid'] for program in partial_programs.values())
)
mock_info.assert_called_with('Failed to get details for 1 programs. Retrying.')
mock_warning.assert_called_with(
u'Failed to get details for program {uuid} from the cache.'.format(uuid=programs[2]['uuid'])
)
mock_warning.reset_mock()
# We can't use a set comparison here because these values are dictionaries
# and aren't hashable. We've already verified that all programs came out
# of the cache above, so all we need to do here is verify the accuracy of
# the data itself.
for program in actual_programs:
key = PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid'])
self.assertEqual(program, partial_programs[key])
# Cache details for all 3 programs.
all_programs = {
PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid']): program for program in programs
}
cache.set_many(all_programs, None)
actual_programs = get_programs(site=self.site)
# All 3 programs should be returned.
self.assertEqual(
set(program['uuid'] for program in actual_programs),
set(program['uuid'] for program in all_programs.values())
)
self.assertFalse(mock_warning.called)
for program in actual_programs:
key = PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid'])
self.assertEqual(program, all_programs[key])
@mock.patch(UTILS_MODULE + '.cache')
def test_get_many_with_missing(self, mock_cache, mock_warning, mock_info):
programs = ProgramFactory.create_batch(3)
all_programs = {
PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid']): program for program in programs
}
partial_programs = {
PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid']): program for program in programs[:2]
}
def fake_get_many(keys):
if len(keys) == 1:
return {PROGRAM_CACHE_KEY_TPL.format(uuid=programs[-1]['uuid']): programs[-1]}
else:
return partial_programs
mock_cache.get.return_value = [program['uuid'] for program in programs]
mock_cache.get_many.side_effect = fake_get_many
actual_programs = get_programs(site=self.site)
# All 3 cached programs should be returned. An info message should be
# logged about the one that was initially missing, but the code should
# be able to stitch together all the details.
self.assertEqual(
set(program['uuid'] for program in actual_programs),
set(program['uuid'] for program in all_programs.values())
)
self.assertFalse(mock_warning.called)
mock_info.assert_called_with('Failed to get details for 1 programs. Retrying.')
for program in actual_programs:
key = PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid'])
self.assertEqual(program, all_programs[key])
def test_get_one(self, mock_warning, _mock_info):
expected_program = ProgramFactory()
expected_uuid = expected_program['uuid']
self.assertEqual(get_programs(uuid=expected_uuid), None)
mock_warning.assert_called_once_with(
u'Failed to get details for program {uuid} from the cache.'.format(uuid=expected_uuid)
)
mock_warning.reset_mock()
cache.set(
PROGRAM_CACHE_KEY_TPL.format(uuid=expected_uuid),
expected_program,
None
)
actual_program = get_programs(uuid=expected_uuid)
self.assertEqual(actual_program, expected_program)
self.assertFalse(mock_warning.called)
def test_get_from_course(self, mock_warning, _mock_info):
expected_program = ProgramFactory()
expected_course = expected_program['courses'][0]['course_runs'][0]['key']
self.assertEqual(get_programs(course=expected_course), [])
cache.set(
COURSE_PROGRAMS_CACHE_KEY_TPL.format(course_run_id=expected_course),
[expected_program['uuid']],
None
)
cache.set(
PROGRAM_CACHE_KEY_TPL.format(uuid=expected_program['uuid']),
expected_program,
None
)
actual_program = get_programs(course=expected_course)
self.assertEqual(actual_program, [expected_program])
self.assertFalse(mock_warning.called)
def test_get_via_uuids(self, mock_warning, _mock_info):
first_program = ProgramFactory()
second_program = ProgramFactory()
cache.set(
PROGRAM_CACHE_KEY_TPL.format(uuid=first_program['uuid']),
first_program,
None
)
cache.set(
PROGRAM_CACHE_KEY_TPL.format(uuid=second_program['uuid']),
second_program,
None
)
results = get_programs(uuids=[first_program['uuid'], second_program['uuid']])
assert first_program in results
assert second_program in results
assert not mock_warning.called
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.logger.info')
@mock.patch(UTILS_MODULE + '.logger.warning')
class TestGetPathways(CacheIsolationTestCase):
ENABLED_CACHES = ['default']
def setUp(self):
super(TestGetPathways, self).setUp()
self.site = SiteFactory()
def test_get_many(self, mock_warning, mock_info):
pathways = PathwayFactory.create_batch(3)
# Cache details for 2 of 3 programs.
partial_pathways = {
PATHWAY_CACHE_KEY_TPL.format(id=pathway['id']): pathway for pathway in pathways[:2]
}
cache.set_many(partial_pathways, None)
# When called before pathways are cached, the function should return an
# empty list and log a warning.
self.assertEqual(get_pathways(self.site), [])
mock_warning.assert_called_once_with('Failed to get credit pathway ids from the cache.')
mock_warning.reset_mock()
# Cache all 3 pathways
cache.set(
SITE_PATHWAY_IDS_CACHE_KEY_TPL.format(domain=self.site.domain),
[pathway['id'] for pathway in pathways],
None
)
actual_pathways = get_pathways(self.site)
# The 2 cached pathways should be returned while info and warning
# messages should be logged for the missing one.
self.assertEqual(
set(pathway['id'] for pathway in actual_pathways),
set(pathway['id'] for pathway in partial_pathways.values())
)
mock_info.assert_called_with('Failed to get details for 1 pathways. Retrying.')
mock_warning.assert_called_with(
u'Failed to get details for credit pathway {id} from the cache.'.format(id=pathways[2]['id'])
)
mock_warning.reset_mock()
# We can't use a set comparison here because these values are dictionaries
# and aren't hashable. We've already verified that all pathways came out
# of the cache above, so all we need to do here is verify the accuracy of
# the data itself.
for pathway in actual_pathways:
key = PATHWAY_CACHE_KEY_TPL.format(id=pathway['id'])
self.assertEqual(pathway, partial_pathways[key])
# Cache details for all 3 pathways.
all_pathways = {
PATHWAY_CACHE_KEY_TPL.format(id=pathway['id']): pathway for pathway in pathways
}
cache.set_many(all_pathways, None)
actual_pathways = get_pathways(self.site)
# All 3 pathways should be returned.
self.assertEqual(
set(pathway['id'] for pathway in actual_pathways),
set(pathway['id'] for pathway in all_pathways.values())
)
self.assertFalse(mock_warning.called)
for pathway in actual_pathways:
key = PATHWAY_CACHE_KEY_TPL.format(id=pathway['id'])
self.assertEqual(pathway, all_pathways[key])
@mock.patch(UTILS_MODULE + '.cache')
def test_get_many_with_missing(self, mock_cache, mock_warning, mock_info):
pathways = PathwayFactory.create_batch(3)
all_pathways = {
PATHWAY_CACHE_KEY_TPL.format(id=pathway['id']): pathway for pathway in pathways
}
partial_pathways = {
PATHWAY_CACHE_KEY_TPL.format(id=pathway['id']): pathway for pathway in pathways[:2]
}
def fake_get_many(keys):
if len(keys) == 1:
return {PATHWAY_CACHE_KEY_TPL.format(id=pathways[-1]['id']): pathways[-1]}
else:
return partial_pathways
mock_cache.get.return_value = [pathway['id'] for pathway in pathways]
mock_cache.get_many.side_effect = fake_get_many
actual_pathways = get_pathways(self.site)
# All 3 cached pathways should be returned. An info message should be
# logged about the one that was initially missing, but the code should
# be able to stitch together all the details.
self.assertEqual(
set(pathway['id'] for pathway in actual_pathways),
set(pathway['id'] for pathway in all_pathways.values())
)
self.assertFalse(mock_warning.called)
mock_info.assert_called_with('Failed to get details for 1 pathways. Retrying.')
for pathway in actual_pathways:
key = PATHWAY_CACHE_KEY_TPL.format(id=pathway['id'])
self.assertEqual(pathway, all_pathways[key])
def test_get_one(self, mock_warning, _mock_info):
expected_pathway = PathwayFactory()
expected_id = expected_pathway['id']
self.assertEqual(get_pathways(self.site, pathway_id=expected_id), None)
mock_warning.assert_called_once_with(
u'Failed to get details for credit pathway {id} from the cache.'.format(id=expected_id)
)
mock_warning.reset_mock()
cache.set(
PATHWAY_CACHE_KEY_TPL.format(id=expected_id),
expected_pathway,
None
)
actual_pathway = get_pathways(self.site, pathway_id=expected_id)
self.assertEqual(actual_pathway, expected_pathway)
self.assertFalse(mock_warning.called)
@mock.patch(UTILS_MODULE + '.get_edx_api_data')
class TestGetProgramTypes(CatalogIntegrationMixin, TestCase):
"""Tests covering retrieval of program types from the catalog service."""
@override_settings(COURSE_CATALOG_API_URL='https://api.example.com/v1/')
def test_get_program_types(self, mock_get_edx_api_data):
"""Verify get_program_types returns the expected list of program types."""
program_types = ProgramTypeFactory.create_batch(3)
mock_get_edx_api_data.return_value = program_types
# Catalog integration is disabled.
data = get_program_types()
self.assertEqual(data, [])
catalog_integration = self.create_catalog_integration()
UserFactory(username=catalog_integration.service_username)
data = get_program_types()
self.assertEqual(data, program_types)
program = program_types[0]
data = get_program_types(name=program['name'])
self.assertEqual(data, program)
@mock.patch(UTILS_MODULE + '.get_edx_api_data')
class TestGetCurrency(CatalogIntegrationMixin, TestCase):
"""Tests covering retrieval of currency data from the catalog service."""
@override_settings(COURSE_CATALOG_API_URL='https://api.example.com/v1/')
def test_get_currency_data(self, mock_get_edx_api_data):
"""Verify get_currency_data returns the currency data."""
currency_data = {
"code": "CAD",
"rate": 1.257237,
"symbol": "$"
}
mock_get_edx_api_data.return_value = currency_data
# Catalog integration is disabled.
data = get_currency_data()
self.assertEqual(data, [])
catalog_integration = self.create_catalog_integration()
UserFactory(username=catalog_integration.service_username)
data = get_currency_data()
self.assertEqual(data, currency_data)
@mock.patch(UTILS_MODULE + '.get_currency_data')
class TestGetLocalizedPriceText(TestCase):
"""
Tests covering converting prices to a localized currency
"""
def test_localized_string(self, mock_get_currency_data):
currency_data = {
"BEL": {"rate": 0.835621, "code": "EUR", "symbol": u"\u20ac"},
"GBR": {"rate": 0.737822, "code": "GBP", "symbol": u"\u00a3"},
"CAN": {"rate": 2, "code": "CAD", "symbol": "$"},
}
mock_get_currency_data.return_value = currency_data
request = RequestFactory().get('/dummy-url')
request.session = {
'country_code': 'CA'
}
expected_result = '$20 CAD'
self.assertEqual(get_localized_price_text(10, request), expected_result)
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.get_edx_api_data')
class TestGetCourseRuns(CatalogIntegrationMixin, TestCase):
"""
Tests covering retrieval of course runs from the catalog service.
"""
def setUp(self):
super(TestGetCourseRuns, self).setUp()
self.catalog_integration = self.create_catalog_integration(cache_ttl=1)
self.user = UserFactory(username=self.catalog_integration.service_username)
def assert_contract(self, call_args):
"""
Verify that API data retrieval utility is used correctly.
"""
args, kwargs = call_args
for arg in (self.catalog_integration, 'course_runs'):
self.assertIn(arg, args)
self.assertEqual(kwargs['api']._store['base_url'], self.catalog_integration.get_internal_api_url()) # pylint: disable=protected-access
querystring = {
'page_size': 20,
'exclude_utm': 1,
}
self.assertEqual(kwargs['querystring'], querystring)
return args, kwargs
def test_config_missing(self, mock_get_edx_api_data):
"""
Verify that no errors occur when catalog config is missing.
"""
CatalogIntegration.objects.all().delete()
data = get_course_runs()
self.assertFalse(mock_get_edx_api_data.called)
self.assertEqual(data, [])
@mock.patch(UTILS_MODULE + '.logger.error')
def test_service_user_missing(self, mock_log_error, mock_get_edx_api_data):
"""
Verify that no errors occur when the catalog service user is missing.
"""
catalog_integration = self.create_catalog_integration(service_username='nonexistent-user')
data = get_course_runs()
mock_log_error.any_call(
u'Catalog service user with username [%s] does not exist. Course runs will not be retrieved.',
catalog_integration.service_username,
)
self.assertFalse(mock_get_edx_api_data.called)
self.assertEqual(data, [])
def test_get_course_runs(self, mock_get_edx_api_data):
"""
Test retrieval of course runs.
"""
catalog_course_runs = CourseRunFactory.create_batch(10)
mock_get_edx_api_data.return_value = catalog_course_runs
data = get_course_runs()
self.assertTrue(mock_get_edx_api_data.called)
self.assert_contract(mock_get_edx_api_data.call_args)
self.assertEqual(data, catalog_course_runs)
def test_get_course_runs_by_course(self, mock_get_edx_api_data):
"""
Test retrievals of run from a Course.
"""
catalog_course_runs = CourseRunFactory.create_batch(10)
catalog_course = CourseFactory(course_runs=catalog_course_runs)
mock_get_edx_api_data.return_value = catalog_course
data = get_course_runs_for_course(course_uuid=str(catalog_course['uuid']))
self.assertTrue(mock_get_edx_api_data.called)
self.assertEqual(data, catalog_course_runs)
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.get_edx_api_data')
class TestGetCourseOwners(CatalogIntegrationMixin, TestCase):
"""
Tests covering retrieval of course runs from the catalog service.
"""
def setUp(self):
super(TestGetCourseOwners, self).setUp()
self.catalog_integration = self.create_catalog_integration(cache_ttl=1)
self.user = UserFactory(username=self.catalog_integration.service_username)
def test_get_course_owners_by_course(self, mock_get_edx_api_data):
"""
Test retrieval of course runs.
"""
catalog_course_runs = CourseRunFactory.create_batch(10)
catalog_course = CourseFactory(course_runs=catalog_course_runs)
mock_get_edx_api_data.return_value = catalog_course
data = get_owners_for_course(course_uuid=str(catalog_course['uuid']))
self.assertTrue(mock_get_edx_api_data.called)
self.assertEqual(data, catalog_course['owners'])
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.get_edx_api_data')
class TestSessionEntitlement(CatalogIntegrationMixin, TestCase):
"""
Test Covering data related Entitlements.
"""
def setUp(self):
super(TestSessionEntitlement, self).setUp()
self.catalog_integration = self.create_catalog_integration(cache_ttl=1)
self.user = UserFactory(username=self.catalog_integration.service_username)
self.tomorrow = now() + timedelta(days=1)
def test_get_visible_sessions_for_entitlement(self, mock_get_edx_api_data):
"""
Test retrieval of visible session entitlements.
"""
catalog_course_run = CourseRunFactory.create()
catalog_course = CourseFactory(course_runs=[catalog_course_run])
mock_get_edx_api_data.return_value = catalog_course
course_key = CourseKey.from_string(catalog_course_run.get('key'))
course_overview = CourseOverviewFactory.create(id=course_key, start=self.tomorrow)
CourseModeFactory.create(mode_slug=CourseMode.VERIFIED, min_price=100, course_id=course_overview.id)
course_enrollment = CourseEnrollmentFactory(
user=self.user, course_id=six.text_type(course_overview.id), mode=CourseMode.VERIFIED
)
entitlement = CourseEntitlementFactory(
user=self.user, enrollment_course_run=course_enrollment, mode=CourseMode.VERIFIED
)
session_entitlements = get_visible_sessions_for_entitlement(entitlement)
self.assertEqual(session_entitlements, [catalog_course_run])
def test_get_visible_sessions_for_entitlement_expired_mode(self, mock_get_edx_api_data):
"""
Test retrieval of visible session entitlements.
"""
catalog_course_run = CourseRunFactory.create()
catalog_course = CourseFactory(course_runs=[catalog_course_run])
mock_get_edx_api_data.return_value = catalog_course
course_key = CourseKey.from_string(catalog_course_run.get('key'))
course_overview = CourseOverviewFactory.create(id=course_key, start=self.tomorrow)
CourseModeFactory.create(
mode_slug=CourseMode.VERIFIED,
min_price=100,
course_id=course_overview.id,
expiration_datetime=now() - timedelta(days=1)
)
course_enrollment = CourseEnrollmentFactory(
user=self.user, course_id=six.text_type(course_overview.id), mode=CourseMode.VERIFIED
)
entitlement = CourseEntitlementFactory(
user=self.user, enrollment_course_run=course_enrollment, mode=CourseMode.VERIFIED
)
session_entitlements = get_visible_sessions_for_entitlement(entitlement)
self.assertEqual(session_entitlements, [catalog_course_run])
def test_unpublished_sessions_for_entitlement_when_enrolled(self, mock_get_edx_api_data):
"""
Test unpublished course runs are part of visible session entitlements when the user
is enrolled.
"""
catalog_course_run = CourseRunFactory.create(status=COURSE_UNPUBLISHED)
catalog_course = CourseFactory(course_runs=[catalog_course_run])
mock_get_edx_api_data.return_value = catalog_course
course_key = CourseKey.from_string(catalog_course_run.get('key'))
course_overview = CourseOverviewFactory.create(id=course_key, start=self.tomorrow)
CourseModeFactory.create(
mode_slug=CourseMode.VERIFIED,
min_price=100,
course_id=course_overview.id,
expiration_datetime=now() - timedelta(days=1)
)
course_enrollment = CourseEnrollmentFactory(
user=self.user, course_id=six.text_type(course_overview.id), mode=CourseMode.VERIFIED
)
entitlement = CourseEntitlementFactory(
user=self.user, enrollment_course_run=course_enrollment, mode=CourseMode.VERIFIED
)
session_entitlements = get_visible_sessions_for_entitlement(entitlement)
self.assertEqual(session_entitlements, [catalog_course_run])
def test_unpublished_sessions_for_entitlement(self, mock_get_edx_api_data):
"""
Test unpublished course runs are not part of visible session entitlements when the user
is not enrolled.
"""
catalog_course_run = CourseRunFactory.create(status=COURSE_UNPUBLISHED)
catalog_course = CourseFactory(course_runs=[catalog_course_run])
mock_get_edx_api_data.return_value = catalog_course
course_key = CourseKey.from_string(catalog_course_run.get('key'))
course_overview = CourseOverviewFactory.create(id=course_key, start=self.tomorrow)
CourseModeFactory.create(mode_slug=CourseMode.VERIFIED, min_price=100, course_id=course_overview.id)
entitlement = CourseEntitlementFactory(
user=self.user, mode=CourseMode.VERIFIED
)
session_entitlements = get_visible_sessions_for_entitlement(entitlement)
self.assertEqual(session_entitlements, [])
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.get_edx_api_data')
class TestGetCourseRunDetails(CatalogIntegrationMixin, TestCase):
"""
Tests covering retrieval of information about a specific course run from the catalog service.
"""
def setUp(self):
super(TestGetCourseRunDetails, self).setUp()
self.catalog_integration = self.create_catalog_integration(cache_ttl=1)
self.user = UserFactory(username=self.catalog_integration.service_username)
def test_get_course_run_details(self, mock_get_edx_api_data):
"""
Test retrieval of details about a specific course run
"""
course_run = CourseRunFactory()
course_run_details = {
'content_language': course_run['content_language'],
'weeks_to_complete': course_run['weeks_to_complete'],
'max_effort': course_run['max_effort']
}
mock_get_edx_api_data.return_value = course_run_details
data = get_course_run_details(course_run['key'], ['content_language', 'weeks_to_complete', 'max_effort'])
self.assertTrue(mock_get_edx_api_data.called)
self.assertEqual(data, course_run_details)
class TestProgramCourseRunCrawling(TestCase):
@classmethod
def setUpClass(cls):
super(TestProgramCourseRunCrawling, cls).setUpClass()
cls.grandchild_1 = {
'title': 'grandchild 1',
'curricula': [{'is_active': True, 'courses': [], 'programs': []}],
}
cls.grandchild_2 = {
'title': 'grandchild 2',
'curricula': [
{
'is_active': True,
'courses': [{
'course_runs': [
{'key': 'course-run-4'},
],
}],
'programs': [],
},
],
}
cls.grandchild_3 = {
'title': 'grandchild 3',
'curricula': [{'is_active': False}],
}
cls.child_1 = {
'title': 'child 1',
'curricula': [{'is_active': True, 'courses': [], 'programs': [cls.grandchild_1]}],
}
cls.child_2 = {
'title': 'child 2',
'curricula': [
{
'is_active': True,
'courses': [{
'course_runs': [
{'key': 'course-run-3'},
],
}],
'programs': [cls.grandchild_2, cls.grandchild_3],
},
],
}
cls.complex_program = {
'title': 'complex program',
'curricula': [
{
'is_active': True,
'courses': [{
'course_runs': [
{'key': 'course-run-2'},
],
}],
'programs': [cls.child_1, cls.child_2],
},
],
}
cls.simple_program = {
'title': 'simple program',
'curricula': [
{
'is_active': True,
'courses': [{
'course_runs': [
{'key': 'course-run-1'},
],
}],
'programs': [cls.grandchild_1]
},
],
}
cls.empty_program = {
'title': 'notice that I have a curriculum, but no programs inside it',
'curricula': [
{
'is_active': True,
'courses': [],
'programs': [],
},
],
}
def test_child_programs_no_curriculum(self):
program = {
'title': 'notice that I do not have a curriculum',
}
self.assertEqual([], child_programs(program))
def test_child_programs_no_children(self):
self.assertEqual([], child_programs(self.empty_program))
def test_child_programs_one_child(self):
self.assertEqual([self.grandchild_1], child_programs(self.simple_program))
def test_child_programs_many_children(self):
expected_children = [
self.child_1,
self.grandchild_1,
self.child_2,
self.grandchild_2,
self.grandchild_3,
]
self.assertEqual(expected_children, child_programs(self.complex_program))
def test_course_run_keys_for_program_no_courses(self):
self.assertEqual(set(), course_run_keys_for_program(self.empty_program))
def test_course_run_keys_for_program_one_course(self):
self.assertEqual({'course-run-1'}, course_run_keys_for_program(self.simple_program))
def test_course_run_keys_for_program_many_courses(self):
expected_course_runs = {
'course-run-2',
'course-run-3',
'course-run-4',
}
self.assertEqual(expected_course_runs, course_run_keys_for_program(self.complex_program))
def test_is_course_run_in_program(self):
self.assertTrue(is_course_run_in_program('course-run-4', self.complex_program))
self.assertFalse(is_course_run_in_program('course-run-5', self.complex_program))
self.assertFalse(is_course_run_in_program('course-run-4', self.simple_program))
@skip_unless_lms
class TestGetProgramsByType(CacheIsolationTestCase):
""" Test for the ``get_programs_by_type()`` function. """
ENABLED_CACHES = ['default']
@classmethod
def setUpClass(cls):
""" Sets up program data. """
super(TestGetProgramsByType, cls).setUpClass()
cls.site = SiteFactory()
cls.other_site = SiteFactory()
cls.masters_program_1 = ProgramFactory.create(type='Masters')
cls.masters_program_2 = ProgramFactory.create(type='Masters')
cls.masters_program_other_site = ProgramFactory.create(type='Masters')
cls.bachelors_program = ProgramFactory.create(type='Bachelors')
cls.no_type_program = ProgramFactory.create(type=None)
def setUp(self):
""" Loads program data into the cache before each test function. """
super(TestGetProgramsByType, self).setUp()
self.init_cache()
def init_cache(self):
""" This function plays the role of the ``cache_programs`` management command. """
all_programs = [
self.masters_program_1,
self.masters_program_2,
self.bachelors_program,
self.no_type_program,
self.masters_program_other_site
]
cached_programs = {
PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid']): program for program in all_programs
}
cache.set_many(cached_programs, None)
programs_by_type = defaultdict(list)
for program in all_programs:
program_type = normalize_program_type(program.get('type'))
site_id = self.site.id
if program == self.masters_program_other_site:
site_id = self.other_site.id
cache_key = PROGRAMS_BY_TYPE_CACHE_KEY_TPL.format(site_id=site_id, program_type=program_type)
programs_by_type[cache_key].append(program['uuid'])
cache.set_many(programs_by_type, None)
def test_get_masters_programs(self):
expected_programs = [self.masters_program_1, self.masters_program_2]
six.assertCountEqual(self, expected_programs, get_programs_by_type(self.site, 'masters'))
def test_get_bachelors_programs(self):
expected_programs = [self.bachelors_program]
self.assertEqual(expected_programs, get_programs_by_type(self.site, 'bachelors'))
def test_get_no_such_type_programs(self):
expected_programs = []
self.assertEqual(expected_programs, get_programs_by_type(self.site, 'doctorate'))
def test_get_masters_programs_other_site(self):
expected_programs = [self.masters_program_other_site]
self.assertEqual(expected_programs, get_programs_by_type(self.other_site, 'masters'))
def test_get_programs_null_type(self):
expected_programs = [self.no_type_program]
self.assertEqual(expected_programs, get_programs_by_type(self.site, None))
def test_get_programs_false_type(self):
expected_programs = []
self.assertEqual(expected_programs, get_programs_by_type(self.site, False))
def test_normalize_program_type(self):
self.assertEqual('none', normalize_program_type(None))
self.assertEqual('false', normalize_program_type(False))
self.assertEqual('true', normalize_program_type(True))
self.assertEqual('', normalize_program_type(''))
self.assertEqual('masters', normalize_program_type('Masters'))
self.assertEqual('masters', normalize_program_type('masters'))
|
ESOedX/edx-platform
|
openedx/core/djangoapps/catalog/tests/test_utils.py
|
Python
|
agpl-3.0
| 34,566
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("uk_results", "0022_postresult_confirmed_resultset")]
operations = [
migrations.AlterModelOptions(
name="candidateresult",
options={"ordering": ("-num_ballots_reported",)},
)
]
|
DemocracyClub/yournextrepresentative
|
ynr/apps/uk_results/migrations/0023_auto_20160505_1636.py
|
Python
|
agpl-3.0
| 322
|
# -*- coding: utf-8 -*-
"""
Code to manage fetching and storing the metadata of IdPs.
"""
#pylint: disable=no-member
from celery.task import task # pylint: disable=import-error,no-name-in-module
import datetime
import dateutil.parser
import logging
from lxml import etree
import requests
from onelogin.saml2.utils import OneLogin_Saml2_Utils
from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData
log = logging.getLogger(__name__)
SAML_XML_NS = 'urn:oasis:names:tc:SAML:2.0:metadata' # The SAML Metadata XML namespace
class MetadataParseError(Exception):
""" An error occurred while parsing the SAML metadata from an IdP """
pass
@task(name='third_party_auth.fetch_saml_metadata')
def fetch_saml_metadata():
"""
Fetch and store/update the metadata of all IdPs
This task should be run on a daily basis.
It's OK to run this whether or not SAML is enabled.
Return value:
tuple(num_changed, num_failed, num_total)
num_changed: Number of providers that are either new or whose metadata has changed
num_failed: Number of providers that could not be updated
num_total: Total number of providers whose metadata was fetched
"""
if not SAMLConfiguration.is_enabled():
return (0, 0, 0) # Nothing to do until SAML is enabled.
num_changed, num_failed = 0, 0
# First make a list of all the metadata XML URLs:
url_map = {}
for idp_slug in SAMLProviderConfig.key_values('idp_slug', flat=True):
config = SAMLProviderConfig.current(idp_slug)
if not config.enabled:
continue
url = config.metadata_source
if url not in url_map:
url_map[url] = []
if config.entity_id not in url_map[url]:
url_map[url].append(config.entity_id)
# Now fetch the metadata:
for url, entity_ids in url_map.items():
try:
log.info("Fetching %s", url)
if not url.lower().startswith('https'):
log.warning("This SAML metadata URL is not secure! It should use HTTPS. (%s)", url)
response = requests.get(url, verify=True) # May raise HTTPError or SSLError or ConnectionError
response.raise_for_status() # May raise an HTTPError
try:
parser = etree.XMLParser(remove_comments=True)
xml = etree.fromstring(response.text, parser)
except etree.XMLSyntaxError:
raise
# TODO: Can use OneLogin_Saml2_Utils to validate signed XML if anyone is using that
for entity_id in entity_ids:
log.info(u"Processing IdP with entityID %s", entity_id)
public_key, sso_url, expires_at = _parse_metadata_xml(xml, entity_id)
changed = _update_data(entity_id, public_key, sso_url, expires_at)
if changed:
log.info(u"→ Created new record for SAMLProviderData")
num_changed += 1
else:
log.info(u"→ Updated existing SAMLProviderData. Nothing has changed.")
except Exception as err: # pylint: disable=broad-except
log.exception(err.message)
num_failed += 1
return (num_changed, num_failed, len(url_map))
def _parse_metadata_xml(xml, entity_id):
"""
Given an XML document containing SAML 2.0 metadata, parse it and return a tuple of
(public_key, sso_url, expires_at) for the specified entityID.
Raises MetadataParseError if anything is wrong.
"""
if xml.tag == etree.QName(SAML_XML_NS, 'EntityDescriptor'):
entity_desc = xml
else:
if xml.tag != etree.QName(SAML_XML_NS, 'EntitiesDescriptor'):
raise MetadataParseError("Expected root element to be <EntitiesDescriptor>, not {}".format(xml.tag))
entity_desc = xml.find(
".//{}[@entityID='{}']".format(etree.QName(SAML_XML_NS, 'EntityDescriptor'), entity_id)
)
if not entity_desc:
raise MetadataParseError("Can't find EntityDescriptor for entityID {}".format(entity_id))
expires_at = None
if "validUntil" in xml.attrib:
expires_at = dateutil.parser.parse(xml.attrib["validUntil"])
if "cacheDuration" in xml.attrib:
cache_expires = OneLogin_Saml2_Utils.parse_duration(xml.attrib["cacheDuration"])
if expires_at is None or cache_expires < expires_at:
expires_at = cache_expires
sso_desc = entity_desc.find(etree.QName(SAML_XML_NS, "IDPSSODescriptor"))
if not sso_desc:
raise MetadataParseError("IDPSSODescriptor missing")
if 'urn:oasis:names:tc:SAML:2.0:protocol' not in sso_desc.get("protocolSupportEnumeration"):
raise MetadataParseError("This IdP does not support SAML 2.0")
# Now we just need to get the public_key and sso_url
public_key = sso_desc.findtext("./{}//{}".format(
etree.QName(SAML_XML_NS, "KeyDescriptor"), "{http://www.w3.org/2000/09/xmldsig#}X509Certificate"
))
if not public_key:
raise MetadataParseError("Public Key missing. Expected an <X509Certificate>")
public_key = public_key.replace(" ", "")
binding_elements = sso_desc.iterfind("./{}".format(etree.QName(SAML_XML_NS, "SingleSignOnService")))
sso_bindings = {element.get('Binding'): element.get('Location') for element in binding_elements}
try:
# The only binding supported by python-saml and python-social-auth is HTTP-Redirect:
sso_url = sso_bindings['urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect']
except KeyError:
raise MetadataParseError("Unable to find SSO URL with HTTP-Redirect binding.")
return public_key, sso_url, expires_at
def _update_data(entity_id, public_key, sso_url, expires_at):
"""
Update/Create the SAMLProviderData for the given entity ID.
Return value:
False if nothing has changed and existing data's "fetched at" timestamp is just updated.
True if a new record was created. (Either this is a new provider or something changed.)
"""
data_obj = SAMLProviderData.current(entity_id)
fetched_at = datetime.datetime.now()
if data_obj and (data_obj.public_key == public_key and data_obj.sso_url == sso_url):
data_obj.expires_at = expires_at
data_obj.fetched_at = fetched_at
data_obj.save()
return False
else:
SAMLProviderData.objects.create(
entity_id=entity_id,
fetched_at=fetched_at,
expires_at=expires_at,
sso_url=sso_url,
public_key=public_key,
)
return True
|
mushtaqak/edx-platform
|
common/djangoapps/third_party_auth/tasks.py
|
Python
|
agpl-3.0
| 6,642
|
from tests.api import auth_for
from tests.data import add_fixtures, clubs, users
def test_lva(db_session, client):
lva = clubs.lva(owner=users.john())
add_fixtures(db_session, lva)
res = client.get("/clubs/{id}".format(id=lva.id))
assert res.status_code == 200
assert res.json == {
"id": lva.id,
"name": "LV Aachen",
"timeCreated": "2015-12-24T12:34:56+00:00",
"website": "http://www.lv-aachen.de",
"isWritable": False,
"owner": {"id": lva.owner.id, "name": lva.owner.name},
}
def test_sfn(db_session, client):
sfn = clubs.sfn()
add_fixtures(db_session, sfn)
res = client.get("/clubs/{id}".format(id=sfn.id))
assert res.status_code == 200
assert res.json == {
u"id": sfn.id,
u"name": u"Sportflug Niederberg",
u"timeCreated": "2017-01-01T12:34:56+00:00",
u"website": None,
u"isWritable": False,
u"owner": None,
}
def test_writable(db_session, client):
lva = clubs.lva()
john = users.john(club=lva)
add_fixtures(db_session, lva, john)
res = client.get("/clubs/{id}".format(id=lva.id), headers=auth_for(john))
assert res.status_code == 200
assert res.json == {
"id": lva.id,
"name": "LV Aachen",
"timeCreated": "2015-12-24T12:34:56+00:00",
"website": "http://www.lv-aachen.de",
"isWritable": True,
"owner": None,
}
def test_missing(client):
res = client.get("/clubs/10000000")
assert res.status_code == 404
def test_invalid_id(client):
res = client.get("/clubs/abc")
assert res.status_code == 404
|
skylines-project/skylines
|
tests/api/views/clubs/read_test.py
|
Python
|
agpl-3.0
| 1,643
|
import os
import shutil
import boto
from boto.s3.key import Key
import subprocess
from io import StringIO
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
from django.utils import timezone
from django.core.mail import send_mail
from core import models
def copy_file(source, destination):
"""
:param source: The source of the folder for copying
:param destination: The destination folder for the file
:return:
"""
destination_folder = os.path.join(settings.BASE_DIR, os.path.dirname(destination))
if not os.path.exists(destination_folder):
os.mkdir(destination_folder)
print("Copying {0}".format(source))
shutil.copy(os.path.join(settings.BASE_DIR, source),
os.path.join(settings.BASE_DIR, destination))
def copy_files(src_path, dest_path):
"""
:param src_path: The source folder for copying
:param dest_path: The destination these files/folders should be copied to
:return: None
"""
if not os.path.exists(src_path):
os.makedirs(src_path)
files = os.listdir(src_path)
for file_name in files:
if not file_name == 'temp':
full_file_name = os.path.join(src_path, file_name)
print("Copying {0}".format(full_file_name))
if os.path.isfile(full_file_name):
shutil.copy(full_file_name, dest_path)
else:
dir_dest = os.path.join(dest_path, file_name)
if os.path.exists(dir_dest):
shutil.rmtree(os.path.join(dir_dest))
shutil.copytree(full_file_name, dir_dest)
def mycb(so_far, total):
print('{0} kb transferred out of {1}'.format(so_far / 1024, total / 1024))
def handle_s3(tmp_path, start_time):
print("Sending to S3.")
file_name = '{0}.zip'.format(start_time)
file_path = os.path.join(settings.BASE_DIR, 'files', 'temp', file_name)
f = open(file_path, 'rb')
END_POINT = settings.END_POINT
S3_HOST = settings.S3_HOST
UPLOADED_FILENAME = 'backups/{0}.zip'.format(start_time)
# include folders in file path. If it doesn't exist, it will be created
s3 = boto.s3.connect_to_region(END_POINT,
aws_access_key_id=settings.S3_ACCESS_KEY,
aws_secret_access_key=settings.S3_SECRET_KEY,
host=S3_HOST)
bucket = s3.get_bucket(settings.S3_BUCKET_NAME)
k = Key(bucket)
k.key = UPLOADED_FILENAME
k.set_contents_from_file(f, cb=mycb, num_cb=200)
def handle_directory(tmp_path, start_time):
print("Copying to backup dir")
file_name = '{0}.zip'.format(start_time)
copy_file('files/temp/{0}'.format(file_name), settings.BACKUP_DIR)
def delete_used_tmp(tmp_path, start_time):
print("Deleting temp directory.")
shutil.rmtree(tmp_path)
file_path = "{0}/{1}.zip".format(os.path.join(settings.BASE_DIR, 'files', 'temp'), start_time)
os.unlink(file_path)
def send_email(start_time, e, success=False):
admins = models.Account.objects.filter(is_superuser=True)
message = ''
if not success:
message = 'There was an error during the backup process.\n\n '
send_mail(
'Backup',
'{0}{1}.'.format(message, e),
'backup@janeway',
[user.email for user in admins],
fail_silently=False,
)
class Command(BaseCommand):
"""
Pulls files together then sends them to aws bucket.
"""
help = "Deletes duplicate settings."
def handle(self, *args, **options):
"""Does a backup..
:param args: None
:param options: None
:return: None
"""
# Ensure temp dir exists:
if not os.path.exists(os.path.join(settings.BASE_DIR, 'files', 'temp')):
os.makedirs(os.path.join(settings.BASE_DIR, 'files', 'temp'))
start_time = str(timezone.now())
try:
tmp_path = os.path.join(settings.BASE_DIR, 'files', 'temp', start_time)
# dump database out to JSON and store in StringIO for saving
print('Dumping json db file')
json_out = StringIO()
call_command('dumpdata', '--indent=4', '--natural-foreign', '--exclude=contenttypes', stdout=json_out)
write_path = os.path.join(settings.BASE_DIR, 'files', 'temp', 'janeway.json')
with open(write_path, 'w', encoding="utf-8") as write:
json_out.seek(0)
shutil.copyfileobj(json_out, write)
os.mkdir(tmp_path)
copy_file('files/temp/janeway.json', 'files/temp/{0}/janeway.json'.format(start_time))
copy_files(os.path.join(settings.BASE_DIR, 'media'), os.path.join(tmp_path, 'media'))
copy_files(os.path.join(settings.BASE_DIR, 'files'), os.path.join(tmp_path, 'files'))
print("Creating archive.")
shutil.make_archive(os.path.join(settings.BASE_DIR, 'files', 'temp', start_time), 'zip', tmp_path)
if settings.BACKUP_TYPE == 's3':
handle_s3(tmp_path, start_time)
else:
handle_directory(tmp_path, start_time)
delete_used_tmp(tmp_path, start_time)
if settings.BACKUP_EMAIL:
send_email(start_time, 'Backup was successfully completed.')
except Exception as e:
send_email(start_time, e)
|
BirkbeckCTP/janeway
|
src/utils/management/commands/backup.py
|
Python
|
agpl-3.0
| 5,466
|
#!/usr/bin/env python3
# -*- coding:UTF-8 -*-
#Copyright (c) 1986 Nick Wong.
#Copyright (c) 2016-2026 TP-NEW Corp.
# License: TP-NEW (www.tp-new.com)
__author__ = "Nick Wong"
"""
用asyncio提供的@asyncio.coroutine可以把一个generator标记为coroutine类型,然后在coroutine内部用yield from调用另一个coroutine实现异步操作
从Python 3.5开始引入了新的语法async和await,可以让coroutine的代码更简洁易读
#generator(生成器)
#coroutine(协程)
async和await是针对coroutine的新语法,要使用新的语法,只需要做两步简单的替换:
1.把@asyncio.coroutine替换为async;
2.把yield from替换为await。
"""
import asyncio
#########旧代码#########
@asyncio.coroutine
def hello():
print('Hello World!')
r = yield from asyncio.sleep(2)
print('Hello again!')
#########新代码#########
async def hello1(): #注:async后跟的函数不能换行,否则语法错误
print('Hello World! 1')
r = await asyncio.sleep(2)
print('Hello again! 1')
#获取EventLoop:
loop = asyncio.get_event_loop()
#执行coroutine
loop.run_until_complete(hello())
loop.run_until_complete(hello1())
loop.close()
|
nick-huang-cc/GraffitiSpaceTT
|
UnderstandStudyPython/IO_coroutine_stu1.py
|
Python
|
agpl-3.0
| 1,188
|
import numpy as np
import pandas as pd
import xarray as xr
from tikon.central import Módulo, SimulMódulo, Modelo, Exper, Parcela
from tikon.central.res import Resultado
from tikon.datos import Obs
from tikon.utils import EJE_TIEMPO, EJE_PARC
f_inic = '2000-01-01'
crds = {'eje 1': ['a', 'b'], 'eje 2': ['x', 'y', 'z']}
class Res(Resultado):
def __init__(símismo, sim, coords, vars_interés):
coords = {**crds, **coords}
super().__init__(sim, coords, vars_interés)
nombre = 'res'
unids = None
class SimulMóduloValid(SimulMódulo):
resultados = [Res]
def incrementar(símismo, paso, f):
super().incrementar(paso, f)
símismo.poner_valor('res', 1, rel=True)
class MóduloValid(Módulo):
nombre = 'módulo'
cls_simul = SimulMóduloValid
class MiObs(Obs):
mód = 'módulo'
var = 'res'
obs = MiObs(
datos=xr.DataArray(
np.arange(10),
coords={EJE_TIEMPO: pd.date_range(f_inic, periods=10, freq='D')}, dims=[EJE_TIEMPO]
).expand_dims({EJE_PARC: ['parcela'], **crds})
)
exper = Exper('exper', Parcela('parcela'), obs=obs)
modelo = Modelo(MóduloValid)
|
julienmalard/Tikon
|
pruebas/test_central/rcrs/modelo_valid.py
|
Python
|
agpl-3.0
| 1,156
|
import logging
import tmlib.models as tm
class SubmissionManager(object):
'''Mixin class for submission and monitoring of computational tasks.'''
def __init__(self, experiment_id, program_name):
'''
Parameters
----------
experiment_id: int
ID of the processed experiment
program_name: str
name of the submitting program
'''
self.experiment_id = experiment_id
self.program_name = program_name
def register_submission(self, user_id=None):
'''Creates a database entry in the "submissions" table.
Parameters
----------
user_id: int, optional
ID of submitting user (if not the user who owns the experiment)
Returns
-------
Tuple[int, str]
ID of the submission and the name of the submitting user
Warning
-------
Ensure that the "submissions" table get updated once the jobs
were submitted, i.e. added to a running `GC3Pie` engine.
To this end, use the ::meth:`tmlib.workflow.api.update_submission`
method.
See also
--------
:class:`tmlib.models.submission.Submission`
'''
with tm.utils.MainSession() as session:
if user_id is None:
experiment = session.query(tm.ExperimentReference).\
get(self.experiment_id)
user_id = experiment.user_id
submission = tm.Submission(
experiment_id=self.experiment_id, program=self.program_name,
user_id=user_id
)
session.add(submission)
session.commit()
return (submission.id, submission.user.name)
|
TissueMAPS/TmLibrary
|
tmlib/submission.py
|
Python
|
agpl-3.0
| 1,761
|
from __future__ import absolute_import
from __future__ import division
import os, traceback, math, re, zlib, base64, time, sys, platform, glob, string, stat
import cPickle as pickle
if sys.version_info[0] < 3:
import ConfigParser
else:
import configparser as ConfigParser
from Cura.util import resources
from Cura.util import version
#########################################################
## Default settings when none are found.
#########################################################
#Single place to store the defaults, so we have a consistent set of default settings.
profileDefaultSettings = {
'nozzle_size': '0.4',
'layer_height': '0.2',
'wall_thickness': '0.8',
'solid_layer_thickness': '0.6',
'fill_density': '20',
'skirt_line_count': '1',
'skirt_gap': '3.0',
'print_speed': '50',
'print_temperature': '220',
'print_bed_temperature': '70',
'support': 'None',
'filament_diameter': '2.89',
'filament_density': '1.00',
'retraction_min_travel': '5.0',
'retraction_enable': 'False',
'retraction_speed': '40.0',
'retraction_amount': '4.5',
'retraction_extra': '0.0',
'retract_on_jumps_only': 'True',
'travel_speed': '150',
'max_z_speed': '3.0',
'bottom_layer_speed': '20',
'cool_min_layer_time': '5',
'fan_enabled': 'True',
'fan_layer': '1',
'fan_speed': '100',
'fan_speed_max': '100',
'model_scale': '1.0',
'flip_x': 'False',
'flip_y': 'False',
'flip_z': 'False',
'swap_xz': 'False',
'swap_yz': 'False',
'model_rotate_base': '0',
'model_multiply_x': '1',
'model_multiply_y': '1',
'extra_base_wall_thickness': '0.0',
'sequence': 'Loops > Perimeter > Infill',
'force_first_layer_sequence': 'True',
'infill_type': 'Line',
'solid_top': 'True',
'fill_overlap': '15',
'support_rate': '50',
'support_distance': '0.5',
'support_dual_extrusion': 'False',
'joris': 'False',
'enable_skin': 'False',
'enable_raft': 'False',
'cool_min_feedrate': '10',
'bridge_speed': '100',
'raft_margin': '5',
'raft_base_material_amount': '100',
'raft_interface_material_amount': '100',
'bottom_thicknes': '0.3',
'hop_on_move': 'False',
'plugin_config': '',
'object_center_x': '-1',
'object_center_y': '-1',
'add_start_end_gcode': 'True',
'gcode_extension': 'gcode',
'alternative_center': '',
'clear_z': '0.0',
'extruder': '0',
'bottom_surface_thickness_layers': '2',
'top_surface_thickness_layers': '3',
#'extruder': '0',
}
alterationDefault = {
#######################################################################################
'start.gcode': """;Sliced {filename} at: {day} {date} {time}
;Basic settings: Layer height: {layer_height} Walls: {wall_thickness} Fill: {fill_density}
;Print time: {print_time}
;Filament used: {filament_amount}m {filament_weight}g
;Filament cost: {filament_cost}
G21 ;metric values
G90 ;absolute positioning
M107 ;start with the fan off
G28 X0 Y0 ;move X/Y to min endstops
G28 Z0 ;move Z to min endstops
G92 X0 Y0 Z0 E0 ;reset software position to front/left/z=0.0 aaa
G1 Z15.0 F{max_z_speed} ;move the platform down 15mm
G92 E0 ;zero the extruded length
G1 F200 E3 ;extrude 3mm of feed stock
G92 E0 ;zero the extruded length again
G1 F{travel_speed}
""",
#######################################################################################
'end.gcode': """;End GCode
M104 S0 ;extruder heater off
M140 S0 ;heated bed heater off (if you have it)
G91 ;relative positioning
G1 E-1 F300 ;retract the filament a bit before lifting the nozzle, to release some of the pressure
G1 Z+0.5 E-5 X-20 Y-20 F{travel_speed} ;move Z up a bit and retract filament even more
G28 X0 Y0 ;move X/Y to min endstops, so the head is out of the way
M84 ;steppers off
G90 ;absolute positioning
""",
#######################################################################################
'support_start.gcode': '',
'support_end.gcode': '',
'cool_start.gcode': '',
'cool_end.gcode': '',
'replace.csv': '',
#######################################################################################
'nextobject.gcode': """;Move to next object on the platform. clear_z is the minimal z height we need to make sure we do not hit any objects.
G92 E0
G91 ;relative positioning
G1 E-1 F300 ;retract the filament a bit before lifting the nozzle, to release some of the pressure
G1 Z+0.5 E-5 F{travel_speed} ;move Z up a bit and retract filament even more
G90 ;absolute positioning
G1 Z{clear_z} F{max_z_speed}
G92 E0
G1 X{object_center_x} Y{object_center_x} F{travel_speed}
G1 F200 E6
G92 E0
""",
#######################################################################################
'switchExtruder.gcode': """;Switch between the current extruder and the next extruder, when printing with multiple extruders.
G92 E0
G1 E-15 F5000
G92 E0
T{extruder}
G1 E15 F5000
G92 E0
""",
}
preferencesDefaultSettings = {
'startMode': 'Simple',
'lastFile': os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'resources', 'example', 'UltimakerRobot_support.stl')),
'machine_width': '205',
'machine_depth': '205',
'machine_height': '200',
'machine_type': 'unknown',
'ultimaker_extruder_upgrade': 'False',
'has_heated_bed': 'False',
'extruder_amount': '1',
'extruder_offset_x1': '-22.0',
'extruder_offset_y1': '0.0',
'extruder_offset_x2': '0.0',
'extruder_offset_y2': '0.0',
'extruder_offset_x3': '0.0',
'extruder_offset_y3': '0.0',
'filament_density': '1300',
'steps_per_e': '0',
'serial_port': 'AUTO',
'serial_port_auto': '',
'serial_baud': 'AUTO',
'serial_baud_auto': '',
'slicer': 'Cura (Skeinforge based)',
'save_profile': 'False',
'filament_cost_kg': '0',
'filament_cost_meter': '0',
'sdpath': '',
'sdshortnames': 'True',
'extruder_head_size_min_x': '70.0',
'extruder_head_size_min_y': '18.0',
'extruder_head_size_max_x': '18.0',
'extruder_head_size_max_y': '35.0',
'extruder_head_size_height': '80.0',
'model_colour': '#8BC53F',
'model_colour2': '#CB3030',
'model_colour3': '#DDD93C',
'model_colour4': '#4550D3',
}
#########################################################
## Profile and preferences functions
#########################################################
## Profile functions
def getDefaultProfilePath():
if platform.system() == "Windows":
basePath = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
#If we have a frozen python install, we need to step out of the library.zip
if hasattr(sys, 'frozen'):
basePath = os.path.normpath(os.path.join(basePath, ".."))
else:
basePath = os.path.expanduser('~/.cura/%s' % version.getVersion(False))
if not os.path.isdir(basePath):
os.makedirs(basePath)
return os.path.join(basePath, 'current_profile.ini')
def loadGlobalProfile(filename):
#Read a configuration file as global config
global globalProfileParser
globalProfileParser = ConfigParser.ConfigParser()
globalProfileParser.read(filename)
def resetGlobalProfile():
#Read a configuration file as global config
global globalProfileParser
globalProfileParser = ConfigParser.ConfigParser()
if getPreference('machine_type') == 'ultimaker':
putProfileSetting('nozzle_size', '0.4')
if getPreference('ultimaker_extruder_upgrade') == 'True':
putProfileSetting('retraction_enable', 'True')
else:
putProfileSetting('nozzle_size', '0.5')
def saveGlobalProfile(filename):
#Save the current profile to an ini file
globalProfileParser.write(open(filename, 'w'))
def loadGlobalProfileFromString(options):
global globalProfileParser
globalProfileParser = ConfigParser.ConfigParser()
globalProfileParser.add_section('profile')
globalProfileParser.add_section('alterations')
options = base64.b64decode(options)
options = zlib.decompress(options)
(profileOpts, alt) = options.split('\f', 1)
for option in profileOpts.split('\b'):
if len(option) > 0:
(key, value) = option.split('=', 1)
globalProfileParser.set('profile', key, value)
for option in alt.split('\b'):
if len(option) > 0:
(key, value) = option.split('=', 1)
globalProfileParser.set('alterations', key, value)
def getGlobalProfileString():
global globalProfileParser
if not globals().has_key('globalProfileParser'):
loadGlobalProfile(getDefaultProfilePath())
p = []
alt = []
tempDone = []
if globalProfileParser.has_section('profile'):
for key in globalProfileParser.options('profile'):
if key in tempOverride:
p.append(key + "=" + tempOverride[key])
tempDone.append(key)
else:
p.append(key + "=" + globalProfileParser.get('profile', key))
if globalProfileParser.has_section('alterations'):
for key in globalProfileParser.options('alterations'):
if key in tempOverride:
p.append(key + "=" + tempOverride[key])
tempDone.append(key)
else:
alt.append(key + "=" + globalProfileParser.get('alterations', key))
for key in tempOverride:
if key not in tempDone:
p.append(key + "=" + tempOverride[key])
ret = '\b'.join(p) + '\f' + '\b'.join(alt)
ret = base64.b64encode(zlib.compress(ret, 9))
return ret
def getProfileSetting(name):
if name in tempOverride:
return unicode(tempOverride[name], "utf-8")
#Check if we have a configuration file loaded, else load the default.
if not globals().has_key('globalProfileParser'):
loadGlobalProfile(getDefaultProfilePath())
if not globalProfileParser.has_option('profile', name):
if name in profileDefaultSettings:
default = profileDefaultSettings[name]
else:
print("Missing default setting for: '" + name + "'")
profileDefaultSettings[name] = ''
default = ''
if not globalProfileParser.has_section('profile'):
globalProfileParser.add_section('profile')
globalProfileParser.set('profile', name, str(default))
#print(name + " not found in profile, so using default: " + str(default))
return default
return globalProfileParser.get('profile', name)
def getProfileSettingFloat(name):
try:
setting = getProfileSetting(name).replace(',', '.')
return float(eval(setting, {}, {}))
except (ValueError, SyntaxError, TypeError):
return 0.0
def putProfileSetting(name, value):
#Check if we have a configuration file loaded, else load the default.
if not globals().has_key('globalProfileParser'):
loadGlobalProfile(getDefaultProfilePath())
if not globalProfileParser.has_section('profile'):
globalProfileParser.add_section('profile')
globalProfileParser.set('profile', name, str(value))
def isProfileSetting(name):
if name in profileDefaultSettings:
return True
return False
## Preferences functions
global globalPreferenceParser
globalPreferenceParser = None
def getPreferencePath():
if platform.system() == "Windows":
basePath = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
#If we have a frozen python install, we need to step out of the library.zip
if hasattr(sys, 'frozen'):
basePath = os.path.normpath(os.path.join(basePath, ".."))
else:
basePath = os.path.expanduser('~/.cura/%s' % version.getVersion(False))
if not os.path.isdir(basePath):
os.makedirs(basePath)
return os.path.join(basePath, 'preferences.ini')
def getPreferenceFloat(name):
try:
setting = getPreference(name).replace(',', '.')
return float(eval(setting, {}, {}))
except (ValueError, SyntaxError, TypeError):
return 0.0
def getPreferenceColour(name):
colorString = getPreference(name)
return [float(int(colorString[1:3], 16)) / 255, float(int(colorString[3:5], 16)) / 255, float(int(colorString[5:7], 16)) / 255, 1.0]
def getPreference(name):
if name in tempOverride:
return unicode(tempOverride[name])
global globalPreferenceParser
if globalPreferenceParser == None:
globalPreferenceParser = ConfigParser.ConfigParser()
globalPreferenceParser.read(getPreferencePath())
if not globalPreferenceParser.has_option('preference', name):
if name in preferencesDefaultSettings:
default = preferencesDefaultSettings[name]
else:
print("Missing default setting for: '" + name + "'")
preferencesDefaultSettings[name] = ''
default = ''
if not globalPreferenceParser.has_section('preference'):
globalPreferenceParser.add_section('preference')
globalPreferenceParser.set('preference', name, str(default))
#print(name + " not found in preferences, so using default: " + str(default))
return default
return unicode(globalPreferenceParser.get('preference', name), "utf-8")
def putPreference(name, value):
#Check if we have a configuration file loaded, else load the default.
global globalPreferenceParser
if globalPreferenceParser == None:
globalPreferenceParser = ConfigParser.ConfigParser()
globalPreferenceParser.read(getPreferencePath())
if not globalPreferenceParser.has_section('preference'):
globalPreferenceParser.add_section('preference')
globalPreferenceParser.set('preference', name, unicode(value).encode("utf-8"))
globalPreferenceParser.write(open(getPreferencePath(), 'w'))
def isPreference(name):
if name in preferencesDefaultSettings:
return True
return False
## Temp overrides for multi-extruder slicing and the project planner.
tempOverride = {}
def setTempOverride(name, value):
tempOverride[name] = unicode(value).encode("utf-8")
def clearTempOverride(name):
del tempOverride[name]
def resetTempOverride():
tempOverride.clear()
#########################################################
## Utility functions to calculate common profile values
#########################################################
def calculateEdgeWidth():
wallThickness = getProfileSettingFloat('wall_thickness')
nozzleSize = getProfileSettingFloat('nozzle_size')
if wallThickness < nozzleSize:
return wallThickness
lineCount = int(wallThickness / nozzleSize + 0.0001)
lineWidth = wallThickness / lineCount
lineWidthAlt = wallThickness / (lineCount + 1)
if lineWidth > nozzleSize * 1.5:
return lineWidthAlt
return lineWidth
def calculateLineCount():
wallThickness = getProfileSettingFloat('wall_thickness')
nozzleSize = getProfileSettingFloat('nozzle_size')
if wallThickness < nozzleSize:
return 1
lineCount = int(wallThickness / nozzleSize + 0.0001)
lineWidth = wallThickness / lineCount
lineWidthAlt = wallThickness / (lineCount + 1)
if lineWidth > nozzleSize * 1.5:
return lineCount + 1
return lineCount
def calculateSolidLayerCount():
layerHeight = getProfileSettingFloat('layer_height')
solidThickness = getProfileSettingFloat('solid_layer_thickness')
return int(math.ceil(solidThickness / layerHeight - 0.0001))
#########################################################
## Alteration file functions
#########################################################
def replaceTagMatch(m):
pre = m.group(1)
tag = m.group(2)
if tag == 'time':
return pre + time.strftime('%H:%M:%S').encode('utf-8', 'replace')
if tag == 'date':
return pre + time.strftime('%d %b %Y').encode('utf-8', 'replace')
if tag == 'day':
return pre + time.strftime('%a').encode('utf-8', 'replace')
if tag == 'print_time':
return pre + '#P_TIME#'
if tag == 'filament_amount':
return pre + '#F_AMNT#'
if tag == 'filament_weight':
return pre + '#F_WGHT#'
if tag == 'filament_cost':
return pre + '#F_COST#'
if pre == 'F' and tag in ['print_speed', 'retraction_speed', 'travel_speed', 'max_z_speed', 'bottom_layer_speed', 'cool_min_feedrate']:
f = getProfileSettingFloat(tag) * 60
elif isProfileSetting(tag):
f = getProfileSettingFloat(tag)
elif isPreference(tag):
f = getProfileSettingFloat(tag)
else:
return '%s?%s?' % (pre, tag)
if (f % 1) == 0:
return pre + str(int(f))
return pre + str(f)
def replaceGCodeTags(filename, gcodeInt):
f = open(filename, 'r+')
data = f.read(2048)
data = data.replace('#P_TIME#', ('%5d:%02d' % (int(gcodeInt.totalMoveTimeMinute / 60), int(gcodeInt.totalMoveTimeMinute % 60)))[-8:])
data = data.replace('#F_AMNT#', ('%8.2f' % (gcodeInt.extrusionAmount / 1000))[-8:])
data = data.replace('#F_WGHT#', ('%8.2f' % (gcodeInt.calculateWeight() * 1000))[-8:])
cost = gcodeInt.calculateCost()
if cost == False:
cost = 'Unknown'
data = data.replace('#F_COST#', ('%8s' % (cost.split(' ')[0]))[-8:])
f.seek(0)
f.write(data)
f.close()
### Get aleration raw contents. (Used internally in Cura)
def getAlterationFile(filename):
#Check if we have a configuration file loaded, else load the default.
if not globals().has_key('globalProfileParser'):
loadGlobalProfile(getDefaultProfilePath())
if not globalProfileParser.has_option('alterations', filename):
if filename in alterationDefault:
default = alterationDefault[filename]
else:
print("Missing default alteration for: '" + filename + "'")
alterationDefault[filename] = ''
default = ''
if not globalProfileParser.has_section('alterations'):
globalProfileParser.add_section('alterations')
#print("Using default for: %s" % (filename))
globalProfileParser.set('alterations', filename, default)
return unicode(globalProfileParser.get('alterations', filename), "utf-8")
def setAlterationFile(filename, value):
#Check if we have a configuration file loaded, else load the default.
if not globals().has_key('globalProfileParser'):
loadGlobalProfile(getDefaultProfilePath())
if not globalProfileParser.has_section('alterations'):
globalProfileParser.add_section('alterations')
globalProfileParser.set('alterations', filename, value.encode("utf-8"))
saveGlobalProfile(getDefaultProfilePath())
### Get the alteration file for output. (Used by Skeinforge)
def getAlterationFileContents(filename):
prefix = ''
postfix = ''
alterationContents = getAlterationFile(filename)
if filename == 'start.gcode':
#For the start code, hack the temperature and the steps per E value into it. So the temperature is reached before the start code extrusion.
#We also set our steps per E here, if configured.
eSteps = getPreferenceFloat('steps_per_e')
if eSteps > 0:
prefix += 'M92 E%f\n' % (eSteps)
temp = getProfileSettingFloat('print_temperature')
bedTemp = 0
if getPreference('has_heated_bed') == 'True':
bedTemp = getProfileSettingFloat('print_bed_temperature')
if bedTemp > 0 and not '{print_bed_temperature}' in alterationContents:
prefix += 'M140 S%f\n' % (bedTemp)
if temp > 0 and not '{print_temperature}' in alterationContents:
prefix += 'M109 S%f\n' % (temp)
if bedTemp > 0 and not '{print_bed_temperature}' in alterationContents:
prefix += 'M190 S%f\n' % (bedTemp)
elif filename == 'end.gcode':
#Append the profile string to the end of the GCode, so we can load it from the GCode file later.
postfix = ';CURA_PROFILE_STRING:%s\n' % (getGlobalProfileString())
elif filename == 'replace.csv':
#Always remove the extruder on/off M codes. These are no longer needed in 5D printing.
prefix = 'M101\nM103\n'
elif filename == 'support_start.gcode' or filename == 'support_end.gcode':
#Add support start/end code
if getProfileSetting('support_dual_extrusion') == 'True' and int(getPreference('extruder_amount')) > 1:
if filename == 'support_start.gcode':
setTempOverride('extruder', '1')
else:
setTempOverride('extruder', '0')
alterationContents = getAlterationFileContents('switchExtruder.gcode')
clearTempOverride('extruder')
else:
alterationContents = ''
return unicode(prefix + re.sub("(.)\{([^\}]*)\}", replaceTagMatch, alterationContents).rstrip() + '\n' + postfix).strip().encode('utf-8')
###### PLUGIN #####
def getPluginConfig():
try:
return pickle.loads(getProfileSetting('plugin_config'))
except:
return []
def setPluginConfig(config):
putProfileSetting('plugin_config', pickle.dumps(config))
def getPluginBasePaths():
ret = []
if platform.system() != "Windows":
ret.append(os.path.expanduser('~/.cura/plugins/'))
if platform.system() == "Darwin" and hasattr(sys, 'frozen'):
ret.append(os.path.normpath(os.path.join(resources.resourceBasePath, "Cura/plugins")))
else:
ret.append(os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'plugins')))
return ret
def getPluginList():
ret = []
for basePath in getPluginBasePaths():
for filename in glob.glob(os.path.join(basePath, '*.py')):
filename = os.path.basename(filename)
if filename.startswith('_'):
continue
with open(os.path.join(basePath, filename), "r") as f:
item = {'filename': filename, 'name': None, 'info': None, 'type': None, 'params': []}
for line in f:
line = line.strip()
if not line.startswith('#'):
break
line = line[1:].split(':', 1)
if len(line) != 2:
continue
if line[0].upper() == 'NAME':
item['name'] = line[1].strip()
elif line[0].upper() == 'INFO':
item['info'] = line[1].strip()
elif line[0].upper() == 'TYPE':
item['type'] = line[1].strip()
elif line[0].upper() == 'DEPEND':
pass
elif line[0].upper() == 'PARAM':
m = re.match('([a-zA-Z]*)\(([a-zA-Z_]*)(?::([^\)]*))?\) +(.*)', line[1].strip())
if m is not None:
item['params'].append({'name': m.group(1), 'type': m.group(2), 'default': m.group(3), 'description': m.group(4)})
else:
print "Unknown item in effect meta data: %s %s" % (line[0], line[1])
if item['name'] != None and item['type'] == 'postprocess':
ret.append(item)
return ret
def runPostProcessingPlugins(gcodefilename):
pluginConfigList = getPluginConfig()
pluginList = getPluginList()
for pluginConfig in pluginConfigList:
plugin = None
for pluginTest in pluginList:
if pluginTest['filename'] == pluginConfig['filename']:
plugin = pluginTest
if plugin is None:
continue
pythonFile = None
for basePath in getPluginBasePaths():
testFilename = os.path.join(basePath, pluginConfig['filename'])
if os.path.isfile(testFilename):
pythonFile = testFilename
if pythonFile is None:
continue
locals = {'filename': gcodefilename}
for param in plugin['params']:
value = param['default']
if param['name'] in pluginConfig['params']:
value = pluginConfig['params'][param['name']]
if param['type'] == 'float':
try:
value = float(value)
except:
value = float(param['default'])
locals[param['name']] = value
try:
execfile(pythonFile, locals)
except:
locationInfo = traceback.extract_tb(sys.exc_info()[2])[-1]
return "%s: '%s' @ %s:%s:%d" % (str(sys.exc_info()[0].__name__), str(sys.exc_info()[1]), os.path.basename(locationInfo[0]), locationInfo[2], locationInfo[1])
return None
def getSDcardDrives():
drives = ['']
if platform.system() == "Windows":
from ctypes import windll
bitmask = windll.kernel32.GetLogicalDrives()
for letter in string.uppercase:
if bitmask & 1:
drives.append(letter + ':/')
bitmask >>= 1
if platform.system() == "Darwin":
drives = []
for volume in glob.glob('/Volumes/*'):
if stat.S_ISLNK(os.lstat(volume).st_mode):
continue
drives.append(volume)
return drives
|
tinkerinestudio/Tinkerine-Suite
|
TinkerineSuite/Cura/util/profile2.py
|
Python
|
agpl-3.0
| 23,092
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.conf import settings
from rest_framework import serializers
from base.models.enums.summary_status import SummaryStatus
from base.models.learning_unit_year import LearningUnitYear
from learning_unit.api.serializers.campus import LearningUnitCampusSerializer
from learning_unit.api.serializers.component import LearningUnitComponentSerializer
from learning_unit.api.serializers.utils import LearningUnitHyperlinkedIdentityField, \
LearningUnitHyperlinkedRelatedField
class LearningUnitTitleSerializer(serializers.ModelSerializer):
title = serializers.SerializerMethodField()
class Meta:
model = LearningUnitYear
fields = (
'title',
)
def get_title(self, learning_unit_year):
language = self.context['language']
return getattr(
learning_unit_year,
'full_title' + ('_' + language if language not in settings.LANGUAGE_CODE_FR else '')
)
class LearningUnitSerializer(LearningUnitTitleSerializer):
url = LearningUnitHyperlinkedIdentityField(read_only=True)
osis_url = serializers.HyperlinkedIdentityField(
view_name='learning_unit',
lookup_url_kwarg="learning_unit_year_id",
read_only=True
)
requirement_entity = serializers.CharField(
source='entity_requirement',
read_only=True
)
allocation_entity = serializers.CharField(
source='entity_allocation',
read_only=True
)
academic_year = serializers.IntegerField(source='academic_year.year')
type = serializers.CharField(source='learning_container_year.container_type')
type_text = serializers.CharField(source='get_container_type_display', read_only=True)
subtype_text = serializers.CharField(source='get_subtype_display', read_only=True)
has_proposal = serializers.SerializerMethodField()
class Meta(LearningUnitTitleSerializer.Meta):
model = LearningUnitYear
fields = LearningUnitTitleSerializer.Meta.fields + (
'url',
'osis_url',
'acronym',
'academic_year',
'credits',
'status',
'requirement_entity',
'allocation_entity',
'type',
'type_text',
'subtype',
'subtype_text',
'has_proposal',
)
def get_has_proposal(self, learning_unit_year):
return getattr(learning_unit_year, "has_proposal", None)
class LearningUnitDetailedSerializer(LearningUnitSerializer):
periodicity_text = serializers.CharField(source='get_periodicity_display', read_only=True)
quadrimester_text = serializers.CharField(source='get_quadrimester_display', read_only=True)
language = serializers.CharField(source='language.code', read_only=True)
team = serializers.BooleanField(source='learning_container_year.team', read_only=True)
campus = LearningUnitCampusSerializer(read_only=True)
components = LearningUnitComponentSerializer(many=True, source='learningcomponentyear_set', read_only=True)
parent = LearningUnitHyperlinkedRelatedField(read_only=True, lookup_field='acronym')
partims = LearningUnitHyperlinkedRelatedField(read_only=True, many=True, source='get_partims_related')
proposal = serializers.SerializerMethodField()
summary_status = serializers.SerializerMethodField()
remark = serializers.CharField(source='other_remark', read_only=True)
remark_en = serializers.CharField(source='other_remark_english', read_only=True)
class Meta(LearningUnitSerializer.Meta):
model = LearningUnitYear
fields = LearningUnitSerializer.Meta.fields + (
'quadrimester',
'quadrimester_text',
'periodicity',
'periodicity_text',
'campus',
'team',
'language',
'exchange_students',
'french_friendly',
'english_friendly',
'components',
'parent',
'partims',
'proposal',
'summary_status',
'professional_integration',
'remark',
'remark_en',
)
@staticmethod
def get_proposal(learning_unit_year):
if not hasattr(learning_unit_year, "proposallearningunit"):
return {}
return {
"folder": learning_unit_year.proposallearningunit.folder,
"type": learning_unit_year.proposallearningunit.get_type_display(),
"status": learning_unit_year.proposallearningunit.get_state_display(),
}
@staticmethod
def get_summary_status(learning_unit_year):
if getattr(learning_unit_year, "summary_status", False):
return SummaryStatus.MODIFIED.value
elif learning_unit_year.summary_locked:
return SummaryStatus.BLOCKED.value
return SummaryStatus.NOT_MODIFIED.value
class ExternalLearningUnitDetailedSerializer(LearningUnitDetailedSerializer):
local_url = serializers.CharField(source='externallearningunityear.url')
local_code = serializers.CharField(source='externallearningunityear.external_acronym')
class Meta(LearningUnitDetailedSerializer.Meta):
model = LearningUnitYear
fields = LearningUnitDetailedSerializer.Meta.fields + (
'local_code',
'local_url'
)
|
uclouvain/OSIS-Louvain
|
learning_unit/api/serializers/learning_unit.py
|
Python
|
agpl-3.0
| 6,633
|
from IPython import embed
import annotateit
from annotateit import model, db, es
from flask import g
def main():
app = annotateit.create_app()
with app.test_request_context():
g.user = model.User.fetch('admin')
embed(display_banner=False)
if __name__ == '__main__':
main()
|
openannotation/annotateit
|
console.py
|
Python
|
agpl-3.0
| 306
|
from django.conf import settings
from rest_framework import viewsets
from rest_framework.response import Response
import snotes20.models as models
class EditorViewSet(viewsets.ViewSet):
def list(self, request):
return Response([
{
'short': short,
'long': long,
'url': settings.EDITORS[short]['userurl']
} for short, long in models.EDITOR_CHOICES
])
|
shownotes/snotes20-restapi
|
snotes20/views/EditorViewSet.py
|
Python
|
agpl-3.0
| 444
|
from .AugmentedWeapon import AugmentedWeapon
from .Cloaked import Cloaked
from .Ethereal import Ethereal
from .Muted import Muted
from .TemporaryIllumination import TemporaryIllumination
from .Undead import Undead
from .HealthDrain import HealthDrain
__all__ = [
"AugmentedWeapon",
"Cloaked",
"Ethereal",
"Muted",
"TemporaryIllumination",
"Undead",
"HealthDrain"
]
|
etkirsch/legends-of-erukar
|
erukar/content/conditions/magical/__init__.py
|
Python
|
agpl-3.0
| 394
|
# coding=utf-8
__author__ = "Daniel Arroyo <daniel@astroprint.com>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import logging
import threading
import time
import os
from octoprint.server import eventManager
from octoprint.events import Events
from octoprint.settings import settings
from astroprint.network import NetworkManager
class MacDevNetworkManager(NetworkManager):
def __init__(self):
self.logger = logging.getLogger(__name__)
self._online = False
self._storedWiFiNetworks = []
self._config = {
"autoConnect" : True,
"name": "astrobox-dev"
}
self._loadDevConfig()
self.name = self._config["name"]
if self._config['autoConnect']:
self._setActiveWifi(self.getWifiNetworks()[0])
super(MacDevNetworkManager, self).__init__()
def getActiveConnections(self):
wireless = None
wired = None
wired = {
'name': 'Wired Test',
'ip': '127.0.0.1:5000',
'mac': 'wi:re:d2:34:56:78:90',
}
if self._storedWiFiNetworks:
for n in self._storedWiFiNetworks:
if n['active']:
wireless = {
'id': 'localhost',
'signal': 80,
'name': n['name'],
'ip': '127.0.0.1:5000',
'mac': 'wi:fi:12:34:56:78:90',
'secured': True
}
return {
'wired': wired,
'wireless': wireless,
'manual': None
}
def storedWifiNetworks(self):
return self._storedWiFiNetworks
def deleteStoredWifiNetwork(self, networkId):
for i in range(0, len(self._storedWiFiNetworks)):
n = self._storedWiFiNetworks[i]
if n['id'] == networkId:
if n['active']:
self._goOffline()
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'disconnected'})
del self._storedWiFiNetworks[i]
self.logger.info("Network [%s] with id [%s] deleted." % (n['name'], n['id']))
return n['id']
def hasWifi(self):
return True
def getWifiNetworks(self):
return [
{"id": "80:1F:02:F9:16:1B", "name": "Secured Network", "secured": True, "signal": 80, "wep": False},
{"id": "90:1F:02:F9:16:1C", "name": "Open Network", "secured": False, "signal": 78, "wep": False},
{"id": "74:DA:38:88:51:90", "name": "WEP Network", "secured": True, "signal": 59, "wep": True},
{"id": "C0:7B:BC:1A:5C:81", "name": "Open Failed", "secured": False, "signal": 37, "wep": False}
]
def setWifiNetwork(self, bssid, password):
for n in self.getWifiNetworks():
if n['id'] == bssid:
if n['secured']:
if not password or len(password) < 3:
self.logger.info("Missing password for a secured network")
time.sleep(2)
return {
'err_code': 'invalid_psk',
'message': 'Invalid Password'
}
elif password != 'pwd':
self.logger.info("Password invalid. Needs to be 'pwd'")
def action():
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'connecting'})
time.sleep(2)
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'failed', 'reason': "no_secrets"})
timer = threading.Timer(3, action)
timer.daemon = True
timer.start()
return {"name": n['name']}
else:
if n["id"] == 'C0:7B:BC:1A:5C:81':
self.logger.info("Open network with NO connection")
def action():
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'connecting'})
time.sleep(2)
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'failed', 'reason': "no_connection"})
timer = threading.Timer(3, action)
timer.daemon = True
timer.start()
return {"name": n['name']}
time.sleep(1)
return self._setActiveWifi(n)
def isAstroprintReachable(self):
return self.isOnline()
def checkOnline(self):
return self.isOnline()
def isOnline(self):
return self._online
def startHotspot(self):
#return True when succesful
return "Not supporded on Mac"
def stopHotspot(self):
#return True when succesful
return "Not supporded on Mac"
def getHostname(self):
return self.name
def setHostname(self, name):
self.name = name
self.logger.info('Host name is set to %s ' % name)
return True
@property
def activeIpAddress(self):
return '127.0.0.1'
@property
def networkDeviceInfo(self):
return [
{
'id': 'eth0',
'mac': 'wi:re:d2:34:56:78:90',
'type': 'wired',
'connected': True
},
{
'id': 'wlan0',
'mac': 'wi:fi:12:34:56:78:90',
'type': 'wifi',
'connected': False
}
]
def _goOnline(self):
self._online = True
eventManager.fire(Events.NETWORK_STATUS, 'online')
def _goOffline(self):
self._online = False
eventManager.fire(Events.NETWORK_STATUS, 'offline')
def _setActiveWifi(self, network):
self.logger.info("Selected WiFi: %s" % network['name'])
for n in self._storedWiFiNetworks:
n['active'] = False
self._storedWiFiNetworks.append({
'id': network['id'],
'name': network['name'],
'active': True
})
def action():
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {'status': 'connecting'})
time.sleep(1)
eventManager.fire(Events.INTERNET_CONNECTING_STATUS, {
'status': 'connected',
'info': {
'type': 'wifi',
'signal': network['signal'],
'name': network['name'],
'ip': '127.0.0.1:5000'
}
})
self._goOnline()
timer = threading.Timer(2, action)
timer.daemon = True
timer.start()
return {'name': network['name']}
def _loadDevConfig(self):
settings_file = "%s/mac-dev-network.yaml" % settings().getConfigFolder()
if os.path.isfile(settings_file):
import yaml
config = None
with open(settings_file, "r") as f:
config = yaml.safe_load(f)
if config:
def merge_dict(a,b):
for key in b:
if isinstance(b[key], dict):
merge_dict(a[key], b[key])
else:
a[key] = b[key]
merge_dict(self._config, config)
|
AstroPrint/AstroBox
|
src/astroprint/network/mac_dev.py
|
Python
|
agpl-3.0
| 5,835
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
import politube.views
urlpatterns = patterns('',
url(r'^$', politube.views.home, name='home'),
url(r'^about/', politube.views.about, name='about'),
url(r'^plenary/', include('plenary.urls')),
url(r'^deputy/', include('deputy.urls')),
url(r'^videos_tools/', include('videos_tools.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
gdesmott/politube
|
politube/urls.py
|
Python
|
agpl-3.0
| 710
|
# Copyright 2019 Ecosoft Co., Ltd (http://ecosoft.co.th/)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html)
from odoo.exceptions import ValidationError
from odoo.addons.analytic_tag_dimension.tests.test_analytic_dimension import (
TestAnalyticDimensionBase,
)
class TestAnalyticDimensionCase(TestAnalyticDimensionBase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.account_obj = cls.env["account.account"]
cls.model_obj = cls.env["ir.model"]
cls.field_obj = cls.env["ir.model.fields"]
cls.invoice = cls.env["account.move"].create(
{"journal_id": cls.journal.id, "partner_id": cls.partner.id}
)
# Mock data for testing model dimension, by_sequence with fitered
vals = {
"name": "A",
"line_ids": [ # use sequence as record identifier
(0, 0, {"value": "percent", "sequence": 1001}),
(0, 0, {"value": "balance", "sequence": 1002}),
],
}
cls.payterm_a = cls.env["account.payment.term"].create(vals)
def test_invoice_line_dimension_required(self):
"""If dimension is marked as required,
I expect error on save if the required dimension is not selected
"""
self.dimension_1.required = True
self.dimension_2.required = True
values = {
"name": "test",
"price_unit": 1,
"account_id": self.account.id,
"move_id": self.invoice.id,
"analytic_account_id": self.analytic_account.id,
"analytic_tag_ids": [(6, 0, [self.analytic_tag_1a.id])],
}
invoice_line_obj = self.env["account.move.line"]
# Error if missing required dimension
with self.assertRaises(ValidationError):
invoice_line_obj.create(values)
self.invoice.invoice_line_ids.unlink()
values["analytic_tag_ids"] = [
(6, 0, [self.analytic_tag_1a.id, self.analytic_tag_2a.id])
]
# Valid if all required dimension is filled
line = invoice_line_obj.create(values)
self.assertTrue(line.x_dimension_test_dim_1.id == self.analytic_tag_1a.id)
self.assertTrue(line.x_dimension_test_dim_2.id == self.analytic_tag_2a.id)
def test_invoice_line_dimension_by_sequence(self):
"""If dimension is by sequence, I expect,
- No duplicated sequence
- Selection allowed by sequence, i.e., Concept then Type
"""
invoice_line_obj = self.env["account.move.line"]
# Test no dimension with any sequence
values = {
"name": "test no sequence",
"price_unit": 1,
"account_id": self.account.id,
"move_id": self.invoice.id,
"analytic_account_id": self.analytic_account.id,
}
line = invoice_line_obj.create(values)
res = line._compute_analytic_tags_domain()
self.assertFalse(res["domain"]["analytic_tag_ids"])
# Now, user will see tags in sequence 1) Type 2) Concept
self.dimension_1.write({"required": False, "by_sequence": True, "sequence": 1})
with self.assertRaises(ValidationError):
self.dimension_2.write(
{"required": False, "by_sequence": True, "sequence": 1}
)
self.dimension_2.write({"required": False, "by_sequence": True, "sequence": 2})
# Now, user will see tags in sequence 1) Type 2) Concept
values = {
"name": "test sequence",
"price_unit": 1,
"account_id": self.account.id,
"move_id": self.invoice.id,
"analytic_account_id": self.analytic_account.id,
}
line = invoice_line_obj.create(values)
# First selection, dimension 1 tag shouldn't be in the domain
res = line._compute_analytic_tags_domain()
tag_ids = res["domain"]["analytic_tag_ids"][0][2]
self.assertNotIn(self.analytic_tag_2a.id, tag_ids)
# Select a dimension 1 tag
line.analytic_tag_ids += self.analytic_tag_1a
res = line._compute_analytic_tags_domain()
tag_ids = res["domain"]["analytic_tag_ids"][0][2]
# Test that all dimension 1 tags are not in list
type_tag_ids = [self.analytic_tag_1a.id, self.analytic_tag_1b.id]
for type_tag_id in type_tag_ids:
self.assertNotIn(type_tag_id, tag_ids)
def test_zz_invoice_line_dimension_ref_model_with_filter(self):
"""
For dimension tags created by ref model with by_sequence and filtered,
We expected that,
- If user select A, user can only select payment term line 1001, 1002
Note:
We use payment term and payment term line for testing purposes,
although it does not make sense in real life
#"""
# It should be executed the last one for avoiding side effects
# as not everything is undone in this removal
# Clear all dimension
self.tag_obj.search([]).unlink()
self.dimension_obj.search([]).unlink()
# Create new dimension, using reference model
pt = self.model_obj.search([("model", "=", "account.payment.term")])
pt_dimension = self.dimension_obj.create(
{
"name": "Payment Term",
"code": "payterm",
"by_sequence": True,
"sequence": 1,
}
)
pt_dimension.create_analytic_tags() # Test create without model
pt_dimension.ref_model_id = pt
pt_dimension.create_analytic_tags()
ptl = self.model_obj.search([("model", "=", "account.payment.term.line")])
# Payment term line will be filtered with payment_id
ptl_dimension = self.dimension_obj.create(
{
"name": "Payment Term Line",
"code": "payterm_line",
"ref_model_id": ptl.id,
"by_sequence": True,
"sequence": 2,
}
)
filter_field = self.field_obj.search(
[("model_id", "=", ptl.id), ("name", "=", "payment_id")]
)
ptl_dimension.filtered_field_ids += filter_field
ptl_dimension.create_analytic_tags()
values = {
"name": "test",
"price_unit": 1,
"account_id": self.account.id,
"move_id": self.invoice.id,
"analytic_account_id": self.analytic_account.id,
}
invoice_line_obj = self.env["account.move.line"]
line = invoice_line_obj.create(values)
tag = self.tag_obj.search([("name", "=", "A")])
line.analytic_tag_ids += tag
res = line._compute_analytic_tags_domain()
# Test whether this will list only 2 tags of payment term line 1001, 1002
tag_ids = res["domain"]["analytic_tag_ids"][0][2]
tags = self.tag_obj.search([("id", "in", tag_ids)])
sequences = [x.sequence for x in tags.mapped("resource_ref")]
self.assertEqual({1001, 1002}, set(sequences))
|
OCA/account-analytic
|
analytic_tag_dimension_enhanced/tests/test_analytic_dimension.py
|
Python
|
agpl-3.0
| 7,081
|
# coding=utf-8
"""Ingest workflow management tool
FileNameSource Class
"""
__copyright__ = "Copyright (C) 2016 University of Maryland"
__license__ = "GNU AFFERO GENERAL PUBLIC LICENSE, Version 3"
import abc
import os
import sys
import psycopg2
class FileNameSource:
def __init__(self): pass
def __iter__(self): return self
@abc.abstractmethod
def next(self): pass
def confirm_completion(self, path):
return True
class FileList(FileNameSource):
def __init__(self, args, cfg):
FileNameSource.__init__(self)
src = args['<source_directory>']
self.fp = sys.stdin if src == '-' else open(src, 'rU')
self.prefix = args['--prefix']
self.offset = len(self.prefix)
def next(self):
v = self.fp.next().strip()
if not v.startswith(self.prefix):
print v, ' not in ', self.prefix, 'ignoring '
return
return decode_str(v[self.offset:])
class DirectoryWalk(FileNameSource):
def __init__(self, args, cfg):
FileNameSource.__init__(self)
src = args['<source_directory>']
if src == '-':
print ' Incompatible mode -- Cannot Walk stdin '
raise ValueError
self.prefix = args['--prefix']
self.offset = len(self.prefix)
self.walker = os.walk(src, topdown=True, followlinks=True)
self.dirname = None
self.files = None
def next(self):
while not self.dirname or not self.files:
self.dirname, _, self.files = self.walker.next()
return os.path.join(self.dirname[self.offset:], self.files.pop())
class DB:
def __init__(self, args, cfg):
defaults = (('user', 'drastic'), ('database', 'drastic'), ('password', 'drastic'), ('host', 'localhost'))
credentials = dict(user=cfg.get('postgres', 'user'),
database=cfg.get('postgres', 'database'),
password=cfg.get('postgres', 'password'),
host=cfg.get('postgres', 'host'))
for k, v in defaults:
if not credentials[k]: credentials[k] = v
self.credentials = credentials
self.cnx = psycopg2.connect(**credentials)
self.cs1 = self.cnx.cursor()
table = args.get('--dataset', 'resource')
if not table: table = 'resource'
self.tablename = table
### Do JIT set up of other queries....
self.update_status = False
self.db_initialized = False
def summary(self):
cmd = '''SELECT status,count(*) from "{0}" group by status order by status '''.format(self.tablename)
try:
self.cs1.execute(cmd)
for v in self.cs1: print '{0:-10s}\t{1:,}'.format(*v)
except Exception as e:
print e
def _setup_db(self, table):
cs = self.cnx.cursor()
# Create the status Enum
try:
cs.execute("CREATE TYPE resource_status AS ENUM ('READY','IN-PROGRESS','DONE','BROKEN','VERIFIED')")
except:
cs.connection.rollback()
#
cmds = [
'''CREATE TABLE IF NOT EXISTS "{0}" (
path TEXT PRIMARY KEY,
status resource_status DEFAULT 'READY',
started timestamp,
fs_sync boolean)''',
'''CREATE INDEX "IDX_{0}_01_status" ON "{0}" (status ) WHERE status <> 'DONE' ''',
'''CREATE INDEX "IDX_{0}_01_fs_sync" ON "{0}" (fs_sync) WHERE fs_sync is not True''']
for cmd in cmds:
try:
cs.execute(cmd.format(table))
cs.connection.commit()
except Exception as e:
cs.connection.rollback()
class DBPrepare(DB):
"""
Class to be used when preparing.
"""
def __init__(self, args, cfg):
DB.__init__(self, args, cfg)
self.prefix = (args['--prefix'])
self.offset = len(self.prefix)
self.cs = self.cnx.cursor('AB1', withhold=True)
self._setup_db(self.tablename)
cmd = '''PREPARE I1 ( text ) AS insert into "{0}" (path,status)
SELECT $1,'READY'::resource_status WHERE NOT EXISTS (SELECT TRUE FROM "{0}" where path = $1)'''
self.cs1.execute(cmd.format(self.tablename))
def prepare(self, path ):
self.cs1.execute("EXECUTE I1(%s); commit", [path])
return True
class DBQuery(FileNameSource, DB):
"""
Class to be used to get file names when injecting.
"""
def __init__(self, args, cfg):
DB.__init__(self,args,cfg)
FileNameSource.__init__(self)
self.prefix = (args['--prefix'])
self.offset = len(self.prefix)
self.fetch_cs = self.cnx.cursor()
cmd = '''PREPARE F1 (integer) AS SELECT path FROM "{0}" where status = 'READY' LIMIT $1 '''.format(self.tablename)
self.fetch_cs.execute(cmd)
self.fetch_cs.execute('EXECUTE F1 (1000)')
# And prepare the update status cmd
ucmd = '''PREPARE M1 (TEXT,resource_status) AS UPDATE "{0}" SET status='DONE' WHERE path = $1 and status <> $2 '''.format(
self.tablename)
self.cs1.execute(ucmd)
# And retreive the values for the status
self.cs1.execute('''SELECT unnest(enum_range(NULL::resource_status))''')
self.status_values = set( ( k[0] for k in self.cs1.fetchall() ))
return
def confirm_completion(self, path, status = 'DONE'):
if status not in self.status_values :
if status == 'FAILED' : status = 'BROKEN'
else : raise ValueError("bad value for enum -- {} : should be {}".format(status,self.status_values) )
####
try:
self.cs1.execute('EXECUTE M1(%s,%s)', [path,status])
updates = self.cs1.rowcount
self.cs1.connection.commit()
return True
except Exception as e:
print 'failed to update status for ', path,'\n',e
self.cs1.connection.rollback()
return False
def next(self):
"""
:return: next path from DB that is ready...
This function will re-issue the Select when the current one is exhausted.
This attempts to avoid two many locks on two many records.
"""
k = self.fetch_cs.fetchone()
#
if not k:
self.fetch_cs.execute('EXECUTE F1 (1000)')
k = self.fetch_cs.fetchone()
#
if k: return k[0].decode('utf-8')
raise StopIteration
def CreateFileNameSource(args, cfg):
"""
use the parameters to prepare an iterator that will deliver all the (suitably normalized) files to be injected
:param args: command line args
:param cfg: global, persistent parameters
:return: iterator
"""
src = args['<source_directory>']
prefix = args['--prefix']
if not prefix:
prefix = '/data'
else:
prefix = prefix.rstrip('/')
if not src.startswith(prefix):
print src, ' must be a subdirectory of the host data directory (--prefix=', prefix, ')'
print 'If you did not specify it, please do so'
sys.exit(1)
#########
## Set up a source that gets list of files from a file
if args['--read'] : return FileList(args, cfg)
if args['--walk']: return DirectoryWalk(args, cfg)
if args['--postgres'] : return DBQuery(args, cfg)
if args['--sqlite3'] :
raise NotImplementedError
def decode_str(s):
"""
:param s: string to be converted to unicode
:return: unicode version
"""
if isinstance(s, unicode): return s
try:
return s.decode('utf8')
except UnicodeDecodeError:
try:
return s.decode('iso8859-1')
except UnicodeDecodeError:
s_ignore = s.decode('utf8', 'ignore')
return s_ignore
|
UMD-DRASTIC/drastic
|
drastic/DrasticLoader/FileNameSource.py
|
Python
|
agpl-3.0
| 7,914
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Region.region_code'
db.alter_column('region', 'region_code', self.gf('django.db.models.fields.CharField')(unique=True, max_length=55))
def backwards(self, orm):
# Changing field 'Region.region_code'
db.alter_column('region', 'region_code', self.gf('django.db.models.fields.CharField')(max_length=10, unique=True))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'datapoints.aggregationexpecteddata': {
'Meta': {'object_name': 'AggregationExpectedData', 'db_table': "'aggregation_expected_data'"},
'aggregation_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.AggregationType']"}),
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'param_type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '55', 'populate_from': "('aggregation_type', 'content_type')"})
},
u'datapoints.aggregationtype': {
'Meta': {'object_name': 'AggregationType', 'db_table': "'aggregation_type'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'display_name_w_sub': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'datapoints.campaign': {
'Meta': {'ordering': "('-start_date',)", 'unique_together': "(('office', 'start_date'),)", 'object_name': 'Campaign', 'db_table': "'campaign'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'office': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Office']"}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': "'get_full_name'"}),
'start_date': ('django.db.models.fields.DateField', [], {})
},
u'datapoints.datapoint': {
'Meta': {'ordering': "['region', 'campaign']", 'unique_together': "(('indicator', 'region', 'campaign'),)", 'object_name': 'DataPoint', 'db_table': "'datapoint'"},
'campaign': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Campaign']"}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Indicator']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Region']"}),
'source_datapoint': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['source_data.SourceDataPoint']"}),
'value': ('django.db.models.fields.FloatField', [], {})
},
u'datapoints.indicator': {
'Meta': {'ordering': "('name',)", 'object_name': 'Indicator', 'db_table': "'indicator'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_reported': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '55'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '255', 'populate_from': "'name'", 'unique_with': '()'})
},
u'datapoints.office': {
'Meta': {'object_name': 'Office', 'db_table': "'office'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '55'})
},
u'datapoints.region': {
'Meta': {'ordering': "('name',)", 'object_name': 'Region', 'db_table': "'region'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_high_risk': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '55'}),
'office': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Office']"}),
'parent_region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Region']", 'null': 'True'}),
'region_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '55'}),
'region_type': ('django.db.models.fields.CharField', [], {'max_length': '55'}),
'shape_file_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '55', 'populate_from': "'name'"}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Source']"}),
'source_region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.SourceRegion']"})
},
u'datapoints.responsibility': {
'Meta': {'ordering': "('indicator',)", 'unique_together': "(('user', 'indicator', 'region'),)", 'object_name': 'Responsibility', 'db_table': "'responsibility'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Indicator']"}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Region']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'datapoints.source': {
'Meta': {'object_name': 'Source', 'db_table': "'source'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'source_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '55'})
},
u'source_data.document': {
'Meta': {'unique_together': "(('docfile', 'doc_text'),)", 'object_name': 'Document'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'doc_text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'docfile': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'source_data.processstatus': {
'Meta': {'object_name': 'ProcessStatus'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status_description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'status_text': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
'source_data.sourcedatapoint': {
'Meta': {'unique_together': "(('source', 'source_guid', 'indicator_string'),)", 'object_name': 'SourceDataPoint'},
'campaign_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'cell_value': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 27, 0, 0)'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']"}),
'error_msg': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'region_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'row_number': ('django.db.models.fields.IntegerField', [], {}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Source']"}),
'source_guid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['source_data.ProcessStatus']"})
},
u'source_data.sourceregion': {
'Meta': {'unique_together': "(('region_string', 'document'),)", 'object_name': 'SourceRegion'},
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'lon': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'parent_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'parent_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'region_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'region_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'region_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'source_guid': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['datapoints']
|
SeedScientific/polio
|
datapoints/migrations/0039_auto__chg_field_region_region_code.py
|
Python
|
agpl-3.0
| 15,288
|
from itertools import repeat, chain
from operator import itemgetter
from django.db.models import Count, Sum, Case, When, IntegerField, Value, FloatField
from django.db.models.expressions import CombinedExpression
from django.http import JsonResponse
from django.shortcuts import render
from django.utils.translation import ugettext as _
from judge.models import Language, Submission
chart_colors = [0x3366CC, 0xDC3912, 0xFF9900, 0x109618, 0x990099, 0x3B3EAC, 0x0099C6, 0xDD4477, 0x66AA00, 0xB82E2E,
0x316395, 0x994499, 0x22AA99, 0xAAAA11, 0x6633CC, 0xE67300, 0x8B0707, 0x329262, 0x5574A6, 0x3B3EAC]
highlight_colors = []
def _highlight_colors():
for color in chart_colors:
r, g, b = color >> 16, (color >> 8) & 0xFF, color & 0xFF
highlight_colors.append('#%02X%02X%02X' % (min(int(r * 1.2), 255),
min(int(g * 1.2), 255),
min(int(b * 1.2), 255)))
_highlight_colors()
del _highlight_colors
chart_colors = map('#%06X'.__mod__, chart_colors)
ac_count = Count(Case(When(submission__result='AC', then=Value(1)), output_field=IntegerField()))
def repeat_chain(iterable):
return chain.from_iterable(repeat(iterable))
def language_data(request, language_count=Language.objects.annotate(count=Count('submission'))):
languages = language_count.filter(count__gte=1000).values('key', 'name', 'short_name', 'count').order_by('-count')
data = []
for language, color, highlight in zip(languages, chart_colors, highlight_colors):
data.append({
'value': language['count'], 'label': language['name'],
'color': color, 'highlight': highlight,
})
data.append({
'value': language_count.filter(count__lt=1000).aggregate(total=Sum('count'))['total'],
'label': 'Other', 'color': '#FDB45C', 'highlight': '#FFC870',
})
return JsonResponse(data, safe=False)
def ac_language_data(request):
return language_data(request, Language.objects.annotate(count=ac_count))
def status_data(request, statuses=None):
if not statuses:
statuses = (Submission.objects.values('result').annotate(count=Count('result'))
.values('result', 'count').order_by('-count'))
data = []
total_count = 0
for status, color, highlight in zip(statuses, chart_colors, highlight_colors):
res = status['result']
if not res:
continue
count = status['count']
total_count += count
data.append({
'value': count, 'label': str(Submission.USER_DISPLAY_CODES[res]),
'color': color, 'highlight': highlight
})
return JsonResponse(data, safe=False)
def ac_rate(request):
rate = CombinedExpression(ac_count / Count('submission'), '*', Value(100.0), output_field=FloatField())
data = Language.objects.annotate(total=Count('submission'), ac_rate=rate).filter(total__gt=0) \
.values('key', 'name', 'short_name', 'ac_rate').order_by('total')
return JsonResponse({
'labels': map(itemgetter('name'), data),
'datasets': [
{
'fillColor': 'rgba(151,187,205,0.5)',
'strokeColor': 'rgba(151,187,205,0.8)',
'highlightFill': 'rgba(151,187,205,0.75)',
'highlightStroke': 'rgba(151,187,205,1)',
'data': map(itemgetter('ac_rate'), data),
}
]
})
def language(request):
return render(request, 'stats/language.html', {
'title': _('Language statistics'), 'tab': 'language'
})
|
monouno/site
|
judge/views/stats.py
|
Python
|
agpl-3.0
| 3,624
|
# -*- coding:Utf-8 -*-
from django.conf import settings
from django.core.urlresolvers import is_valid_path
from django.http import HttpResponseRedirect
from django.utils.cache import patch_vary_headers
from django.utils import translation
from django.middleware.locale import LocaleMiddleware
from corsheaders.middleware import CorsMiddleware
__all__ = (
'VosaeLocaleMiddleware',
)
class VosaeLocaleMiddleware(LocaleMiddleware):
def process_response(self, request, response):
language = translation.get_language()
# Check if app has i18n_patterns urlconf
is_i18n_pattern = hasattr(request, 'resolver_match') and getattr(request.resolver_match, 'app_name', None) in ('account',)
# If path is '/', resolver_match is errored and not provided
if request.path == '/' and request.user.is_anonymous():
# On home, if not anonymous -> tenant_root
is_i18n_pattern = True
if (response.status_code == 404 and
is_i18n_pattern
and not translation.get_language_from_path(request.path_info)
and self.is_language_prefix_patterns_used()):
urlconf = getattr(request, 'urlconf', None)
language_path = '/%s%s' % (language, request.path_info)
path_valid = is_valid_path(language_path, urlconf)
if (not path_valid and settings.APPEND_SLASH
and not language_path.endswith('/')):
path_valid = is_valid_path("%s/" % language_path, urlconf)
if path_valid:
language_url = "%s://%s/%s%s" % (
request.is_secure() and 'https' or 'http',
request.get_host(), language, request.get_full_path())
return HttpResponseRedirect(language_url)
translation.deactivate()
patch_vary_headers(response, ('Accept-Language',))
if 'Content-Language' not in response:
response['Content-Language'] = language
return response
class VosaeCorsMiddleware(CorsMiddleware):
"""Middleware which adds headers for every API requests"""
def process_request(self, request):
if request.path.startswith('/api/'):
return super(VosaeCorsMiddleware, self).process_request(request)
return None
def process_response(self, request, response):
if request.path.startswith('/api/'):
return super(VosaeCorsMiddleware, self).process_response(request, response)
return response
|
Naeka/vosae-app
|
www/middleware.py
|
Python
|
agpl-3.0
| 2,513
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ResUsers(models.Model):
_inherit = 'res.users'
has_group_warning_account = fields.Boolean(
'A warning can be set on a partner (Account)', compute='_compute_groups_id', inverse='_inverse_groups_id',
group_xml_id='account.group_warning_account')
has_group_cash_rounding = fields.Boolean(
'Allow the cash rounding management', compute='_compute_groups_id', inverse='_inverse_groups_id',
group_xml_id='account.group_cash_rounding')
group_account_user = fields.Selection(
selection=lambda self: self._get_group_selection('base.module_category_accounting_and_finance'),
string='Accounting & Finance', compute='_compute_groups_id', inverse='_inverse_groups_id',
category_xml_id='base.module_category_accounting_and_finance')
|
maxive/erp
|
addons/account/models/res_users.py
|
Python
|
agpl-3.0
| 933
|
#!/usr/bin/env python
# Copyright(C) 2012 thomasv@gitorious
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import argparse
import ConfigParser
import logging
import socket
import sys
import time
import threading
import json
import os
import imp
if os.path.dirname(os.path.realpath(__file__)) == os.getcwd():
imp.load_module('electrumcreditbitserver', *imp.find_module('src'))
from electrumcreditbitserver import storage, networks, utils
from electrumcreditbitserver.processor import Dispatcher, print_log
from electrumcreditbitserver.server_processor import ServerProcessor
from electrumcreditbitserver.blockchain_processor import BlockchainProcessor
from electrumcreditbitserver.stratum_tcp import TcpServer
from electrumcreditbitserver.stratum_http import HttpServer
logging.basicConfig()
if sys.maxsize <= 2**32:
print "Warning: it looks like you are using a 32bit system. You may experience crashes caused by mmap"
if os.getuid() == 0:
print "Do not run this program as root!"
print "Run the install script to create a non-privileged user."
sys.exit()
def attempt_read_config(config, filename):
try:
with open(filename, 'r') as f:
config.readfp(f)
except IOError:
pass
def load_banner(config):
try:
with open(config.get('server', 'banner_file'), 'r') as f:
config.set('server', 'banner', f.read())
except IOError:
pass
def setup_network_params(config):
type = config.get('network', 'type')
params = networks.params.get(type)
utils.PUBKEY_ADDRESS = int(params.get('pubkey_address'))
utils.SCRIPT_ADDRESS = int(params.get('script_address'))
storage.GENESIS_HASH = params.get('genesis_hash')
if config.has_option('network', 'pubkey_address'):
utils.PUBKEY_ADDRESS = config.getint('network', 'pubkey_address')
if config.has_option('network', 'script_address'):
utils.SCRIPT_ADDRESS = config.getint('network', 'script_address')
if config.has_option('network', 'genesis_hash'):
storage.GENESIS_HASH = config.get('network', 'genesis_hash')
def create_config(filename=None):
config = ConfigParser.ConfigParser()
# set some defaults, which will be overwritten by the config file
config.add_section('server')
config.set('server', 'banner', 'Welcome to Creditbit Electrum!')
config.set('server', 'banner_file', '/etc/electrum-creditbit.banner')
config.set('server', 'host', 'localhost')
config.set('server', 'electrum_rpc_port', '8002')
config.set('server', 'report_host', '')
config.set('server', 'stratum_tcp_port', '50001')
config.set('server', 'stratum_http_port', '8081')
config.set('server', 'stratum_tcp_ssl_port', '50002')
config.set('server', 'stratum_http_ssl_port', '8082')
config.set('server', 'report_stratum_tcp_port', '50001')
config.set('server', 'report_stratum_http_port', '8081')
config.set('server', 'report_stratum_tcp_ssl_port', '50002')
config.set('server', 'report_stratum_http_ssl_port', '8082')
config.set('server', 'ssl_certfile', '')
config.set('server', 'ssl_keyfile', '')
config.set('server', 'irc', 'no')
config.set('server', 'irc_nick', '')
config.set('server', 'coin', 'creditbit')
config.set('server', 'logfile', '/var/log/electrum-creditbit.log')
config.set('server', 'donation_address', '')
config.set('server', 'max_subscriptions', '10000')
config.add_section('leveldb')
config.set('leveldb', 'path', '/dev/shm/electrum-creditbit_db')
config.set('leveldb', 'pruning_limit', '100')
config.set('leveldb', 'utxo_cache', str(64*1024*1024))
config.set('leveldb', 'hist_cache', str(128*1024*1024))
config.set('leveldb', 'addr_cache', str(16*1024*1024))
config.set('leveldb', 'profiler', 'no')
# set network parameters
config.add_section('network')
config.set('network', 'type', 'creditbit_main')
# try to find the config file in the default paths
if not filename:
for path in ('/etc/', ''):
filename = path + 'electrum-creditbit.conf'
if os.path.isfile(filename):
break
if not os.path.isfile(filename):
print 'could not find electrum configuration file "%s"' % filename
sys.exit(1)
attempt_read_config(config, filename)
load_banner(config)
return config
def run_rpc_command(params, electrum_rpc_port):
cmd = params[0]
import xmlrpclib
server = xmlrpclib.ServerProxy('http://localhost:%d' % electrum_rpc_port)
func = getattr(server, cmd)
r = func(*params[1:])
if cmd == 'sessions':
now = time.time()
print 'type address sub version time'
for item in r:
print '%4s %21s %3s %7s %.2f' % (item.get('name'),
item.get('address'),
item.get('subscriptions'),
item.get('version'),
(now - item.get('time')),
)
else:
print json.dumps(r, indent=4, sort_keys=True)
def cmd_banner_update():
load_banner(dispatcher.shared.config)
return True
def cmd_getinfo():
return {
'blocks': chain_proc.storage.height,
'peers': len(server_proc.peers),
'sessions': len(dispatcher.request_dispatcher.get_sessions()),
'watched': len(chain_proc.watched_addresses),
'cached': len(chain_proc.history_cache),
}
def cmd_sessions():
return map(lambda s: {"time": s.time,
"name": s.name,
"address": s.address,
"version": s.version,
"subscriptions": len(s.subscriptions)},
dispatcher.request_dispatcher.get_sessions())
def cmd_numsessions():
return len(dispatcher.request_dispatcher.get_sessions())
def cmd_peers():
return server_proc.peers.keys()
def cmd_numpeers():
return len(server_proc.peers)
def cmd_debug(s):
import traceback
from guppy import hpy;
hp = hpy()
if s:
try:
result = str(eval(s))
except:
err_lines = traceback.format_exc().splitlines()
result = '%s | %s' % (err_lines[-3], err_lines[-1])
return result
def get_port(config, name):
try:
return config.getint('server', name)
except:
return None
# global
shared = None
chain_proc = None
server_proc = None
dispatcher = None
transports = []
def start_server(config):
global shared, chain_proc, server_proc, dispatcher
logfile = config.get('server', 'logfile')
utils.init_logger(logfile)
host = config.get('server', 'host')
stratum_tcp_port = get_port(config, 'stratum_tcp_port')
stratum_http_port = get_port(config, 'stratum_http_port')
stratum_tcp_ssl_port = get_port(config, 'stratum_tcp_ssl_port')
stratum_http_ssl_port = get_port(config, 'stratum_http_ssl_port')
ssl_certfile = config.get('server', 'ssl_certfile')
ssl_keyfile = config.get('server', 'ssl_keyfile')
setup_network_params(config)
if ssl_certfile is '' or ssl_keyfile is '':
stratum_tcp_ssl_port = None
stratum_http_ssl_port = None
print_log("Starting Electrum server on", host)
# Create hub
dispatcher = Dispatcher(config)
shared = dispatcher.shared
# handle termination signals
import signal
def handler(signum = None, frame = None):
print_log('Signal handler called with signal', signum)
shared.stop()
for sig in [signal.SIGTERM, signal.SIGHUP, signal.SIGQUIT]:
signal.signal(sig, handler)
# Create and register processors
chain_proc = BlockchainProcessor(config, shared)
dispatcher.register('blockchain', chain_proc)
server_proc = ServerProcessor(config, shared)
dispatcher.register('server', server_proc)
# Create various transports we need
if stratum_tcp_port:
tcp_server = TcpServer(dispatcher, host, stratum_tcp_port, False, None, None)
transports.append(tcp_server)
if stratum_tcp_ssl_port:
tcp_server = TcpServer(dispatcher, host, stratum_tcp_ssl_port, True, ssl_certfile, ssl_keyfile)
transports.append(tcp_server)
if stratum_http_port:
http_server = HttpServer(dispatcher, host, stratum_http_port, False, None, None)
transports.append(http_server)
if stratum_http_ssl_port:
http_server = HttpServer(dispatcher, host, stratum_http_ssl_port, True, ssl_certfile, ssl_keyfile)
transports.append(http_server)
for server in transports:
server.start()
def stop_server():
shared.stop()
server_proc.join()
chain_proc.join()
print_log("Electrum Server stopped")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--conf', metavar='path', default=None, help='specify a configuration file')
parser.add_argument('command', nargs='*', default=[], help='send a command to the server')
args = parser.parse_args()
config = create_config(args.conf)
electrum_rpc_port = get_port(config, 'electrum_rpc_port')
if len(args.command) >= 1:
try:
run_rpc_command(args.command, electrum_rpc_port)
except socket.error:
print "server not running"
sys.exit(1)
sys.exit(0)
try:
run_rpc_command(['getpid'], electrum_rpc_port)
is_running = True
except socket.error:
is_running = False
if is_running:
print "server already running"
sys.exit(1)
start_server(config)
from SimpleXMLRPCServer import SimpleXMLRPCServer
server = SimpleXMLRPCServer(('localhost', electrum_rpc_port), allow_none=True, logRequests=False)
server.register_function(lambda: os.getpid(), 'getpid')
server.register_function(shared.stop, 'stop')
server.register_function(cmd_getinfo, 'getinfo')
server.register_function(cmd_sessions, 'sessions')
server.register_function(cmd_numsessions, 'numsessions')
server.register_function(cmd_peers, 'peers')
server.register_function(cmd_numpeers, 'numpeers')
server.register_function(cmd_debug, 'debug')
server.register_function(cmd_banner_update, 'banner_update')
server.socket.settimeout(1)
while not shared.stopped():
try:
server.handle_request()
except socket.timeout:
continue
except:
stop_server()
|
creditbit/electrum-creditbit-server
|
run_electrum_creditbit_server.py
|
Python
|
agpl-3.0
| 11,252
|
"""
Test cases to cover Accounts-related serializers of the User API application
"""
import logging
from django.test import TestCase
from django.test.client import RequestFactory
from testfixtures import LogCapture
from openedx.core.djangoapps.user_api.accounts.serializers import UserReadOnlySerializer
from common.djangoapps.student.models import UserProfile
from common.djangoapps.student.tests.factories import UserFactory
LOGGER_NAME = "openedx.core.djangoapps.user_api.accounts.serializers"
class UserReadOnlySerializerTest(TestCase): # lint-amnesty, pylint: disable=missing-class-docstring
def setUp(self):
super(UserReadOnlySerializerTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
request_factory = RequestFactory()
self.request = request_factory.get('/api/user/v1/accounts/')
self.user = UserFactory.build(username='test_user', email='test_user@test.com')
self.user.save()
self.config = {
"default_visibility": "public",
"public_fields": [
'email', 'name', 'username'
],
}
def test_serializer_data(self):
"""
Test serializer return data properly.
"""
UserProfile.objects.create(user=self.user, name='test name')
data = UserReadOnlySerializer(self.user, configuration=self.config, context={'request': self.request}).data
assert data['username'] == self.user.username
assert data['name'] == 'test name'
assert data['email'] == self.user.email
def test_user_no_profile(self):
"""
Test serializer return data properly when user does not have profile.
"""
with LogCapture(LOGGER_NAME, level=logging.DEBUG) as logger:
data = UserReadOnlySerializer(self.user, configuration=self.config, context={'request': self.request}).data
logger.check(
(LOGGER_NAME, 'WARNING', 'user profile for the user [test_user] does not exist')
)
assert data['username'] == self.user.username
assert data['name'] is None
|
stvstnfrd/edx-platform
|
openedx/core/djangoapps/user_api/accounts/tests/test_serializers.py
|
Python
|
agpl-3.0
| 2,122
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2022 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import contextlib
import warnings
from _decimal import Decimal
from typing import Optional, List
from django.db import IntegrityError
from django.db.models import F, Case, When, IntegerField, QuerySet, Max, OuterRef, Subquery
from django.db.models import Q
from base.models.academic_year import AcademicYear
from base.models.education_group_year import EducationGroupYear
from base.models.enums.education_group_categories import Categories
from education_group.ddd.domain.exception import TrainingNotFoundException
from education_group.models.group import Group
from education_group.models.group_year import GroupYear
from osis_common.ddd import interface
from osis_common.ddd.interface import RootEntity
from program_management import formatter
from program_management.ddd import command
from program_management.ddd.business_types import *
from program_management.ddd.domain import exception
from program_management.ddd.domain import program_tree
from program_management.ddd.domain import program_tree_version
from program_management.ddd.domain.exception import ProgramTreeVersionNotFoundException
from program_management.ddd.domain.program_tree_version import ProgramTreeVersionIdentity, STANDARD, NOT_A_TRANSITION
from program_management.ddd.dtos import UniteEnseignementDTO, ContenuNoeudDTO, ProgrammeDeFormationDTO
from program_management.ddd.repositories import program_tree as program_tree_repository
from program_management.models.education_group_version import EducationGroupVersion
class ProgramTreeVersionRepository(interface.AbstractRepository):
@classmethod
def save(cls, entity: RootEntity) -> None:
raise NotImplementedError
@classmethod
def create(
cls,
program_tree_version: 'ProgramTreeVersion',
**_
) -> 'ProgramTreeVersionIdentity':
warnings.warn("DEPRECATED : use .save() function instead", DeprecationWarning, stacklevel=2)
offer_acronym = program_tree_version.entity_id.offer_acronym
year = program_tree_version.entity_id.year
try:
education_group_year_id = EducationGroupYear.objects.filter(
acronym=offer_acronym,
academic_year__year=year,
).values_list(
'pk', flat=True
)[0]
except IndexError:
raise TrainingNotFoundException(acronym=offer_acronym, year=year)
group_year_id = GroupYear.objects.filter(
partial_acronym=program_tree_version.program_tree_identity.code,
academic_year__year=program_tree_version.program_tree_identity.year,
).values_list(
'pk', flat=True
)[0]
try:
educ_group_version = EducationGroupVersion.objects.create(
version_name=program_tree_version.version_name,
title_fr=program_tree_version.title_fr,
title_en=program_tree_version.title_en,
offer_id=education_group_year_id,
transition_name=program_tree_version.entity_id.transition_name,
root_group_id=group_year_id,
)
_update_start_year_and_end_year(
educ_group_version,
program_tree_version.start_year,
program_tree_version.end_year_of_existence
)
except IntegrityError as ie:
raise exception.ProgramTreeAlreadyExistsException
return program_tree_version.entity_id
@classmethod
def update(cls, program_tree_version: 'ProgramTreeVersion', **_) -> 'ProgramTreeVersionIdentity':
warnings.warn("DEPRECATED : use .save() function instead", DeprecationWarning, stacklevel=2)
obj = EducationGroupVersion.objects.get(
offer__acronym=program_tree_version.entity_identity.offer_acronym,
offer__academic_year__year=program_tree_version.entity_identity.year,
version_name=program_tree_version.entity_identity.version_name,
transition_name=program_tree_version.entity_identity.transition_name,
)
obj.version_name = program_tree_version.version_name
obj.title_fr = program_tree_version.title_fr
obj.title_en = program_tree_version.title_en
obj.save()
_update_start_year_and_end_year(
obj,
program_tree_version.start_year,
program_tree_version.end_year_of_existence
)
return program_tree_version.entity_id
@classmethod
def get(cls, entity_id: 'ProgramTreeVersionIdentity') -> 'ProgramTreeVersion':
qs = _get_common_queryset().filter(
version_name=entity_id.version_name,
offer__acronym=entity_id.offer_acronym,
offer__academic_year__year=entity_id.year,
transition_name=entity_id.transition_name,
)
try:
return _instanciate_tree_version(qs.get())
except EducationGroupVersion.DoesNotExist:
raise exception.ProgramTreeVersionNotFoundException()
@classmethod
def get_last_in_past(cls, entity_id: 'ProgramTreeVersionIdentity') -> 'ProgramTreeVersion':
qs = EducationGroupVersion.objects.filter(
version_name=entity_id.version_name,
offer__acronym=entity_id.offer_acronym,
offer__academic_year__year__lt=entity_id.year,
transition_name=entity_id.transition_name
).order_by(
'offer__academic_year'
).values_list(
'offer__academic_year__year',
flat=True,
)
if qs:
last_past_year = qs.last()
last_identity = ProgramTreeVersionIdentity(
offer_acronym=entity_id.offer_acronym,
year=last_past_year,
version_name=entity_id.version_name,
transition_name=entity_id.transition_name,
)
return cls.get(entity_id=last_identity)
@classmethod
def search(
cls,
entity_ids: Optional[List['ProgramTreeVersionIdentity']] = None,
version_name: str = None,
offer_acronym: str = None,
transition_name: str = None,
code: str = None,
year: int = None,
**kwargs
) -> List['ProgramTreeVersion']:
qs = _get_common_queryset()
if "element_ids" in kwargs:
qs = qs.filter(root_group__element__in=kwargs['element_ids'])
if version_name is not None:
qs = qs.filter(version_name=version_name)
if offer_acronym is not None:
qs = qs.filter(offer__acronym=offer_acronym)
if transition_name is not None:
qs = qs.filter(transition_name=transition_name)
if year is not None:
qs = qs.filter(offer__academic_year__year=year)
if code is not None:
qs = qs.filter(root_group__partial_acronym=code)
results = []
for record_dict in qs:
results.append(_instanciate_tree_version(record_dict))
return results
@classmethod
def delete(
cls,
entity_id: 'ProgramTreeVersionIdentity',
delete_program_tree_service: interface.ApplicationService = None
) -> None:
program_tree_version = cls.get(entity_id)
EducationGroupVersion.objects.filter(
version_name=entity_id.version_name,
offer__acronym=entity_id.offer_acronym,
offer__academic_year__year=entity_id.year,
transition_name=entity_id.transition_name,
).delete()
root_node = program_tree_version.get_tree().root_node
cmd = command.DeleteProgramTreeCommand(code=root_node.code, year=root_node.year)
delete_program_tree_service(cmd)
@classmethod
def search_all_versions_from_root_node(cls, root_node_identity: 'NodeIdentity') -> List['ProgramTreeVersion']:
offer_ids = EducationGroupVersion.objects.filter(
root_group__partial_acronym=root_node_identity.code,
root_group__academic_year__year=root_node_identity.year
).values_list('offer_id', flat=True)
return _search_versions_from_offer_ids(list(offer_ids))
@classmethod
def search_all_versions_from_root_nodes(cls, node_identities: List['NodeIdentity']) -> List['ProgramTreeVersion']:
offer_ids = _search_by_node_entities(list(node_identities))
return _search_versions_from_offer_ids(offer_ids)
@classmethod
def search_versions_from_trees(cls, trees: List['ProgramTree']) -> List['ProgramTreeVersion']:
root_nodes_identities = [tree.root_node.entity_id for tree in trees]
tree_versions = cls.search_all_versions_from_root_nodes(root_nodes_identities)
result = []
for tree_version in tree_versions:
with contextlib.suppress(StopIteration):
tree_version.tree = next(tree for tree in trees if tree.entity_id == tree_version.program_tree_identity)
result.append(tree_version)
return result
@classmethod
def search_last_occurence(cls, from_year: int) -> List['ProgramTreeVersion']:
subquery_max_existing_year_for_offer = EducationGroupVersion.objects.filter(
offer__academic_year__year__gte=from_year,
offer__education_group=OuterRef("offer__education_group"),
version_name=OuterRef('version_name'),
transition_name=OuterRef('transition_name')
).values(
"offer__education_group"
).annotate(
max_year=Max("offer__academic_year__year")
).order_by(
"offer__education_group"
).values("max_year")
qs = _get_common_queryset().filter(
offer__academic_year__year=Subquery(subquery_max_existing_year_for_offer[:1])
)
results = []
for record_dict in qs:
results.append(_instanciate_tree_version(record_dict))
return results
@classmethod
def get_dto(cls, identity: ProgramTreeVersionIdentity) -> Optional['ProgrammeDeFormationDTO']:
pgm_tree_version = cls.get(identity)
return build_dto(pgm_tree_version, identity)
@classmethod
def get_dto_from_year_and_code(cls, code: str, year: int) -> Optional['ProgrammeDeFormationDTO']:
pgm_tree_version = cls.search(code=code, year=year)
if pgm_tree_version:
return build_dto(pgm_tree_version[0], pgm_tree_version[0].entity_identity)
raise ProgramTreeVersionNotFoundException
def _update_start_year_and_end_year(
educ_group_version: EducationGroupVersion,
start_year: int,
end_year_of_existence: int
):
# FIXME :: should add a field EducationgroupVersion.end_year
# FIXME :: and should remove GroupYear.end_year
# FIXME :: End_year is useful only for EducationGroupYear (training, minitraining) and programTreeVersions.
# FIXME :: End year is not useful for Groups. For business, Group doesn't have a 'end date'.
group = Group.objects.get(
groupyear__educationgroupversion__pk=educ_group_version.pk
)
end_year_id = None
if end_year_of_existence:
end_year_id = AcademicYear.objects.only('pk').get(year=end_year_of_existence).pk
group.end_year_id = end_year_id
group.start_year_id = AcademicYear.objects.only('pk').get(year=start_year).pk
group.save()
def _instanciate_tree_version(record_dict: dict) -> 'ProgramTreeVersion':
identity = program_tree_version.ProgramTreeVersionIdentity(
offer_acronym=record_dict['offer_acronym'],
year=record_dict['offer_year'],
version_name=record_dict['version_name'],
transition_name=record_dict['transition_name'],
)
return program_tree_version.ProgramTreeVersion(
entity_identity=identity,
entity_id=identity,
program_tree_identity=program_tree.ProgramTreeIdentity(record_dict['code'], record_dict['offer_year']),
program_tree_repository=program_tree_repository.ProgramTreeRepository(),
start_year=record_dict['start_year'],
title_fr=record_dict['version_title_fr'],
title_en=record_dict['version_title_en'],
end_year_of_existence=record_dict['end_year_of_existence'],
)
def _search_by_node_entities(entity_ids: List['NodeIdentity']) -> List[int]:
if bool(entity_ids):
qs = EducationGroupVersion.objects.all().values_list('offer_id', flat=True)
filter_search_from = _build_where_clause(entity_ids[0])
for identity in entity_ids[1:]:
filter_search_from |= _build_where_clause(identity)
qs = qs.filter(filter_search_from)
return list(qs)
return []
def _build_where_clause(node_identity: 'NodeIdentity') -> Q:
return Q(
Q(
root_group__partial_acronym=node_identity.code,
root_group__academic_year__year=node_identity.year
)
)
def _search_versions_from_offer_ids(offer_ids: List[int]) -> List['ProgramTreeVersion']:
qs = _get_common_queryset()
qs = qs.filter(
offer_id__in=offer_ids,
)
results = []
for record_dict in qs:
results.append(_instanciate_tree_version(record_dict))
return results
def _get_common_queryset() -> QuerySet:
return EducationGroupVersion.objects.all().order_by(
'version_name'
).annotate(
code=F('root_group__partial_acronym'),
offer_acronym=F('offer__acronym'),
offer_year=F('offer__academic_year__year'),
version_title_fr=F('title_fr'),
version_title_en=F('title_en'),
# FIXME :: should add a field EducationgroupVersion.end_year
# FIXME :: and should remove GroupYear.end_year
# FIXME :: End_year is useful only for EducationGroupYear (training, minitraining) and programTreeVersions.
# FIXME :: End year is not useful for Groups. For business, Group doesn't have a 'end date'.
end_year_of_existence=Case(
When(
Q(
offer__education_group_type__category__in={
Categories.TRAINING.name, Categories.MINI_TRAINING.name
}
) & Q(
version_name=STANDARD
) & Q(
transition_name=NOT_A_TRANSITION
),
then=F('offer__education_group__end_year__year')
),
default=F('root_group__group__end_year__year'),
output_field=IntegerField(),
),
start_year=Case(
When(
Q(
offer__education_group_type__category__in={
Categories.TRAINING.name, Categories.MINI_TRAINING.name
}
) & Q(
version_name=STANDARD
) & Q(
transition_name=NOT_A_TRANSITION
),
then=F('offer__education_group__start_year__year')
),
default=F('root_group__group__start_year__year'),
output_field=IntegerField(),
),
).values(
'code',
'offer_acronym',
'offer_year',
'version_name',
'version_title_fr',
'version_title_en',
'transition_name',
'end_year_of_existence',
'start_year',
)
def build_dto(pgm_tree_version: 'ProgramTreeVersion', identity: ProgramTreeVersionIdentity) \
-> 'ProgrammeDeFormationDTO':
tree = pgm_tree_version.get_tree()
contenu = _build_contenu(tree.root_node, )
return ProgrammeDeFormationDTO(
racine=contenu,
annee=identity.year,
sigle=identity.offer_acronym,
version=identity.version_name,
intitule_formation="{}{}".format(
tree.root_node.offer_title_fr,
"{}".format("[ {} ]".format(pgm_tree_version.title_fr) if pgm_tree_version.title_fr else '')
),
code=tree.entity_id.code,
transition_name=identity.transition_name
)
def _build_contenu(node: 'Node', lien_parent: 'Link' = None) -> 'ContenuNoeudDTO':
contenu_ordonne = []
for lien in node.children:
if lien.child.is_learning_unit():
contenu_ordonne.append(
UniteEnseignementDTO(
bloc=lien.block,
code=lien.child.code,
intitule_complet=lien.child.title,
quadrimestre=lien.child.quadrimester,
quadrimestre_texte=lien.child.quadrimester.value if lien.child.quadrimester else "",
credits_absolus=lien.child.credits,
volume_annuel_pm=lien.child.volume_total_lecturing,
volume_annuel_pp=lien.child.volume_total_practical,
obligatoire=lien.is_mandatory if lien else False,
session_derogation='',
credits_relatifs=lien.relative_credits,
)
)
else:
groupement_contenu = _build_contenu(lien.child, lien_parent=lien)
contenu_ordonne.append(groupement_contenu)
return ContenuNoeudDTO(
code=node.code,
intitule=node.title,
remarque=node.remark_fr,
obligatoire=lien_parent.is_mandatory if lien_parent else False,
credits=_get_credits(lien_parent),
intitule_complet=get_verbose_title_group(node),
contenu_ordonne=contenu_ordonne,
)
def get_verbose_title_group(node: 'NodeGroupYear') -> str:
if node.is_finality():
return format_complete_title_label(node, node.offer_partial_title_fr)
if node.is_option():
return format_complete_title_label(node, node.offer_title_fr)
else:
return node.group_title_fr
def format_complete_title_label(node, title_fr) -> str:
version_complete_label = formatter.format_version_complete_name(node, "fr-be")
return "{}{}".format(title_fr, version_complete_label)
def _get_credits(link: 'Link') -> Optional[Decimal]:
if link:
return link.relative_credits or link.child.credits or 0
return None
|
uclouvain/osis
|
program_management/ddd/repositories/program_tree_version.py
|
Python
|
agpl-3.0
| 19,400
|
# -*- coding: utf-8 -*-
# Copyright 2019 OpenSynergy Indonesia
# Copyright 2022 PT. Simetri Sinergi Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "New Assignment Career Transition "
"Integration With Timesheet Computation",
"version": "8.0.1.0.0",
"category": "Human Resource",
"website": "https://simetri-sinergi.id",
"author": "OpenSynergy Indonesia, PT. Simetri Sinergi Indonesia",
"license": "AGPL-3",
"installable": True,
"auto_install": True,
"depends": [
"hr_assignment_transition",
"hr_career_transition_timesheet_computation",
],
"data": [],
}
|
open-synergy/opnsynid-hr
|
hr_assignment_transition_timesheet_computation/__openerp__.py
|
Python
|
agpl-3.0
| 655
|
# Django specific
from django.db.models import Q
# Tastypie specific
from tastypie import fields
from tastypie.constants import ALL
from tastypie.resources import ModelResource
from tastypie.serializers import Serializer
# Data specific
from iati.models import Activity
from api.v3.resources.helper_resources import TitleResource, DescriptionResource, FinanceTypeResource
from api.cache import NoTransformCache
from api.v3.resources.advanced_resources import OnlyCountryResource, OnlyRegionResource
from api.v3.resources.activity_view_resources import ActivityViewTiedStatusResource, ActivityViewAidTypeResource, ActivityViewOrganisationResource, ActivityViewActivityStatusResource, ActivityViewSectorResource, ActivityViewCollaborationTypeResource, ActivityViewFlowTypeResource, ActivityViewCurrencyResource
#cache specific
from django.http import HttpResponse
from cache.validator import Validator
class ActivityListResource(ModelResource):
reporting_organisation = fields.ForeignKey(ActivityViewOrganisationResource, 'reporting_organisation', full=True, null=True)
participating_organisations = fields.ToManyField(ActivityViewOrganisationResource, 'participating_organisation', full=True, null=True)
activity_status = fields.ForeignKey(ActivityViewActivityStatusResource, 'activity_status', full=True, null=True)
countries = fields.ToManyField(OnlyCountryResource, 'recipient_country', full=True, null=True)
regions = fields.ToManyField(OnlyRegionResource, 'recipient_region', full=True, null=True)
sectors = fields.ToManyField(ActivityViewSectorResource, 'sector', full=True, null=True)
titles = fields.ToManyField(TitleResource, 'title_set', full=True, null=True)
descriptions = fields.ToManyField(DescriptionResource, 'description_set', full=True, null=True)
collaboration_type = fields.ForeignKey(ActivityViewCollaborationTypeResource, attribute='collaboration_type', full=True, null=True)
default_flow_type = fields.ForeignKey(ActivityViewFlowTypeResource, attribute='default_flow_type', full=True, null=True)
default_finance_type = fields.ForeignKey(FinanceTypeResource, attribute='default_finance_type', full=True, null=True)
default_aid_type = fields.ForeignKey(ActivityViewAidTypeResource, attribute='default_aid_type', full=True, null=True)
default_tied_status = fields.ForeignKey(ActivityViewTiedStatusResource, attribute='default_tied_status', full=True, null=True)
default_currency = fields.ForeignKey(ActivityViewCurrencyResource, attribute='default_currency', full=True, null=True)
class Meta:
queryset = Activity.objects.all()
resource_name = 'activity-list'
max_limit = 100
serializer = Serializer(formats=['xml', 'json'])
excludes = ['date_created']
ordering = ['start_actual', 'start_planned', 'end_actual', 'end_planned', 'sectors', 'total_budget']
filtering = {
'iati_identifier': 'exact',
'start_planned': ALL,
'start_actual': ALL,
'end_planned' : ALL,
'end_actual' : ALL,
'total_budget': ALL,
'sectors' : ('exact', 'in'),
'regions': ('exact', 'in'),
'countries': ('exact', 'in'),
'reporting_organisation': ('exact', 'in')
}
cache = NoTransformCache()
def apply_filters(self, request, applicable_filters):
base_object_list = super(ActivityListResource, self).apply_filters(request, applicable_filters)
query = request.GET.get('query', None)
filters = {}
if query:
qset = (
Q(id__in=query, **filters) |
Q(activity_recipient_country__country__name__in=query, **filters) |
Q(title__title__icontains=query, **filters) #|
# Q(description__description__icontains=query, **filters)
)
return base_object_list.filter(qset).distinct()
return base_object_list.filter(**filters).distinct()
def get_list(self, request, **kwargs):
# check if call is cached using validator.is_cached
# check if call contains flush, if it does the call comes from the cache updater and shouldn't return cached results
validator = Validator()
cururl = request.META['PATH_INFO'] + "?" + request.META['QUERY_STRING']
if not 'flush' in cururl and validator.is_cached(cururl):
return HttpResponse(validator.get_cached_call(cururl), mimetype='application/json')
else:
return super(ActivityListResource, self).get_list(request, **kwargs)
|
schlos/OIPA-V2.1
|
OIPA/api/v3/resources/activity_list_resources.py
|
Python
|
agpl-3.0
| 4,609
|
# Nessus results viewing tools
#
# Developed by Felix Ingram, f.ingram@gmail.com, @lllamaboy
# http://www.github.com/nccgroup/lapith
#
# Released under AGPL. See LICENSE for more information
import wx
import os
from model.Nessus import NessusFile, NessusTreeItem, MergedNessusReport, NessusReport, NessusItem
import difflib
from drop_target import MyFileDropTarget
from view import (
ViewerView,
SaveDialog,
ID_Load_Files,
ID_Merge_Files,
ID_Generate_CSV,
ID_Generate_VulnXML,
ID_Generate_RST,
ID_About,
)
from wx.lib.wordwrap import wordwrap
import csv
from xml.sax.saxutils import escape
from datetime import datetime
from jinja2 import Template
SEVERITY = {0:"Other", 1:"Low", 2:"Med", 3:"High", 4:"Critical"}
OUTPUT_TEMPLATE=Template("""\
{{item.name}}
{{hosts_count}} hosts with this issue
{% for host in hosts %}
{{host}}{% endfor %}
---------------------------------------------
{% for host in identical_hosts %}
{{host}}{% endfor %}
{{ initial_output }}
""")
RST_TEMPLATE=Template("""\
{%- for vuln in vulns %}{% if not vuln.name.startswith("PORT:") %}{{ vuln.name }}
{% for a in vuln.name %}={% endfor %}
.. affectedhosts::{% for host in merged_scans.hosts_with_pid(vuln.pid) %}{% for item in host.items_for_pid(vuln.pid) %}
{{ host.address }}, {{ item.info_dict.port }}/{{ item.info_dict.protocol }}
{%- endfor %}{%- endfor %}
:severity:`{{ vuln.item.info_dict["severity_text"] }}`
:cvss:`{{ vuln.item.info_dict["cvss_base_score"] }}`
:cvss:`{{ vuln.item.info_dict["cvss_vector"] }}`
Description
-----------
{{ "\n".join(vuln.issue.initial_output.splitlines()[7:])|replace("Plugin Output:", "Plugin Output::\n") }}
{% endif %}
Recommendation
--------------
References
----------
{% if vuln.item.info_dict["cve"] %}
CVE:
{% for cve in vuln.item.info_dict["cve"] %}
{{ cve }}: `http://web.nvd.nist.gov/view/vuln/detail?vulnId={{ cve }}`
{%- endfor %}
{%- endif %}
{% if vuln.item.info_dict["bid"] %}
BID:
{% for bid in vuln.item.info_dict["bid"] %}
{{ bid }}: `http://www.securityfocus.com/bid/{{ bid }}`
{%- endfor %}
{%- endif %}
{% if vuln.item.info_dict["xref"] %}
Other References:
{% for xref in vuln.item.info_dict["xref"] %}
{{ xref }}
{%- endfor %}
{%- endif %}
{% if vuln.item.info_dict["see_also"] %}
See also:
{% for xref in vuln.item.info_dict["see_also"] %}
{{ xref }}
{%- endfor %}
{%- endif %}
{% endfor %}
""")
VULNXML_TEMPLATE=Template("""<?xml version="1.0"?>
<Results Date="{{ timestamp|e }}" Tool="Lapith">
<Hosts>{% for host in hosts %}
<Host dnsname="{{ host.dns_name|e }}" ipv6="" ipv4="{{ host.address|e }}">
<Vulns>
{% for vuln in host.items %}<Vuln TestPhase="" id="{{ vuln.pid|e }}">
<Data Type="afh:TCP Ports" encoding="">{{ vuln.info_dict.port }}/{{ vuln.info_dict.protocol }}</Data>
</Vuln>
{% endfor %}</Vulns>
</Host>
{% endfor %}</Hosts>
<Vulns>
{% for vuln in vulns %}
<Vuln group="" id="{{ vuln.pid|e }}">
<Title>{{ vuln.name|e }}</Title>
<Description encoding="">
{{ "\n".join(vuln.issue.initial_output.splitlines()[7:])|replace("Plugin Output:", "Plugin Output::\n") | e}}
------------------------
{{ vuln.diffs|e }}
</Description>
<Recommendation encoding=""></Recommendation>
<References/>
<Category/>
<Patches/>
<CVSS>
<OverallScore>{% if vuln.item.info_dict["cvss_base_score"] %}{{ vuln.item.info_dict["cvss_base_score"]|e }}{% else %}{{ vuln.severity|e }}{% endif %}</OverallScore>
<Vector>{{ vuln.item.info_dict["cvss_vector"]|replace("CVSS2#", "")|e }}</Vector>
</CVSS>
<Severity>{{ vuln.severity|e }}</Severity>
</Vuln>
{% endfor %}
</Vulns>
<Groups/>
</Results>
""")
ID_Save_Results = wx.NewId()
class ViewerController:
def __init__(self):
# def initView(self):
self.view = ViewerView()
## Instance vars
self.files = []
self.tests = []
self.tree_hooks = {}
self._search_text = ""
## Flags
self._in_search = False
## Dialog paths
self._save_path = os.getcwd()
self._open_path = os.getcwd()
self.create_tree()
drop_target = MyFileDropTarget(self.view.tree,
{
"nessus": self.drop_action,
},
self.view.display.write
)
self.view.tree.SetDropTarget(drop_target)
self.bind_events()
self.view.Layout()
self.view.Show()
#self.view.search.SetFocus()
def drop_action(self, file_):
self.files.append(NessusFile(file_))
self.create_scan_trees()
def on_do_search(self, event):
text = self.view.search.GetValue()
self.search(text)
def search(self, text):
self._in_search = True
self._search_text = text
for host in self.files:
pass
#hook = self.hooks[host.name][FILES]
#if self.view.tree.IsExpanded(hook): ## Only need to do it for expanded
#files = host.get_full_files(search=text)
#self.view.tree.DeleteChildren(hook)
#for f in files:
#item = self.view.tree.AppendItem(hook, f.name, 0)
#self.view.tree.SetPyData(item, f)
#self.view.tree.SortChildren(hook)
self.view.search.SetFocus()
self._in_search = False
def add_output_page(self, title, text, font="Courier New"):
display = self.view.CreateTextCtrl(font=font)
display.SetValue(text)
self.delete_page_with_title(title)
self.view.notebook.AddPage(display, title)
return self.view.notebook.GetPageIndex(display)
def load_files(self, event):
wildcard = "Nessus files (*.nessus)|*.nessus|" \
"All files (*.*)|*.*"
dlg = wx.FileDialog(
self.view, message="Choose a file",
defaultDir=os.getcwd(),
defaultFile="",
wildcard=wildcard,
style=wx.OPEN | wx.MULTIPLE | wx.CHANGE_DIR
)
if dlg.ShowModal() == wx.ID_OK:
# This returns a Python list of files that were selected.
paths = dlg.GetPaths()
if paths:
for path in paths:
self.files.append(NessusFile(path))
self._open_path = paths[0].rsplit(os.sep, 1)[0]
dlg.Destroy()
self.create_scan_trees()
def delete_page_with_title(self, title):
notebook = self.view.notebook
page_count = notebook.GetPageCount()
for i in xrange(page_count):
if notebook.GetPageText(i) == title:
notebook.DeletePage(i)
def create_tree(self):
self.view.tree.DeleteAllItems()
self.view.tree.AddRoot("Scans")
self.create_scan_trees()
self.view.tree.Expand(self.view.tree.GetRootItem())
def create_scan_trees(self):
scans = self.view.tree.GetRootItem()
self.view.tree.DeleteChildren(scans)
for file_ in self.files:
self.create_scan_tree(file_, scans)
self.view.tree.Expand(scans)
def sorted_tree_items(self, report, items):
list_ = list(set([NessusTreeItem(report, i) for i in items]))
list_.sort()
return list_
def create_scan_tree(self, file_, hosts):
reports = file_.get_all_reports()
scans_hook = self.view.tree.GetRootItem()
file_hook = self.view.tree.AppendItem(scans_hook, file_.short_name, 0)
for report in reports:
scan = self.view.tree.AppendItem(file_hook, report.reportname, 0)
self.view.tree.SetPyData(scan, report)
info = self.view.tree.AppendItem(scan, "Info", 0)
self.view.tree.SetPyData(info, report.info)
if report.policy:
policy = self.view.tree.AppendItem(scan, "Policy", 0)
self.view.tree.SetPyData(policy, report.policy)
hosts = self.view.tree.AppendItem(scan, "Hosts", 0)
self.view.tree.SetPyData(hosts, "\n".join(str(h) for h in report.hosts))
items_hook = self.view.tree.AppendItem(scan, "Findings", 0)
self.view.tree.SetPyData(items_hook, self.sorted_tree_items(report, report.criticals+report.highs+report.meds+report.lows+report.others))
critical_hook = self.view.tree.AppendItem(items_hook, "Criticals", 0)
self.view.tree.SetPyData(critical_hook, self.sorted_tree_items(report, report.criticals))
high_hook = self.view.tree.AppendItem(items_hook, "Highs", 0)
self.view.tree.SetPyData(high_hook, self.sorted_tree_items(report, report.highs))
med_hook = self.view.tree.AppendItem(items_hook, "Meds", 0)
self.view.tree.SetPyData(med_hook, self.sorted_tree_items(report, report.meds))
low_hook = self.view.tree.AppendItem(items_hook, "Lows", 0)
self.view.tree.SetPyData(low_hook, self.sorted_tree_items(report, report.lows))
other_hook = self.view.tree.AppendItem(items_hook, "Others", 0)
self.view.tree.SetPyData(other_hook, self.sorted_tree_items(report, report.others))
for crit in self.sorted_tree_items(report, report.criticals):
item = self.view.tree.AppendItem(critical_hook, str(crit), 0)
self.view.tree.SetPyData(item, crit)
for high in self.sorted_tree_items(report, report.highs):
item = self.view.tree.AppendItem(high_hook, str(high), 0)
self.view.tree.SetPyData(item, high)
for med in self.sorted_tree_items(report, report.meds):
item = self.view.tree.AppendItem(med_hook, str(med), 0)
self.view.tree.SetPyData(item, med)
for low in self.sorted_tree_items(report, report.lows):
item = self.view.tree.AppendItem(low_hook, str(low), 0)
self.view.tree.SetPyData(item, low)
for other in [NessusTreeItem(report, o) for o in report.others]:
item = self.view.tree.AppendItem(other_hook, str(other), 0)
self.view.tree.SetPyData(item, other)
def get_item_output(self, item):
hosts = item.report.hosts_with_pid(item.pid)
initial_output = hosts[0].plugin_output(item.pid)
diffs = []
for host in hosts[1:]:
diff = difflib.unified_diff(initial_output.splitlines(), host.plugin_output(item.pid).splitlines())
diffs.append((host, "\n".join(list(diff))))
initial_output = item.name.strip() + "\n\n" + initial_output
diff_output = ""
identical_hosts = [hosts[0]]
for (host, diff) in diffs:
if diff:
diff_output += "=" * 70 + "\n\n%s\n%s\n\n" % (host, diff)
else:
identical_hosts.append(host)
output = OUTPUT_TEMPLATE.render(
item=item,
hosts_count=len(hosts),
hosts=hosts,
identical_hosts=identical_hosts,
initial_output=initial_output
)
return output, diff_output, dict(item=item, hosts=hosts, identical_hosts=identical_hosts, initial_output=initial_output)
# output = item.name+"\n"
# output += "%s hosts with this issue\n" % len(hosts)
# output += "\n".join(str(i).split()[0] for i in hosts)
# output += "\n"+"-"*20+"\n"
# output += "\n".join(str(i) for i in identical_hosts) + "\n\n" + initial_output
# return output, diff_output
def show_nessus_item(self, item):
output, diff_output, _ = self.get_item_output(item)
diff_title = "Diffs"
self.delete_page_with_title(diff_title)
display = self.view.display
if diff_output:
self.add_output_page(diff_title, diff_output, font="Courier New")
display.SetValue(output)
def generate_rst(self, event):
saveas = SaveDialog(self.view, defaultDir=self._save_path, message="Save RST as...").get_choice()
if saveas:
merged_scans = MergedNessusReport(self.files)
if not saveas.endswith(".rst"):
saveas = saveas+".rst"
sorted_tree_items = self.sorted_tree_items(merged_scans, merged_scans.criticals+merged_scans.highs+merged_scans.meds+merged_scans.lows+merged_scans.others)
with open(saveas, "wb") as f:
for item in sorted_tree_items:
issue, diffs, meta = self.get_item_output(item)
item.issue = meta
item.diffs = diffs
item.severity = SEVERITY[item.item.severity]
f.write(RST_TEMPLATE.render(
timestamp=datetime.now(),
hosts=merged_scans.hosts,
vulns=sorted_tree_items,
merged_scans=merged_scans,
)
)
def generate_vulnxml(self, event):
saveas = SaveDialog(self.view, defaultDir=self._save_path, message="Save VulnXML as...").get_choice()
if saveas:
merged_scans = MergedNessusReport(self.files)
if not saveas.endswith(".xml"):
saveas = saveas+".xml"
sorted_tree_items = self.sorted_tree_items(merged_scans, merged_scans.criticals+merged_scans.highs+merged_scans.meds+merged_scans.lows+merged_scans.others)
with open(saveas, "wb") as f:
for item in sorted_tree_items:
issue, diffs, meta = self.get_item_output(item)
item.issue = meta
item.diffs = diffs
item.severity = SEVERITY[item.item.severity]
f.write(VULNXML_TEMPLATE.render(
timestamp=datetime.now(),
hosts=merged_scans.hosts,
vulns=sorted_tree_items,
merged_scans=merged_scans,
)
)
def generate_csv(self, event):
saveas = SaveDialog(self.view, defaultDir=self._save_path, message="Save csv as...").get_choice()
if saveas:
merged_scans = MergedNessusReport(self.files)
if not saveas.endswith(".csv"):
saveas = saveas+".csv"
sorted_tree_items = self.sorted_tree_items(merged_scans, merged_scans.criticals+merged_scans.highs+merged_scans.meds+merged_scans.lows+merged_scans.others)
with open(saveas, "wb") as f:
csv_writer = csv.writer(f)
csv_writer.writerow(["PID","Severity","Hosts","Output","Diffs"])
for item in sorted_tree_items:
csv_writer.writerow([
item.pid,
SEVERITY[item.item.severity],
"\n".join(x.address for x in merged_scans.hosts_with_pid(item.pid)),
self.get_item_output(item)[0],
self.get_item_output(item)[1],
]
)
def combine_files(self, event):
scans_hook = self.view.tree.GetRootItem()
merged_scans = MergedNessusReport(self.files)
if merged_scans.get_all_reports():
merge_hook = self.view.tree.AppendItem(scans_hook, "Merged Files", 0)
items_hook = self.view.tree.AppendItem(merge_hook, "Findings", 0)
self.view.tree.SetPyData(items_hook, self.sorted_tree_items(merged_scans, merged_scans.criticals+merged_scans.highs+merged_scans.meds+merged_scans.lows+merged_scans.others))
critical_hook = self.view.tree.AppendItem(items_hook, "Critical", 0)
self.view.tree.SetPyData(critical_hook, self.sorted_tree_items(merged_scans, merged_scans.criticals))
high_hook = self.view.tree.AppendItem(items_hook, "Highs", 0)
self.view.tree.SetPyData(high_hook, self.sorted_tree_items(merged_scans, merged_scans.highs))
med_hook = self.view.tree.AppendItem(items_hook, "Meds", 0)
self.view.tree.SetPyData(med_hook, self.sorted_tree_items(merged_scans, merged_scans.meds))
low_hook = self.view.tree.AppendItem(items_hook, "Lows", 0)
self.view.tree.SetPyData(low_hook, self.sorted_tree_items(merged_scans, merged_scans.lows))
other_hook = self.view.tree.AppendItem(items_hook, "Others", 0)
self.view.tree.SetPyData(other_hook, self.sorted_tree_items(merged_scans, merged_scans.others))
for crit in self.sorted_tree_items(merged_scans, merged_scans.criticals):
item = self.view.tree.AppendItem(critical_hook, str(crit), 0)
self.view.tree.SetPyData(item, crit)
for high in self.sorted_tree_items(merged_scans, merged_scans.highs):
item = self.view.tree.AppendItem(high_hook, str(high), 0)
self.view.tree.SetPyData(item, high)
for med in self.sorted_tree_items(merged_scans, merged_scans.meds):
item = self.view.tree.AppendItem(med_hook, str(med), 0)
self.view.tree.SetPyData(item, med)
for low in self.sorted_tree_items(merged_scans, merged_scans.lows):
item = self.view.tree.AppendItem(low_hook, str(low), 0)
self.view.tree.SetPyData(item, low)
for other in merged_scans.others:
item = self.view.tree.AppendItem(other_hook, str(other), 0)
self.view.tree.SetPyData(item, other)
self.view.tree.Expand(scans_hook)
def bind_events(self):
# Toolbar events
self.view.Bind(wx.EVT_TOOL, self.load_files, id=ID_Load_Files)
self.view.Bind(wx.EVT_TOOL, self.combine_files, id=ID_Merge_Files)
self.view.Bind(wx.EVT_TOOL, self.generate_csv, id=ID_Generate_CSV)
self.view.Bind(wx.EVT_TOOL, self.generate_vulnxml, id=ID_Generate_VulnXML)
self.view.Bind(wx.EVT_TOOL, self.generate_rst, id=ID_Generate_RST)
# Tree clicking and selections
self.view.tree.Bind(wx.EVT_TREE_SEL_CHANGED, self.on_sel_changed, self.view.tree)
self.view.tree.Bind(wx.EVT_TREE_ITEM_MENU, self.on_right_click, self.view.tree)
# Tab close event - will prevent closing the output tab
self.view.Bind(wx.aui.EVT_AUINOTEBOOK_PAGE_CLOSE, self.on_page_close)
# Menu stuff
self.view.Bind(wx.EVT_MENU, self.load_files, id=wx.ID_OPEN)
self.view.Bind(wx.EVT_MENU, self.extract_results, id=ID_Save_Results)
self.view.Bind(wx.EVT_MENU, self.on_exit, id=wx.ID_EXIT)
self.view.Bind(wx.EVT_MENU, self.on_about, id=ID_About)
## Search
#self.view.search.Bind(wx.EVT_TEXT_ENTER, self.on_do_search)
#self.view.search.Bind(wx.EVT_TEXT, self.on_do_search)
def extract_results(self, event):
item = self.view.tree.GetSelection()
data = self.view.tree.GetItemData(item).GetData()
saveas = SaveDialog(self.view, defaultDir=self._save_path, message="Save results as...").get_choice()
if saveas:
with open(saveas, "w") as f:
output = ""
if isinstance(data, list):
for item in data:
output, diff_output, _ = self.get_item_output(item)
f.write("="*20+"\n")
f.write(output)
f.write(diff_output)
elif isinstance(data, NessusReport):
pass
elif isinstance(data, MergedNessusReport):
pass
def on_right_click(self, event):
item = event.GetItem()
self.view.tree.SelectItem(item)
data = self.view.tree.GetItemData(item).GetData()
if isinstance(data, NessusReport) or isinstance(data, MergedNessusReport) or isinstance(data, list):
menu = wx.Menu()
menu.Append(ID_Save_Results, "Save all results")
self.view.PopupMenu(menu)
menu.Destroy()
def on_page_close(self, event):
## We don't want the user to be able to close any tabs
## TODO Find a way to diable the cross on the GUI
event.Veto()
def on_sel_changed(self, event):
item = event.GetItem()
tree = self.view.tree
data = tree.GetItemData(item).GetData()
if isinstance(data, NessusReport):
self.view.display.Clear()
self.view.display.SetValue(data.reportname)
self.view.notebook.SetSelection(0)
self.view.tree.SetFocus()
elif isinstance(data, NessusItem):
self.view.display.Clear()
self.view.display.SetValue(data.output.replace('\\n', "\n"))
self.view.notebook.SetSelection(0)
self.view.tree.SetFocus()
elif isinstance(data, NessusTreeItem):
self.show_nessus_item(data)
self.view.tree.SetFocus()
elif isinstance(data, str):
self.view.display.Clear()
self.view.display.SetValue(data.replace('\\n', "\n"))
self.view.notebook.SetSelection(0)
self.view.tree.SetFocus()
def on_exit(self, event):
self.view.Close()
def on_about(self, event):
## Just display a dialog box
info = wx.AboutDialogInfo()
info.Name = "Nessus Results - The right way around"
info.Version = "1.0.2\n"
info.Copyright = "(C) 2012 Felix Ingram\n"
info.Description = wordwrap(
"Sometimes you need Nessus results on a per-issue basis, "
"sometimes you need to combine a load of reports into one.",
350, wx.ClientDC(self.view))
info.Developers = [ "Felix Ingram",]
## Then we call wx.AboutBox giving it that info object
wx.AboutBox(info)
|
nccgroup/lapith
|
controller/viewer_controller.py
|
Python
|
agpl-3.0
| 22,502
|
# Generated by Django 2.2.12 on 2020-04-29 05:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('edxval', '0001_squashed_0016_add_transcript_credentials_model'),
]
operations = [
migrations.AddField(
model_name='video',
name='error_description',
field=models.TextField(blank=True, null=True, verbose_name='Error Description'),
),
]
|
edx/edx-val
|
edxval/migrations/0002_add_error_description_field.py
|
Python
|
agpl-3.0
| 464
|
# -*- coding: utf-8 -*-
# import the main window object (mw) from ankiqt
from aqt import mw
# import the "show info" tool from utils.py
from aqt.utils import showInfo
# import all of the Qt GUI library
from aqt.qt import *
from anki.exporting import *
from anki.hooks import addHook
class LatexNoteExporter(Exporter):
key = _("Notes in Latex")
ext = ".tex"
def __init__(self, col):
Exporter.__init__(self, col)
self.includeID = True
self.includeTags = True
def replaceLineBreaks(self,text):
"Replace html-line breaks by plain-text line breaks"
#remove plain-text line breaks (most probobly there aren't any to begin with)
text = text.replace("\n","")
#convert some html
htmldict = {r"<br>":"\n",
r"<br />":"\n",
r"<div>":"\n",
r"</div>":"",
r" ":r" "}
for k, v in htmldict.items():
text = text.replace(k, v)
return text
def stripNewLines(self,text):
"Remove newlines at beginning and end of text, and replace double blank lines by single blank lines"
text = re.sub("\n\s*\n+","\n\n",text).strip()
#the following is superfluous as its done automatically by strip()
#while len(text) > 0 and text[1] == "\n":
# text = text[1:].strip()
#while len(text) > 0 and text[-1] == "\n":
# text = text[:-1].strip()
return text
def htmlToLatex(self, text):
"Remove [latex], [/latex] and html"
#convert some html
htmldict = {r"&":r"&",
r"<":r"<",
r">":r">"}
for k, v in htmldict.items():
text = text.replace(k, v)
#remove all remaining html
text = re.sub("<[^<]+?>", "", text)
#remove latex marks and any surrounding line breaks
text = re.sub("\n*\[latex\]","",text)
text = re.sub("\[/latex\]\n*","",text)
return text
def doExport(self, file):
cardIds = self.cardIds()
data = []
model = mw.col.getCard(cardIds[0]).model()
for id, flds, tags in self.col.db.execute("""
select guid, flds, tags from notes
where id in
(select nid from cards
where cards.id in %s)""" % ids2str(cardIds)):
latexnote = []
TAB = " "
latexnote.append(r"\begin{note}")
# fields
for f in splitFields(flds):
newf = self.replaceLineBreaks(f)
if newf.find("[latex]") != -1:
#treat as latex field
newf = self.htmlToLatex(newf)
if newf.find("\n") == -1:
#field consists of a single line
latexnote.append(TAB + r"\xfield{" + newf + "}")
else:
newf = self.stripNewLines(newf)
newf = TAB + TAB + newf.replace("\n","\n" + TAB + TAB)
latexnote.append(TAB + r"\begin{field}" + "\n" + newf + "\n" + TAB + r"\end{field}")
else:
#treat as plain-text field
if newf.find("\n") == -1:
#field consists of a single line
latexnote.append(TAB + r"\xplain{" + newf + "}")
else:
newf = self.stripNewLines(newf)
newf = TAB + TAB + newf.replace("\n","\n" + TAB + TAB)
latexnote.append(TAB + r"\begin{plain}" + "\n" + newf + "\n" + TAB + r"\end{plain}")
#remove empty fields at the end of the note:
while latexnote[-1] == TAB + r"\xplain{}":
latexnote.pop()
# tags
if self.includeTags:
cleantag = tags.strip()
if cleantag != "":
latexnote.append(TAB + r"\tags{" + tags.strip() + r"}")
latexnote.append(r"\end{note}" + "\n")
data.append("\n".join(latexnote))
self.count = len(data)
#preamble =r"""# -- I've decided that this should be placed in model["latexPre"] by the user
#\newenvironment{note}{}{\begin{center}\rule{\textwidth}{2pt}\end{center}}
#\newenvironment{field}{}{\begin{center}\rule{\textwidth}{0.4pt}\end{center}}
#\newcommand*{\tags}[1]{\paragraph{tags: }#1}"""
out = "% -*- coding: utf-8 -*-\n" + model["latexPre"] + "\n" + "\n".join(data) + "\n" + model["latexPost"]
file.write(out.encode("utf-8"))
def addLatexExporterToList(exps):
exps.append((LatexNoteExporter.key + " (*" + LatexNoteExporter.ext + r")", LatexNoteExporter))
addHook("exportersList", addLatexExporterToList);
|
TentativeConvert/LaTeX-Note-Importer-for-Anki
|
Anki 2.1/latexbiport/latexexport.py
|
Python
|
agpl-3.0
| 4,809
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from models import Place
class SimpleTest(TestCase):
def test_simple_place_creation(self):
"""
Creates test place
"""
places = Place.objects.filter(name="Test Place")
[place.delete() for place in places]
place = Place()
place.name = "Test Place"
place.capacity = 20
place.save()
place = Place.objects.filter(name="Test Place")
print place
self.assertNotEqual(place, None)
|
TheCodingMonkeys/checkin-at-fmi
|
checkinatfmi_project/university/tests.py
|
Python
|
agpl-3.0
| 694
|
""" Users API URI specification """
from django.conf import settings
from django.conf.urls import url
from django.db import transaction
from edx_solutions_api_integration.users import views as users_views
from rest_framework.urlpatterns import format_suffix_patterns
COURSE_ID_PATTERN = settings.COURSE_ID_PATTERN
urlpatterns = [
url(r'^metrics/cities/$', users_views.UsersMetricsCitiesList.as_view(), name='apimgr-users-metrics-cities-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/grades$',
users_views.UsersCoursesGradesList.as_view(), name='users-courses-grades-list'),
url(
r'^(?P<user_id>[a-zA-Z0-9]+)/courses/{}/grades$'.format(COURSE_ID_PATTERN),
transaction.non_atomic_requests(users_views.UsersCoursesGradesDetail.as_view()),
name='users-courses-grades-detail'
),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/{}/metrics/social/$'.format(COURSE_ID_PATTERN),
users_views.UsersSocialMetrics.as_view(), name='users-social-metrics'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/{}$'.format(COURSE_ID_PATTERN),
users_views.UsersCoursesDetail.as_view(), name='users-courses-detail'),
url(
r'^(?P<user_id>[a-zA-Z0-9]+)/courses/*$',
transaction.non_atomic_requests(users_views.UsersCoursesList.as_view()),
name='users-courses-list'
),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/groups/*$', users_views.UsersGroupsList.as_view(), name='users-groups-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/groups/(?P<group_id>[0-9]+)$',
users_views.UsersGroupsDetail.as_view(), name='users-groups-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/preferences$',
users_views.UsersPreferences.as_view(), name='users-preferences-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/preferences/(?P<preference_id>[a-zA-Z0-9_]+)$',
users_views.UsersPreferencesDetail.as_view(), name='users-preferences-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/organizations/$',
users_views.UsersOrganizationsList.as_view(), name='users-organizations-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/roles/(?P<role>[a-z_]+)/courses/{}$'.format(COURSE_ID_PATTERN),
users_views.UsersRolesCoursesDetail.as_view(), name='users-roles-courses-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/roles/*$', users_views.UsersRolesList.as_view(), name='users-roles-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/workgroups/$',
users_views.UsersWorkgroupsList.as_view(), name='users-workgroups-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/notifications/(?P<msg_id>[0-9]+)/$',
users_views.UsersNotificationsDetail.as_view(), name='users-notifications-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)$', users_views.UsersDetail.as_view(), name='apimgr-users-detail'),
url(r'^$', users_views.UsersList.as_view(), name='apimgr-users-list'),
url(r'mass-details/$', users_views.MassUsersDetailsList.as_view(), name='apimgr-mass-users-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/progress',
users_views.UsersCourseProgressList.as_view(), name='users-courses-progress'),
url(r'^integration-test-users/$', users_views.UsersListWithEnrollment.as_view(), name='integration-test-users'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/attributes/',
users_views.ClientSpecificAttributesView.as_view(), name='users-attributes'),
url(r'validate-token/$', users_views.TokenBasedUserDetails.as_view(),
name='validate-bearer-token'),
url(r'anonymous_id/$', users_views.UsersAnonymousId.as_view(),
name='user-anonymous-id'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
edx-solutions/api-integration
|
edx_solutions_api_integration/users/urls.py
|
Python
|
agpl-3.0
| 3,599
|
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields, api
from datetime import *
class clv_medicament_template_history(models.Model):
_name = 'clv_medicament.template.history'
medicament_template_id = fields.Many2one('clv_medicament.template', 'Medicament Template', required=True)
user_id = fields.Many2one ('res.users', 'User', required=True)
date = fields.Datetime("Date", required=True)
state = fields.Selection([('draft','Draft'),
('revised','Revised'),
('waiting','Waiting'),
('done','Done'),
('canceled','Canceled'),
], string='Status', default='draft', readonly=True, required=True, help="")
notes = fields.Text(string='Notes')
_order = "date desc"
_defaults = {
'user_id': lambda obj,cr,uid,context: uid,
'date': lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
}
class clv_medicament_template(models.Model):
_inherit = 'clv_medicament.template'
history_ids = fields.One2many('clv_medicament.template.history', 'medicament_template_id', 'Medicament Template History', readonly=True)
active_history = fields.Boolean('Active History',
help="If unchecked, it will allow you to disable the history without removing it.",
default=True)
@api.one
def insert_clv_medicament_template_history(self, medicament_template_id, state, notes):
if self.active_history:
values = {
'medicament_template_id': medicament_template_id,
'state': state,
'notes': notes,
}
self.pool.get('clv_medicament.template.history').create(self._cr, self._uid, values)
@api.multi
def write(self, values):
if (not 'state' in values) and (not 'date' in values):
notes = values.keys()
self.insert_clv_medicament_template_history(self.id, self.state, notes)
return super(clv_medicament_template, self).write(values)
@api.one
def button_draft(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'draft'
self.insert_clv_medicament_template_history(self.id, 'draft', '')
@api.one
def button_revised(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'revised'
self.insert_clv_medicament_template_history(self.id, 'revised', '')
@api.one
def button_waiting(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'waiting'
self.insert_clv_medicament_template_history(self.id, 'waiting', '')
@api.one
def button_done(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'done'
self.insert_clv_medicament_template_history(self.id, 'done', '')
@api.one
def button_cancel(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'canceled'
self.insert_clv_medicament_template_history(self.id, 'canceled', '')
@api.one
def set_to_draft(self, *args):
self.state = 'draft'
self.create_workflow()
return True
|
CLVsol/odoo_addons
|
clv_medicament_template/history/clv_medicament_template_history.py
|
Python
|
agpl-3.0
| 4,727
|
from django import forms
from django.utils.html import escape
from django.forms.utils import ErrorList
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from booktype.utils.misc import booktype_slugify
from booki.editor.models import BookiGroup
from booktype.utils import misc
from booktype.apps.core.forms import BaseBooktypeForm
from widgets import RemovableImageWidget
class SpanErrorList(ErrorList):
def __unicode__(self):
return unicode(self.as_spans())
def as_spans(self):
return "<span style='color: red'>%s</span>" % (
",".join([e for e in self])
)
class BaseGroupForm(BaseBooktypeForm, forms.ModelForm):
name = forms.CharField()
description = forms.CharField(
label=_('Description (250 characters)'),
required=False,
max_length=250,
widget=forms.Textarea(attrs={'rows': '10', 'cols': '40'})
)
group_image = forms.FileField(
label=_('Group image'),
required=False,
widget=RemovableImageWidget(attrs={
'label_class': 'checkbox-inline',
'input_class': 'group-image-removable'
}
)
)
class Meta:
model = BookiGroup
fields = [
'name', 'description'
]
def __init__(self, *args, **kwargs):
kwargs.update({'error_class': SpanErrorList})
super(BaseGroupForm, self).__init__(*args, **kwargs)
def clean_name(self):
new_url_name = booktype_slugify(self.cleaned_data['name'])
group_data_url_name = BookiGroup.objects.filter(url_name=new_url_name).exclude(pk=self.instance.pk)
if len(group_data_url_name) > 0:
raise ValidationError(_('Group name is already in use'))
return self.cleaned_data.get('name', '')
def clean_description(self):
return escape(self.cleaned_data.get('description', ''))
def set_group_image(self, group_id, group_image):
try:
filename = misc.set_group_image(group_id, group_image, 240, 240)
if len(filename) == 0:
raise ValidationError(_('Only JPEG file is allowed for group image.'))
else:
misc.set_group_image( "{}_small".format(group_id), group_image, 18, 18)
except Exception as err:
# TODO: we should do something here
print err
class GroupCreateForm(BaseGroupForm):
pass
class GroupUpdateForm(BaseGroupForm):
def clean_group_image(self):
group_image = self.files.get('group_image', None)
group_id = str(self.instance.pk)
if group_image:
self.set_group_image(group_id, group_image)
return group_image
|
MiczFlor/Booktype
|
lib/booktype/apps/portal/forms.py
|
Python
|
agpl-3.0
| 2,755
|