code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Jamie Duncan'
SITENAME = u'Open Tech Podcast'
SITEURL = 'https://opentechpodcast.org'
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = 'opetechpodcast.org'
CATEGORY_FEED_ATOM = 'feeds/all.atom.xml'
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
FEED_RSS = 'feeds/rss.xml'
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
THEME = "pelican-theme"
PLUGIN_PATHS = ['pelican-plugins']
PLUGINS = ['asciidoc_reader',
'sitemap',
'gravatar',
'filetime_from_git',
'gallery',
'thumbnailer',
'disqus_static',]
SITEMAP = {
'format': 'xml',
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
STATIC_PATHS = [
'images',
'extra/robots.txt',
'extra/favicon.ico',
'extra/CNAME'
]
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
'extra/favicon.ico': {'path': 'favicon.ico'},
'extra/CNAME': {'path': 'CNAME'}
}
# nice-blog them settings
SIDEBAR_DISPLAY = ['about','links','categories','tags']
SIDEBAR_ABOUT = "Jamie Duncan and Dave Sirrine. A couple of career Open Source Geeks talking about technology with their friends, and an occassional beer."
THEME_COLOR = 'red'
DISPLAY_CATEGORIES_ON_MENU = False
DISPLAY_PAGES_ON_MENU = True
# thumbnailer settings
IMAGE_PATH = 'images'
THUMBNAIL_DIR = 'images'
THUMBNAIL_KEEP_NAME = True
THUMBNAIL_KEEP_TREE = True
# disqus_static settings
DISQUS_SITENAME = 'opentechpodcast'
DISQUS_PUBLIC_KEY = 'Zuy9Hu0Lj35N0FNLnke5ye9No0cJhsBvZKNGJ701eIJQf4adgeKYnGeROOHm1OgG'
DISQUS_SECRET_KEY = 's8BQwXKdKek2WAAHeIGAo9yzQrAMFDGjmBr3OmYq5IewowmeASmPllkUlkYRipVs'
ASCIIDOC_BACKEND = 'html5'
GALLERY_PATH = 'images/gallery'
RESIZE = [
('images/gallery', False, 200,200),
]
GOOGLE_ANALYTICS = 'UA-90496768-1'
|
jduncan-rva/opentechpodcast
|
pelicanconf.py
|
Python
|
gpl-3.0
| 2,398
|
#!/usr/bin/env python
# *********************************************************************
# * Copyright (C) 2012 Luca Baldini (luca.baldini@pi.infn.it) *
# * *
# * For the license terms see the file LICENSE, distributed *
# * along with this software. *
# *********************************************************************
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from e3pipe.root.__ROOT__ import *
from e3pipe.root.E3Latex import E3Latex
class E3Logo:
""" EEE logo to be put on a ROOT canvas.
We haven't made a huge effort, yet, to make the logo scalable or
anything like that. It is made of three pieces and we realy on the
TLatex alignmnent for the relative positioning.
"""
def __init__(self, x = 0.75, y = 0.99, subtitle = 'La Scienza nelle Scuole',
color = ROOT.kBlue, shadowColor = ROOT.kGray):
"""
"""
self.BigPrint = E3Latex(x, y, 'EEE', NDC = True, TextAlign = 33,
TextSize = BIG_TEXT_SIZE, TextColor = color)
self.Shadow = E3Latex(x, y, 'EEE', NDC = True, TextAlign = 33,
TextSize = BIG_TEXT_SIZE, TextColor = shadowColor)
self.Shadow.shift(0.0075, -0.0075)
text = '#splitline{ Extreme Energy Events}{ %s}' % subtitle
self.SmallPrint = E3Latex(x, y, text, NDC = True, TextAlign = 13,
TextSize = SMALLEST_TEXT_SIZE,
TextColor = color)
def Draw(self, opts = ''):
"""
"""
self.Shadow.Draw(opts)
self.BigPrint.Draw(opts)
self.SmallPrint.Draw(opts)
def test():
logo = E3Logo()
logo.Draw()
if __name__ == '__main__':
test()
|
centrofermi/e3pipe
|
root/E3Logo.py
|
Python
|
gpl-3.0
| 2,531
|
# Author: Jason Lu
class Publisher:
def __init__(self):
self.observers = []
def add(self, observer):
if observer not in self.observers:
self.observers.append(observer)
else:
print('Failed to add: {}'.format(observer))
def remove(self, observer):
try:
self.observers.remove(observer)
except ValueError as e:
print('Failed to remove {}'.format(observer))
def notify(self):
[o.notify(self) for o in self.observers]
class DefaultFormatter(Publisher):
def __init__(self, name):
Publisher.__init__(self)
self.name = name
self._data = 0
def __str__(self):
return "{}: '{}' has data = {}".format(type(self).__name__, self.name, self._data)
@property
def data(self):
return self._data
@data.setter
def data(self, new_value):
try:
self._data = int(new_value)
except ValueError as e:
print('Error: {}'.format(e))
else:
self.notify()
class HexFormatter:
def notify(self, publisher):
print("{}: '{}' has now hex data = {}".format(type(self).__name__, publisher.name, hex(publisher.data)))
class BinaryFormatter:
def notify(self, publisher):
print("{}: '{}' has now bin data = {}".format(type(self).__name__, publisher.name, bin(publisher.data)))
def main():
df = DefaultFormatter('test1')
print(df)
print()
hf = HexFormatter()
df.add(hf)
df.data = 3
print(df)
print()
bf = BinaryFormatter()
df.add(bf)
df.data = 21
print(df)
print()
df.remove(hf)
df.data = 40
print(df)
print()
df.remove(hf)
df.add(bf)
df.data = 'hello'
print(df)
print()
df.data = 15.8
print(df)
if __name__ == '__main__':
main()
|
jinzekid/codehub
|
python/设计模式/行为型模式/观察者模式/observer.py
|
Python
|
gpl-3.0
| 1,865
|
# Import Parent
import test
from testresult import TestResult
import pytz
#ValidityTest
class Test(test.Test):
"""docstring for ClassName"""
def __init__(self):
self.name = "Subject name"
self.weight = 0
self.required = True
# Test
def performTest(self, scanResult):
if scanResult.hostnameMatch == True:
return TestResult(self, True, "Domain name found in certificate.")
return TestResult(self, False, "Domain name not found in certificate.")
|
darckbit/ssl-health
|
src/tests/servCommonName.py
|
Python
|
gpl-3.0
| 470
|
#! /usr/bin/python
import sys
import os
if len(sys.argv) == 1:
obj = "."
else:
obj = sys.argv[1]
files_to_img = [
obj+"/stage1.o",
obj+"/stage2.o",
obj+"/kernel64.o"
]
buf = []
for fn in files_to_img:
with open(fn, "rb") as f:
# append file
d = f.read()
buf.append(d)
# 512 byte padding
if len(d) % 512 != 0:
padding_size = 512 - len(d) % 512
buf.append("\0" * padding_size)
with open("floppy.bin", "wb") as f:
f.write(''.join(buf))
|
KoczurekK/pocOS
|
scripts/merge.py
|
Python
|
gpl-3.0
| 467
|
# Copyright (C) British Crown (Met Office) & Contributors.
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
"""Compress archive sources in gzip."""
import os
class RoseArchGzip:
"""Compress archive sources in gzip."""
SCHEMES = ["gz", "gzip"]
def __init__(self, app_runner, *args, **kwargs):
self.app_runner = app_runner
def compress_sources(self, target, work_dir):
"""Gzip each source in target.
Use work_dir to dump results.
"""
for source in target.sources.values():
if source.path.endswith("." + target.compress_scheme):
continue # assume already done
name_gz = source.name + "." + target.compress_scheme
work_path_gz = os.path.join(work_dir, name_gz)
self.app_runner.fs_util.makedirs(
self.app_runner.fs_util.dirname(work_path_gz)
)
# N.B. Python's gzip is slow
command = "gzip -c '%s' >'%s'" % (source.path, work_path_gz)
self.app_runner.popen.run_simple(command, shell=True)
source.path = work_path_gz
|
metomi/rose
|
metomi/rose/apps/rose_arch_compressions/rose_arch_gzip.py
|
Python
|
gpl-3.0
| 1,827
|
"""
DWF Python Example
Author: Digilent, Inc.
Revision: 2018-07-19
Requires:
Python 2.7, 3
"""
from ctypes import *
from dwfconstants import *
import math
import time
import matplotlib.pyplot as plt
import sys
import numpy
import wave
import datetime
import os
buffersize = 4096; # samples / buffer
samplerate = 8000; # samples / second
signalgenhz = 80;
if sys.platform.startswith("win"):
dwf = cdll.dwf
elif sys.platform.startswith("darwin"):
dwf = cdll.LoadLibrary("/Library/Frameworks/dwf.framework/dwf")
else:
dwf = cdll.LoadLibrary("libdwf.so")
#declare ctype variables
hdwf = c_int()
sts = c_byte()
rgdSamples = (c_double*buffersize)()
version = create_string_buffer(16)
dwf.FDwfGetVersion(version)
print("DWF Version: "+str(version.value))
#open device
print("Opening first device")
dwf.FDwfDeviceOpen(c_int(-1), byref(hdwf))
if hdwf.value == hdwfNone.value:
szerr = create_string_buffer(512)
dwf.FDwfGetLastErrorMsg(szerr)
print(szerr.value)
print("failed to open device")
quit()
cBufMax = c_int()
dwf.FDwfAnalogInBufferSizeInfo(hdwf, 0, byref(cBufMax))
print("Device buffer size: "+str(cBufMax.value)+" samples")
#set up acquisition
dwf.FDwfAnalogInFrequencySet(hdwf, c_double(samplerate))
dwf.FDwfAnalogInBufferSizeSet(hdwf, c_int(buffersize))
dwf.FDwfAnalogInChannelEnableSet(hdwf, c_int(0), c_bool(True))
dwf.FDwfAnalogInChannelRangeSet(hdwf, c_int(0), c_double(5))
# set up signal generation
channel = c_int(0) # use W1
dwf.FDwfAnalogOutNodeEnableSet(hdwf, channel, AnalogOutNodeCarrier, c_bool(True))
dwf.FDwfAnalogOutNodeFunctionSet(hdwf, channel, AnalogOutNodeCarrier, funcTriangle) # ! this looks like a square wave
dwf.FDwfAnalogOutNodeFrequencySet(hdwf, channel, AnalogOutNodeCarrier, c_double(signalgenhz))
dwf.FDwfAnalogOutNodeAmplitudeSet(hdwf, channel, AnalogOutNodeCarrier, c_double(1.41)) # ! this doesn't really do anything
dwf.FDwfAnalogOutNodeOffsetSet(hdwf, channel, AnalogOutNodeCarrier, c_double(1.41))
print("Generating sine wave @"+str(signalgenhz)+"Hz...")
dwf.FDwfAnalogOutConfigure(hdwf, channel, c_bool(True))
#wait at least 2 seconds for the offset to stabilize
time.sleep(2)
#get the proper file name
starttime = datetime.datetime.now();
startfilename = "AD2_" + "{:04d}".format(starttime.year) + "{:02d}".format(starttime.month) + "{:02d}".format(starttime.day) + "_" + "{:02d}".format(starttime.hour) + "{:02d}".format(starttime.minute) + "{:02d}".format(starttime.second) + ".wav";
#open WAV file
print("Opening WAV file '" + startfilename + "'");
waveWrite = wave.open(startfilename, "wb");
waveWrite.setnchannels(2); # 2 channels for the testing (1 channel would be enough if FDwfAnalogInStatusData returned only 1 channel's data
waveWrite.setsampwidth(4); # 32 bit / sample
waveWrite.setframerate(samplerate);
waveWrite.setcomptype("NONE","No compression");
#start aquisition
print("Starting oscilloscope")
dwf.FDwfAnalogInConfigure(hdwf, c_bool(False), c_bool(True))
print("Recording data @"+str(samplerate)+"Hz, press Ctrl+C to stop...");
bufferCounter = 0;
try:
while True:
while True:
dwf.FDwfAnalogInStatus(hdwf, c_int(1), byref(sts))
if sts.value == DwfStateDone.value :
break
time.sleep(0.1)
dwf.FDwfAnalogInStatusData(hdwf, 0, rgdSamples, buffersize) # get channel 1 data CH1 - ! it looks like 2 channels get read here and only the second is the data of CH1
#dwf.FDwfAnalogInStatusData(hdwf, 1, rgdSamples, buffersize) # get channel 2 data CH2
waveWrite.writeframes(rgdSamples);
bufferCounter += 1;
if ((bufferCounter % 1) == 0):
print(str(waveWrite.tell() * 4) + " bytes were written");
except KeyboardInterrupt:
pass
print("Acquisition done")
print("Closing WAV file")
waveWrite.close();
dwf.FDwfDeviceCloseAll()
#rename the file so that we know both the start and end times from the filename
endtime = datetime.datetime.now();
endfilename = "AD2_" + "{:04d}".format(starttime.year) + "{:02d}".format(starttime.month) + "{:02d}".format(starttime.day) + "_" + "{:02d}".format(starttime.hour) + "{:02d}".format(starttime.minute) + "{:02d}".format(starttime.second) + "-" + "{:02d}".format(endtime.hour) + "{:02d}".format(endtime.minute) + "{:02d}".format(endtime.second) + ".wav";
print("Renaming file from '" + startfilename + "' to '" + endfilename + "'");
os.rename(startfilename, endfilename);
#plot window
#dc = sum(rgdSamples)/len(rgdSamples)
#print("DC: "+str(dc)+"V")
#plt.plot(numpy.fromiter(rgdSamples, dtype = numpy.float))
#plt.show()
|
andrasfuchs/BioBalanceDetector
|
Measurements/WaveForms/Experiments/SleepLogging/python/AnalogIn_AcquisitionSaveToWAV_float.py
|
Python
|
gpl-3.0
| 4,592
|
import pandas as pd
import numpy as np
import os
import json
from sklearn.preprocessing import StandardScaler
from itertools import combinations_with_replacement
'''train_df_play['percent_damage'] = \
train_df_play[['crack_length_1','crack_length_2','crack_length_3','crack_length_4']].max(axis=1)/threshold * 100
print(train_df_play.head(2),train_df_play.index,train_df_play.info())
train_df_play = train_df_play[['StepIndex', 'percent_damage','delta_K_current_1','crack_length_1','delta_K_current_2',
'crack_length_2','delta_K_current_3','crack_length_3','delta_K_current_4',
'crack_length_4','Load_1','Load_2']]
print("after changing: {}".format(train_df_play.columns.values))
train_df_play.to_csv('/home/ihsan/Documents/thesis_models/with_stepindex.csv')
#-----------------------SEE BELOW FOR REINDEXING EXAMPLE -------------------------------
train_df_play_dropped_stepindex = train_df_play[['percent_damage','delta_K_current_1','crack_length_1','delta_K_current_2',
'crack_length_2','delta_K_current_3','crack_length_3','delta_K_current_4',
'crack_length_4','Load_1','Load_2']]'''
def parse_scattergro(save_arrays = True, analysis_mode = False, feature_identifier = 'FVX', use_data_in_model_folder = False):
'''Cuts apart the lumped arrays. If analysis_mode is True, it'll return the intermediate arrays and indices,
for corpus_characterizer to do its thing. '''
#raw_path = "/home/ihsan/Documents/thesis_generator/results/devin/to_process/" #needs the absolute path, no tildes!
#processed_path = "/home/ihsan/Documents/thesis_generator/results/devin"
#usb drive
#raw_path = '/media/ihsan/LID_FLASH_1/Thesis/thesis_generator/results/run_2/'
#--------PASTED PART
#------------------------------------------ END OF PASTED PART----------------------------------------
raw_path = "/home/ihsan/Documents/thesis_generator/results/to_process/"
processed_path = "/home/ihsan/Documents/thesis_models/unsplit"
#processed_path = '/media/ihsan/LID_FLASH_1/Thesis/thesis_generator/results/run_2/processed/'
items = os.listdir(raw_path)
items.sort()
print(type(items))
for file in items:
if ('.csv') not in str(file):
del items[items.index(file)]
print(items)
seq_length_dict = {}
seq_length_dict_filename = processed_path + "/sequence_lengths.json"
seq_group_params = {}
seq_group_params_filename = "./analysis/seq_group_params.json"
seq_individual_params = {}
seq_individual_params_filename = "./analysis/seq_individual_params.json"
seq_entire_params = {}
seq_entire_params_filename = "./analysis/seq_entire_params.json"
#suffix = "3a"
#csv_path = "~/Documents/thesis_generator/results/devin/crack_growth_sequence" + suffix + ".csv"
sequence_lengths = {} #save sequence lengths as a dict. or maybe a json?
j = 0 # counter.
threshold = 0.5 # threshold crack length. rivet pitch is 0.875" so a bit over half of that.
for file in items:
print("filename: {}".format(str(file)))
csv_path = raw_path + str(file)
if ("_0.") in str(file): # only the first file in the series has a header.
cg_seq_df = pd.read_csv(csv_path)
header_names = cg_seq_df.columns.values
print("header names: {}".format(header_names))
else:
cg_seq_df = pd.read_csv(csv_path, names=header_names)
print(cg_seq_df.columns.values)
cg_seq_df['percent_damage'] = \
cg_seq_df[['crack_length_1', 'crack_length_2', 'crack_length_3', 'crack_length_4']].max(
axis=1) / threshold * 100
train_list = ['StepIndex', 'percent_damage', 'delta_K_current_1', 'ctip_posn_curr_1', 'delta_K_current_2',
'ctip_posn_curr_2',
'delta_K_current_3', 'ctip_posn_curr_3', 'delta_K_current_4', 'ctip_posn_curr_4', 'Load_1',
'Load_2'] # and seq_id,somehow
label_list = ['StepIndex', 'delta_a_current_1', 'delta_a_current_2', 'delta_a_current_3', 'delta_a_current_4']
train_df = cg_seq_df[train_list]
print("cg_seq_df shape: {}".format(cg_seq_df.columns.values))
print(train_df.index, train_df.head(1))
label_train_df = cg_seq_df[label_list]
# to accommodate different feature sets, read the column names on the fly.
seq_group_params['train_colnames'] = train_df.columns.tolist()
seq_group_params['label_colnames'] = label_train_df.columns.tolist()
#------------ANALYSIS PART-----------------------------------------------------------------------------
if analysis_mode == True:
# calculates the characteristic parameters of blocks of sequences (same IC and same load cond)
group_train_scaler_params = {}
group_label_scaler_params = {}
group_train_scaler = StandardScaler()
group_label_scaler = StandardScaler()
group_train_scaler.fit(train_df.values)
group_label_scaler.fit(label_train_df.values)
# print(group_train_scaler.mean_, group_train_scaler.scale_, group_train_scaler.var_, group_train_scaler.std_)
group_train_scaler_params['mean'] = np.ndarray.tolist(group_train_scaler.mean_)
group_train_scaler_params['scale'] = np.ndarray.tolist(group_train_scaler.scale_)
#group_train_scaler_params['std'] = np.ndarray.tolist(group_train_scaler.std_)
group_train_scaler_params['var'] = np.ndarray.tolist(group_train_scaler.var_)
group_label_scaler_params['mean'] = np.ndarray.tolist(group_label_scaler.mean_)
group_label_scaler_params['scale'] = np.ndarray.tolist(group_label_scaler.scale_)
#group_label_scaler_params['std'] = np.ndarray.tolist(group_label_scaler.std_)
group_label_scaler_params['var'] = np.ndarray.tolist(group_label_scaler.var_)
# nested dict.
seq_group_params[str(file)] = {}
seq_group_params[str(file)]["data"] = group_train_scaler_params
seq_group_params[str(file)]["label"] = group_label_scaler_params
# ------------END OF ANALYSIS PART---------------------------------------------------------------------
indices = train_df[train_df['StepIndex'] == 1].index.tolist()
indices.append(train_df.shape[0] - 1) #the 0th position was missing if run using the original method.
indices_offset_min1 = [i - 1 for i in indices]
print("file {}'s indices_offset_min1 {}".format(str(file), indices_offset_min1))
indices_offset_min1.pop(0)
print("indices: {}, indices_offset_min1: {}".format(indices, indices_offset_min1))
ranges = [(t, s) for t, s in zip(indices, indices_offset_min1)]
# print("before changing :{}".format(ranges))
'''for tuple in ranges:
print(tuple)
tuple[1:][0] = (tuple[1:][0]) + 1'''
# ranges[1:][0] = ranges[1:][0] + 1
print("\nafter changing :{} ".format(ranges))
# print("lengths: {} ".format([indices[4]-indices[3],indices[3]-indices[2],indices[2]-indices[1],indices[1]-indices[0]]))
print("lengths: {} ".format([t - s for (s, t) in ranges]))
i = 0
for indices_as_tuples in ranges:
i = i + 1
print("indices as tuples: {}".format(indices_as_tuples))
train_df_as_np_array = train_df[indices_as_tuples[0]:indices_as_tuples[1]].values
label_train_df_as_np_array = label_train_df[indices_as_tuples[0]:indices_as_tuples[1]].values
print("df_as_np_array shape: {}".format(train_df_as_np_array.shape))
print("file: {}".format(file))
#TODO makes this a regex.. second underscore
identifier = str(str(file)[-8:-6]) # eg 1a 2a etc. #you can use a regex.
print("identifier: {}".format(identifier))
#TODO feature version identifier.
# j is sequence id. #i is the sequence number within the csv.
np_train_path = processed_path + "/sequence_" + identifier + "_" + str(j) + "_" + str(i) + ".npy"
np_label_train_path = processed_path + "/sequence_" + identifier + "_" + str(j) + "_" + str(i) + "_label_.npy"
seq_length_dict["sequence_" + identifier + "_" + str(j) + "_" + str(i)] = indices_as_tuples[1] - \
indices_as_tuples[0]
# seq_length_dict = json.load(open(seq_length_dict))
print("np_train_path: {}".format(np_train_path))
print("np_label_train_path :{}".format(np_label_train_path))
# ------------ANALYSIS PART-----------------------------------------------------------------------------
if analysis_mode == True: #calculates statistics
# calculates the characteristic parameters of blocks of sequences (same IC and same load cond)
individual_sequence_scaler_params = {}
individual_label_scaler_params = {}
individual_sequence_scaler = StandardScaler()
individual_label_scaler = StandardScaler()
individual_sequence_scaler.fit(train_df_as_np_array)
individual_label_scaler.fit(label_train_df_as_np_array)
# print(individual_sequence_scaler.mean_, individual_sequence_scaler.scale_, individual_sequence_scaler.var_, individual_sequence_scaler.std_)
individual_sequence_scaler_params['mean'] = np.ndarray.tolist(individual_sequence_scaler.mean_)
individual_sequence_scaler_params['scale'] = np.ndarray.tolist(individual_sequence_scaler.scale_)
# individual_sequence_scaler_params['std'] = np.ndarray.tolist(individual_sequence_scaler.std_)
individual_sequence_scaler_params['var'] = np.ndarray.tolist(individual_sequence_scaler.var_)
individual_label_scaler_params['mean'] = np.ndarray.tolist(individual_label_scaler.mean_)
individual_label_scaler_params['scale'] = np.ndarray.tolist(individual_label_scaler.scale_)
# deprecated individual_label_scaler_params['std'] = np.ndarray.tolist(individual_label_scaler.std_)
individual_label_scaler_params['var'] = np.ndarray.tolist(individual_label_scaler.var_)
# nested dict.
seq_individual_params["sequence_" + identifier + "_" + str(j) + "_" + str(i) + ".npy"] = individual_sequence_scaler_params
seq_individual_params["sequence_" + identifier + "_" + str(j) + "_" + str(i) + "_label_.npy"] = individual_label_scaler_params
# ------------END OF ANALYSIS PART----------------------------------------------------------------------
if save_arrays == True:
np.save(np_train_path, train_df_as_np_array)
np.save(np_label_train_path, label_train_df_as_np_array)
j = j + 1
print(seq_length_dict) #these are of individual sequence lengths.
#---------------ANALYSIS OF UNSPLIT---------------------------------------------------------------------
if analysis_mode == True:
# processed_path = '/media/ihsan/LID_FLASH_1/Thesis/thesis_generator/results/run_2/processed/'
items_processed = os.listdir(processed_path)
items_processed.sort()
print(type(items_processed))
for file_p in items_processed:
if ('.npy') not in str(file_p):
del items_processed[items_processed.index(file_p)] #get rid of non .npy files from this list.
print(items_processed)
#run standardscaler on all the sequences. Would be unproductive to do it earlier.
entire_data_scaler = StandardScaler()
entire_label_scaler = StandardScaler()
entire_data_scaler_params = {}
entire_label_scaler_params = {}
for file_p in items_processed: #TODO these are all tuples..
if("label") in str(file_p):
partial_label = np.load(processed_path + '/' + str(file_p))
entire_label_scaler.partial_fit(partial_label)
if("label") not in str(file_p):
partial_data = np.load(processed_path + '/' + str(file_p))
entire_data_scaler.partial_fit(partial_data)
entire_data_scaler_params['mean'] = np.ndarray.tolist(entire_data_scaler.mean_)
entire_data_scaler_params['scale'] = np.ndarray.tolist(entire_data_scaler.scale_)
# entire_data_scaler_params['std'] = np.ndarray.tolist(entire_data_scaler.std_)
entire_data_scaler_params['var'] = np.ndarray.tolist(entire_data_scaler.var_)
entire_label_scaler_params['mean'] = np.ndarray.tolist(entire_label_scaler.mean_)
entire_label_scaler_params['scale'] = np.ndarray.tolist(entire_label_scaler.scale_)
# entire_label_scaler_params['std'] = np.ndarray.tolist(entire_label_scaler.std_)
entire_label_scaler_params['var'] = np.ndarray.tolist(entire_label_scaler.var_)
seq_entire_params['data'] = entire_data_scaler_params
seq_entire_params['label'] = entire_label_scaler_params
#TODO calculate covariances of everything.
#possible_combinations = combinations_with_replacement(#column numbers ,r=2)
#crack position vs crack growth rate
#load vs. crack growth rate
#TODO find the kink in crack growth rate.
#probably the correlation between the load and the crack growth rate, on each crack..
#use pearson_r
# ---------------END OF ANALYSIS---------------------------------------------------------------------
#TODO use DictWriter to get csvs.
json.dump(seq_length_dict, open(seq_length_dict_filename, 'wb'))
json.dump(seq_group_params, open(seq_group_params_filename, 'wb'))
json.dump(seq_individual_params, open(seq_individual_params_filename, 'wb'))
json.dump(seq_entire_params, open(seq_entire_params_filename, 'wb'))
if __name__ == "__main__":
parse_scattergro(save_arrays=False,analysis_mode=True)
|
samnashi/howdoflawsgetlonger
|
scattergro_parser_each.py
|
Python
|
gpl-3.0
| 14,324
|
#!/usr/bin/env python
"""
Create Phase Tensor Map from the ModEM's output Resistivity model
"""
import os
import os.path as op
import matplotlib.colorbar as mcb
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.colors import Normalize
from matplotlib.patches import Ellipse
import mtpy.analysis.pt as mtpt
import mtpy.imaging.mtcolors as mtcl
import mtpy.imaging.mtplottools as mtplottools
import mtpy.modeling.ws3dinv as ws
import mtpy.utils.exceptions as mtex
from mtpy.utils.calculator import nearest_index
from mtpy.utils.gis_tools import epsg_project
from mtpy.utils import basemap_tools
from mtpy.modeling.modem import Data, Model
import logging, traceback
try:
from pyevtk.hl import gridToVTK, pointsToVTK
except ImportError:
print ('If you want to write a vtk file for 3d viewing, you need to pip install PyEVTK:'
' https://bitbucket.org/pauloh/pyevtk')
print ('Note: if you are using Windows you should build evtk first with'
'either MinGW or cygwin using the command: \n'
' python setup.py build -compiler=mingw32 or \n'
' python setup.py build -compiler=cygwin')
# ==============================================================================
# plot phase tensors
# ==============================================================================
class PlotPTMaps(mtplottools.MTEllipse):
"""
Plot phase tensor maps including residual pt if response file is input.
:Plot only data for one period: ::
>>> import mtpy.modeling.ws3dinv as ws
>>> dfn = r"/home/MT/ws3dinv/Inv1/WSDataFile.dat"
>>> ptm = ws.PlotPTMaps(data_fn=dfn, plot_period_list=[0])
:Plot data and model response: ::
>>> import mtpy.modeling.ws3dinv as ws
>>> dfn = r"/home/MT/ws3dinv/Inv1/WSDataFile.dat"
>>> rfn = r"/home/MT/ws3dinv/Inv1/Test_resp.00"
>>> mfn = r"/home/MT/ws3dinv/Inv1/Test_model.00"
>>> ptm = ws.PlotPTMaps(data_fn=dfn, resp_fn=rfn, model_fn=mfn,
>>> ... plot_period_list=[0])
>>> # adjust colorbar
>>> ptm.cb_res_pad = 1.25
>>> ptm.redraw_plot()
========================== ================================================
Attributes Description
========================== ================================================
cb_pt_pad percentage from top of axes to place pt
color bar. *default* is 1.2
cb_res_pad percentage from bottom of axes to place
resistivity color bar. *default* is 0.5
cb_residual_tick_step tick step for residual pt. *default* is 3
cb_tick_step tick step for phase tensor color bar,
*default* is 45
data_obj data object (read in from ModEM data file)
data_fn full path to data fle
dscale scaling parameter depending on map_scale
ellipse_cmap color map for pt ellipses. *default* is
mt_bl2gr2rd
ellipse_colorby [ 'skew' | 'skew_seg' | 'phimin' | 'phimax'|
'phidet' | 'ellipticity' ] parameter to color
ellipses by. *default* is 'phimin'
ellipse_range (min, max, step) min and max of colormap, need
to input step if plotting skew_seg
ellipse_size relative size of ellipses in map_scale
ew_limits limits of plot in e-w direction in map_scale
units. *default* is None, scales to station
area
fig_aspect aspect of figure. *default* is 1
fig_dpi resolution in dots-per-inch. *default* is 300
fig_list list of matplotlib.figure instances for each
figure plotted.
fig_size [width, height] in inches of figure window
*default* is [6, 6]
font_size font size of ticklabels, axes labels are
font_size+2. *default* is 7
grid_east relative location of grid nodes in e-w direction
in map_scale units
grid_north relative location of grid nodes in n-s direction
in map_scale units
grid_z relative location of grid nodes in z direction
in map_scale units
model_fn full path to initial file
map_scale [ 'km' | 'm' ] distance units of map.
*default* is km
mesh_east np.meshgrid(grid_east, grid_north, indexing='ij')
mesh_north np.meshgrid(grid_east, grid_north, indexing='ij')
model_fn full path to model file
nodes_east relative distance betwen nodes in e-w direction
in map_scale units
nodes_north relative distance betwen nodes in n-s direction
in map_scale units
nodes_z relative distance betwen nodes in z direction
in map_scale units
ns_limits (min, max) limits of plot in n-s direction
*default* is None, viewing area is station area
pad_east padding from extreme stations in east direction
pad_north padding from extreme stations in north direction
period_list list of periods from data
plot_grid [ 'y' | 'n' ] 'y' to plot grid lines
*default* is 'n'
plot_period_list list of period index values to plot
*default* is None
plot_yn ['y' | 'n' ] 'y' to plot on instantiation
*default* is 'y'
res_cmap colormap for resisitivity values.
*default* is 'jet_r'
res_limits (min, max) resistivity limits in log scale
*default* is (0, 4)
res_model np.ndarray(n_north, n_east, n_vertical) of
model resistivity values in linear scale
residual_cmap color map for pt residuals.
*default* is 'mt_wh2or'
resp np.ndarray(n_stations, n_periods, 2, 2)
impedance tensors for model response
resp_fn full path to response file
save_path directory to save figures to
save_plots [ 'y' | 'n' ] 'y' to save plots to save_path
station_east location of stations in east direction in
map_scale units
station_fn full path to station locations file
station_names station names
station_north location of station in north direction in
map_scale units
subplot_bottom distance between axes and bottom of figure window
subplot_left distance between axes and left of figure window
subplot_right distance between axes and right of figure window
subplot_top distance between axes and top of figure window
title titiel of plot *default* is depth of slice
xminorticks location of xminorticks
yminorticks location of yminorticks
========================== ================================================
"""
def __init__(self, data_fn=None, resp_fn=None, model_fn=None, **kwargs):
# MTEllipse.__init__(self, **kwargs)
super(PlotPTMaps, self).__init__(**kwargs)
self.model_fn = model_fn
self.data_fn = data_fn
self.resp_fn = resp_fn
self.save_path = kwargs.pop('save_path', None)
if self.model_fn is not None and self.save_path is None:
self.save_path = os.path.dirname(self.model_fn)
elif self.model_fn is not None and self.save_path is None:
self.save_path = os.path.dirname(self.model_fn)
if self.save_path is not None:
if not os.path.exists(self.save_path):
os.mkdir(self.save_path)
self.save_plots = kwargs.pop('save_plots', 'y')
self.plot_period_list = kwargs.pop('plot_period_list', None)
self.period_dict = None
self.d_index = kwargs.pop('d_index',None)
self.map_scale = kwargs.pop('map_scale', 'km')
# make map scale
if self.map_scale == 'km':
self.dscale = 1000.
elif self.map_scale == 'm':
self.dscale = 1.
self.ew_limits = kwargs.pop('ew_limits', None)
self.ns_limits = kwargs.pop('ns_limits', None)
self.pad_east = kwargs.pop('pad_east', 2000)
self.pad_north = kwargs.pop('pad_north', 2000)
self.plot_grid = kwargs.pop('plot_grid', 'n')
self.fig_num = kwargs.pop('fig_num', 1)
self.fig_size = kwargs.pop('fig_size', [6, 6])
self.fig_dpi = kwargs.pop('dpi', 300)
self.fig_aspect = kwargs.pop('fig_aspect', 1)
self.title = kwargs.pop('title', 'on')
self.fig_list = []
self.xminorticks = kwargs.pop('xminorticks', 1000)
self.yminorticks = kwargs.pop('yminorticks', 1000)
self.residual_cmap = kwargs.pop('residual_cmap', 'mt_wh2or')
self.font_size = kwargs.pop('font_size', 7)
self.cb_pt_pad = kwargs.pop('cb_pt_pad', 1.2)
self.cb_res_pad = kwargs.pop('cb_res_pad', .5)
self.res_limits = kwargs.pop('res_limits', (0, 4))
self.res_cmap = kwargs.pop('res_cmap', 'jet_r')
# --> set the ellipse properties -------------------
self._ellipse_dict = kwargs.pop('ellipse_dict',
{'size': 2,
'ellipse_range':[0,0],
'ellipse_colorby':'phimin',
'ellipse_cmap':'mt_bl2gr2rd',
'normalise':False})
self._read_ellipse_dict(self._ellipse_dict)
self.ellipse_size = kwargs.pop(
'ellipse_size', self._ellipse_dict['size'])
self.normalise_ellipses = kwargs.pop('normalise_ellipses',False)
self.cb_tick_step = kwargs.pop('cb_tick_step', None)
# update default colorbar tick step based on ellipse_range
if self.cb_tick_step is None:
self.cb_tick_step = int((self.ellipse_range[1] - self.ellipse_range[0])/2.)
self.cb_residual_tick_step = kwargs.pop('cb_residual_tick_step', 3)
self.subplot_right = .99
self.subplot_left = .085
self.subplot_top = .92
self.subplot_bottom = .1
self.subplot_hspace = .2
self.subplot_wspace = .05
self.data_obj = None
self.resp_obj = None
self.model_obj = None
self.period_list = None
self.pt_data_arr = None
self.pt_resp_arr = None
self.pt_resid_arr = None
self.residual_pt_type= kwargs.pop('residual_pt_type','heise')
# FZ: do not call plot in the constructor! it's not pythonic
self.plot_yn = kwargs.pop('plot_yn', 'n')
if self.plot_yn == 'y':
self.plot()
else:
self._read_files()
def _read_files(self):
"""
get information from files
"""
# --> read in data file
self.data_obj = Data()
self.data_obj.read_data_file(self.data_fn)
# --> read response file
if self.resp_fn is not None:
self.resp_obj = Data()
self.resp_obj.read_data_file(self.resp_fn)
# --> read mode file
if self.model_fn is not None:
self.model_obj = Model()
self.model_obj.read_model_file(self.model_fn)
self._get_plot_period_list()
self._get_pt()
def _get_plot_period_list(self):
"""
get periods to plot from input or data file
"""
# --> get period list to plot
if self.plot_period_list is None:
self.plot_period_list = self.data_obj.period_list
else:
if isinstance(self.plot_period_list, list):
# check if entries are index values or actual periods
if isinstance(self.plot_period_list[0], int):
self.plot_period_list = [self.period_list[ii]
for ii in self.plot_period_list]
else:
pass
elif isinstance(self.plot_period_list, int):
self.plot_period_list = self.period_list[self.plot_period_list]
elif isinstance(self.plot_period_list, float):
self.plot_period_list = [self.plot_period_list]
self.period_dict = dict([(key, value) for value, key in
enumerate(self.data_obj.period_list)])
def _get_pt(self):
"""
put pt parameters into something useful for plotting
"""
ns = len(list(self.data_obj.mt_dict.keys()))
nf = len(self.data_obj.period_list)
data_pt_arr = np.zeros((nf, ns), dtype=[('phimin', np.float),
('phimax', np.float),
('skew', np.float),
('azimuth', np.float),
('east', np.float),
('north', np.float),
('lon', np.float),
('lat', np.float),
('station', 'S10')])
if self.resp_fn is not None:
model_pt_arr = np.zeros((nf, ns), dtype=[('phimin', np.float),
('phimax', np.float),
('skew', np.float),
('azimuth', np.float),
('east', np.float),
('north', np.float),
('lon', np.float),
('lat', np.float),
('station', 'S10')])
res_pt_arr = np.zeros((nf, ns), dtype=[('phimin', np.float),
('phimax', np.float),
('skew', np.float),
('azimuth', np.float),
('east', np.float),
('north', np.float),
('lon', np.float),
('lat', np.float),
('geometric_mean', np.float),
('station', 'S10')])
for ii, key in enumerate(self.data_obj.mt_dict.keys()):
east = self.data_obj.mt_dict[key].grid_east / self.dscale
north = self.data_obj.mt_dict[key].grid_north / self.dscale
lon = self.data_obj.mt_dict[key].lon
lat = self.data_obj.mt_dict[key].lat
dpt = self.data_obj.mt_dict[key].pt
data_pt_arr[:, ii]['east'] = east
data_pt_arr[:, ii]['north'] = north
data_pt_arr[:, ii]['lon'] = lon
data_pt_arr[:, ii]['lat'] = lat
data_pt_arr[:, ii]['phimin'] = dpt.phimin
data_pt_arr[:, ii]['phimax'] = dpt.phimax
data_pt_arr[:, ii]['azimuth'] = dpt.azimuth
data_pt_arr[:, ii]['skew'] = dpt.beta
data_pt_arr[:, ii]['station'] = self.data_obj.mt_dict[key].station
if self.resp_fn is not None:
mpt = self.resp_obj.mt_dict[key].pt
try:
rpt = mtpt.ResidualPhaseTensor(pt_object1=dpt,
pt_object2=mpt,
residualtype=self.residual_pt_type)
rpt = rpt.residual_pt
res_pt_arr[:, ii]['east'] = east
res_pt_arr[:, ii]['north'] = north
res_pt_arr[:, ii]['lon'] = lon
res_pt_arr[:, ii]['lat'] = lat
res_pt_arr[:, ii]['phimin'] = rpt.phimin
res_pt_arr[:, ii]['phimax'] = rpt.phimax
res_pt_arr[:, ii]['azimuth'] = rpt.azimuth
res_pt_arr[:, ii]['skew'] = rpt.beta
res_pt_arr[:, ii]['station'] = self.data_obj.mt_dict[key].station
res_pt_arr[:, ii]['geometric_mean'] = np.sqrt(np.abs(rpt.phimin) *
np.abs(rpt.phimax))
except mtex.MTpyError_PT:
print(key, dpt.pt.shape, mpt.pt.shape)
model_pt_arr[:, ii]['east'] = east
model_pt_arr[:, ii]['north'] = north
model_pt_arr[:, ii]['lon'] = lon
model_pt_arr[:, ii]['lat'] = lat
model_pt_arr[:, ii]['phimin'] = mpt.phimin
model_pt_arr[:, ii]['phimax'] = mpt.phimax
model_pt_arr[:, ii]['azimuth'] = mpt.azimuth
model_pt_arr[:, ii]['skew'] = mpt.beta
model_pt_arr[
:, ii]['station'] = self.data_obj.mt_dict[key].station
# make these attributes
self.pt_data_arr = data_pt_arr
if self.resp_fn is not None:
self.pt_resp_arr = model_pt_arr
self.pt_resid_arr = res_pt_arr
def plot_on_axes(self, ax, m, periodIdx, ptarray='data', ellipse_size_factor=10000,
cvals=None, map_scale='m', centre_shift=[0, 0], plot_tipper='n',
tipper_size_factor=1e5, **kwargs):
'''
Plots phase tensors for a given period index.
:param ax: plot axis
:param m: basemap instance
:param periodIdx: period index
:param ptarray: name of data-array to access for retrieving attributes;
can be either 'data', 'resp' or 'resid'
:param ellipse_size_factor: factor to control ellipse size
:param cvals: list of colour values for colouring each ellipse; must be of
the same length as the number of tuples for each period
:param map_scale: map length scale
:param kwargs: list of relevant matplotlib arguments (e.g. zorder, alpha, etc.)
:param plot_tipper: string ('n', 'yr', 'yi', or 'yri') to plot
no tipper, real only, imaginary only, or both
:param tipper_size_factor: scaling factor for tipper vectors
'''
assert (periodIdx >= 0 and periodIdx < len(self.data_obj.period_list)), \
'Error: Index for plot-period out of bounds.'
k = periodIdx
pt_array = getattr(self, 'pt_' + ptarray + '_arr')
for i in range(len(pt_array[k])):
lon = pt_array[k]['lon'][i]
lat = pt_array[k]['lat'][i]
if self.normalise_ellipses:
phimax = pt_array[k]['phimax'][i] / pt_array[k]['phimax'][i]
phimin = pt_array[k]['phimin'][i] / pt_array[k]['phimax'][i]
else:
phimax = pt_array[k]['phimax'][i] / pt_array[k]['phimax'].max()
phimin = pt_array[k]['phimin'][i] / pt_array[k]['phimax'].max()
az = pt_array[k]['azimuth'][i]
if ptarray == 'resid':
phimin = np.abs(phimin)
nskew = pt_array[k]['skew'][i]
# print az
if (phimax > 0 and phimin > 0):
c = None
if (cvals is not None): c = cvals[i]
if (c is not None): kwargs['facecolor'] = c
if m is None:
x = pt_array[k]['east'][i]
y = pt_array[k]['north'][i]
if map_scale == 'km':
x /= 1e3
y /= 1e3
else:
x, y = m(lon, lat)
# matplotlib angles are defined as degrees anticlockwise from positive x direction.
# therefore we need to adjust az accordingly
e = Ellipse([x, y],
phimax * ellipse_size_factor,
phimin * ellipse_size_factor,
90. - az, **kwargs)
ax.add_artist(e)
# end if
# end for
if 'y' in plot_tipper:
# if neither r or i provided, assume that we want to plot both
if plot_tipper == 'y':
plot_tipper = 'yri'
self._plot_induction_vectors(ax, m, periodIdx,
ptarray=ptarray, size_factor=tipper_size_factor,
map_scale=map_scale, centre_shift=centre_shift,
plot_tipper=plot_tipper, **kwargs)
# end func
def plot(self, period = None, periodIdx = 0, save2file=None, **kwargs):
""" Plot phase tensor maps for data and or response, each figure is of a
different period. If response is input a third column is added which is
the residual phase tensor showing where the model is not fitting the data
well. The data is plotted in km.
Args:
period: the period index to plot, default=0
Returns:
"""
print(("The input parameter period is", period))
# --> read in data first
if self.data_obj is None:
self._read_files()
# set plot properties
plt.rcParams['font.size'] = self.font_size
plt.rcParams['figure.subplot.left'] = self.subplot_left
plt.rcParams['figure.subplot.right'] = self.subplot_right
plt.rcParams['figure.subplot.bottom'] = self.subplot_bottom
plt.rcParams['figure.subplot.top'] = self.subplot_top
font_dict = {'size': self.font_size + 2, 'weight': 'bold'}
# make a grid of subplots
gs = gridspec.GridSpec(1, 3, hspace=self.subplot_hspace,
wspace=self.subplot_wspace)
# set some parameters for the colorbar
ckmin = float(self.ellipse_range[0])
ckmax = float(self.ellipse_range[1])
try:
ckstep = float(self.ellipse_range[2])
except IndexError:
if self.ellipse_cmap == 'mt_seg_bl2wh2rd':
raise ValueError('Need to input range as (min, max, step)')
else:
ckstep = 3
bounds = np.arange(ckmin, ckmax + ckstep, ckstep)
# set plot limits to be the station area
if self.ew_limits is None:
east_min = self.data_obj.data_array['rel_east'].min() - \
self.pad_east
east_max = self.data_obj.data_array['rel_east'].max() + \
self.pad_east
self.ew_limits = (east_min / self.dscale, east_max / self.dscale)
if self.ns_limits is None:
north_min = self.data_obj.data_array['rel_north'].min() - \
self.pad_north
north_max = self.data_obj.data_array['rel_north'].max() + \
self.pad_north
self.ns_limits = (north_min / self.dscale, north_max / self.dscale)
# -------------plot phase tensors------------------------------------
if period > len(self.plot_period_list) - 1:
print((
"Error: the period exceeds the max value:", len(
self.plot_period_list) - 1))
# FZ: changed below to plot a given period index
# for ff, per in enumerate(self.plot_period_list):
# first, reset fig list
self.fig_list = []
for ff, per in enumerate(self.plot_period_list[period:period + 1]):
data_ii = self.period_dict[per]
print('Plotting Period: {0:.5g}'.format(per))
fig = plt.figure('{0:.5g}'.format(per), figsize=self.fig_size,
dpi=self.fig_dpi)
fig.clf()
if self.resp_fn is not None:
axd = fig.add_subplot(gs[0, 0], aspect='equal')
axm = fig.add_subplot(gs[0, 1], aspect='equal')
axr = fig.add_subplot(gs[0, 2], aspect='equal')
ax_list = [axd, axm, axr]
else:
axd = fig.add_subplot(gs[0, :], aspect='equal')
ax_list = [axd]
# plot model below the phase tensors
if self.model_fn is not None:
gridzcentre = np.mean(
[self.model_obj.grid_z[1:], self.model_obj.grid_z[:-1]], axis=0)
if self.d_index is not None:
approx_depth, d_index = ws.estimate_skin_depth(self.model_obj.res_model.copy(),
gridzcentre / self.dscale,
per,
dscale=self.dscale)
else:
d_index = self.d_index
approx_depth = self.model_obj.grid_z[d_index]
# need to add an extra row and column to east and north to make sure
# all is plotted see pcolor for details.
plot_east = np.append(self.model_obj.grid_east,
self.model_obj.grid_east[-1] * 1.25) / \
self.dscale
plot_north = np.append(self.model_obj.grid_north,
self.model_obj.grid_north[-1] * 1.25) / \
self.dscale
# make a mesh grid for plotting
# the 'ij' makes sure the resulting grid is in east, north
try:
self.mesh_east, self.mesh_north = np.meshgrid(plot_east,
plot_north,
indexing='ij')
except TypeError:
self.mesh_east, self.mesh_north = [arr.T for arr in np.meshgrid(plot_east,
plot_north)]
for ax in ax_list:
plot_res = np.log10(
self.model_obj.res_model[
:, :, d_index].T)
ax.pcolormesh(self.mesh_east,
self.mesh_north,
plot_res,
cmap=self.res_cmap,
vmin=self.res_limits[0],
vmax=self.res_limits[1])
# --> plot data phase tensors
print(kwargs)
for pt in self.pt_data_arr[data_ii]:
eheight = pt['phimin'] / \
self.pt_data_arr[data_ii]['phimax'].max() * \
self.ellipse_size
ewidth = pt['phimax'] / \
self.pt_data_arr[data_ii]['phimax'].max() * \
self.ellipse_size
ellipse = Ellipse((pt['east'],
pt['north']),
width=ewidth,
height=eheight,
angle=90 - pt['azimuth'],
**kwargs)
# get ellipse color
if self.ellipse_cmap.find('seg') > 0:
ellipse.set_facecolor(mtcl.get_plot_color(pt[self.ellipse_colorby],
self.ellipse_colorby,
self.ellipse_cmap,
ckmin,
ckmax,
bounds=bounds))
else:
ellipse.set_facecolor(mtcl.get_plot_color(pt[self.ellipse_colorby],
self.ellipse_colorby,
self.ellipse_cmap,
ckmin,
ckmax))
axd.add_artist(ellipse)
# -----------plot response phase tensors---------------
if self.resp_fn is not None:
rcmin = np.floor(self.pt_resid_arr['geometric_mean'].min())
rcmax = np.floor(self.pt_resid_arr['geometric_mean'].max())
for mpt, rpt in zip(self.pt_resp_arr[data_ii],
self.pt_resid_arr[data_ii]):
eheight = mpt['phimin'] / \
self.pt_resp_arr[data_ii]['phimax'].max() * \
self.ellipse_size
ewidth = mpt['phimax'] / \
self.pt_resp_arr[data_ii]['phimax'].max() * \
self.ellipse_size
ellipsem = Ellipse((mpt['east'],
mpt['north']),
width=ewidth,
height=eheight,
angle=90 - mpt['azimuth'],
**kwargs)
# get ellipse color
if self.ellipse_cmap.find('seg') > 0:
ellipsem.set_facecolor(mtcl.get_plot_color(mpt[self.ellipse_colorby],
self.ellipse_colorby,
self.ellipse_cmap,
ckmin,
ckmax,
bounds=bounds))
else:
ellipsem.set_facecolor(mtcl.get_plot_color(mpt[self.ellipse_colorby],
self.ellipse_colorby,
self.ellipse_cmap,
ckmin,
ckmax))
axm.add_artist(ellipsem)
# -----------plot residual phase tensors---------------
eheight = rpt['phimin'] / \
self.pt_resid_arr[data_ii]['phimax'].max() * \
self.ellipse_size
ewidth = rpt['phimax'] / \
self.pt_resid_arr[data_ii]['phimax'].max() * \
self.ellipse_size
ellipser = Ellipse((rpt['east'],
rpt['north']),
width=ewidth,
height=eheight,
angle=rpt['azimuth'],
**kwargs)
# get ellipse color
rpt_color = np.sqrt(abs(rpt['phimin'] * rpt['phimax']))
if self.ellipse_cmap.find('seg') > 0:
ellipser.set_facecolor(mtcl.get_plot_color(rpt_color,
'geometric_mean',
self.residual_cmap,
ckmin,
ckmax,
bounds=bounds))
else:
ellipser.set_facecolor(mtcl.get_plot_color(rpt_color,
'geometric_mean',
self.residual_cmap,
ckmin,
ckmax))
axr.add_artist(ellipser)
# --> set axes properties
# data
axd.set_xlim(self.ew_limits)
axd.set_ylim(self.ns_limits)
axd.set_xlabel('Easting ({0})'.format(self.map_scale),
fontdict=font_dict)
axd.set_ylabel('Northing ({0})'.format(self.map_scale),
fontdict=font_dict)
# make a colorbar for phase tensors
# bb = axd.axes.get_position().bounds
bb = axd.get_position().bounds
y1 = .25 * (2 + (self.ns_limits[1] - self.ns_limits[0]) /
(self.ew_limits[1] - self.ew_limits[0]))
cb_location = (3.35 * bb[2] / 5 + bb[0],
y1 * self.cb_pt_pad, .295 * bb[2], .02)
cbaxd = fig.add_axes(cb_location)
if self.ellipse_cmap in list(mtcl.cmapdict.keys()):
ecmap = mtcl.cmapdict[self.ellipse_cmap]
else:
ecmap = self.ellipse_cmap
cbd = mcb.ColorbarBase(cbaxd,
cmap=ecmap,
norm=Normalize(vmin=ckmin,
vmax=ckmax),
orientation='horizontal')
cbd.ax.xaxis.set_label_position('top')
cbd.ax.xaxis.set_label_coords(.5, 1.75)
cbd.set_label(mtplottools.ckdict[self.ellipse_colorby])
cbd.set_ticks(np.arange(ckmin, ckmax + self.cb_tick_step,
self.cb_tick_step))
axd.text(self.ew_limits[0] * .95,
self.ns_limits[1] * .95,
'Data',
horizontalalignment='left',
verticalalignment='top',
bbox={'facecolor': 'white'},
fontdict={'size': self.font_size + 1})
# Model and residual
if self.resp_fn is not None:
for aa, ax in enumerate([axm, axr]):
ax.set_xlim(self.ew_limits)
ax.set_ylim(self.ns_limits)
ax.set_xlabel('Easting ({0})'.format(self.map_scale),
fontdict=font_dict)
plt.setp(ax.yaxis.get_ticklabels(), visible=False)
# make a colorbar ontop of axis
bb = ax.axes.get_position().bounds
y1 = .25 * (2 + (self.ns_limits[1] - self.ns_limits[0]) /
(self.ew_limits[1] - self.ew_limits[0]))
cb_location = (3.35 * bb[2] / 5 + bb[0],
y1 * self.cb_pt_pad, .295 * bb[2], .02)
cbax = fig.add_axes(cb_location)
if aa == 0:
cb = mcb.ColorbarBase(cbax,
cmap=mtcl.cmapdict[
self.ellipse_cmap],
norm=Normalize(vmin=ckmin,
vmax=ckmax),
orientation='horizontal')
cb.ax.xaxis.set_label_position('top')
cb.ax.xaxis.set_label_coords(.5, 1.75)
cb.set_label(mtplottools.ckdict[self.ellipse_colorby])
cb.set_ticks(np.arange(ckmin, ckmax + self.cb_tick_step,
self.cb_tick_step))
ax.text(self.ew_limits[0] * .95,
self.ns_limits[1] * .95,
'Model',
horizontalalignment='left',
verticalalignment='top',
bbox={'facecolor': 'white'},
fontdict={'size': self.font_size + 1})
else:
cb = mcb.ColorbarBase(cbax,
cmap=mtcl.cmapdict[
self.residual_cmap],
norm=Normalize(vmin=rcmin,
vmax=rcmax),
orientation='horizontal')
cb.ax.xaxis.set_label_position('top')
cb.ax.xaxis.set_label_coords(.5, 1.75)
cb.set_label(r"$\sqrt{\Phi_{min} \Phi_{max}}$")
cb_ticks = [rcmin, (rcmax - rcmin) / 2, rcmax]
cb.set_ticks(cb_ticks)
ax.text(self.ew_limits[0] * .95,
self.ns_limits[1] * .95,
'Residual',
horizontalalignment='left',
verticalalignment='top',
bbox={'facecolor': 'white'},
fontdict={'size': self.font_size + 1})
if self.model_fn is not None:
for ax in ax_list:
ax.tick_params(direction='out')
bb = ax.axes.get_position().bounds
y1 = .25 * (2 - (self.ns_limits[1] - self.ns_limits[0]) /
(self.ew_limits[1] - self.ew_limits[0]))
cb_position = (3.0 * bb[2] / 5 + bb[0],
y1 * self.cb_res_pad, .35 * bb[2], .02)
cbax = fig.add_axes(cb_position)
cb = mcb.ColorbarBase(cbax,
cmap=self.res_cmap,
norm=Normalize(vmin=self.res_limits[0],
vmax=self.res_limits[1]),
orientation='horizontal')
cb.ax.xaxis.set_label_position('top')
cb.ax.xaxis.set_label_coords(.5, 1.5)
cb.set_label('Resistivity ($\Omega \cdot$m)')
cb_ticks = np.arange(np.floor(self.res_limits[0]),
np.ceil(self.res_limits[1] + 1), 1)
cb.set_ticks(cb_ticks)
cb.set_ticklabels([mtplottools.labeldict[ctk]
for ctk in cb_ticks])
if save2file is not None:
fig.savefig(save2file, dpi=self.fig_dpi, bbox_inches='tight')
plt.show()
self.fig_list.append(fig)
return fig
def redraw_plot(self):
"""
redraw plot if parameters were changed
use this function if you updated some attributes and want to re-plot.
:Example: ::
>>> # change the color and marker of the xy components
>>> import mtpy.modeling.occam2d as occam2d
>>> ocd = occam2d.Occam2DData(r"/home/occam2d/Data.dat")
>>> p1 = ocd.plotAllResponses()
>>> #change line width
>>> p1.lw = 2
>>> p1.redraw_plot()
"""
for fig in self.fig_list:
plt.close(fig)
self.plot()
def _plot_induction_vectors(self, ax, m, periodIdx, ptarray='data', size_factor=10000,
map_scale='m', centre_shift=[0, 0], plot_tipper='yri', **kwargs):
if ptarray == 'data':
data_array = self.data_obj.data_array
elif ptarray == 'resp':
data_array = self.resp_obj.data_array
rx = data_array['tip'].real[:,periodIdx,0,0]
ry = data_array['tip'].real[:,periodIdx,0,1]
ix = data_array['tip'].imag[:,periodIdx,0,0]
iy = data_array['tip'].imag[:,periodIdx,0,1]
lon,lat = self.data_obj.station_locations.lon, self.data_obj.station_locations.lat
x,y = m(lon,lat)
kwargs_tip = {'length_includes_head':True,
'head_width':size_factor*0.07,
'head_length': size_factor*0.1}
kwargs_tip.update(kwargs)
for sidx in range(len(self.data_obj.data_array)):
if 'r' in plot_tipper:
ax.arrow(x[sidx],y[sidx],size_factor*rx[sidx],size_factor*ry[sidx],color='k',**kwargs_tip)
if 'i' in plot_tipper:
ax.arrow(x[sidx],y[sidx],size_factor*ix[sidx],size_factor*iy[sidx],color='b',**kwargs_tip)
def _get_pt_data_list(self, attribute, xykeys=['east', 'north']):
headerlist = ['period', 'station'] + xykeys + \
['azimuth', 'phimin', 'phimax', 'skew']
data = getattr(self, attribute).T.copy()
indices = np.argsort(data['station'][:, 0])
data = data[indices].T
dtype = []
for val in headerlist:
if val == 'station':
dtype.append((val, 'S10'))
else:
dtype.append((val, np.float))
data_to_write = np.zeros(np.product(data.shape), dtype=dtype)
data_to_write['period'] = np.vstack(
[self.plot_period_list] * data.shape[1]).T.flatten()
for val in headerlist[1:]:
if val in ['east', 'north']:
data[val] *= self.dscale
data_to_write[val] = data[val].flatten()
return data_to_write, headerlist
def get_period_attributes(self, periodIdx, key, ptarray='data'):
'''
Returns, for a given period, a list of attribute values for key
(e.g. skew, phimax, etc.).
:param periodIdx: index of period; print out _plot_period for periods available
:param key: attribute key
:param ptarray: name of data-array to access for retrieving attributes;
can be either 'data', 'resp' or 'resid'
:return: numpy array of attribute values
'''
# load data if necessary
if self.data_obj is None:
self._read_files()
assert (periodIdx >= 0 and periodIdx < len(self.plot_period_list)), \
'Error: Index for plot-period out of bounds.'
pk = periodIdx
try:
print("getting", key)
if key == 'phimean':
vals = np.mean([getattr(self, 'pt_' + ptarray + '_arr')[pk]['phimin'],
getattr(self, 'pt_' + ptarray + '_arr')[pk]['phimax']],axis=0)
else:
vals = getattr(self, 'pt_' + ptarray + '_arr')[pk][key]
return vals
except:
print('Attribute %s not found' % ('pt_' + ptarray + '_arr'))
logging.error(traceback.format_exc())
exit(-1)
return None
# end func
def write_pt_data_to_text(self, savepath='.'):
if self.pt_data_arr is None:
self._read_files()
for att in ['pt_data_arr', 'pt_resp_arr', 'pt_resid_arr']:
if hasattr(self, att):
data_to_write, headerlist = self._get_pt_data_list(att)
header = ' '.join(headerlist)
filename = op.join(savepath, att[:-4] + '.txt')
if att == 'pt_resid_arr':
data_to_write['azimuth'] = 90. - data_to_write['azimuth']
np.savetxt(filename, data_to_write, header=header,
fmt=['%.4e', '%s', '%.2f', '%.2f', '%.2f', '%.2f', '%.2f', '%.3f'])
def write_pt_data_to_gmt(self, period=None, epsg=None, savepath='.', center_utm=None,
colorby='phimin', attribute='data', clim=None):
"""
write data to plot phase tensor ellipses in gmt.
saves a gmt script and text file containing ellipse data
provide:
period to plot (seconds)
epsg for the projection the model was projected to
(google "epsg your_projection_name" and you will find it)
centre_utm - utm coordinates for centre position of model, if not
provided, script will try and extract it from data file
colorby - what to colour the ellipses by, 'phimin', 'phimax', or 'skew'
attribute - attribute to plot 'data', 'resp', or 'resid' for data,
response or residuals
"""
att = 'pt_{}_arr'.format(attribute)
# if centre utm not provided, get station locations from the data
# object
project = False
xykeys = ['lon', 'lat']
if epsg is not None:
if center_utm is not None:
project = True
else:
if hasattr(self.data_obj, 'center_position'):
if np.all(np.array(self.data_obj.center_position) > 0):
project = True
center_utm = self.data_obj.project_xy(self.data_obj.center_position[0],
self.data_obj.center_position[
1],
epsg_from=4326, epsg_to=epsg)
if project:
xykeys = ['east', 'north']
# get text data list
data, headerlist = self._get_pt_data_list(att, xykeys=xykeys)
# extract relevant columns in correct order
periodlist = data['period']
columns = xykeys + [colorby, 'azimuth', 'phimax', 'phimin']
gmtdata = np.vstack([data[i] for i in columns]).T
# make a filename based on period
if period >= 1.:
suffix = '%1i' % round(period)
else:
nzeros = np.abs(np.int(np.floor(np.log10(period))))
fmt = '%0' + str(nzeros + 1) + 'i'
suffix = fmt % (period * 10 ** nzeros)
filename = 'ellipse_' + attribute + '.' + suffix
if period is not None:
# extract relevant period
unique_periods = np.unique(periodlist)
closest_period = unique_periods[np.abs(unique_periods - period) ==
np.amin(np.abs(unique_periods - period))]
# indices to select all occurrances of relevant period (to nearest
# 10^-8 s)
pind = np.where(np.abs(closest_period - periodlist) < 1e-8)[0]
else:
# take the first period
pind = 0
# select relevant periods
periodlist, gmtdata = periodlist[pind], gmtdata[pind]
if project:
gmtdata[:, 0] += center_utm[0]
gmtdata[:, 1] += center_utm[1]
# now that x y coordinates are in utm, project to lon/lat
self.data_obj.epsg = epsg
gmtdata[
:, 0], gmtdata[
:, 1] = self.data_obj.project_xy(
gmtdata[
:, 0], gmtdata[
:, 1])
if self.normalise_ellipses:
norm = gmtdata[:,4]
else:
# normalise by maximum value of phimax
norm = np.amax(gmtdata[:, 4])
gmtdata[:, 5] /= norm
gmtdata[:, 4] /= norm
if attribute != 'resid':
gmtdata[:, 3] = 90. - gmtdata[:, 3]
# write to text file in correct format
fmt = ['%+11.6f', '%+10.6f'] + ['%+9.4f'] * 2 + ['%8.4f'] * 2
np.savetxt(op.join(savepath, filename), gmtdata, fmt)
# write gmt script
xmin, xmax = gmtdata[:, 0].min(), gmtdata[:, 0].max()
ymin, ymax = gmtdata[:, 1].min(), gmtdata[:, 1].max()
pad = min(ymax - ymin, xmax - xmin) / 10.
tr = -int(np.log10(20. * (xmax - xmin)))
tickspacing = int(np.round(20. * (xmax - xmin), tr))
scalebarlat = int(round(ymax + ymin) / 2.)
if clim is None:
cr = int(np.ceil(-np.log10(np.amax(gmtdata[:, 2]))))
clim = np.round([gmtdata[:, 2].min(), gmtdata[
:, 2].max()], cr).astype(int)
gmtlines = [line + '\n' for line in ['w={}'.format(xmin - pad),
'e={}'.format(xmax + pad),
's={}'.format(ymin - pad),
'n={}'.format(ymax + pad),
r"wesn=$w/$s/$e/$n'r'",
'',
'# define output file and remove it if it exists',
'PS={}.ps'.format(
filename.replace('.', '')),
'rm $PS',
'',
'# set gmt parameters',
'gmtset FORMAT_GEO_MAP ddd:mm:ss',
'gmtset FONT_ANNOT_PRIMARY 9p,Helvetica,black',
'gmtset MAP_FRAME_TYPE fancy',
'',
'# make colour palette',
'makecpt -Cpolar -T{}/{} -Z > {}.cpt'.format(
clim[0], clim[1], colorby),
'',
'# draw coastline',
'pscoast -R$wesn -JM18c -W0.5p -Ba1f1/a1f1WSen -Gwhite -Slightgrey -Lfx14c/1c/{}/{}+u -Df -P -K >> $PS'.format(
scalebarlat, tickspacing),
'',
'# draw ellipses',
'psxy {} -R -J -P -Se -C{}.cpt -W0.01p -O >> $PS'.format(filename,
colorby),
'',
'# save to png',
'ps2raster -Tg -A -E400 $PS']]
with open(op.join(savepath, 'gmtscript_{}.gmt'.format(attribute)), 'wb') as scriptfile:
scriptfile.writelines(gmtlines)
def save_all_figures(self, save_path=None, fig_dpi=None, file_format='pdf',
orientation='landscape', close_fig='y'):
"""
save_figure will save all figures in fig_list to save_fn.
Arguments:
-----------
**save_path** : string
full path to save figure to, can be input as
* directory path -> the directory path to save to
in which the file will be saved as
save_fn/station_name_PhaseTensor.file_format
* full path -> file will be save to the given
path. If you use this option then the format
will be assumed to be provided by the path
**file_format** : [ pdf | eps | jpg | png | svg ]
file type of saved figure pdf,svg,eps...
**orientation** : [ landscape | portrait ]
orientation in which the file will be saved
*default* is portrait
**fig_dpi** : int
The resolution in dots-per-inch the file will be
saved. If None then the dpi will be that at
which the figure was made. I don't think that
it can be larger than dpi of the figure.
**close_plot** : [ y | n ]
* 'y' will close the plot after saving.
* 'n' will leave plot open
:Example: ::
>>> # to save plot as jpg
>>> import mtpy.modeling.occam2d as occam2d
>>> dfn = r"/home/occam2d/Inv1/data.dat"
>>> ocd = occam2d.Occam2DData(dfn)
>>> ps1 = ocd.plotPseudoSection()
>>> ps1.save_plot(r'/home/MT/figures', file_format='jpg')
"""
if fig_dpi is None:
fig_dpi = self.fig_dpi
if os.path.isdir(save_path) == False:
try:
os.mkdir(save_path)
except:
raise IOError('Need to input a correct directory path')
for fig in self.fig_list:
per = fig.canvas.get_window_title()
save_fn = os.path.join(save_path, 'PT_DepthSlice_{0}s.{1}'.format(
per, file_format))
fig.savefig(save_fn, dpi=fig_dpi, format=file_format,
orientation=orientation, bbox_inches='tight')
if close_fig == 'y':
plt.close(fig)
else:
pass
self.fig_fn = save_fn
print('Saved figure to: ' + self.fig_fn)
|
MTgeophysics/mtpy
|
mtpy/modeling/modem/phase_tensor_maps.py
|
Python
|
gpl-3.0
| 54,665
|
a = int(input())
print(-15<a<=12 or 14<a<17 or a>=19)
|
fess932/python
|
Stepic Программирование на Python/Часть первая/интервалы.py
|
Python
|
gpl-3.0
| 53
|
#!/usr/bin/env python
# COPYRIGHT: Robosub Club of the Palouse under the GPL v3
"""Creates and maintains Robosub vision processes.
Creates one process for every vision processor defined in
robosub/src/settings.json.
"""
#import json # TODO remove this once the hack main function is removed
#from vision_settings import vision_settings
import sys
import os
import cv2
import cv2.cv as cv
import argparse
from multiprocessing import Process, Pipe
from stream_processor import StreamProcessor
from time import sleep
sys.path.append(os.path.abspath('../..'))
from robosub_settings import settings
from util.communication.grapevine import Communicator
# TODO figure out how to initialize the camera driver settings through guvcview.
class VisionController(object):
"""Create and maintain Robosub video logic processes."""
def __init__(self):
"""Create and maintain all video logic processes defined in settings.
Args:
"""
#sys.stdout, sys.stdin, sys.stderr = [open('/dev/null', 'w')] * 3
#self._vision_processors = []
self._vision_processors = {}
self._init_vision_processors()
print 'size of vision pool is {size}'.format(
size=len(self._vision_processors))
# Monitor vision processes and reinitialize any that fail.
self._maintain_vision_processors()
def _init_vision_processors(self):
"""Initialize process for each settings['vision_processors']."""
for vp_name in settings['sensor/vision/control']['vision_processors']:
self._init_vision_process(vp_name)
def _init_vision_process(self, process_name):
"""Initialize a process using settings given in vp_settings dict.
Args:
process_name: Name of process. Must match a process key entry in settings.
"""
parent_conn, child_conn = Pipe()
proc = Process(target = StreamProcessor,
name = process_name,
args = (process_name, child_conn))
#We want all managed processes to die if VisionController dies.
proc.daemon = True
proc.start()
parent_conn.send(1)
self._vision_processors[process_name] = \
{'process': proc, 'pipe': parent_conn}
def _maintain_vision_processors(self):
"""Keep all processes in self._vision_processors alive.
Every 'interval' seconds, check that all processes in
self._vision_processors are responsive. Restart processes that are
unresponsive or stopped.
"""
#FIXME this is only checking that the processes are still running. Make
#sure they are still responsive too. Pipe message passing?
while True:
sleep(settings['sensor/vision/control']['maintenance_interval'])
for process_name in self._vision_processors.keys():
self._maintain_proc(process_name)
# TODO if a process has died or frozen, the process needs to be destroyed
# and restarted after the camera has been given sufficient time to shut
# down (~4sec?). This wait should be done in a separate thread so the rest
# of the vision processing can continue.
def _maintain_proc(self, process_name):
proc = self._vision_processors[process_name]['process']
parent_conn = self._vision_processors[process_name]['pipe']
if not proc.is_alive() or not parent_conn.poll():
# Process died unexpectedly or froze, restart it.
self._init_vision_process(process_name)
else:
# Send the token to the child process. If the child doesn't freeze
# we should get it back before the next maintenance check.
parent_conn.send(parent_conn.recv())
def _main(args):
"""Start the vision daemon."""
#settings = json.loads(open(args.settings_path, 'r').read())
if args.video:
settings['sensor/vision/control']['vision_processors'] = ['sensor/vision/cam_fake']
settings['sensor/vision/cam_fake']['recorded_video'] = args.video
if args.plugins:
settings['sensor/vision/cam_fake']['plugins'] = args.plugins
v = VisionController()
def _command_line():
"""Parse command line arguments."""
parser = argparse.ArgumentParser(description='Mock module.')
parser.add_argument('-v', '--video', type=str,
help='Video file or stream to open.')
# TODO figure out how to list plugins?
parser.add_argument('-p', '--plugins', type=str, nargs='+',
help='Plugins to load.')
return parser.parse_args()
#TODO delete this
if __name__ == '__main__':
_main(_command_line())
|
pi19404/robosub-1
|
src/sensor/vision/vision_controller.py
|
Python
|
gpl-3.0
| 4,677
|
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 20 19:15:28 2014
@author: Pete
"""
from __future__ import division, print_function
import numpy as np
import xlwt
# Constants
R = 0.5
A = 1.0
D = 1.0
tsrs = np.arange(0.1, 3.15, 0.1)
tsr_wake = 1.9
speeds = np.arange(0.4, 1.45, 0.2)
z_H = np.arange(0, 0.75, 0.125)
y_R = np.hstack([-3.,-2.75,-2.5,-2.25,-2.,-1.8, np.arange(-1.6,0.1,0.1)])
y_R = np.hstack([y_R, -np.flipud(y_R[0:-1])])
y_R = np.round(y_R, decimals=4)
# Add regular experiment sections and top level types
sections = []
types = []
for u in speeds:
sections.append("Perf-" + str(u))
for u in speeds:
sections.append("Wake-" + str(u))
# Add tare drag and tare torque to sections
sections.append("Tare drag")
sections.append("Tare torque")
# Compute highest and lowest RPMs for tare torque
rpm_low = np.min(tsrs)*np.min(speeds)/R*60/(2*np.pi)
rpm_high = np.max(tsrs)*np.max(speeds)/R*60/(2*np.pi)
rpms_tt = np.linspace(rpm_low, rpm_high)
times_tt = 30 # 30 second runs for tare torque
# Create Excel sheet
wb = xlwt.Workbook()
sheet_tl = wb.add_sheet("Top level")
sheet_tl.write(0, 0, "Type")
sheet_tl.write(0, 1, "U")
sheet_tl.write(0, 2, "TSR")
for n in range(len(sections)):
if "Perf" in sections[n]:
sheet_tl.write(n+1, 0, "Perf curve")
sheet_tl.write(n+1, 1, float(sections[n].split("-")[-1]))
sheet_tl.write(n+1, 2, str(tsrs[0]) + "--" + str(tsrs[-1]))
elif "Wake" in sections[n]:
sheet_tl.write(n+1, 0, "Wake map")
sheet_tl.write(n+1, 1, float(sections[n].split("-")[-1]))
sheet_tl.write(n+1, 2, tsr_wake)
else:
pass
print(rpms_tt)
for section in sections:
sheet = wb.add_sheet(section)
|
petebachant/TurbineDAQ
|
scripts/test_plan_builder/test_plan_builder.py
|
Python
|
gpl-3.0
| 1,699
|
################################################################################
################################################################################
class JumpEvent(object):
##############################################################################
def __init__(self, State, InitialVelocity, MaxNumberOfJumps = 2):
self.State = State
self.InitialVelocity = InitialVelocity
self.MaxNumberOfJumps = MaxNumberOfJumps
self.NumberOfJumps = 0
##############################################################################
def ProcessEvent(self, Event):
if self.NumberOfJumps < self.MaxNumberOfJumps:
self.State.yVelocity = self.InitialVelocity
self.NumberOfJumps += 1
##############################################################################
def ResetJumpCount(self):
self.NumberOfJumps = 0
################################################################################
################################################################################
|
dloman/FiestaMonsterz
|
Events/JumpEvent.py
|
Python
|
gpl-3.0
| 1,020
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import caffe
import numpy as np
import scipy.ndimage
from utils import *
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
transformations = {
"identity": { "f": lambda i,v: i, "steps": [0], },
"shift_x": { "f": img_shift_x, "steps": rangesym(1, 15, 2) },
"shift_y": { "f": img_shift_y, "steps": rangesym(1, 25, 2) },
"blur": { "f": img_blur, "steps": range(1, 5, 1) },
"rotate": { "f": img_rotate, "steps": rangesym(1, 93, 2) },
"sindisp_x": { "f": img_sindisp_x, "steps": rangesym(1, 6, 1) },
"sindisp_y": { "f": img_sindisp_y, "steps": rangesym(1, 6, 1) },
}
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--proto', type=str, required=True)
parser.add_argument('--model', type=str, required=True)
parser.add_argument('--image', type=str, nargs='+')
parser.add_argument('--layer', type=str, required=True)
parser.add_argument('--fwd-npz', type=str, required=True)
parser.add_argument('--pca-npz', type=str, required=True)
parser.add_argument('--out-npz', type=str, required=True)
parser.add_argument('--seed', type=int, default=0)
args = parser.parse_args()
print "Load data"
imgs_orig = [ scipy.ndimage.imread(i, flatten=True).astype(np.uint8)
for i in args.image ]
fwd_npz = np.load(args.fwd_npz)
blobs = fwd_npz[args.layer]
labels = fwd_npz['labels']
infos = np.array([ dict(src="dataset", l=int(l), tr="identity", v=0)
for l in labels ])
pca_npz = np.load(args.pca_npz)
pca = pca_npz['pca'].flat.next()
net = caffe.Net(args.proto, args.model, caffe.TEST)
caffe.set_mode_cpu()
print "Compute transformations"
imgs_tr = []
for i, img in enumerate(imgs_orig):
print " input image:", args.image[i]
for k, t in transformations.iteritems():
print " transform:", k
for s in t['steps']:
imgs_tr.append(t['f'](img, s))
fname = "".join(args.image[i].split('/')[-1].split('.')[:-1])
infos = np.append(infos, dict(src=fname, l=-1, tr=k, v=s))
print "Transform forward output"
imgs_tr_np = np.array(imgs_tr).reshape(-1, 1, 28, 28)
res = net.forward_all(data=imgs_tr_np, blobs=[args.layer])
iblobs = res[args.layer].reshape(len(imgs_tr), -1)
blobs = np.concatenate((blobs, iblobs))
blobs_pca = pca.transform(blobs)
print "Computing t-SNE"
tsne = TSNE(n_components=2, random_state=args.seed, verbose=True)
pts2 = tsne.fit_transform(blobs_pca)
print "Dump into npz"
np.savez_compressed(args.out_npz, imgs_tr_np=imgs_tr_np, tsne=tsne,
pts=pts2, infos=infos)
|
axel-angel/master-project
|
src/trace_transfo.py
|
Python
|
gpl-3.0
| 2,776
|
#!/usr/bin/python
#
# marker.py is a piece of uCon 2009 Capture The Flag code
# Copyright (C) 2002 Marcos Alvares <marcos.alvares@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
files_path = '/ctf_ucon2/'
import sys
import os
import pwd
import time
'''Print the correct way of using this program'''
def usage():
print 'Usage:', sys.argv[0], '<user_id>'
print 'Where <user_id> is your id and a valid one.'
print
'''Check if the string passed to the program is a valid id registered at the id_file'''
def check_id (id):
if id == '':
'<user_id> is \'\' (NULL)'
return False
else:
'<user_id> is a legit string'
'Opening the id filein read only'
try:
id_file = open(files_path + 'id_file', 'r')
except:
print 'The id file doesn\'t exist!'
print
raise SystemExit(1)
'Checking if <user_id> is registered in the id_file'
for entry in id_file:
if entry.strip() == id.strip():
'<user_id> is registered in the id_file'
id_file.close()
return True
'<user_id> is not registered in the id_file'
id_file.close()
return False
'''Detects which tag file will be used'''
def detect_tag_file():
'Detecting the challenge defeated'
challenge_name = pwd.getpwuid(os.geteuid())[0]
return files_path + 'tag_' + challenge_name
'''Check if the passed id is not already marked'''
def check_tag(id):
'Opening the tag_file in read only mode'
try:
tag_file = open(detect_tag_file(), 'r')
except:
print 'The tag file (' + detect_tag_file() + ')', 'doesn\'t exist!'
print
raise SystemExit(1)
'Seeing if the tag file was already marked'
for entry in tag_file:
if entry.strip() == id.strip():
'<user_id> is already tagged'
tag_file.close()
return True
'<user_id> is not marked, so we must mark now'
tag_file.close()
return False
'''Mark the passed id in the tag file'''
def mark_tag(id):
'Opening the tag_file in append mode'
try:
tag_file = open(detect_tag_file(), 'a')
except:
print 'Some strange problem ocurred when writing to the tag file (' + detect_tag_file() + ').'
print 'Try again.'
print
raise SystemExit(1)
'Writing tag in the file'
tag_file.write(id+'\n')
'''This program marks a valid user id in a tag file if not already marked'''
if __name__ == '__main__':
'Checking number of arguments'
if len(sys.argv) != 2:
'Wrong number of arguments passed'
usage()
raise SystemExit(1)
else:
'Sleeping for 0.5 seconds to prevent file access problems'
time.sleep(0.5)
'Passing two arguments (program name and one parameter string)'
'Checking if the paramenter string is a valid <user_id>'
if check_id(sys.argv[1]):
'<user_id> is ready to be marked'
if check_tag(sys.argv[1]):
print 'Tag already marked!'
print 'The tag for', sys.argv[1], 'is already marked at', detect_tag_file()
print 'Skipping...'
print
else:
mark_tag(sys.argv[1])
print 'Marking tag now!'
print 'The tag for', sys.argv[1], 'was marked at', detect_tag_file()
print 'Done!'
print
else:
'The id passed is a invalid one'
usage()
raise SystemExit(1)
raise SystemExit(0)
|
mabj/ctf_ucon2
|
bot/marker.py
|
Python
|
gpl-3.0
| 3,740
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Cristhofer Travieso <cristhofert97@gmail.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import Gtk
from sugar3.activity import activity
class Activity(activity.SimpleActivity):
def __init__(self, handle):
activity.SimpleActivty.__init__(self, handle, True)
self.show_all()
|
cristhofert/presentation
|
activity.py
|
Python
|
gpl-3.0
| 1,028
|
import numpy as np
from pele.utils.hessian import sort_eigs
__all__ = ["normalmode_frequencies", "normalmodes", "logproduct_freq2"]
class NormalModeError(Exception):
"""raised if the normal mode results are not as expected
this typically means that a minimum is actually a transition state,
or a transition state is actually a higher order saddle
"""
def normalmode_frequencies(hessian, metric=None, eps=1e-4):
'''calculate (squared) normal mode frequencies
Parameters
----------
hessian: 2d array
hessian matrix
metric: 2d array
mass weighted metric tensor
Returns
-------
sorted array of normal mode frequencies
'''
A = hessian
if metric is not None:
A = np.dot(np.linalg.pinv(metric), hessian)
frq = np.linalg.eigvals(A)
if(np.max(np.abs(np.imag(frq))) > eps):
print frq
raise ValueError("imaginary eigenvalue in frequency calculation"
", check hessian + metric tensor\nthe largest imaginary part is %g"%np.max(np.abs(np.imag(frq))))
return np.sort(np.real(frq))
def normalmodes(hessian, metric=None, eps=1e-4, symmetric=False):
'''calculate (squared) normal mode frequencies and normal mode vectors
Parameters
----------
hessian: array
hessian marix
metric: array
mass weighted metric tensor
symmetric: bool
If true, the Hessian times the metric tensor is assumed to be symmetric. This is
not usually the case, even if the metric tensor is symmetric. It is
true if the metric tensor is the identity.
Returns
-------
freq, evecs tuple array of squared frequencies and normal modes
'''
if metric is None:
A = hessian
symmetric = True
else:
A = np.dot(np.linalg.pinv(metric), hessian)
if symmetric:
freq, evecs = np.linalg.eigh(A)
else:
freq, evecs = np.linalg.eig(A)
if(np.max(np.abs(np.imag(freq))) > eps):
print freq
raise ValueError("imaginary eigenvalue in frequency calculation"
", check hessian + metric tensor\nthe largest imaginary part is %g"%np.max(np.abs(np.imag(freq))))
freq = np.real(freq)
freq, evecs = sort_eigs(freq, evecs)
return freq, evecs
def logproduct_freq2(freqs, nzero, nnegative=0, eps=1e-4):
''' calculate the log product of positive (squared) frequencies
calculates
log(product_i f_i^2)
Parameters
----------
freqs:
array of (squared) normalmode frequencies
nzero: int
expected number of zero eigenvalues
nnegative: int, optional
expected number of negative frequencies, 0 for minimum, 1 for transition states
eps: float, optional
cutoff to determine if eigenvalue is no zero
Returns
-------
tuple of number of considered frequencies and log product of frequencies
'''
negative_eigs = []
zero_eigs = []
lnf = 0.
n = 0
for f in freqs:
if np.abs(f) < eps:
zero_eigs.append(f)
continue
if f < 0.:
negative_eigs.append(f)
continue
lnf += np.log(f)
n+=1
izero = len(zero_eigs)
inegative = len(negative_eigs)
if nzero != izero:
raise ValueError("the number of zero eigenvalues (%d) differs from the expected value (%d)" % (izero, nzero))
if nnegative != inegative:
if nnegative > 0 and inegative > nnegative:
raise NormalModeError("the number of negative eigenvalues (%d) is greater than expected "
"(%d). Is this a higher order saddle point?" % (inegative, nnegative))
else:
raise NormalModeError("the number of negative eigenvalues (%d) differs from the expected "
"number (%d) (not a minimum / transition state?)" % (inegative, nnegative))
return n, lnf
|
js850/pele
|
pele/thermodynamics/_normalmodes.py
|
Python
|
gpl-3.0
| 4,019
|
from __future__ import absolute_import
import re
from ner_v1.detectors.base_detector import BaseDetector
from language_utilities.constant import ENGLISH_LANG
class PhoneDetector(BaseDetector):
"""Detects phone numbers in given text and tags them.
Detects all phone numbers in given text and replaces them by entity_name
Attributes:
text: string to extract entities from
entity_name: string by which the detected phone numbers would be replaced with on calling detect_entity()
tagged_text: string with phone numbers replaced with tag defined by entity name
processed_text: string with phone numbers detected removed
phone: list of phone numbers detected
original_phone_text: list to store substrings of the text detected as phone numbers
tag: entity_name prepended and appended with '__'
For Example:
text = "Hi, can you send a text message to +919222222222"
phone_detector = PhoneDetector("phone_number")
phone_numbers, original_phone_numbers = phone_detector.detect_entity(text)
phone_detector.tagged_text
Output:
' Hi, can you send a text message to __phone_number__ '
phone_numbers, original_phone_numbers
Output:
(['+919222222222'], ['+919222222222'])
Note:
text and tagged_text will have a extra space prepended and appended after calling detect_entity(text)
"""
def __init__(self, entity_name, source_language_script=ENGLISH_LANG, translation_enabled=False):
"""Initializes a PhoneDetector object
Args:
entity_name: A string by which the detected phone numbers would be replaced with on calling detect_entity()
source_language_script: ISO 639 code for language of entities to be detected by the instance of this class
translation_enabled: True if messages needs to be translated in case detector does not support a
particular language, else False
"""
# assigning values to superclass attributes
self._supported_languages = [ENGLISH_LANG]
super(PhoneDetector, self).__init__(source_language_script, translation_enabled)
self.text = ''
self.entity_name = entity_name
self.tagged_text = ''
self.processed_text = ''
self.phone = []
self.original_phone_text = []
self.tag = '__' + self.entity_name + '__'
@property
def supported_languages(self):
return self._supported_languages
def _detect_phone(self):
"""Detects phone numbers in the self.text
Returns:
A tuple of two lists with first list containing the detected phone numbers and second list containing their
corresponding substrings in the given text.
For example:
(['+919222222222'], ['+919222222222'])
"""
phone_list = []
original_list = []
phone_list, original_list = self._detect_phone_format(phone_list, original_list)
self._update_processed_text(original_list)
return phone_list, original_list
def detect_entity(self, text, **kwargs):
"""Detects phone numbers in the text string
Args:
text: string to extract entities from
**kwargs: it can be used to send specific arguments in future.
Returns:
A tuple of two lists with first list containing the detected phone numbers and second list containing their
corresponding substrings in the given text.
For example:
(['+919222222222'], ['+919222222222'])
Additionally this function assigns these lists to self.phone and self.original_phone_text attributes
respectively.
"""
self.text = ' ' + text + ' '
self.processed_text = self.text
self.tagged_text = self.text
phone_data = self._detect_phone()
self.phone = phone_data[0]
self.original_phone_text = phone_data[1]
return phone_data
def _detect_phone_format(self, phone_list=None, original_list=None):
"""
Detects phone numbers from self.text conforming to formats defined by regex pattern.
Args:
phone_list: Optional, list to store detected phone numbers
original_list: Optional, list to store corresponding substrings of given text which were detected as
phone numbers
Returns:
A tuple of two lists with first list containing the detected phone numbers and second list containing their
corresponding substrings in the given text. For example:
For example:
(['+919222222222'], ['+919222222222'])
"""
if phone_list is None:
phone_list = []
if original_list is None:
original_list = []
patterns = self._detect_mobile_number_pattern(self.processed_text.lower())
for pattern in patterns:
original = pattern
phone = pattern
phone_list.append(phone)
original_list.append(original)
return phone_list, original_list
def _detect_mobile_number_pattern(self, text):
"""
Detects phone numbers from text that match the defined regex pattern
Args:
text: text string to extract entities from
Returns:
A list of substrings of text that match the defined regex pattern
For example:
(['+919222222222', '919999999999'])
"""
return re.findall(r'\s((?:(?:\+|0{0,2})91(?:\s*[\-]\s*)?|[0]?)?[6789]\d{9})\b', text)
def _update_processed_text(self, original_phone_strings):
"""
Replaces detected phone numbers with tag generated from entity_name used to initialize the object with
A final string with all phone numbers replaced will be stored in object's tagged_text attribute
A string with all phone numbers removed will be stored in object's processed_text attribute
Args:
original_phone_strings: list of substrings of original text to be replaced with tag created
from entity_name
"""
for detected_text in original_phone_strings:
self.tagged_text = self.tagged_text.replace(detected_text, self.tag)
self.processed_text = self.processed_text.replace(detected_text, '')
|
hellohaptik/chatbot_ner
|
ner_v1/detectors/pattern/phone_number/phone_detection.py
|
Python
|
gpl-3.0
| 6,529
|
"""
determined if a word is all lowcase, all uppcase or just first letter being Capital.
https://leetcode.com/problems/detect-capital/
NOTE:the below code is borrowed from: https://leetcode.com/problems/detect-capital/discuss/1555379/Python-Simple-Implementation
This question is easy, but the code below showcased the Python built-in support for string munipulation.
Date: 11/05/21
"""
def capital(word: str) -> bool:
return word.upper() == word or word.lower() == word or word.capitalize() == word
if __name__ == '__main__':
word = 'USA'
print(capital(word))
|
entrepidea/projects
|
python/tutorials/algo/leetcode/easy/detect_capital.py
|
Python
|
gpl-3.0
| 577
|
#!/usr/bin/python3
# note that module name has changed from Tkinter in Python 2 to tkinter in Python 3
from tkinter import *
from tkinter.ttk import *
"""
Using Grid Layout
most of this is taken from:: http://zetcode.com/gui/tkinter/layout/
and http://www.dabeaz.com/special/Tkinter.pdf
http://infohost.nmt.edu/tcc/help/pubs/tkinter/web/listbox-scrolling.html
(the real hero)
"""
class MainFrame(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
parent.title("Sumulator")
self.pack()
self.initUI()
def initUI(self):
Style().configure("Sum", padding=(0, 5, 0, 5), font='serif 10')
self.yScroll = Scrollbar(self, orient=VERTICAL)
self.yScroll.grid(row=0, column=1, sticky=N+S)
self.xScroll = Scrollbar(self, orient=HORIZONTAL)
self.xScroll.grid(row=1, column=0, sticky=E+W)
self.listbox = Listbox(self, selectmode=MULTIPLE,
xscrollcommand=self.xScroll.set,
yscrollcommand=self.yScroll.set)
for i in range(1000):
self.listbox.insert(END, str(i))
self.listbox.bind("<<ListboxSelect>>", self.onClick)
self.listbox.grid(row=0, column=0, sticky=N+S+E+W)
self.xScroll['command'] = self.listbox.xview
self.yScroll['command'] = self.listbox.yview
self.sumLabel = Label(self, text="0", width=10)
self.sumLabel.grid(row=0, column=2, sticky=N+S+E+W)
def onClick(self, event):
# instead of self.listbox, can use event.widget here
selection=self.listbox.curselection()
self.sumLabel.config(text=sum(selection))
root = Tk()
app = MainFrame(root)
root.mainloop()
|
cbshiles/community_site
|
domains/subs/slack/res/sumulator.py
|
Python
|
gpl-3.0
| 1,731
|
import sys
from pylint.lint import Run
def lint(file):
"""
Lint file and return non-zero code if code is rated at less than 8/10
Parameters
----------
file: str
Path name of the file to lint
"""
results = Run([file, "--max-line-length=100"], exit=False)
if results.linter.stats["global_note"] < 8:
print("Code must be rated at 8/10 or more")
sys.exit(1)
else:
sys.exit(0)
if __name__ == "__main__":
lint("1password2pass.py")
|
eftov/1password2pass
|
test/lint.py
|
Python
|
gpl-3.0
| 503
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from oioioi.base.utils.deps import check_django_app_dependencies
from oioioi.contests.models import Submission, Contest
check_django_app_dependencies(__name__, ['oioioi.disqualification'],
strict=True)
class SubmissionsSimilarityGroup(models.Model):
contest = models.ForeignKey(Contest, verbose_name=_("contest"))
comment = models.TextField(blank=True, verbose_name=_("admin comment"))
class Meta(object):
verbose_name = _("submissions similarity")
verbose_name_plural = _("submissions similarities")
class SubmissionsSimilarityEntry(models.Model):
submission = models.ForeignKey(Submission,
verbose_name=_("submission"), related_name='similarities')
group = models.ForeignKey(SubmissionsSimilarityGroup,
verbose_name=_("group"), related_name='submissions')
guilty = models.BooleanField(default=True, verbose_name=_("guilty"))
class Meta(object):
verbose_name = _("submissions similarity entry")
verbose_name_plural = _("submissions similarity entries")
unique_together = (('submission', 'group'),)
|
papedaniel/oioioi
|
oioioi/similarsubmits/models.py
|
Python
|
gpl-3.0
| 1,206
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Rotterdam utilities tests
$Id: util.py 30238 2005-05-04 13:24:21Z hdima $
"""
import os
import zope.app.rotterdam.tests
dir = os.path.dirname(zope.app.rotterdam.tests.__file__)
input_dir = os.path.join(dir, 'input')
output_dir = os.path.join(dir, 'output')
def read_input(filename):
filename = os.path.join(input_dir, filename)
return open(filename, 'r').read().decode("utf-8")
def read_output(filename):
filename = os.path.join(output_dir, filename)
return open(filename, 'r').read().decode("utf-8")
|
Donkyhotay/MoonPy
|
zope/app/rotterdam/tests/util.py
|
Python
|
gpl-3.0
| 1,167
|
#!/usr/bin/python
'''
The following commands were ran prior to execution of this script:
$ touch goodpass.txt
** For storing our successful password
$ vi zero.txt
** type in some passphrases from a hint you have...hopefully
Example:
zero kool
zerokool
zerocool
zero cool
...
$ sudo john --rules=nt --wordlist=zero.txt --stdout > zeropass.txt
$ sudo john --rules=L33t --wordlist=zeropass.txt --stdout > zeropass1.txt
'''
import subprocess as s
import os
p = open(os.getcwd() + '/goodpass.txt', 'w')
# Used to escape symbols such as $,#,|, etc in bash that will cause echo to behave weird
def clean(word):
password = word.strip()
password = "'" + password + "'"
return password
with open(os.getcwd() + '/zeropass1.txt' , 'r') as f:
for password in f:
password = clean(password)
test = s.call('echo ' + password + ' | gpg --passphrase-fd 0 -q --batch --allow-multiple-messages --no-tty --output decrypt.txt -d ' + os.getcwd() + '/flag.txt.gpg', shell=True)
if test == 2:
print "Bad Password: " + password
else:
print "Password: " + password
p.write(password)
f.close
p.close
break
f.close
p.close
|
mcardacci/tools_of_the_dark_arts
|
gpg_brute_forcer_python/gpg_Brute_Force.py
|
Python
|
gpl-3.0
| 1,259
|
import os.path
import zipfile
import bs4
import tempfile
import os
import glob
import re
import nltk
import string
import unicodedata
from subprocess import call
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
def get_soup(f):
h = open(html,"r+")
soup = bs4.BeautifulSoup(h,"lxml")
h.close()
return soup
for md in glob.glob('*.md'):
html=md[:-2] + "html"
call(["pandoc", "-o", html, md])
soup=get_soup(html)
for i in soup.findAll(["tr","th","td"]):
i.attrs.clear()
hj=soup.body.select(" > *")
if len(hj)==2 and hj[0].name=="p" and hj[1].name=="table":
c=hj[0]
t=hj[1]
t.insert(0,c)
c.name="caption"
h=unicode(soup)
with open(html, "wb") as file:
file.write(h.encode('utf8'))
|
kanner/lfs-crawler
|
fix/mdtohtml.py
|
Python
|
gpl-3.0
| 743
|
from __future__ import absolute_import
import os
import unittest
from .. import PhyloTree, NCBITaxa
from ..ncbi_taxonomy import ncbiquery
DATABASE_PATH = "test_tmp/testdb.sqlite"
class Test_ncbiquery(unittest.TestCase):
def test_00_update_database(self):
if not os.path.exists(DATABASE_PATH):
ncbiquery.update_db(DATABASE_PATH)
def test_01tree_annotation(self):
t = PhyloTree( "((9598, 9606), 10090);", sp_naming_function=lambda name: name)
t.annotate_ncbi_taxa(dbfile=DATABASE_PATH)
self.assertEqual(t.sci_name, 'Euarchontoglires')
homi = (t&'9606').up
self.assertEqual(homi.sci_name, 'Homininae')
self.assertEqual(homi.taxid, 207598)
self.assertEqual(homi.rank, 'subfamily')
self.assertEqual(homi.named_lineage, [u'root', u'cellular organisms', u'Eukaryota', u'Opisthokonta', u'Metazoa', u'Eumetazoa', u'Bilateria', u'Deuterostomia', u'Chordata', u'Craniata', u'Vertebrata', u'Gnathostomata', u'Teleostomi', u'Euteleostomi', u'Sarcopterygii', u'Dipnotetrapodomorpha', u'Tetrapoda', u'Amniota', u'Mammalia', u'Theria', u'Eutheria', u'Boreoeutheria', u'Euarchontoglires', u'Primates', u'Haplorrhini', u'Simiiformes', u'Catarrhini', u'Hominoidea', u'Hominidae', u'Homininae'])
self.assertEqual(homi.lineage, [1, 131567, 2759, 33154, 33208, 6072, 33213, 33511, 7711, 89593, 7742, 7776, 117570, 117571, 8287, 1338369, 32523, 32524, 40674, 32525, 9347, 1437010, 314146, 9443, 376913, 314293, 9526, 314295, 9604, 207598] )
human = t&'9606'
self.assertEqual(human.sci_name, 'Homo sapiens')
self.assertEqual(human.taxid, 9606)
self.assertEqual(human.rank, 'species')
self.assertEqual(human.named_lineage, [u'root', u'cellular organisms', u'Eukaryota', u'Opisthokonta', u'Metazoa', u'Eumetazoa', u'Bilateria', u'Deuterostomia', u'Chordata', u'Craniata', u'Vertebrata', u'Gnathostomata', u'Teleostomi', u'Euteleostomi', u'Sarcopterygii', u'Dipnotetrapodomorpha', u'Tetrapoda', u'Amniota', u'Mammalia', u'Theria', u'Eutheria', u'Boreoeutheria', u'Euarchontoglires', u'Primates', u'Haplorrhini', u'Simiiformes', u'Catarrhini', u'Hominoidea', u'Hominidae', u'Homininae', u'Homo', u'Homo sapiens'])
self.assertEqual(human.lineage, [1, 131567, 2759, 33154, 33208, 6072, 33213, 33511, 7711, 89593, 7742, 7776, 117570, 117571, 8287, 1338369, 32523, 32524, 40674, 32525, 9347, 1437010, 314146, 9443, 376913, 314293, 9526, 314295, 9604, 207598, 9605, 9606])
def test_ncbi_compare(self):
t = PhyloTree( "((9606, (9598, 9606)), 10090);", sp_naming_function=lambda x: x.name )
t.annotate_ncbi_taxa(dbfile=DATABASE_PATH)
#t.ncbi_compare()
def test_ncbiquery(self):
ncbi = NCBITaxa(dbfile=DATABASE_PATH)
id2name = ncbi.get_taxid_translator(['9606', '7507'])
self.assertEqual(id2name[7507], 'Mantis religiosa')
self.assertEqual(id2name[9606], 'Homo sapiens')
name2id = ncbi.get_name_translator(['Mantis religiosa', 'homo sapiens'])
self.assertEqual(name2id['Mantis religiosa'], [7507])
self.assertEqual(name2id['homo sapiens'], [9606])
name2id = ncbi.get_name_translator(['Bacteria'])
#self.assertEqual(set(name2id['Bacteria']), set([2, 629395]))
# Recent versions of NCBI seem to have removed the name Bacteria from 629395
self.assertEqual(set(name2id['Bacteria']), set([2]))
out = ncbi.get_descendant_taxa("9605", intermediate_nodes=True)
#Out[9]: [1425170, 741158, 63221, 9606]
self.assertEqual(set(out), set([1425170, 741158, 63221, 9606]))
out = ncbi.get_descendant_taxa("9605", intermediate_nodes=False)
#Out[10]: [1425170, 741158, 63221]
self.assertEqual(set(out), set([1425170, 741158, 63221]))
out = ncbi.get_descendant_taxa("9605", intermediate_nodes=False, rank_limit="species")
#Out[11]: [9606, 1425170]
self.assertEqual(set(out), set([9606, 1425170]))
def test_get_topology(self):
ncbi = NCBITaxa(dbfile=DATABASE_PATH)
t1 = ncbi.get_topology([9606, 7507, 9604])
t2 = ncbi.get_topology([9606, 7507, 678])
self.assertEqual(sorted(t1.get_leaf_names()), ["7507", "9606"])
self.assertEqual(sorted(t2.get_leaf_names()), ["678", "7507", "9606"])
# Test taxid synonyms
self.assertEqual(ncbi.get_topology(["42099"]).write(format=5), "1223560:1;")
for target in [9604, 9443, "9443"]:
t1 = ncbi.get_descendant_taxa(target, return_tree=True)
t2 = ncbi.get_topology([target])
t3 = ncbi.get_topology(ncbi.get_descendant_taxa(target))
t4 = ncbi.get_topology(list(map(str, ncbi.get_descendant_taxa(target))))
self.assertEqual(set(t1.get_leaf_names()), set(t2.get_leaf_names()))
self.assertEqual(set(t2.get_leaf_names()), set(t3.get_leaf_names()))
self.assertEqual(set(t3.get_leaf_names()), set(t4.get_leaf_names()))
diffs1 = t1.compare(t2, unrooted=True)
diffs2 = t2.compare(t3, unrooted=True)
diffs3 = t3.compare(t4, unrooted=True)
self.assertEqual(diffs1["rf"], 0.0)
self.assertEqual(diffs2["rf"], 0.0)
self.assertEqual(diffs3["rf"], 0.0)
def test_merged_id(self):
ncbi = NCBITaxa(dbfile=DATABASE_PATH)
t1 = ncbi.get_lineage(245018)
self.assertEqual(t1, [1, 131567, 2, 1783272, 1239, 186801, 186802, 186803, 207244, 649756])
t2 = ncbi.get_lineage("245018")
self.assertEqual(t2, [1, 131567, 2, 1783272, 1239, 186801, 186802, 186803, 207244, 649756])
if __name__ == '__main__':
unittest.main()
|
Unode/ete
|
ete3/test/test_ncbiquery.py
|
Python
|
gpl-3.0
| 5,434
|
from . import GeneralBranch, Branch
from anaconda_updates.settings import GlobalSettings
class Rhel7_5Branch(GeneralBranch):
def __init__(self):
super().__init__(branch_type=Branch.rhel7_5,
cmd_args=["-rh7.5", "--rhel7.5"],
help="working on RHEL 7.5",
version="21.48.22.134",
img_name="rhel7.5_updates.img",
blivet_args=[], pykickstart_args=[])
GlobalSettings.use_blivet = False
GlobalSettings.use_pykickstart = False
|
jkonecny12/anaconda_updates
|
anaconda_updates/releases/rhel7_5.py
|
Python
|
gpl-3.0
| 576
|
# -*- coding: utf-8 -*-
import pycurl
import re
import time
from module.plugins.internal.Account import Account
class FilefactoryCom(Account):
__name__ = "FilefactoryCom"
__type__ = "account"
__version__ = "0.16"
__description__ = """Filefactory.com account plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
("stickell", "l.stickell@yahoo.it")]
VALID_UNTIL_PATTERN = r'Premium valid until: <strong>(?P<D>\d{1,2})\w{1,2} (?P<M>\w{3}), (?P<Y>\d{4})</strong>'
def loadAccountInfo(self, user, req):
html = req.load("http://www.filefactory.com/account/")
m = re.search(self.VALID_UNTIL_PATTERN, html)
if m:
premium = True
validuntil = re.sub(self.VALID_UNTIL_PATTERN, '\g<D> \g<M> \g<Y>', m.group(0))
validuntil = time.mktime(time.strptime(validuntil, "%d %b %Y"))
else:
premium = False
validuntil = -1
return {"premium": premium, "trafficleft": -1, "validuntil": validuntil}
def login(self, user, data, req):
req.http.c.setopt(pycurl.REFERER, "http://www.filefactory.com/member/login.php")
html = req.load("https://www.filefactory.com/member/signin.php",
post={"loginEmail" : user,
"loginPassword": data['password'],
"Submit" : "Sign In"})
if req.lastEffectiveURL != "http://www.filefactory.com/account/":
self.wrongPassword()
|
Zerknechterer/pyload
|
module/plugins/accounts/FilefactoryCom.py
|
Python
|
gpl-3.0
| 1,573
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import androidpn
import boxcar
import email_notify
import nma
import pushbullet
import pushover
import slack
import tweet
# online
twitter_notifier = tweet.TwitterNotifier()
boxcar_notifier = boxcar.BoxcarNotifier()
pushbullet_notifier = pushbullet.PushbulletNotifier()
pushover_notifier = pushover.PushoverNotifier()
androidpn_notifier = androidpn.AndroidPNNotifier()
nma_notifier = nma.NMA_Notifier()
slack_notifier = slack.SlackNotifier()
email_notifier = email_notify.EmailNotifier()
notifiers = [
twitter_notifier,
boxcar_notifier,
pushbullet_notifier,
pushover_notifier,
androidpn_notifier,
nma_notifier,
slack_notifier,
email_notifier
]
def notify_download(title):
for n in notifiers:
n.notify_download(title)
def notify_snatch(title):
for n in notifiers:
n.notify_snatch(title)
|
CHBMB/LazyLibrarian
|
lazylibrarian/notifiers/__init__.py
|
Python
|
gpl-3.0
| 1,598
|
# -*- coding: utf-8 -*-
# Space Syntax Toolkit
# Set of tools for essential space syntax network analysis and results exploration
# -------------------
# begin : 2014-04-01
# copyright : (C) 2015 by Jorge Gil, UCL
# author : Jorge Gil
# email : jorge.gil@ucl.ac.uk
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# Import the PyQt and QGIS libraries
from qgis.PyQt.QtCore import (QObject, QSettings, QFileInfo)
from qgis.PyQt.QtWidgets import QDialog
# import toolkit settings dialog
from .ui_Settings import Ui_SettingsDialog
class SettingsManager(QObject):
def __init__(self, iface):
QObject.__init__(self)
self.iface = iface
self.dlg = SettingsDialog()
def showDialog(self):
self.dlg.show()
def getLastDir(self):
settings = QSettings()
return settings.value("/esst/lastUsedDir", "")
def setLastDir(self, path):
settings = QSettings()
save_path = QFileInfo(path).filePath()
settings.setValue("/esst/lastUsedDir", save_path)
class SettingsDialog(QDialog, Ui_SettingsDialog):
def __init__(self):
QDialog.__init__(self)
# Set up the user interface from Designer.
self.setupUi(self)
# set up internal GUI signals
self.closeButtonBox.rejected.connect(self.close)
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
esstoolkit/SettingsManager.py
|
Python
|
gpl-3.0
| 1,562
|
#!/usr/bin/env python
# This file is part of Gummworld2.
#
# Gummworld2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Gummworld2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Gummworld2. If not, see <http://www.gnu.org/licenses/>.
# Compatible: Python 2.7, Python 3.2
"""engine.py - A sample engine for Gummworld2.
This module provides an Engine class that can be subclassed for an application
framework that's easy to use.
The run loop keeps time via the game clock. update() and event handlers are
called every time an update cycle is ready. draw() is called every time a frame
cycle is ready.
The subclass should override update() and draw() for its own purposes. If the
subclass wants to get events for a particular type, all it needs to do is
override the event handler for that type.
If you want to write your own framework instead of using this one, then in
general you will still want to initialize yours in the same order as this class,
though not everything created in the constructor is required. See
Engine.__init__(), Engine.run(), and examples/00_minimum.py for helpful clues.
"""
import pygame
from pygame import KEYDOWN, KEYUP, MOUSEMOTION, MOUSEBUTTONUP, MOUSEBUTTONDOWN
from pygame import JOYAXISMOTION, JOYBALLMOTION, JOYHATMOTION, JOYBUTTONUP, JOYBUTTONDOWN
from pygame import VIDEORESIZE, VIDEOEXPOSE, USEREVENT, QUIT, ACTIVEEVENT
# if __name__ == '__main__':
# import paths
from gummworld2 import State, Context, Screen, View, BasicMap, Camera, GameClock
from gummworld2 import context, model, pygame_utils, state
__version__ = '$Id: engine.py 407 2013-08-12 15:11:30Z stabbingfinger@gmail.com $'
__author__ = 'Gummbum, (c) 2011-2014'
__all__ = ['NO_WORLD', 'SIMPLE_WORLD', 'Engine', 'run']
NO_WORLD = 0
SIMPLE_WORLD = 1
class Engine(Context):
NO_WORLD = NO_WORLD
SIMPLE_WORLD = SIMPLE_WORLD
def __init__(self, screen_surface=None, resolution=None, display_flags=0, caption=None, camera_target=None,
camera_view=None, camera_view_rect=None, map=None, tile_size=None, map_size=None, update_speed=30,
frame_speed=30, world_type=NO_WORLD, set_state=True):
"""Construct an instance of Engine.
This constructor does the following:
The pygame display is initialized with an optional caption, and the
resulting screen.Screen object is placed in State.screen.
An empty map.BasicMap object is created and placed in State.map.
An empty model.World* object is created and placed in State.world.
State.world_type is set to one of the engine.*_WORLD values
corresponding to the world object in State.world.
A camera.Camera object is created and placed in State.camera. The
camera target is either taken from the camera_target argument, or an
appropriate target for world type is created. The target is *NOT*
added to the world, as the target does not need to be an object
subject to game rules. If target happens to be an avatar-type object
then add it manually to world with the rest of the world entities.
A game_clock.GameClock object is created and placed in State.clock.
Joystick objects are created for connected controllers.
The following arguments are used to initialize a Screen object:
The screen_surface argument specifies the pygame top level surface
to use for creating the State.screen object. The presence of this
argument overrides initialization of the pygame display, and
resolution and display_flags arguments are ignored. Use this if
the pygame display has already been initialized in the calling
program.
The resolution argument specifies the width and height of the
display.
The display_flags argument specifies the pygame display flags to
pass to the display initializer.
The caption argument is a string to use as the window caption.
The following arguments are used to initialize a Camera object:
The camera_target argument is the target that the camera will track.
If camera_target is None, Engine will create a default target
appropriate for the world type.
The camera_view argument is a screen.View object to use as the
camera's view.
The camera_view_rect argument specifies the pygame Rect from which
to create a screen.View object for the camera's view.
State.screen.surface is used as the source surface. This argument is
ignored if camera_view is not None.
The following arguments are used to initialize a BasicMap object:
The tile_size and map_size arguments specify the width and height of
a map tile, and width and height of a map in tiles, respectively.
The following arguments are used to initialize a model.World* object:
The world_type argument specifies which of the world classes to
create. It must be one of engine.NO_WORLD, or engine.SIMPLE_WORLD.
The following arguments are used to initialize a Clock object:
update_speed specifies the maximum updates that can occur per
second.
frame_speed specifies the maximum frames that can occur per second.
The clock sacrifices frames per second in order to achieve the desired
updates per second. If frame_speed is 0 the frame rate is uncapped.
Engine.update() and Engine.draw() are registered as callbacks in the
clock.
"""
if __debug__:
print('Engine: -- new engine --')
Context.__init__(self)
# If you don't use this engine, then in general you will still want
# to initialize your State objects in the same order you see here.
self.world_type = world_type
self.screen = None
self.caption = caption
self.map = None
self.world = None
self.camera = None
self.camera_target = camera_target
self.clock = None
# Screen.
if screen_surface:
if __debug__:
print('Engine: Screen(surface=screen_surface)')
self.screen = Screen(surface=screen_surface)
elif resolution:
if __debug__:
print('Engine: Screen(resolution, display_flags)')
self.screen = Screen(resolution, display_flags)
elif State.screen:
if __debug__:
print('Engine: using State.screen')
self.screen = State.screen
else:
if __debug__:
print('Engine: falling back on pygame.display.get_surface()')
self.screen = Screen(surface=pygame.display.get_surface())
# BasicMap.
if map:
if __debug__:
print('Engine: using pre-made map')
self.map = map
elif tile_size and map_size:
if __debug__:
print('Engine: BasicMap(map_size, tile_size)')
self.map = BasicMap(map_size[0], map_size[1], tile_size[0], tile_size[1])
else:
if __debug__:
print('Engine: SKIPPING map creation:' +
' no map, tile_size, or map_size')
# If you want to use the camera target as a world entity, you have to
# use the right object type. Type checking and exception handling are
# not done. This is to allow flexible initialization of the Engine
# context.
if __debug__ and self.camera_target:
print('Engine: using pre-made camera target')
if not self.map:
if __debug__:
print('Engine: SKIPPING world creation: no map')
pass
elif world_type == NO_WORLD:
if __debug__:
print('Engine: NoWorld(self.map.rect)')
self.world = model.NoWorld(self.map.rect)
if camera_target is None:
if __debug__:
print('Engine: making camera target Object()')
self.camera_target = model.Object()
elif world_type == SIMPLE_WORLD:
if __debug__:
print('Engine: World(self.map.rect)')
self.world = model.World(self.map.rect)
if camera_target is None:
if __debug__:
print('Engine: making camera target Object()')
self.camera_target = model.Object()
# Create the camera.
if any((self.camera_target, camera_view, camera_view_rect)):
if camera_view:
if __debug__:
print('Engine: using pre-made camera view')
else:
if camera_view_rect:
if __debug__:
print('Engine: making camera view from rect')
camera_view = View((self.screen or State.screen).surface, camera_view_rect)
else:
if __debug__:
print('Engine: making camera view from screen')
camera_view = self.screen
if __debug__:
print('Engine: making camera')
self.camera = Camera(self.camera_target, camera_view)
else:
if __debug__:
print('Engine: SKIPPING camera creation:' +
' no camera target, view, or view rect')
# Create the clock, specifying callbacks for update() and draw().
if __debug__:
print('Engine: creating GameClock')
self.clock = GameClock(
update_speed, frame_speed,
update_callback=self._update, frame_callback=self._draw)
# Init joysticks.
if not pygame.joystick.get_init():
if __debug__:
print('Engine: initializing joysticks')
self._joysticks = pygame_utils.init_joystick()
self._get_pygame_events = pygame.event.get
# Initialize State.
if set_state:
if __debug__:
print('Engine: copying my objects to State')
self.set_state()
def enter(self):
"""Called when the context is entered.
If you override this, make sure you call the super.
"""
self.set_state()
def resume(self):
"""Called when the context is resumed.
If you override this, make sure you call the super.
"""
self.set_state()
def set_state(self):
if self.world_type is not None:
State.world_type = self.world_type
if self.screen is not None:
State.screen = self.screen
if self.caption is not None:
pygame.display.set_caption(self.caption)
if self.map is not None:
State.map = self.map
if self.world is not None:
State.world = self.world
if self.camera is not None:
State.camera = self.camera
if self.camera_target is not None:
State.camera_target = self.camera_target
if self.clock is not None:
State.clock = self.clock
def _update(self, dt):
"""The clock's update_callback, which in turn calls
Engine._get_events and Engine.update.
"""
self._get_events()
self.update(dt)
def _draw(self, interp):
"""The clock's draw_callback, which in turn calls
Camera.interpolate and Engine.draw.
"""
if State.camera:
State.camera.interpolate()
self.draw(interp)
def update(self, dt):
"""Override this method. Called by run() when the clock signals an
update cycle is ready.
Suggestion:
move_camera()
State.camera.update()
... custom update the rest of the game ...
"""
pass
def draw(self, interp):
"""Override this method. Called by run() when the clock signals a
frame cycle is ready.
Suggestion:
State.screen.clear()
... custom draw the screen ...
State.screen.flip()
"""
pass
@property
def joysticks(self):
"""List of initialized joysticks.
"""
return list(self._joysticks)
def _get_events(self):
"""Get events and call the handler. Called automatically by run() each
time the clock indicates an update cycle is ready.
"""
for e in self._get_pygame_events():
typ = e.type
if typ == KEYDOWN:
self.on_key_down(e.unicode, e.key, e.mod)
elif typ == KEYUP:
self.on_key_up(e.key, e.mod)
elif typ == MOUSEMOTION:
self.on_mouse_motion(e.pos, e.rel, e.buttons)
elif typ == MOUSEBUTTONUP:
self.on_mouse_button_up(e.pos, e.button)
elif typ == MOUSEBUTTONDOWN:
self.on_mouse_button_down(e.pos, e.button)
elif typ == JOYAXISMOTION:
self.on_joy_axis_motion(e.joy, e.axis, e.value)
elif typ == JOYBALLMOTION:
self.on_joy_ball_motion(e.joy, e.ball, e.rel)
elif typ == JOYHATMOTION:
self.on_joy_hat_motion(e.joy, e.hat, e.value)
elif typ == JOYBUTTONUP:
self.on_joy_button_up(e.joy, e.button)
elif typ == JOYBUTTONDOWN:
self.on_joy_button_down(e.joy, e.button)
elif typ == VIDEORESIZE:
self.on_video_resize(e.size, e.w, e.h)
elif typ == VIDEOEXPOSE:
self.on_video_expose()
elif typ == USEREVENT:
self.on_user_event(e)
elif typ == QUIT:
self.on_quit()
elif typ == ACTIVEEVENT:
self.on_active_event(e.gain, e.state)
# Override an event handler to get specific events.
def on_active_event(self, gain, state): pass
def on_joy_axis_motion(self, joy, axis, value): pass
def on_joy_ball_motion(self, joy, ball, rel): pass
def on_joy_button_down(self, joy, button): pass
def on_joy_button_up(self, joy, button): pass
def on_joy_hat_motion(self, joy, hat, value): pass
def on_key_down(self, unicode, key, mod): pass
def on_key_up(self, key, mod): pass
def on_mouse_button_down(self, pos, button): pass
def on_mouse_button_up(self, pos, button): pass
def on_mouse_motion(self, pos, rel, buttons): pass
def on_quit(self): pass
def on_user_event(self, e): pass
def on_video_expose(self): pass
def on_video_resize(self, size, w, h): pass
def run(app):
"""Push app onto the context stack and start the run loop.
To exit the run loop gracefully, call context.pop().
"""
context.push(app)
while context.top():
State.clock.tick()
# if __name__ == '__main__':
# # Multiple "apps", (aka engines, aka levels) and other settings
# from pygame import *
# from gamelib import Vec2d, View, toolkit
#
# class App(Engine):
#
# def __init__(self, **kwargs):
# super(App, self).__init__(**kwargs)
# toolkit.make_tiles2()
# self.speed = 3
# self.movex = 0
# self.movey = 0
#
# def update(self):
# if self.movex or self.movey:
# State.camera.position += self.movex,self.movey
# State.camera.update()
#
# def draw(self):
# State.camera.interpolate()
# State.screen.surface.fill(Color('black'))
# toolkit.draw_tiles()
# if State.camera.view is not State.screen:
# pygame.draw.rect(State.screen.surface, (99,99,99),
# State.camera.view.parent_rect, 1)
# pygame.display.flip()
#
# def on_key_down(self, unicode, key, mod):
# if key == K_DOWN:
# self.movey += self.speed
# elif key == K_UP:
# self.movey += -self.speed
# elif key == K_RIGHT:
# self.movex += self.speed
# elif key == K_LEFT:
# self.movex += -self.speed
# elif key == K_SPACE:
# State.running = False
# elif key == K_ESCAPE:
# quit()
#
# def on_key_up(self, key, mod):
# if key == K_DOWN:
# self.movey -= self.speed
# elif key == K_UP:
# self.movey -= -self.speed
# elif key == K_RIGHT:
# self.movex -= self.speed
# elif key == K_LEFT:
# self.movex -= -self.speed
#
# def make_app(num, **kwargs):
# name = 'app' + str(num)
# if name in state.states:
# State.restore(name)
# pygame.display.set_caption(State.caption + ' (restored)')
# else:
# State.app = App(**kwargs)
# if num % 2:
# toolkit.make_tiles()
# else:
# toolkit.make_tiles2()
# State.camera.position = State.camera.screen_center
# State.caption = kwargs['caption']
# State.save(name)
#
# def make_app1():
# screen = pygame.display.set_mode(resolution)
# make_app(1, screen_surface=screen, tile_size=tile_size, map_size=map_size, caption='1:Screen')
#
# def make_app2():
# tile_size = Vec2d(32, 32)
# view = View(State.screen.surface, Rect(16, 16, *(tile_size * 6)))
# make_app(2, tile_size=tile_size, map_size=map_size, camera_view=view, caption='2:View+Tilesize')
#
# def make_app3():
# make_app(3, tile_size=tile_size, map_size=map_size, camera_view_rect=Rect(16, 16, *(tile_size * 3)),
# caption='3:Viewrect')
#
# tile_size = Vec2d(64, 64)
# map_size = Vec2d(10, 10)
# resolution = tile_size * 4
#
# State.default_attrs.extend(('app', 'caption'))
# app_num = 0
#
# while 1:
# app_num += 1
# if app_num > 3:
# app_num = 1
# if app_num == 1:
# make_app1()
# elif app_num == 2:
# make_app2()
# elif app_num == 3:
# make_app3()
# State.app.run()
|
gentooza/Freedom-Fighters-of-Might-Magic
|
src/gamelib/gummworld2/engine.py
|
Python
|
gpl-3.0
| 19,363
|
#!/usr/bin/env python
'''
njRat Config Decoder
'''
__description__ = 'njRat Config Extractor'
__author__ = 'Kevin Breen http://techanarchy.net http://malwareconfig.com'
__version__ = '0.2'
__date__ = '2015/06/13'
#Standard Imports Go Here
import os
import sys
import base64
import string
from optparse import OptionParser
#Non Standard Imports
try:
import pype32
except ImportError:
print "[+] Couldn't Import pype32 'https://github.com/crackinglandia/pype32'"
# Main Decode Function Goes Here
'''
data is a read of the file
Must return a python config_dict of values
'''
def run(data):
try:
pe = pype32.PE(data=data)
string_list = get_strings(pe, '#US')
#print string_list
#parse the string list
config_dict = parse_config(string_list)
return config_dict
except Exception as e:
print e
return None
#Helper Functions Go Here
# Get a list of strings from a section
def get_strings(pe, dir_type):
counter = 0
string_list = []
m = pe.ntHeaders.optionalHeader.dataDirectory[14].info
for s in m.netMetaDataStreams[dir_type].info:
for offset, value in s.iteritems():
string_list.append(value)
#print counter, value
counter += 1
return string_list
#Turn the strings in to a python config_dict
def parse_config(string_list):
config_dict = {}
if string_list[5] == '0.3.5':
config_dict["Campaign ID"] = base64.b64decode(string_list[4])
config_dict["version"] = string_list[5]
config_dict["Install Name"] = string_list[1]
config_dict["Install Dir"] = string_list[2]
config_dict["Registry Value"] = string_list[3]
config_dict["Domain"] = string_list[7]
config_dict["Port"] = string_list[8]
config_dict["Network Separator"] = string_list[9]
config_dict["Install Flag"] = string_list[6]
elif string_list[6] == '0.3.6':
config_dict["Campaign ID"] = base64.b64decode(string_list[5])
config_dict["version"] = string_list[6]
config_dict["Install Name"] = string_list[2]
config_dict["Install Dir"] = string_list[3]
config_dict["Registry Value"] = string_list[4]
config_dict["Domain"] = string_list[8]
config_dict["Port"] = string_list[9]
config_dict["Network Separator"] = string_list[10]
config_dict["Install Flag"] = string_list[11]
elif string_list[3] == '0.4.1a':
config_dict["Campaign ID"] = base64.b64decode(string_list[2])
config_dict["version"] = string_list[3]
config_dict["Install Name"] = string_list[5]
config_dict["Install Dir"] = string_list[6]
config_dict["Registry Value"] = string_list[7]
config_dict["Domain"] = string_list[8]
config_dict["Port"] = string_list[9]
config_dict["Network Separator"] = string_list[10]
config_dict["Install Flag"] = string_list[11]
elif string_list[2] == '0.5.0E':
config_dict["Campaign ID"] = base64.b64decode(string_list[1])
config_dict["version"] = string_list[2]
config_dict["Install Name"] = string_list[4]
config_dict["Install Dir"] = string_list[5]
config_dict["Registry Value"] = string_list[6]
config_dict["Domain"] = string_list[7]
config_dict["Port"] = string_list[8]
config_dict["Network Separator"] = string_list[10]
config_dict["Install Flag"] = string_list[9]
elif string_list[2] == '0.6.4':
config_dict["Campaign ID"] = base64.b64decode(string_list[1])
config_dict["version"] = string_list[2]
config_dict["Install Name"] = string_list[3]
config_dict["Install Dir"] = string_list[4]
config_dict["Registry Value"] = string_list[5]
config_dict["Domain"] = string_list[6]
config_dict["Port"] = string_list[7]
config_dict["Network Separator"] = string_list[8]
config_dict["Install Flag"] = string_list[9]
elif string_list[2] == '0.7.1':
config_dict["Campaign ID"] = base64.b64decode(string_list[1])
config_dict["version"] = string_list[2]
config_dict["Mutex"] = string_list[3]
config_dict["Install Name"] = string_list[4]
config_dict["Install Dir"] = string_list[5]
config_dict["Registry Value"] = string_list[6]
config_dict["Domain"] = string_list[7]
config_dict["Port"] = string_list[8]
config_dict["Network Separator"] = string_list[10]
config_dict["Install Flag"] = string_list[9]
config_dict["Author"] = string_list[12]
elif string_list[2] == '0.7d':
config_dict["Campaign ID"] = base64.b64decode(string_list[1])
config_dict["version"] = string_list[2]
config_dict["Install Name"] = string_list[3]
config_dict["Install Dir"] = string_list[4]
config_dict["Registry Value"] = string_list[5]
config_dict["Domain"] = string_list[6]
config_dict["Port"] = string_list[7]
config_dict["Network Separator"] = string_list[8]
config_dict["Install Flag"] = string_list[9]
else:
return None
return config_dict
#Recursive Function Goes Here
def run_recursive(folder, output):
counter1 = 0
counter2 = 0
print "[+] Writing Configs to File {0}".format(output)
with open(output, 'a+') as out:
#This line will need changing per Decoder
out.write("Filename,Campaign ID, Version, Install Name, Install Dir, Registry Value, Domain, Network Seperator, Install Flag\n")
for server in os.listdir(folder):
file_data = open(os.path.join(folder,server), 'rb').read()
config_dict = run(file_data)
if config_dict != None:
#This line will need changing per Decoder
out.write('{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},\n'.format(server, config_dict["Campaign ID"],config_dict["version"],config_dict["Install Name"],config_dict["Install Dir"],config_dict["Registry Value"],config_dict["Domain"],config_dict["Port"],config_dict["Network Separator"],config_dict["Install Flag"]))
counter1 += 1
counter2 += 1
print "[+] Decoded {0} out of {1} Files".format(counter1, counter2)
return "Complete"
# Main
if __name__ == "__main__":
parser = OptionParser(usage='usage: %prog inFile outConfig\n' + __description__, version='%prog ' + __version__)
parser.add_option("-r", "--recursive", action='store_true', default=False, help="Recursive Mode")
(options, args) = parser.parse_args()
# If we dont have args quit with help page
if len(args) > 0:
pass
else:
parser.print_help()
sys.exit()
# if we want a recursive extract run this function
if options.recursive == True:
if len(args) == 2:
run_recursive(args[0], args[1])
sys.exit()
else:
print "[+] You need to specify Both Dir to read AND Output File"
parser.print_help()
sys.exit()
# If not recurisve try to open file
try:
print "[+] Reading file"
file_data = open(args[0], 'rb').read()
except:
print "[+] Couldn't Open File {0}".format(args[0])
#Run the config extraction
print "[+] Searching for Config"
config = run(file_data)
#If we have a config figure out where to dump it out.
if config == None:
print "[+] Config not found"
sys.exit()
#if you gave me two args im going to assume the 2nd arg is where you want to save the file
if len(args) == 2:
print "[+] Writing Config to file {0}".format(args[1])
with open(args[1], 'a') as outFile:
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
outFile.write("Key: {0}\t Value: {1}\n".format(key,clean_value))
# if no seconds arg then assume you want it printing to screen
else:
print "[+] Printing Config to screen"
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
print " [-] Key: {0}\t Value: {1}".format(key,clean_value)
print "[+] End of Config"
|
hoangcuongflp/RATDecoders
|
njRat.py
|
Python
|
gpl-3.0
| 8,357
|
"""
This module provides classes that describe quantum spin operators as well as
spin interactions.
"""
__all__ = [
"SpinOperator",
"SpinInteraction",
]
from itertools import product
import matplotlib.pyplot as plt
import numpy as np
from scipy.sparse import csr_matrix, identity, kron
from HamiltonianPy.quantumoperator.constant import ANNIHILATION, CREATION, \
NUMERIC_TYPES_GENERAL, SPIN_MATRICES, SPIN_OTYPES, SPIN_DOWN, SPIN_UP
from HamiltonianPy.quantumoperator.particlesystem import AoC, ParticleTerm
from HamiltonianPy.quantumoperator.quantumstate import SiteID
class SpinOperator:
"""
A unified description of quantum spin operator.
Attributes
----------
otype : str
The type of this spin operator.
Supported value: "x" | "y" | "z" | "p" | "m".
site_id : SiteID
The ID of the lattice site on which the spin operator is defined.
coordinate : tuple
The coordinates of the lattice site in tuple form.
site : 1D np.ndarray
The coordinates of the lattice site in np.ndarray form.
Examples
--------
>>> from HamiltonianPy.quantumoperator import SpinOperator
>>> SX = SpinOperator("x", site=[0, 0])
>>> SY = SpinOperator("y", site=[1, 1])
>>> SX
SpinOperator(otype="x", site=(0, 0))
>>> SY.matrix()
array([[ 0.+0.j , -0.-0.5j],
[ 0.+0.5j, 0.+0.j ]])
>>> SY < SX
False
>>> SX.dagger() is SX
True
>>> print(2 * SX * SY)
The coefficient of this term: 2
The component operators:
SpinOperator(otype="x", site=(0, 0))
SpinOperator(otype="y", site=(1, 1))
"""
def __init__(self, otype, site):
"""
Customize the newly created instance.
Parameters
----------
otype : {"x", "y", "z", "p" or "m"}
The type of this spin operator.
site : list, tuple or 1D np.ndarray
The coordinates of the lattice site on which the spin operator is
defined. The `site` parameter should be 1D array with length 1,
2 or 3.
"""
assert otype in SPIN_OTYPES, "Invalid operator type"
site_id = SiteID(site=site)
self._otype = otype
self._site_id = site_id
# The tuple form of this instance
# It is a tuple: (otype, site) and site itself is a tuple with length
# 1, 2 or 3.
self._tuple_form = (otype, site_id._tuple_form)
@property
def otype(self):
"""
The `otype` attribute.
"""
return self._otype
@property
def site_id(self):
"""
The `site_id` attribute.
"""
return self._site_id
@property
def coordinate(self):
"""
The `coordinate` attribute.
"""
return self._site_id.coordinate
@property
def site(self):
"""
The `site` attribute.
"""
return self._site_id.site
def getIndex(self, indices_table):
"""
Return the index of this operator.
Parameters
----------
indices_table : IndexTable
A table that associate instances of SpinOperator with integer
indices.
Returns
-------
index : int
The index of this instance in the given table.
See also
--------
getSiteIndex
"""
return indices_table(self)
def getSiteIndex(self, indices_table):
"""
Return the index of the lattice site on which this operator is defined.
Notes:
This method is different from the `getIndex` method.
This method return the index of the site on which this operator
is defined and the `getIndex` method return the index of the
operator itself.
Parameters
----------
indices_table : IndexTable
A table that associate instances of SiteID with integer indices.
Returns
-------
index : int
The index of the `site_id` attribute of this instance.
"""
return indices_table(self._site_id)
def __repr__(self):
"""
Official string representation of the instance.
"""
info = 'SpinOperator(otype="{0}", site={1!r})'
return info.format(self._otype, self.coordinate)
__str__ = __repr__
def tolatex(self, **kwargs):
"""
Return the LaTex form of this instance.
Parameters
----------
kwargs :
All keyword arguments are passed to the `tolatex` method of the
`site_id` attribute.
See also: `SiteID.tolatex`.
Returns
-------
latex : str
The LaTex form of this instance.
"""
subscript = self._site_id.tolatex(**kwargs)
return r"$S_{{{0}}}^{{{1}}}$".format(subscript, self._otype)
def show(self, **kwargs):
"""
Show the instance in handwriting form.
Parameters
----------
kwargs :
All keyword arguments are passed to the `tolatex` method of the
`site_id` attribute.
See also: `SiteID.tolatex`.
"""
fig, ax = plt.subplots()
ax.text(
0.5, 0.5, self.tolatex(**kwargs), fontsize="xx-large",
ha="center", va="center", transform=ax.transAxes
)
ax.set_axis_off()
plt.show()
def __hash__(self):
"""
Calculate the hash code of the instance.
"""
return hash(self._tuple_form)
def __lt__(self, other):
"""
Implement the `<` operator between self and other.
"""
if isinstance(other, self.__class__):
return self._tuple_form < other._tuple_form
else:
return NotImplemented
def __eq__(self, other):
"""
Implement the `==` operator between self and other.
"""
if isinstance(other, self.__class__):
return self._tuple_form == other._tuple_form
else:
return NotImplemented
def __gt__(self, other):
"""
Implement the `>` operator between self and other.
"""
if isinstance(other, self.__class__):
return self._tuple_form > other._tuple_form
else:
return NotImplemented
def __le__(self, other):
"""
Implement the `<=` operator between self and other.
"""
if isinstance(other, self.__class__):
return self._tuple_form <= other._tuple_form
else:
return NotImplemented
def __ne__(self, other):
"""
Implement the `!=` operator between self and other.
"""
if isinstance(other, self.__class__):
return self._tuple_form != other._tuple_form
else:
return NotImplemented
def __ge__(self, other):
"""
Implement the `>=` operator between self and other.
"""
if isinstance(other, self.__class__):
return self._tuple_form >= other._tuple_form
else:
return NotImplemented
def __mul__(self, other):
"""
Implement the binary arithmetic operation: `*`.
`self` is the left operand and `other` is the right operand;
Return an instance of SpinInteraction/
"""
if isinstance(other, self.__class__):
return SpinInteraction((self, other), coeff=1.0)
elif isinstance(other, NUMERIC_TYPES_GENERAL):
return SpinInteraction((self,), coeff=other)
else:
return NotImplemented
def __rmul__(self, other):
"""
Implement the binary arithmetic operation: `*`.
`self` parameter is the right operand and `other` is the left operand;
Return an instance of SpinInteraction.
"""
if isinstance(other, NUMERIC_TYPES_GENERAL):
return SpinInteraction((self,), coeff=other)
else:
return NotImplemented
def matrix(self):
"""
Return the matrix representation of the spin operator.
The matrix representation is calculated in the single spin Hilbert
space, i.e. 2 dimension.
See also
--------
matrix_function
matrix_repr
"""
return np.array(SPIN_MATRICES[self._otype], copy=True)
def dagger(self):
"""
Return the Hermitian conjugate of this operator.
"""
if self._otype == "p":
operator = self.derive(otype="m")
elif self._otype == "m":
operator = self.derive(otype="p")
else:
operator = self
return operator
def conjugate_of(self, other):
"""
Return whether `self` is Hermitian conjugate of `other`.
"""
if isinstance(other, self.__class__):
return self.dagger() == other
else:
raise TypeError(
"The `other` parameter is not instance of this class!"
)
def same_site(self, other):
"""
Return whether `self` and `other` is defined on the same lattice site.
"""
if isinstance(other, self.__class__):
return self._site_id == other._site_id
else:
raise TypeError(
"The `other` parameter is not instance of this class!"
)
def derive(self, *, otype=None, site=None):
"""
Derive a new instance from `self` and the given parameters.
This method creates a new instance with the same attribute as `self`
except for these given to this method.
All the parameters should be specified as keyword arguments.
Returns
-------
res : A new instance of SpinOperator.
"""
if otype is None:
otype = self.otype
if site is None:
site = self.coordinate
return self.__class__(otype=otype, site=site)
def Schwinger(self):
"""
Return the Schwinger Fermion representation of this spin operator.
"""
coordinate = self.coordinate
C_UP = AoC(otype=CREATION, site=coordinate, spin=SPIN_UP)
C_DOWN = AoC(otype=CREATION, site=coordinate, spin=SPIN_DOWN)
A_UP = AoC(otype=ANNIHILATION, site=coordinate, spin=SPIN_UP)
A_DOWN = AoC(otype=ANNIHILATION, site=coordinate, spin=SPIN_DOWN)
terms = []
SMatrix = self.matrix()
for row_index, row_aoc in enumerate((C_UP, C_DOWN)):
for col_index, col_aoc in enumerate((A_UP, A_DOWN)):
coeff = SMatrix[row_index, col_index]
if coeff != 0.0:
terms.append(ParticleTerm([row_aoc, col_aoc], coeff=coeff))
return terms
@staticmethod
def matrix_function(operator, total_spin):
"""
Calculate the matrix representation of the spin operator.
For a specific spin operator, its' matrix representation in the
Hilbert space is defined as follow:
I_{n-1} * ... * I_{i+1} * S_i * I_{i-1} * ... * I_0
where I is (2, 2) identity matrix, `*` represents tensor product,
`n` is the total number of spins and `i` is the index of the lattice
site.
Parameters
----------
operator : tuple or list
Length 2 tuple or list: (index, otype) or [index, otype].
`index` is the index of the lattice site on which the spin
operator is defined;
`otype` is the type of the spin operator which should be only one
of "x" | "y" | "z" | "p" | "m".
total_spin : int
The total number of spins.
Returns
-------
res : csr_matrix
The matrix representation of this spin operator.
"""
index, otype = operator
I = identity(1 << index, dtype=np.float64, format="csr")
res = kron(SPIN_MATRICES[otype], I, format="csr")
I = identity(1 << (total_spin-index-1), dtype=np.float64, format="csr")
return kron(I, res, format="csr")
def matrix_repr(self, site_indices_table):
"""
Return the matrix representation of this spin operator.
For a specific spin operator, its matrix representation in the
Hilbert space is defined as follow:
I_{n-1} * ... * I_{i+1} * S_i * I_{i-1} * ... * I_0
where I is (2, 2) identity matrix, `*` represents tensor product,
`n` is the total number of spins and `i` is the index of the lattice
site.
Parameters
----------
site_indices_table : IndexTable
A table that associate instances of SiteID with integer indices.
Returns
-------
res : csr_matrix
The matrix representation of this spin operator.
"""
total_spin = len(site_indices_table)
operator = (site_indices_table(self._site_id), self._otype)
return self.matrix_function(operator, total_spin)
class SpinInteraction:
"""
A unified description of spin interaction term.
Attributes
----------
coeff : float, int or complex
The coefficient of this term.
components : tuple
The component spin operators of this term.
Examples
--------
>>> from HamiltonianPy.quantumoperator import SpinOperator, SpinInteraction
>>> S0X = SpinOperator("x", site=[0, 0])
>>> S1X = SpinOperator("x", site=[0, 1])
>>> term = SpinInteraction((S0X, S1X), coeff=1.5)
>>> print(term)
The coefficient of this term: 1.5
The component operators:
SpinOperator(otype="x", site=(0, 0))
SpinOperator(otype="x", site=(0, 1))
>>> print(2 * term)
The coefficient of this term: 3.0
The component operators:
SpinOperator(otype="x", site=(0, 0))
SpinOperator(otype="x", site=(0, 1))
"""
def __init__(self, operators, coeff=1.0):
"""
Customize the newly created instance.
Parameters
----------
operators : tuple or list
A collection of `SpinOperator` objects that composing this term.
coeff : int, float, complex, optional
The coefficient of this term.
Default: 1.0.
"""
assert isinstance(coeff, NUMERIC_TYPES_GENERAL), "Invalid coefficient"
# Sorting the spin operators in ascending order according to their
# SiteID. The relative position of two operators with the same SiteID
# will not change and the exchange of two spin operators on different
# lattice site never change the interaction term.
self._operators = tuple(
sorted(operators, key=lambda item: item.site_id)
)
self._coeff = coeff
@property
def coeff(self):
"""
The coefficient of this term.
"""
return self._coeff
@coeff.setter
def coeff(self, value):
assert isinstance(value, NUMERIC_TYPES_GENERAL), "Invalid coefficient"
self._coeff = value
@property
def components(self):
"""
The component spin operators of this term.
"""
return self._operators
def __str__(self):
"""
Return a string that describes the content of the instance.
"""
return "\n".join(
[
"The coefficient of this term: {0}".format(self._coeff),
"The component operators:",
*[" {0}".format(operator) for operator in self._operators],
]
)
def tolatex(self, indices_table=None, **kwargs):
"""
Return the LaTex form of this instance.
Parameters
----------
indices_table : IndexTable or None, optional
A table that associate instances of SiteID with integer indices.
The `indices_table` is passed to the `tolatex` method of
`SiteID` as the `site_index` argument.
If not given or None, the `site` is show as it is.
Default: None.
kwargs :
All other keyword arguments are passed to the `tolatex` method of
`SiteID`.
See also: `SiteID.tolatex`.
Returns
-------
latex : str
The LaTex form of this instance.
"""
latex_operators = [
operator.tolatex(
site_index=indices_table, **kwargs
).replace("$", "") for operator in self._operators
]
return "".join(["$", str(self._coeff), *latex_operators, "$"])
def show(self, indices_table=None, **kwargs):
"""
Show the instance in handwriting form.
Parameters
----------
indices_table : IndexTable or None, optional
A table that associate instances of SiteID with integer indices.
The `indices_table` is passed to the `tolatex` method of
`SiteID` as the `site_index` argument.
If not given or None, the `site` is show as it is.
Default: None.
kwargs :
All other keyword arguments are passed to the `tolatex` method of
`SiteID`.
See also: `SiteID.tolatex`.
"""
fig, ax = plt.subplots()
ax.text(
0.5, 0.5, self.tolatex(indices_table, **kwargs),
fontsize="xx-large", ha="center", va="center",
transform=ax.transAxes
)
ax.set_axis_off()
plt.show()
def __mul__(self, other):
"""
Implement the binary arithmetic operation: `*`.
`self` is the left operand and `other'` is the right operand;
Return a new instance of this class.
"""
if isinstance(other, self.__class__):
operators = self._operators + other._operators
coeff = self._coeff * other._coeff
elif isinstance(other, SpinOperator):
operators = self._operators + (other, )
coeff = self._coeff
elif isinstance(other, NUMERIC_TYPES_GENERAL):
operators = self._operators
coeff = self._coeff * other
else:
return NotImplemented
return self.__class__(operators, coeff=coeff)
def __rmul__(self, other):
"""
Implement the binary arithmetic operation: `*`.
`self` is the right operand and `other` is the left operand;
This method return a new instance of this class.
"""
if isinstance(other, SpinOperator):
operators = (other, ) + self._operators
coeff = self._coeff
elif isinstance(other, NUMERIC_TYPES_GENERAL):
operators = self._operators
coeff = other * self._coeff
else:
return NotImplemented
return self.__class__(operators, coeff=coeff)
def dagger(self):
"""
Return the Hermitian conjugate of this term.
"""
operators = [operator.dagger() for operator in self._operators[::-1]]
return self.__class__(operators, coeff=self._coeff.conjugate())
def Schwinger(self):
"""
Return the Schwinger Fermion representation of this term.
"""
fermion_reprs = [operator.Schwinger() for operator in self._operators]
terms = []
for term in product(*fermion_reprs):
res_term = self._coeff
for sub_term in term:
res_term = res_term * sub_term
terms.append(res_term)
return terms
@staticmethod
def matrix_function(operators, total_spin, coeff=1.0):
"""
Return the matrix representation of the spin interaction term.
Parameters
----------
operators : sequence
A sequence of 2-tuple: [(index_0, otype_0), ..., (index_n, otype_n)]
`index_i` is the index of the lattice site on which the spin
operator is defined;
`otype_i` is the type of the spin operator which should be only
one of "x" | "y" | "z" | "p" | "m".
total_spin: int
The total number of spins.
coeff : int, float or complex, optional
The coefficient of the term.
Default: 1.0.
Returns
-------
res : csr_matrix
The matrix representation of this term.
"""
assert isinstance(total_spin, int) and total_spin > 0
assert isinstance(coeff, NUMERIC_TYPES_GENERAL), "Invalid coefficient"
operators = sorted(operators, key=lambda item: item[0], reverse=True)
if len(operators) == 2 and operators[0][0] > operators[1][0]:
(i, alpha), (j, beta) = operators
Si = coeff * SPIN_MATRICES[alpha]
Sj = SPIN_MATRICES[beta]
dim0 = 1 << j
dim1 = 1 << (i - j - 1)
dim2 = 1 << (total_spin - i - 1)
if dim1 == 1:
res = kron(Si, Sj, format="csr")
else:
I = identity(dim1, dtype=np.float64, format="csr")
res = kron(Si, kron(I, Sj, format="csr"), format="csr")
if dim0 != 1:
res = kron(res, identity(dim0, np.float64, "csr"), format="csr")
if dim2 != 1:
res = kron(identity(dim2, np.float64, "csr"), res, format="csr")
else:
res = coeff * identity(
1 << total_spin, dtype=np.float64, format="csr"
)
for index, otype in operators:
I = identity(1 << index, dtype=np.float64, format="csr")
tmp = kron(SPIN_MATRICES[otype], I, format="csr")
I = identity(
1 << (total_spin-index-1), dtype=np.float64, format="csr"
)
tmp = kron(I, tmp, format="csr")
res = res.dot(tmp)
return res
def matrix_repr(self, site_indices_table, coeff=None):
"""
Return the matrix representation of this spin interaction term.
Parameters
----------
site_indices_table : IndexTable
A table that associate instances of SiteID with integer indices.
coeff : int, float or complex, optional
A new coefficient for this spin interaction term.
If not given or None, use the original coefficient.
Default: None.
Returns
-------
res : csr_matrix
The matrix representation of this spin interaction term.
"""
if coeff is not None:
self.coeff = coeff
total_spin = len(site_indices_table)
operators = [
(operator.getSiteIndex(site_indices_table), operator.otype)
for operator in self._operators
]
return self.matrix_function(operators, total_spin, self.coeff)
|
wangshiphys/HamiltonianPy
|
HamiltonianPy/quantumoperator/spinsystem.py
|
Python
|
gpl-3.0
| 23,008
|
#!/usr/bin/python
# This script gives server-side access to one Soledad user database by using
# the configuration stored in /etc/soledad/soledad-server.conf.
#
# Use it like this:
#
# python -i server-side-db.py <uuid>
import sys
from ConfigParser import ConfigParser
from leap.soledad.common.couch import CouchDatabase
if len(sys.argv) != 2:
print 'Usage: %s <uuid>' % sys.argv[0]
exit(1)
uuid = sys.argv[1]
# get couch url
cp = ConfigParser()
cp.read('/etc/soledad/soledad-server.conf')
url = cp.get('soledad-server', 'couch_url')
# access user db
dbname = 'user-%s' % uuid
db = CouchDatabase(url, dbname)
# get replica info
replica_uid = db._replica_uid
gen, docs = db.get_all_docs()
print "dbname: %s" % dbname
print "replica_uid: %s" % replica_uid
print "generation: %d" % gen
# get relevant docs
schemes = map(lambda d: d.content['_enc_scheme'], docs)
pubenc = filter(lambda d: d.content['_enc_scheme'] == 'pubkey', docs)
print "total number of docs: %d" % len(docs)
print "pubkey encrypted docs: %d" % len(pubenc)
|
leapcode/soledad
|
scripts/db_access/server_side_db.py
|
Python
|
gpl-3.0
| 1,052
|
import select
import errno
class BasePoller:
def __init__(self, options):
self.options = options
self.initialize()
def initialize(self):
pass
def register_readable(self, fd):
raise NotImplementedError
def register_writable(self, fd):
raise NotImplementedError
def unregister(self, fd):
raise NotImplementedError
def poll(self, timeout):
raise NotImplementedError
def before_daemonize(self):
pass
def after_daemonize(self):
pass
class SelectPoller(BasePoller):
def initialize(self):
self._select = select
self._init_fdsets()
def register_readable(self, fd):
self.readables.add(fd)
def register_writable(self, fd):
self.writables.add(fd)
def unregister(self, fd):
if fd in self.readables:
self.readables.remove(fd)
if fd in self.writables:
self.writables.remove(fd)
def unregister_all(self):
self._init_fdsets()
def poll(self, timeout):
try:
r, w, x = self._select.select(
self.readables,
self.writables,
[], timeout
)
except select.error as err:
if err.args[0] == errno.EINTR:
self.options.logger.blather('EINTR encountered in poll')
return [], []
if err.args[0] == errno.EBADF:
self.options.logger.blather('EBADF encountered in poll')
self.unregister_all()
return [], []
raise
return r, w
def _init_fdsets(self):
self.readables = set()
self.writables = set()
class PollPoller(BasePoller):
def initialize(self):
self._poller = select.poll()
self.READ = select.POLLIN | select.POLLPRI | select.POLLHUP
self.WRITE = select.POLLOUT
def register_readable(self, fd):
self._poller.register(fd, self.READ)
def register_writable(self, fd):
self._poller.register(fd, self.WRITE)
def unregister(self, fd):
self._poller.unregister(fd)
def poll(self, timeout):
fds = self._poll_fds(timeout)
readables, writables = [], []
for fd, eventmask in fds:
if self._ignore_invalid(fd, eventmask):
continue
if eventmask & self.READ:
readables.append(fd)
if eventmask & self.WRITE:
writables.append(fd)
return readables, writables
def _poll_fds(self, timeout):
try:
return self._poller.poll(timeout * 1000)
except select.error as err:
if err.args[0] == errno.EINTR:
self.options.logger.blather('EINTR encountered in poll')
return []
raise
def _ignore_invalid(self, fd, eventmask):
if eventmask & select.POLLNVAL:
# POLLNVAL means `fd` value is invalid, not open.
# When a process quits it's `fd`s are closed so there
# is no more reason to keep this `fd` registered
# If the process restarts it's `fd`s are registered again
self.unregister(fd)
return True
return False
class KQueuePoller(BasePoller):
'''
Wrapper for select.kqueue()/kevent()
'''
max_events = 1000
def initialize(self):
self._kqueue = select.kqueue()
self.readables = set()
self.writables = set()
def register_readable(self, fd):
self.readables.add(fd)
kevent = select.kevent(fd, filter=select.KQ_FILTER_READ,
flags=select.KQ_EV_ADD)
self._kqueue_control(fd, kevent)
def register_writable(self, fd):
self.writables.add(fd)
kevent = select.kevent(fd, filter=select.KQ_FILTER_WRITE,
flags=select.KQ_EV_ADD)
self._kqueue_control(fd, kevent)
def unregister(self, fd):
kevent = select.kevent(
fd,
filter=(select.KQ_FILTER_READ | select.KQ_FILTER_WRITE),
flags=select.KQ_EV_DELETE
)
self._forget_fd(fd)
self._kqueue_control(fd, kevent)
def _kqueue_control(self, fd, kevent):
try:
self._kqueue.control([kevent], 0)
except OSError as error:
if error.errno == errno.EBADF:
self.options.logger.blather('EBADF encountered in kqueue. '
'Invalid file descriptor %s' % fd)
else:
raise
def _forget_fd(self, fd):
for collection in (self.readables, self.writables):
try:
collection.remove(fd)
except KeyError:
pass
def poll(self, timeout):
readables, writables = [], []
try:
kevents = self._kqueue.control(None, self.max_events, timeout)
except OSError as error:
if error.errno == errno.EINTR:
self.options.logger.blather('EINTR encountered in poll')
return readables, writables
raise
for kevent in kevents:
if kevent.filter == select.KQ_FILTER_READ:
readables.append(kevent.ident)
if kevent.filter == select.KQ_FILTER_WRITE:
writables.append(kevent.ident)
return readables, writables
def before_daemonize(self):
self._kqueue.close()
self._kqueue = None
def after_daemonize(self):
self._kqueue = select.kqueue()
for fd in self.readables:
self.register_readable(fd)
for fd in self.writables:
self.register_writable(fd)
def implements_poll():
return hasattr(select, 'poll')
def implements_kqueue():
return hasattr(select, 'kqueue')
if implements_kqueue():
Poller = KQueuePoller
elif implements_poll():
Poller = PollPoller
else:
Poller = SelectPoller
|
soarpenguin/python-scripts
|
poller.py
|
Python
|
gpl-3.0
| 6,022
|
#!/usr/bin/python -u
'''
Targets:
* Create framework for path finding:
* maze creation (manual) from learn_pygame/pygame_3rd.py
* start/end points
* creep class
* some way to change pathfinding algorithm
* Implement stright-to-the-target pathfinding
* Implement A* algorithm
* Some kind of decision visualization
* Interface to show/change current algorithm
'''
import pygame
import time
import sys
SCREEN_SIZE=(800,640)
FPS=50
class Creep(object): #no animation yet
def __init__(self,sprite, cell_size,initial_pos,finish):
self.sprite = pygame.image.load(sprite).convert_alpha()
self.start=initial_pos
self.finish=finish
self.cell_size = cell_size
self.pos = initial_pos
self.speed = 25
self.delay = self.speed
self.path=[]
def move(self,area):
if self.delay:
self.delay -= 1
return None
else:
self.delay = self.speed
print "+",
if area.get_cell(self.pos) == "Finish":
self.pos = area.get_start()
self.path = None
if self.path:
old_pos=self.pos
new_pos=self.path.pop()
if self.validate(new_pos):
self.pos=new_pos
return old_pos
else:
self.path=self.pathfind(area)
else:
print "*",
self.path=self.pathfind( area)
def validate(self, pos):
return True
def get_pixel_position(self):
print "position", (self.pos[0]*self.cell_size[0],self.pos[1]*self.cell_size[1])
return (self.pos[0]*self.cell_size[0],self.pos[1]*self.cell_size[1])
def update(self,surface):
'''
make a blit, return update rect
'''
surface.blit(self.sprite,self.get_pixel_position())
return pygame.Rect(self.get_pixel_position(),self.cell_size)
def pathfind(self,area):
'''
Implements a* pathfinding algorithm. http://en.wikipedia.org/wiki/A*_search_algorithm
'''
#not really, just stub to test the rest
return [(a,a) for a in xrange(20)]
class AreaMap:
def __init__(self, size, empty_image, filled_image, message=None):
self.size=size
self.empty = pygame.image.load(empty_image).convert()
fill = pygame.image.load(filled_image).convert_alpha()
self.fill = self.empty.copy()
self.fill.blit(fill, (0, 0)) #create 'fill' tile over empty (fill can contain transparency)
self.start = self.fill.copy()
self.start.fill((30, 30, 30, 127))
self.finish = self.fill.copy()
self.finish.fill((230, 230, 230, 127))
if self.empty.get_size() != self.fill.get_size():
raise Exception("Fill/empty images not the same size")
self.cell_size = self.empty.get_size()
self.size = size
self.area = [[ False for y in range(self.size[1])] for x in range(self.size[0])] #init 2D array with falses
self.queue = set([(x, y) for x in range(self.size[0]) for y in range (self.size[1])])
self.update_rects = []
self.message=message
def shift(self, (x,y), (shift_x, shift_y)):
return x+shift_x, y+shift_y
def get_edges(self, x, y):
'''
return accessible edges in graph for specified vertex
finish & stop are accessible
return value: list of edges, each edge - pair of coordinates
'''
accessible=[]
for shift in ( (-1,-1), (-1,0), (-1,1), (0,-1), (0,1), (1,-1), (1,0), (1,1) ):
if self.get_cell(self.shift((x,y), shift) ) == None: #Only None field is accessible
accessible.append((self.shift((x,y),shift)))
return accessible
def add_start(self, x, y):
'''
add start point to area
'''
self.area[x][y] = "Start"
self.mob=Creep("mob.png",self.cell_size,(x,y))
self.queue.add((x, y))
def add_finish(self, x, y):
'''
add finish point to area
'''
self.area[x][y] = "Finish"
self.queue.add((x, y))
def get_start(self):
for x in self.size[0]:
for y in self.size[y]:
if self.area[x][y]=="Start":
return (x,y)
def get_finish(self):
for x in self.size[0]:
for y in self.size[y]:
if self.area[x][y]=="Finish":
return (x,y)
def clear(self):
for x in range(self.size[0]):
for y in range (self.size[1]):
self.set_cell(False,(x,y))
def pos(self, pos):
'''convert position to block number, take (x,y), return x,y'''
return pos[0]/self.cell_size[0], pos[1]/self.cell_size[1]
def get_cell_by_pos(self, pos):
return self.get_cell(self.pos(pos))
def get_cell(self, index):
if index[0] < 0 or index [1] < 0 or index [0] >= self.size[0] or index[1] >= self.size[1]:
return "Unaccessible"
return self.area[index[0]][index[1]]
def set_cell(self,value,(x,y)):
if self.area[x][y] in ("Start", "Finish"): #do not allow replace of finish or start
return
if self.area[x][y] != value:
self.area[x][y] = value
self.queue.add((x, y))
def set_cell_by_pos(self, value, pos):
self.set_cell(value,self.pos(pos))
def set_row_by_pos(self, value, pos):
for x in range(self.size[0]):
self.set_cell(value, (x, self.pos(pos)[1]))
def set_col_by_pos(self, value, pos):
for y in range(self.size[1]):
self.set_cell(value, (self.pos(pos)[0], y))
def set_with_kmod(self, value, pos, key_mod):
'''
select to set single cell, row or column, based on key_mod
'''
if key_mod & pygame.KMOD_CTRL:
self.set_col_by_pos(value, pos)
elif key_mod & pygame.KMOD_SHIFT:
self.set_row_by_pos(value, pos)
else:
self.set_cell_by_pos(value, pos)
def set_cells_by_strike(self,value,pos,rel, key_mod):
'''operates on every cell under single mouse move'''
offset=max(map(abs,rel))
if offset<min(self.cell_size):
self.set_with_kmod(value, pos, key_mod) #trivial - one cell changed
return
div_round_to_infinity = lambda a, b: a//b if a*b<0 else (a+(-a%b))//b # http://stackoverflow.com/questions/7181757/how-to-implement-division-with-round-towards-infinity-in-python
point_calc = lambda pos,rel, step, steps, size, index: pos[index] - rel[index] + div_round_to_infinity(rel[index]*step, steps)
steps = div_round_to_infinity(offset, min(self.cell_size))
for step in range(0, steps):
x = point_calc(pos, rel, step, steps, self.cell_size, 0)
y = point_calc(pos, rel, step, steps, self.cell_size, 1)
self.set_with_kmod(value, (x, y), key_mod)
def update_cell(self, disp, cell):
reg_x = cell[0]*self.cell_size[0]
reg_y = cell[1]*self.cell_size[1]
print "updating:", cell, reg_x, reg_y
if self.area[cell[0]][cell[1]]== True:
pattern = self.fill
elif self.area[cell[0]][cell[1]] == False:
pattern = self.empty
elif self.area[cell[0]][cell[1]] == "Finish":
pattern = self.finish
elif self.area[cell[0]][cell[1]] == "Start":
pattern = self.start
else:
print "wat?"
disp.blit(pattern,(reg_x, reg_y))
reg=pygame.rect.Rect((reg_x, reg_y), self.cell_size)
self.update_rects.append(reg)
# print "update_rects", self.update_rects
def update(self, disp):
mob_old_pos = self.mob.move(self)
if mob_old_pos:
self.queue.add(mob_old_pos)
print "old", mob_old_pos, self.queue
if not self.queue:
return
for item in self.queue:
self.update_cell( disp, item)
self.update_rects.append(self.mob.update(disp))
if self.message:
if self.message.get_rect().collidelist(self.update_rects) != -1:
disp.blit(self.message, (0, 0))
self.update_rects.append(self.message.get_rect())
pygame.display.update(self.update_rects)
self.queue=set()
self.update_rects=[]
if __name__ == '__main__':
pygame.init()
disp = pygame.display.set_mode(SCREEN_SIZE, pygame.DOUBLEBUF)
clock = pygame.time.Clock()
default_font = pygame.font.get_default_font()
font = pygame.font.SysFont(default_font, 30)
msg = font.render("Click anywere to draw. Shift - line, ctrl - column, mid. button - clear screen", True, (230, 30, 30, 255))
area = AreaMap((25,20), "p1_empty.png", "p1_barrier.png", msg)
area.add_start(0,5)
area.add_finish(24,15)
area.update(disp)
while True:
pygame.event.pump()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit(0)
elif event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
state = not area.get_cell_by_pos(event.pos)
area.set_cells_by_strike(state, event.pos,(0,0),pygame.key.get_mods())
prev = event.pos
if event.button == 2:
area.clear()
elif event.type == pygame.MOUSEMOTION:
if event.buttons[0]:
rel=map(int.__sub__,event.pos,prev) #workaround for fast mouse movements
area.set_cells_by_strike(state,event.pos,rel,pygame.key.get_mods())
prev=event.pos
area.update(disp)
clock.tick(FPS)
|
amarao/fun_came
|
learn/pathfinding/pathplay.py
|
Python
|
gpl-3.0
| 9,902
|
import numpy as num
import scipy.linalg
import numpy.matlib
class gait(object):
"""
Class biped gait gernerator
"""
def __init__(self, gait_param = None):
self.Tcycle = 1.4
self.DSrate = 0.2
self.SSrate = 0.8
self.SWrate = 0.4
self.STrate = 0.6
self.samp_per_sec = 100
self.dt = 1.0/self.samp_per_sec
self.HipWidth = 0.114
self.stepHeight = 0.03
self.gait_param = gait_param
# self.zmp_design = gait_param
# self.zmp_design[1, 0] = self.zmp_design[1, 0]/2
# self.zmp_design[1, -1] = self.zmp_design[1, -1]/2
# Output argument
self.left_ankle = None
self.right_ankle = None
self.vel_left_ankle = None # Left Ankle velocity
self.vel_right_ankle = None # Right Ankle velocity
self.com = None
self.vel_com = None
self.zmp_ref = None
self.zmp_out = None
self.time = None
def cal_stance_foot(self, init_stance, first_leg):
"""
Fucntion calculate stance foot step planer
:param foot_step:
:param init_stance:
:param first_leg:
:return: Ankle position and velocity
"""
s_x = self.gait_param[0, :]
s_y = self.gait_param[1, :]
row, col = self.gait_param.shape
num_step = col
# Which leg is stance first?
if first_leg is 'Left':
left_stance_first = False
else:
left_stance_first = True
stance_foot = num.zeros((num_step+1, 2))
stance_foot[0, 0] = init_stance[0]
stance_foot[0, 1] = init_stance[1]
for index in range(1, num_step+1):
# Equation 4.50 in text book: Introduction to humanoid robotics
stance_foot[index, 0] = stance_foot[index-1, 0] + s_x[index-1]
if left_stance_first:
stance_foot[index, 1] = stance_foot[index-1, 1] - (-1)**index*s_y[index-1]
else:
stance_foot[index, 1] = stance_foot[index - 1, 1] + (-1)**index * s_y[index - 1]
t_ds = self.Tcycle/2*self.DSrate
t_sw = self.Tcycle*self.SWrate
t_st = self.Tcycle*self.STrate
ds_period = num.arange(0.0, t_ds+self.dt, self.dt, dtype=float)
sw_period = num.arange(0.0, t_sw + self.dt, self.dt, dtype=float)
st_period = num.arange(0.0, t_st + self.dt, self.dt, dtype=float)
# Calculate number of stance phase of each foot
left_foot_stance = None
right_foot_stance = None
if left_stance_first:
left_flag = False
else:
left_flag = True
for index in range(num_step+1):
if left_flag:
if left_foot_stance is None:
left_foot_stance = num.append(stance_foot[index, :], 0)
else:
left_foot_stance = num.vstack((left_foot_stance, num.append(stance_foot[index, :], 0)))
else:
if right_foot_stance is None:
right_foot_stance = num.append(stance_foot[index, :], 0)
else:
right_foot_stance = num.vstack((right_foot_stance, num.append(stance_foot[index, :], 0)))
left_flag = not left_flag
# When left leg is stance first
if left_stance_first:
# Todo : add code in the case left leg is stace first. Maybe reverse the code in else case
print('You need to add code in this case!!')
else:
""" In this case: right leg is the first stance leg
Calculate the position of foot in timeseries
"""
rfoot_time = [0]
rfoot_pos = right_foot_stance[0,:]
row, col = right_foot_stance.shape
for index in range(1, row):
# Stance phase of right leg
pre_time = rfoot_time[-1] # previous time
rfoot_time = num.append(rfoot_time, pre_time+t_st)
rfoot_pos = num.vstack((rfoot_pos, right_foot_stance[index-1, :]))
# Stance phase of right leg
pre_time = rfoot_time[-1]
rfoot_time = num.append(rfoot_time, pre_time + t_sw)
rfoot_pos = num.vstack((rfoot_pos, right_foot_stance[index, :]))
# Left leg is swing
lfoot_time = [0]
lfoot_pos = None
row, col = left_foot_stance.shape
for index in range(1, row):
pre_time = lfoot_time[-1]
lfoot_time = num.append(lfoot_time, pre_time + t_sw)
# Swing phase
if lfoot_pos is None:
lfoot_pos = left_foot_stance[index - 1, :]
else:
lfoot_pos = num.vstack((lfoot_pos, left_foot_stance[index - 1, :]))
# Stance phase
pre_time = lfoot_time[-1]
lfoot_time = num.append(lfoot_time, pre_time + t_st)
lfoot_pos = num.vstack((lfoot_pos, left_foot_stance[index, :]))
# Adding the final state
lfoot_pos = num.vstack((lfoot_pos, left_foot_stance[-1, :]))
# Create Ankle data for Right leg first because this is the fist stance leg
rankle_time = None
rankle_pos = None
rankle_vel = None
pre_time = 0
for index in range(len(rfoot_pos)-1):
if rfoot_pos[index, 0] == rfoot_pos[index+1, 0]:
# Right leg is in the stance phase
if rankle_pos is None:
rankle_time = pre_time + st_period
rankle_pos = num.matlib.repmat(rfoot_pos[index, :], len(st_period), 1)
rankle_vel = num.matlib.repmat([0, 0, 0], len(st_period), 1)
else:
rankle_time = num.append(rankle_time, pre_time + st_period)
rankle_pos = num.vstack((rankle_pos, num.matlib.repmat(rfoot_pos[index, :], len(st_period), 1)))
rankle_vel = num.vstack((rankle_vel, num.matlib.repmat([0, 0, 0], len(st_period), 1)))
pre_time = rankle_time[-1]
else:
# Right leg is in the swing phase
rankle_time = num.append(rankle_time, pre_time + sw_period)
x_pos, x_vel = self.interpolation_ankle_x(rfoot_pos[index, 0], rfoot_pos[index+1, 0], rfoot_time[index], sw_period)
z_pos, z_vel = self.interpolation_ankle_z(self.stepHeight, sw_period)
y_pos = num.matlib.repmat(rfoot_pos[index, 1], len(sw_period), 1)
y_vel = num.matlib.repmat(0, len(sw_period), 1)
rankle_pos = num.vstack((rankle_pos, num.hstack((x_pos.reshape(y_pos.shape), y_pos, z_pos.reshape(y_pos.shape)))))
rankle_vel = num.vstack((rankle_vel, num.hstack((x_vel.reshape(y_pos.shape), y_vel, z_vel.reshape(y_pos.shape)))))
pre_time = rankle_time[-1]
# Create Ankle data for left leg
lankle_time = None
lankle_pos = None
lankle_vel = None
pre_time = 0
for index in range(len(lfoot_pos)-1):
if lfoot_pos[index, 0] == lfoot_pos[index + 1, 0]:
# Left foot is in stance phase
if lankle_pos is None: # for store data at first step
lankle_time = pre_time + st_period
lankle_pos = num.matlib.repmat(lfoot_pos[index, :], len(st_period), 1)
lankle_vel = num.matlib.repmat([0, 0, 0], len(st_period), 1)
else:
lankle_time = num.append(lankle_time, pre_time + st_period)
lankle_pos = num.vstack((lankle_pos, num.matlib.repmat(lfoot_pos[index, :], len(st_period), 1)))
lankle_vel = num.vstack((lankle_vel, num.matlib.repmat([0, 0, 0], len(st_period), 1)))
pre_time = lankle_time[-1]
else:
# Left leg is in the swing phase
if lankle_pos is None: # for store data at first step
lankle_time = pre_time + sw_period
x_pos, x_vel = self.interpolation_ankle_x(lfoot_pos[index, 0], lfoot_pos[index + 1, 0],
lfoot_time[index], sw_period)
z_pos, z_vel = self.interpolation_ankle_z(self.stepHeight, sw_period)
y_pos = num.matlib.repmat(lfoot_pos[index, 1], len(sw_period), 1)
y_vel = num.matlib.repmat(0, len(sw_period), 1)
lankle_pos = num.hstack((x_pos.reshape(y_pos.shape), y_pos, z_pos.reshape(y_pos.shape)))
lankle_vel = num.hstack((x_vel.reshape(y_pos.shape), y_vel, z_vel.reshape(y_pos.shape)))
else:
lankle_time = num.append(lankle_time, pre_time + sw_period)
x_pos, x_vel = self.interpolation_ankle_x(lfoot_pos[index, 0], lfoot_pos[index + 1, 0],
lfoot_time[index], sw_period)
z_pos, z_vel = self.interpolation_ankle_z(self.stepHeight, sw_period)
y_pos = num.matlib.repmat(lfoot_pos[index, 1], len(sw_period), 1)
y_vel = num.matlib.repmat(0, len(sw_period), 1)
lankle_pos = num.vstack(
(lankle_pos, num.hstack((x_pos.reshape(y_pos.shape), y_pos, z_pos.reshape(y_pos.shape)))))
lankle_vel = num.vstack(
(lankle_vel, num.hstack((x_vel.reshape(y_pos.shape), y_vel, z_vel.reshape(y_pos.shape)))))
pre_time = lankle_time[-1]
""" Vi tri ban dau hai chan o vi tri double support
Vi then trong khoang thoi gian 0 ->T_ds, chan trai dung yen
Bo sung them khoang thoi gian do vao chan trai
Xem them hinh anh human walking giat (search google)
"""
lankle_pos_init = num.matlib.repmat(left_foot_stance[0, :], len(ds_period), 1)
lankle_vel_init = num.matlib.repmat([0, 0, 0], len(ds_period), 1)
lankle_time_init = ds_period
lankle_pos = num.vstack((lankle_pos_init, lankle_pos))
lankle_vel = num.vstack((lankle_vel_init, lankle_vel))
lankle_time = num.append(ds_period, lankle_time + ds_period[-1])
return rankle_pos, rankle_vel, rankle_time, lankle_pos, lankle_vel, lankle_time
def interpolation_ankle_x(self, x_start, x_end, t_start, t_sw):
"""
Function interpolation ankle position in x direction
:param x_start:
:param x_end:
:param t_start:
:param t_sw:
:return: Ankle position and velocity in x direction
"""
t1 = 0.0
t3 = t_sw[-1]
t2 = (t1 + t3)/2
f1 = 0
f3 = x_end - x_start
f2 = (f1 + f3)/2
A = num.array([[t1**4, t1**3, t1**2, t1, 1],
[t2**4, t2**3, t2**2, t2, 1],
[t3**4, t3**3, t3**2, t3, 1],
[4*t1**3, 3*t1**2, 2*t1, 1, 0],
[4*t3**3, 3*t3**2, 2*t3, 1, 0]])
Y = num.array([[f1],
[f2],
[f3],
[0],
[0]])
X = num.dot(num.linalg.inv(A), Y)
X = X.ravel()
times = t_sw
x_pos = x_start + self.poly4th(X, times)
x_vel = self.poly4th_diff(X, times)
return x_pos, x_vel
def interpolation_ankle_z(self, step_height, t_sw):
"""
Function interpolation ankle position in z direction
:param step_height: Foot step height
:param t_sw:
:return: Ankle position and velocity in z direction
"""
t1 = 0.0
t3 = t_sw[-1]
t2 = (t1 + t3)/2
f1 = 0
f2 = step_height
f3 = 0
A = num.array([[t1 ** 4, t1 ** 3, t1 ** 2, t1, 1],
[t2 ** 4, t2 ** 3, t2 ** 2, t2, 1],
[t3 ** 4, t3 ** 3, t3 ** 2, t3, 1],
[4 * t1 ** 3, 3 * t1 ** 2, 2 * t1, 1, 0],
[4 * t3 ** 3, 3 * t3 ** 2, 2 * t3, 1, 0]])
Y = num.array([[f1],
[f2],
[f3],
[0],
[0]])
X = num.dot(num.linalg.inv(A), Y)
X = X.ravel()
times = t_sw
z_pos = self.poly4th(X, times)
z_vel = self.poly4th_diff(X, times)
return z_pos, z_vel
def cal_zmp_ref(self, zmp_init, first_leg):
"""
Function calculate the reference of ZMP
:param zmp_init:
:param first_leg:
:return: zmp_ref, time
"""
zmp_table = self.gait_param
zmp_table[1, 0] = zmp_table[1, 0]/2
zmp_table[1, -1] = zmp_table[1, -1] / 2
zmp_raw_x = zmp_table[0, :]
zmp_raw_y = zmp_table[1, :]
row,col = zmp_table.shape
# Which leg is stance first?
if first_leg is 'Left':
left_stance_first = False
else:
left_stance_first = True
zmp_raw = num.zeros((col+1, 2))
zmp_raw[0, 0] = zmp_init[0]
zmp_raw[0, 1] = zmp_init[1]
for index in range(1, col + 1):
# Equation 4.50 in text book: Introduction to humanoid robotics
zmp_raw[index, 0] = zmp_raw[index - 1, 0] + zmp_raw_x[index - 1]
if left_stance_first:
zmp_raw[index, 1] = zmp_raw[index - 1, 1] - (-1) ** index * zmp_raw_y[index - 1]
else:
zmp_raw[index, 1] = zmp_raw[index - 1, 1] + (-1) ** index * zmp_raw_y[index - 1]
# Init state before calculate
t_ss = self.Tcycle/2*self.SSrate # Single support duration
t_ds = self.Tcycle/2*self.DSrate # Double support duration
ds_period = num.arange(0.0, t_ds + self.dt, self.dt, dtype=float)
ds_num_sample = len(ds_period)
ss_period = num.arange(0.0, t_ss + self.dt, self.dt, dtype=float)
ss_num_sample = len(ss_period)
time = None
zmp_x = None
zmp_y = None
pre_time = 0
for index in range(1, col+1):
# Double support phase
zmpx = num.linspace(zmp_raw[index - 1, 0], zmp_raw[index, 0], ds_num_sample)
zmpy = num.linspace(zmp_raw[index - 1, 1], zmp_raw[index, 1], ds_num_sample)
# Store data
if time is None:
# Store in the fist time
time = pre_time + ds_period
zmp_x = zmpx
zmp_y = zmpy
else:
time = num.append(time, pre_time + ds_period)
zmp_x = num.append(zmp_x, zmpx)
zmp_y = num.append(zmp_y, zmpy)
# Single support phase
zmpx = num.matlib.repmat(zmp_raw[index, 0], ss_num_sample, 1)
zmpy = num.matlib.repmat(zmp_raw[index, 1], ss_num_sample, 1)
pre_time = time[-1]
# Store data
time = num.append(time, pre_time + ss_period)
zmp_x = num.append(zmp_x, zmpx)
zmp_y = num.append(zmp_y, zmpy)
pre_time = time[-1]
zmp_x = zmp_x.reshape((len(zmp_x), 1))
zmp_y = zmp_y.reshape((len(zmp_y), 1))
zmp_ref = num.hstack((zmp_x, zmp_y))
return zmp_ref, time
def poly4th(self, a, x):
"""
Function calculate 4the poly function value
:param a:
:param x:
:return:
"""
value = a[0]*x**4 + a[1]*x**3 + a[2]*x**2 + a[3]*x + a[4]
return value
def poly4th_diff(self, a, x):
"""
Function calculate the diff of 4th poly function
:param a:
:param x:
:return:
"""
value = 4*a[0]*x**3 + 3*a[1]*x**2 + 2*a[2]*x + a[3]
return value
def gait_generation(self, z_com, preview_time):
"""
Fucntion generation walking gait
:param z_com:
:param preview_time:
:return: zmp_ref, zmp_time, rankle_pos, rankle_vel, lankle_pos, lankle_vel, com_pos, com_vel, zmp_out
"""
rankle_pos, rankle_vel, rankle_time, lankle_pos, lankle_vel, lankle_time = \
self.cal_stance_foot([0.0, self.HipWidth / 2.0], 'Left')
zmp_ref, zmp_time = self.cal_zmp_ref([0.0, 0.0], 'Left')
# Thoi gian tao quy dao ZMP, LeftFoot, RightFoot la khac nhau vi vay can
# phai dong bo hoat thoi gian cua 3 quy dao
time_end = num.array([rankle_time[-1], lankle_time[-1], zmp_time[-1]])
longest_time = time_end.max()
# Right ankle
tmp_time = num.arange(rankle_time[-1], longest_time, self.dt)
tmp_pos = num.matlib.repmat(rankle_pos[-1, :], len(tmp_time), 1)
tmp_vel = num.matlib.repmat(rankle_vel[-1, :], len(tmp_time), 1)
rankle_pos = num.vstack((rankle_pos, tmp_pos))
rankle_vel = num.vstack((rankle_vel, tmp_vel))
rankle_time = num.append(rankle_time, tmp_time)
# Left ankle
tmp_time = num.arange(lankle_time[-1], longest_time, self.dt)
tmp_pos = num.matlib.repmat(lankle_pos[-1, :], len(tmp_time), 1)
tmp_vel = num.matlib.repmat(lankle_vel[-1, :], len(tmp_time), 1)
lankle_pos = num.vstack((lankle_pos, tmp_pos))
lankle_vel = num.vstack((lankle_vel, tmp_vel))
lankle_time = num.append(lankle_time, tmp_time)
# zmp
tmp_time = num.arange(zmp_time[-1], longest_time, self.dt)
tmp_zmp = num.matlib.repmat(zmp_ref[-1, :], len(tmp_time), 1)
zmp_ref = num.vstack((zmp_ref, tmp_zmp))
zmp_time = num.append(zmp_time, tmp_time)
# Them thoi gian chuan bi buoc di = Tcycle/2
time_prepare = num.arange(0.0, self.Tcycle / 2 - self.dt, self.dt)
len_time_prepare = len(time_prepare)
# Left ankle
tmp_pos = num.matlib.repmat(lankle_pos[0, :], len_time_prepare, 1)
tmp_vel = num.matlib.repmat(lankle_vel[0, :], len_time_prepare, 1)
lankle_pos = num.vstack((tmp_pos, lankle_pos))
lankle_vel = num.vstack((tmp_vel, lankle_vel))
lankle_time = num.append(time_prepare, self.Tcycle / 2 + lankle_time)
# Right ankle
tmp_pos = num.matlib.repmat(rankle_pos[0, :], len_time_prepare, 1)
tmp_vel = num.matlib.repmat(rankle_vel[0, :], len_time_prepare, 1)
rankle_pos = num.vstack((tmp_pos, rankle_pos))
rankle_vel = num.vstack((tmp_vel, rankle_vel))
rankle_time = num.append(time_prepare, self.Tcycle / 2 + rankle_time)
# zmp ref
tmp_zmp = num.matlib.repmat(zmp_ref[0, :], len_time_prepare, 1)
zmp_ref = num.vstack((tmp_zmp, zmp_ref))
zmp_time = num.append(time_prepare, self.Tcycle / 2 + zmp_time)
####################################################################
# OK!
# now, remove the duplicate data in time series
####################################################################
# Left ankle: find and remove the duplicate time element
duplicate_idx = None
for index in range(len(lankle_time) - 1):
if lankle_time[index] == lankle_time[index + 1]:
if duplicate_idx is None:
duplicate_idx = index
else:
duplicate_idx = num.append(duplicate_idx, index)
lankle_pos = num.delete(lankle_pos, duplicate_idx, axis=0)
lankle_vel = num.delete(lankle_vel, duplicate_idx, axis=0)
lankle_time = num.delete(lankle_time, duplicate_idx)
# Right ankle: find and remove the duplicate time element
duplicate_idx = None
for index in range(len(rankle_time) - 1):
if rankle_time[index] == rankle_time[index + 1]:
if duplicate_idx is None:
duplicate_idx = index
else:
duplicate_idx = num.append(duplicate_idx, index)
rankle_pos = num.delete(rankle_pos, duplicate_idx, axis=0)
rankle_vel = num.delete(rankle_vel, duplicate_idx, axis=0)
rankle_time = num.delete(rankle_time, duplicate_idx)
# zmp reference: find and remove the duplicate time element
duplicate_idx = None
for index in range(len(zmp_time) - 1):
if zmp_time[index] == zmp_time[index + 1]:
if duplicate_idx is None:
duplicate_idx = index
else:
duplicate_idx = num.append(duplicate_idx, index)
zmp_ref = num.delete(zmp_ref, duplicate_idx, axis=0)
zmp_time = num.delete(zmp_time, duplicate_idx)
# Using cart-table model to generate CoM and ZPM
com_pos, com_vel, zmp_out = self.cart_table(zmp_ref, z_com, zmp_time[-1], preview_time)
return zmp_ref, zmp_time, rankle_pos, rankle_vel, lankle_pos, lankle_vel, com_pos, com_vel, zmp_out
def cart_table(self, zmp_ref, z_com, cal_time, preview_time):
"""
Function calculate CoM using cart-table model
:param zmp_ref:
:param z_com:
:param cal_time:
:param preview_time:
:return:
"""
dt = self.dt
g = -9.810
pre_time = num.arange(0.0, preview_time + self.dt, self.dt)
pre_len = len(pre_time)
insert_x = num.matlib.repmat(zmp_ref[-1, 0], pre_len, 1)
insert_y = num.matlib.repmat(zmp_ref[-1, 1], pre_len, 1)
foot_x = num.append(zmp_ref[:, 0], insert_x)
foot_y = num.append(zmp_ref[:, 1], insert_y)
mat_a = num.array([[1, dt, dt**2],
[0, 1, dt],
[0, 0, 1]])
mat_b = num.array([[dt**3/6.0],
[dt**2/2.0],
[dt]])
mat_c = num.array([1, 0, z_com/g])
mat_d = num.array([0])
# Error system
zero = num.zeros((3, 1))
phi_r0 = num.append(1.0, num.dot(-mat_c, mat_a))
phi_r1 = num.hstack((zero, mat_a))
mat_phi = num.vstack((phi_r0, phi_r1))
mat_g = num.vstack((num.dot(-mat_c, mat_b), mat_b))
mat_gr = num.vstack(([1, zero]))
Q = num.zeros((4, 4))
Q[0, 0] = 10.0**8
H = 1.0
# Riccati equation
"""Solve the discrete time lqr controller.
x[k+1] = A x[k] + B u[k]
cost = sum x[k].T*Q*x[k] + u[k].T*R*u[k]
"""
# first, try to solve the ricatti equation
P = scipy.linalg.solve_discrete_are(mat_phi, mat_g, Q, H)
# K=-(H+mat_g'*P*mat_g)^(-1)*mat_g'*P*mat_phi;
K = -num.dot(num.dot(num.dot(1.0/(H + num.dot(num.dot(mat_g.T, P), mat_g)), mat_g.T), P), mat_phi)
# xi = (eye(4,4)-mat_g*(H+mat_g'*P*mat_g)^(-1)*mat_g'*P)*mat_phi;
xi = num.dot((num.eye(4) - num.dot(num.dot(mat_g, 1.0/(H + num.dot(num.dot(mat_g.T, P), mat_g))*mat_g.T), P)), mat_phi)
# Now, solving the preview control problem
x = num.array([[0], [0], [0]])
y = num.array([[0], [0], [0]])
xp = x
yp = y
ux = 0
uy = 0
time = num.arange(0.0, cal_time, self.dt, dtype=float)
len_time = len(time)
com_pos = None
com_vel = None
zmp_out = None
# Loop start
for index in range(len_time):
p_x = num.dot(mat_c, x) # Output zmp in x direction
p_y = num.dot(mat_c, y) # Output zmp in y direction
e_x = foot_x[index] - p_x # Error between the target ZMP(x)
e_y = foot_y[index] - p_y # Error between the target ZMP(y)
X = num.vstack((e_x, x - xp))
Y = num.vstack((e_y, y - yp))
xp = x
yp = y
# x direction
du_x = num.dot(K, X)
t_x = num.arange(time[index], time[index] + preview_time, self.dt, dtype=float)
for idx in range(1, len(t_x)):
if foot_x[index + idx] - foot_x[index + idx - 1] != 0.0:
# gain_idx = -(H+G'*P*G)^(-1)*G'*(xi')^(j-1)*P*GR;
gain_idx = -num.dot(num.dot(num.dot((1.0/(H + num.dot(num.dot(mat_g.T, P), mat_g)))*mat_g.T, num.linalg.matrix_power(xi.T, idx - 1)), P), mat_gr)
# gain_idx = gain_idx.ravel()
du_x = du_x + gain_idx*(foot_x[index + idx] - foot_x[index + idx - 1])
ux = ux + du_x
# y direction
du_y = num.dot(K, Y)
t_y = num.arange(time[index], time[index] + preview_time, self.dt, dtype=float)
for idx in range(1, len(t_y)):
if foot_y[index + idx] - foot_y[index + idx - 1] != 0.0:
# = -(H+G'*P*G)^(-1)*G'*(xi')^(j-1)*P*GR;
gain_idx = -num.dot(num.dot(num.dot((1.0 / (H + num.dot(num.dot(mat_g.T, P), mat_g))) * mat_g.T,
num.linalg.matrix_power(xi.T, idx - 1)), P), mat_gr)
du_y = du_y + gain_idx * (foot_y[index + idx] - foot_y[index + idx - 1])
uy = uy + du_y
x = num.dot(mat_a, x) + num.dot(mat_b, ux)
y = num.dot(mat_a, y) + num.dot(mat_b, uy)
if com_pos is None:
com_pos = num.array([x[0], y[0], z_com])
com_vel = num.array([x[1], y[1], 0.0])
zmp_out = num.append(p_x, p_y)
else:
com_pos = num.vstack((com_pos, [x[0], y[0], z_com]))
com_vel = num.vstack((com_vel, [x[1], y[1], 0]))
zmp_out = num.vstack((zmp_out, num.append(p_x, p_y)))
return com_pos, com_vel, zmp_out
|
nvtienanh/PyUXASim
|
gait.py
|
Python
|
gpl-3.0
| 25,979
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name' : 'Sales & Purchases Receipts',
'version' : '1.0',
'summary': 'Manage your debts and credits thanks to simple sale/purchase receipts',
'description': """
This module allows you recording sales and purchases receipts. Receipts are useful when the payment is done directly. Thanks to the receipts, no need to encode an invoice and a payment, the receipt is enough.
""",
'category': 'Accounting',
'sequence': 20,
'depends' : ['account'],
'demo' : [],
'data' : [
'security/ir.model.access.csv',
'views/account_voucher_views.xml',
'security/account_voucher_security.xml',
'data/account_voucher_data.xml',
],
'auto_install': False,
'installable': True,
}
|
t3dev/odoo
|
addons/account_voucher/__manifest__.py
|
Python
|
gpl-3.0
| 842
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import unittest.mock
from testtools.matchers import Equals
import snapcraft
from tests import unit
class TestBasePlugin(unit.TestCase):
def setUp(self):
super().setUp()
self.project_options = snapcraft.ProjectOptions()
def test_parallel_build_count_returns_1_when_disabled(self):
options = unit.MockOptions(disable_parallel=True)
plugin = snapcraft.BasePlugin('test_plugin', options,
self.project_options)
self.assertThat(plugin.parallel_build_count, Equals(1))
def test_parallel_build_count_returns_build_count_from_project(self):
options = unit.MockOptions(disable_parallel=False)
plugin = snapcraft.BasePlugin('test_plugin', options,
self.project_options)
unittest.mock.patch.object(
self.project_options, 'parallel_build_count', 2)
self.assertThat(plugin.parallel_build_count, Equals(2))
def test_part_name_with_forward_slash_is_one_directory(self):
plugin = snapcraft.BasePlugin('test/part', options=None)
os.makedirs(plugin.sourcedir)
self.assertIn('test\N{BIG SOLIDUS}part', os.listdir('parts'))
@unittest.mock.patch('snapcraft.internal.common.run')
def test_run_without_specifying_cwd(self, mock_run):
plugin = snapcraft.BasePlugin('test/part', options=None)
plugin.run(['ls'])
mock_run.assert_called_once_with(['ls'], cwd=plugin.builddir)
@unittest.mock.patch('snapcraft.internal.common.run')
def test_run_specifying_a_cwd(self, mock_run):
plugin = snapcraft.BasePlugin('test/part', options=None)
plugin.run(['ls'], cwd=plugin.sourcedir)
mock_run.assert_called_once_with(['ls'], cwd=plugin.sourcedir)
@unittest.mock.patch('snapcraft.internal.common.run_output')
def test_run_output_without_specifying_cwd(self, mock_run):
plugin = snapcraft.BasePlugin('test/part', options=None)
plugin.run_output(['ls'])
mock_run.assert_called_once_with(['ls'], cwd=plugin.builddir)
@unittest.mock.patch('snapcraft.internal.common.run_output')
def test_run_output_specifying_a_cwd(self, mock_run):
plugin = snapcraft.BasePlugin('test/part', options=None)
plugin.run_output(['ls'], cwd=plugin.sourcedir)
mock_run.assert_called_once_with(['ls'], cwd=plugin.sourcedir)
|
elopio/snapcraft
|
tests/unit/plugins/test_base.py
|
Python
|
gpl-3.0
| 3,087
|
import typing
import string
import discord
from discord.ext import commands, vbu
class UserCommands(vbu.Cog):
EIGHT_BALL_ANSWERS = (
"It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes – definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful. ",
)
@commands.context_command(name="Get ship percentage")
async def _context_menu_ship(self, ctx: vbu.SlashContext, user: discord.Member):
command = self.ship
await command.can_run(ctx)
await ctx.invoke(command, user)
@commands.command()
@commands.bot_has_permissions(send_messages=True)
async def ship(self, ctx: vbu.Context, user: discord.Member, user2: discord.Member = None):
"""
Gives you a ship percentage between two users.
"""
# Fix attrs
if user2 is None:
user, user2 = ctx.author, user
# Add response for yourself
if user == user2:
return await ctx.send("-.-")
# Get percentage
async with vbu.Database() as db:
rows = await db("SELECT * FROM ship_percentages WHERE user_id_1=ANY($1::BIGINT[]) AND user_id_2=ANY($1::BIGINT[])", [user.id, user2.id])
if rows and rows[0]['percentage']:
percentage = rows[0]['percentage'] / 100
else:
percentage = ((user.id + user2.id + 4500) % 10001) / 100
return await ctx.send(f"{user.mention} \N{REVOLVING HEARTS} **{percentage:.2f}%** \N{REVOLVING HEARTS} {user2.mention}", allowed_mentions=discord.AllowedMentions(users=False))
@commands.command(add_slash_command=False)
@vbu.checks.is_bot_support()
@commands.bot_has_permissions(add_reactions=True)
async def addship(self, ctx: vbu.Context, user1: discord.Member, user2: discord.Member = None, percentage: float = 0):
"""
Add a custom ship percentage.
"""
user2 = user2 or ctx.author
percentage = max([min([percentage * 100, 10_000]), -10_000])
async with vbu.Database() as db:
await db(
"""INSERT INTO ship_percentages (user_id_1, user_id_2, percentage) VALUES ($1, $2, $3)
ON CONFLICT (user_id_1, user_id_2) DO UPDATE SET percentage=excluded.percentage""",
*sorted([user1.id, user2.id]), percentage,
)
await ctx.okay()
@commands.command(aliases=['8ball'])
@commands.bot_has_permissions(send_messages=True)
async def eightball(self, ctx: vbu.Context, *, message: str):
"""
Gives you an 8ball answer.
"""
index = sum([ord(i) for i in message.lower().strip(string.punctuation + string.whitespace)]) % len(self.EIGHT_BALL_ANSWERS)
return await ctx.send(self.EIGHT_BALL_ANSWERS[index])
def setup(bot: vbu.Bot):
x = UserCommands(bot)
bot.add_cog(x)
|
4Kaylum/SkyBot
|
cogs/user_commands.py
|
Python
|
gpl-3.0
| 3,285
|
# -*- coding: utf-8 -*-
"""
DEMO_002 OPTION DICTIONARY
Which options can one set?
This demo explains all fields of the options dictionary, e.g. which options
you can set for the fitting process as a user.
"""
from numpy import array, exp
from psignifit.psignifit import psignifit
# to have some data we use the data from demo_001
data = array([[0.0010, 45.0000, 90.0000],
[0.0015, 50.0000, 90.0000],
[0.0020, 44.0000, 90.0000],
[0.0025, 44.0000, 90.0000],
[0.0030, 52.0000, 90.0000],
[0.0035, 53.0000, 90.0000],
[0.0040, 62.0000, 90.0000],
[0.0045, 64.0000, 90.0000],
[0.0050, 76.0000, 90.0000],
[0.0060, 79.0000, 90.0000],
[0.0070, 88.0000, 90.0000],
[0.0080, 90.0000, 90.0000],
[0.0100, 90.0000, 90.0000]])
# initializing options dictionary
options = dict()
# now or at any later time you can run a fit with this command.
res=psignifit(data,options)
""" list of options fields """
''' Here we list all fields of the option dictionary which can be accessed by
options['fieldname'] = default Value
afterwards follows some explanation and allowed values '''
""" options['sigmoidName'] = 'norm' """
''' This sets the type of sigmoid you fit to your data. '''
#The dafault value 'norm' fits a cumulative gaussian to your data.
options['sigmoidName'] = 'norm'
#another standard alternative is the logistic function
options['sigmoidName'] = 'logistic'
# For data on a logscale you may want to fit a log-normal distribution or a
# Weibull which you invoce with:
options['sigmoidName'] = 'logn'
# or
options['sigmoidName'] = 'weibull'
# We also included the gumbel and reversed gumbel functions for asymmetric
# psychometric functions. The gumbel has a longer lower tail the reversed
# gumbel a longer upper tail.
options['sigmoidName'] = 'gumbel'
# or
options['sigmoidName'] = 'rgumbel'
# for a heavy tailed distribution use
options['sigmoidName'] = 'tdist'
""" options['sigmoidHandle'] """
''' Here you may provide a handle to your own sigmoid which takes two
parameters as input and hands back a function value. This should be
vectorized or even a formula.
However, this is usually obtained from options['sigmoidName']
This is needed if you want to use your own sigmoid, which is not built in '''
""" options['expType'] = 'YesNo' """
''' This sets which parameters you want to be free and which you fix and to
which values, for standard experiment types. '''
# 'YesNo', default sets all parameters free, which is suitable for a standard
# yes/no paradigm.
options['expType'] = 'YesNo'
# '2AFC', fixes the lower asymptote to .5 and fits the rest, for 2
# alternative forced choice experiments.
options['expType'] = '2AFC'
# 'nAFC', fixes the lower asymptote to 1/n and fits the rest. For this type
# of experiment you MUST also provide options['expN'] the number of
# alternatives.
# As an example with 3 alternatives:
options['expType'] = 'nAFC'
options['expN'] = 3
""" options['estimateType'] = 'mean' """
''' How you want to estimate your fit from the posterior '''
# 'MAP' The MAP estimator is the maximum a posteriori computed from
# the posterior.
options['estimateType'] = 'MAP'
# 'mean' The posterior mean. In a Bayesian sence a more suitable estimate.
# the expected value of the Posterior.
options['estimateType'] = 'mean'
""" options['stepN'] = [40,40,20,20,20]
options['mbStepN'] = [25,20,10,10,20] """
''' This sets the number of grid points on each dimension in the final
fitting (stepN) and in the moving of borders mbStepN
the order is
[threshold,width,upper asymptote,lower asymptote,variance scaling]
You may change this if you need more accurate estimates on the sparsely
sampled parameters or if you want to play with them to save time '''
# for example to get an even more exact estimate on the
# lapse rate/upper asymptote plug in
options['stepN']=[40,40,50,20,20]
# now the lapse rate is sampled at 50 places giving you a much more exact
# and smooth curve for comparisons.
""" options['confP'] = .95 """
''' The confidence level for the computed confidence intervals.
This may be set to any number between 0 and 1 excluding. '''
# for example to get 99% confidence intervals try
options['confP'] = .99
# You may specify a vector as well. If you do the conf_intervals in the
# result will be a 5x2xN array containing the values for the different
# confidence levels in the 3rd dimension.
options['confP'] = [.95,.9,.68,.5]
# will return 4 confidence intervals for each parameter for example.
""" options['threshPC'] = .5 """
''' Which percent correct correspond to the threshold?
Given in Percent correct on the unscaled sigmoid (reaching from 0 to 1). '''
# For example to define the threshold as 90% correct try:
options['threshPC'] = .9
""" options['CImethod'] ='stripes' """
''' This sets how the confidence intervals are computed in getConfRegion
possible variants are:
'project' -> project the confidence region on each axis
'stripes' -> find a threshold with (1-alpha) above it
This will disregard intervals of low posterior probability and then move
in from the sides to adjust the exact CI size.
This can handle borders and asymmetric distributions slightly better, but
will introduce slight jumps of the confidence interval when confp is
adjusted depending on when the gridpoints get too small posterior
probability.
'percentiles' -> find alpha/2 and 1-alpha/2 percentiles
(alpha = 1-confP)
cuts at the estimated percentiles-> always tries to place alpha/2
posterior probability above and below the credible interval.
This has no jumping but will exclude border values even when they have
the highest posterior. Additionally it will not choose the area of
highest posterior density if the distribution is skewed. '''
""" options['priors'] = getStandardPriors() """
''' This field contains a cell array of function handles, which define the
priors for each parameter.
If you want to set your priors manually, here is the place for it.
For details on how do change these refer to
https://github.com/wichmann-lab/psignifit/wiki/Priors '''
#TODO change to the Python repo
""" options['betaPrior'] = 20 """
''' this sets the strength of the Prior in favor of a binomial observer.
Larger values correspond to a stronger prior. We choose this value after
a rather large number of simulations. Refer to the paper to learn more
about this '''
""" options['nblocks'] = inf """
""" options['poolMaxGap'] = inf """
""" options['poolMaxLength'] = 50 """
""" options['poolxTol'] = 0 """
''' these options set how your data is pooled into blocks. Your data is only
pooled if your data Matrix has more than nblocks lines. Then we pool
together a maximum of poolMaxLength trials, which are separated by a
maximum of poolMaxGap trials of other stimulus levels. If you want you may
specify a tolerance in stimulus level to pool trials, but by default we
only pool trials with exactly the same stimulus level. '''
""" options['instantPlot'] = 0 """
''' A boolean to control whether you immediately get 2 standard plots of your
fit. Turn to 1 to see the effect. '''
options['instantPlot'] = 1
""" options['borders'] """
''' In this field you may provide your own bounds for the parameters.
This should be a 5x2 matrix of start and end of the range for the 5
parameters. (threshold,width,upper asymptote,lower asymptote,variance
scale) '''
#For example this would set the borders to
# threshold between 1 and 2
# width between .1 and 5
# a fixed lapse rate of .05
# a fixed lower asymptote at .05
# a maximum on the variance scale of .2
options['borders']= [ 1,2, .1,5,.05,.05, .5,.5, exp(-20),.2]
''' NOTE: By this you artificially exclude all values out of this range. Only
exclude parameter values, which are truely impossible!'''
""" options['setBordersType'] = 0 """
''' The method to set the outer borders of the grid. You find it's use in
setBorders
defaults to reasonable estimates for the threshold and width parameter: '''
options['setBordersType'] = 0
# To set the borders for arbitrary parameterizations change to
options['setBordersType'] = 1
#But also see demo_003 on how to implement other parameterizations as all
# build in functions are parameterized by threshold and width
""" options['maxBorderValue'] = exp(-10) """
''' Parts of the grid which produce marginal values below this are considered
0 and are excluded from the calculation in moveBorders.m
it should be a very small value and at least smaller than 1/(max(stepN)) '''
# This for example would exclude fewer values and more conservative
# movement of the borders:
options['maxBorderValue'] = exp(-20)
""" options.moveBorders = 1 """
''' Toggles the movement of borders by moveBorders
Usually this is good to concentrate on the right area in the parameter
space. '''
options['moveBorders'] = 1
# If you set
options['moveBorders'] = 0
# your posterior will always use the initial setting for the borders.
# This is usefull if you set the borders by hand and do not want
# psignifit to move them after this.
""" options['dynamicGrid'] = 0 """
''' Toggles the useage of a dynamic/adaptive grid.
there was hope for a more exact estimate by this, but although the curves
look smoother the confidence intervals were not more exact. Thus this is
deactivated by default. '''
options['dynamicGrid'] = 1
options['dynamicGrid'] = 0
# How many Likelihood evaluations are done per dimension to set the
# adaptive grid. Should be a relatively large number.
options['GridSetEval'] = 10000
# Only used with dynamic grid,--> by default not at all
''' options['UniformWeight'] = 0.5000
How many times the average is added to each position while setting the
adaptive grid. You may increase this number to get a more equally sampled
grid or decrease it to get an even stronger focus of the sampling on the
peak.
When you increase this value very much try to set options['dynamicGrid'] = 0
which produces an equal stepsize grid right away. '''
# As an example: Will produce a more focused grid which leaves the borders
# very weakly sampled.
options['UniformWeight'] = 0.01000
# Only used with dynamic grid,-> by default not at all
""" options['widthalpha'] = .05 """
''' This changes how the width of a psychometric function is defined
width= psi^(-1)(1-alpha) - psi^(-1)(alpha)
where psi^(-1) is the inverse of the sigmoid function.
widthalpha must be between 0 and .5 excluding '''
# Thus this would enable the useage of the interval from .1 to .9 as the
# width for example:
options['widthalpha'] = .1
""" options.logspace: = 0 """
''' this is triggered when you fit lognormal or Weibull functions, which are
fitted in logspace. This is an internal variable which is used to pass
this to all functions. It is of no interest for a user. '''
|
Visdoom/psignifit-4.0
|
demo_002.py
|
Python
|
gpl-3.0
| 11,305
|
"""
Filters
-------
`FilterQuery` objects save the filter details and are stored pickled in the `Filter` class as
`Filter.query` in :doc:`models`.
"""
from __future__ import with_statement
# python imports
import re
from os import path
# library imports
from sqlalchemy import and_, not_
# local imports
from webscavator.utils.utils import session, ROOT_DIR
from webscavator.model.models import *
def regexp(expr, item):
"""
Enables regular expressions for filter queries.
"""
r = re.compile(expr)
return r.match(item) is not None
class FilterQuery(object):
"""
Stores the tables, attributes, operations and values of a query, e.g.
::
params = (Entry, title, Is, 'Test')
which equates to `WHERE Entry.title = 'Test'` in SQL.
`FilterQuery` objects get stored inside a `Filter` object in :doc:`models`. The
object stored inside the `Filter` object gets pickled thereby preserving the
filter information.
"""
classes = {'Browser': Browser,
'URL Parts': URL,
'Entry': Entry,
'Web Files': Group,
'Search Terms': SearchTerms
}
def __init__(self):
self.params = []
def __repr__(self):
return "[filter query object]"
def add_element(self, cls, attr, func, val, val_list):
"""
Add a new filter line to the `FilterQuery` object `params` list
attribute.
"""
self.params.append((cls, attr, func, val, val_list))
def query(self):
"""
Called by `Case.filter_queries()` in :doc:`models` to construct the filter
clause. Returns an ANDed list of filters e.g. `Entry.title = "test" AND
Entry.url <> "http://example.org"`
"""
conn = session.bind.connect()
conn.connection.create_function("regexp", 2, regexp)
ands = []
for cls, attr, func, val, val_list in self.params:
table = self.classes[cls]
opts = table.filter_options
column = getattr(table, attr)
if val_list is not None:
clause = self._operate(column, func, val_list)
else:
clause = self._operate(column, func, val)
ands.append(column != None)
ands.append(clause)
return and_(*ands) if ands else None
def _operate(self, col, op, val):
"""
Returns the correct filter clause given the column to filter on, the value
and the operator.
.. Note::
**ToDo**: 'Contains fuzzy' and 'Periodical every' are not yet functional and return
`None`.
"""
if op == "Is":
return col == val
elif op == "Is not":
return col != val
elif op == "Matches regular expression":
return col.op('REGEXP')(val)
elif op == "Contains fuzzy":
return None
elif op == "Contains":
return col.like('%' + val + '%')
elif op == "Greater than":
return col > val
elif op == "Less than":
return col < val
elif op == "Periodical every":
return None
elif op == "Is in list":
return col.in_(self._getList(val))
elif op == "Is not in list":
return not_(col.in_(self._getList(val)))
def _getList(self, name):
"""
Given a file called `name`, return a list of the lines in the file.
Used when the filter is value IN [list]
"""
with open(path.join(ROOT_DIR, '..','case lists',name),'r') as file:
return [unicode(line.strip(), 'utf-8') for line in file.readlines()]
|
lowmanio/Webscavator
|
webscavator/webscavator/model/filters.py
|
Python
|
gpl-3.0
| 4,022
|
import pytest
import unittest
import resource
import functools
import os
def source_root():
path_list = os.path.dirname(os.path.abspath(__file__)).split("/")
while len(path_list) > 0:
git_path = os.path.join(os.sep, "/".join(path_list), ".git")
if os.path.isdir(git_path):
return os.path.join(os.sep, *path_list)
path_list.pop()
raise RuntimeError('Cannot find the source folder')
def has_equinor_test_data():
return os.path.isdir(os.path.join(source_root(), "test-data", "Equinor"))
def pytest_runtest_setup(item):
if item.get_closest_marker("equinor_test") and not has_equinor_test_data():
pytest.skip("Test requires Equinor data")
@pytest.fixture(autouse=True)
def env_save():
environment_pre = [(key, val) for key, val in os.environ.items() if key != "PYTEST_CURRENT_TEST"]
yield
environment_post = [(key, val) for key, val in os.environ.items() if key != "PYTEST_CURRENT_TEST"]
if set(environment_pre) != set(environment_post):
raise EnvironmentError("Your environment has changed after that test, please reset")
|
Statoil/libres
|
python/tests/conftest.py
|
Python
|
gpl-3.0
| 1,113
|
# vim: ts=4:sw=4:expandtab
# -*- coding: UTF-8 -*-
# BleachBit
# Copyright (C) 2008-2017 Andrew Ziem
# https://www.bleachbit.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Actions that perform cleaning
"""
from __future__ import absolute_import, print_function
from bleachbit import Command, FileUtilities, General, Special
from bleachbit import _, FSE, expanduser, expandvars
import glob
import logging
import os
import re
import types
if 'posix' == os.name:
re_flags = 0
from bleachbit import Unix
else:
re_flags = re.IGNORECASE
#
# Plugin framework
# http://martyalchin.com/2008/jan/10/simple-plugin-framework/
#
class PluginMount(type):
"""A simple plugin framework"""
def __init__(cls, name, bases, attrs):
if not hasattr(cls, 'plugins'):
cls.plugins = []
else:
cls.plugins.append(cls)
class ActionProvider:
"""Abstract base class for performing individual cleaning actions"""
__metaclass__ = PluginMount
def __init__(self, action_node):
"""Create ActionProvider from CleanerML <action>"""
pass
def get_deep_scan(self):
"""Return a dictionary used to construct a deep scan"""
raise StopIteration
def get_commands(self):
"""Yield each command (which can be previewed or executed)"""
pass
#
# base class
#
class FileActionProvider(ActionProvider):
"""Base class for providers which work on individual files"""
action_key = '_file'
def __init__(self, action_element):
"""Initialize file search"""
self.regex = action_element.getAttribute('regex')
assert(isinstance(self.regex, (str, unicode, types.NoneType)))
self.nregex = action_element.getAttribute('nregex')
assert(isinstance(self.nregex, (str, unicode, types.NoneType)))
self.wholeregex = action_element.getAttribute('wholeregex')
assert(isinstance(self.wholeregex, (str, unicode, types.NoneType)))
self.nwholeregex = action_element.getAttribute('nwholeregex')
assert(isinstance(self.nwholeregex, (str, unicode, types.NoneType)))
self.search = action_element.getAttribute('search')
self.object_type = action_element.getAttribute('type')
self.path = expanduser(expandvars(action_element.getAttribute('path')))
if 'nt' == os.name and self.path:
# convert forward slash to backslash for compatibility with getsize()
# and for display. Do not convert an empty path, or it will become
# the current directory (.).
self.path = os.path.normpath(self.path)
self.ds = {}
if 'deep' == self.search:
self.ds['regex'] = self.regex
self.ds['nregex'] = self.nregex
self.ds['cache'] = General.boolstr_to_bool(
action_element.getAttribute('cache'))
self.ds['command'] = action_element.getAttribute('command')
self.ds['path'] = self.path
if not any([self.object_type, self.regex, self.nregex,
self.wholeregex, self.nwholeregex]):
# If the filter is not needed, bypass it for speed.
self.get_paths = self._get_paths
def get_deep_scan(self):
if 0 == len(self.ds):
raise StopIteration
yield self.ds
def path_filter(self, path):
"""Process the filters: regex, nregex, type
If a filter is defined and it fails to match, this function
returns False. Otherwise, this function returns True."""
if self.regex:
if not self.regex_c.search(os.path.basename(path)):
return False
if self.nregex:
if self.nregex_c.search(os.path.basename(path)):
return False
if self.wholeregex:
if not self.wholeregex_c.search(path):
return False
if self.nwholeregex:
if self.nwholeregex_c.search(path):
return False
if self.object_type:
if 'f' == self.object_type and not os.path.isfile(path):
return False
elif 'd' == self.object_type and not os.path.isdir(path):
return False
return True
def get_paths(self):
import itertools
for f in itertools.ifilter(self.path_filter, self._get_paths()):
yield f
def _get_paths(self):
"""Return a filtered list of files"""
def get_file(path):
if os.path.lexists(path):
yield path
def get_walk_all(top):
for expanded in glob.iglob(top):
for path in FileUtilities.children_in_directory(
expanded, True):
yield path
def get_walk_files(top):
for expanded in glob.iglob(top):
for path in FileUtilities.children_in_directory(expanded, False):
yield path
if 'deep' == self.search:
raise StopIteration
elif 'file' == self.search:
func = get_file
elif 'glob' == self.search:
func = glob.iglob
elif 'walk.all' == self.search:
func = get_walk_all
elif 'walk.files' == self.search:
func = get_walk_files
else:
raise RuntimeError("invalid search='%s'" % self.search)
if self.regex:
self.regex_c = re.compile(self.regex, re_flags)
if self.nregex:
self.nregex_c = re.compile(self.nregex, re_flags)
if self.wholeregex:
self.wholeregex_c = re.compile(self.wholeregex, re_flags)
if self.nwholeregex:
self.nwholeregex_c = re.compile(self.nwholeregex, re_flags)
for path in func(self.path):
yield path
def get_commands(self):
raise NotImplementedError('not implemented')
#
# Action providers
#
class AptAutoclean(ActionProvider):
"""Action to run 'apt-get autoclean'"""
action_key = 'apt.autoclean'
def __init__(self, action_element):
pass
def get_commands(self):
# Checking executable allows auto-hide to work for non-APT systems
if FileUtilities.exe_exists('apt-get'):
yield Command.Function(None,
Unix.apt_autoclean,
'apt-get autoclean')
class AptAutoremove(ActionProvider):
"""Action to run 'apt-get autoremove'"""
action_key = 'apt.autoremove'
def __init__(self, action_element):
pass
def get_commands(self):
# Checking executable allows auto-hide to work for non-APT systems
if FileUtilities.exe_exists('apt-get'):
yield Command.Function(None,
Unix.apt_autoremove,
'apt-get autoremove')
class ChromeAutofill(FileActionProvider):
"""Action to clean 'autofill' table in Google Chrome/Chromium"""
action_key = 'chrome.autofill'
def get_commands(self):
for path in self.get_paths():
yield Command.Function(
path,
Special.delete_chrome_autofill,
_('Clean file'))
class ChromeDatabases(FileActionProvider):
"""Action to clean Databases.db in Google Chrome/Chromium"""
action_key = 'chrome.databases_db'
def get_commands(self):
for path in self.get_paths():
yield Command.Function(
path,
Special.delete_chrome_databases_db,
_('Clean file'))
class ChromeFavicons(FileActionProvider):
"""Action to clean 'Favicons' file in Google Chrome/Chromium"""
action_key = 'chrome.favicons'
def get_commands(self):
for path in self.get_paths():
yield Command.Function(
path,
Special.delete_chrome_favicons,
_('Clean file'))
class ChromeHistory(FileActionProvider):
"""Action to clean 'History' file in Google Chrome/Chromium"""
action_key = 'chrome.history'
def get_commands(self):
for path in self.get_paths():
yield Command.Function(
path,
Special.delete_chrome_history,
_('Clean file'))
class ChromeKeywords(FileActionProvider):
"""Action to clean 'keywords' table in Google Chrome/Chromium"""
action_key = 'chrome.keywords'
def get_commands(self):
for path in self.get_paths():
yield Command.Function(
path,
Special.delete_chrome_keywords,
_('Clean file'))
class Delete(FileActionProvider):
"""Action to delete files"""
action_key = 'delete'
def get_commands(self):
for path in self.get_paths():
yield Command.Delete(path)
class Ini(FileActionProvider):
"""Action to clean .ini configuration files"""
action_key = 'ini'
def __init__(self, action_element):
FileActionProvider.__init__(self, action_element)
self.section = action_element.getAttribute('section')
self.parameter = action_element.getAttribute('parameter')
if self.parameter == "":
self.parameter = None
def get_commands(self):
for path in self.get_paths():
yield Command.Ini(path, self.section, self.parameter)
class Journald(ActionProvider):
"""Action to run 'journalctl --vacuum-time=1'"""
action_key = 'journald.clean'
def __init__(self, action_element):
pass
def get_commands(self):
if FileUtilities.exe_exists('journalctl'):
yield Command.Function(None, Unix.journald_clean, 'journalctl --vacuum-time=1')
class Json(FileActionProvider):
"""Action to clean JSON configuration files"""
action_key = 'json'
def __init__(self, action_element):
FileActionProvider.__init__(self, action_element)
self.address = action_element.getAttribute('address')
def get_commands(self):
for path in self.get_paths():
yield Command.Json(path, self.address)
class MozillaUrlHistory(FileActionProvider):
"""Action to clean Mozilla (Firefox) URL history in places.sqlite"""
action_key = 'mozilla_url_history'
def get_commands(self):
for path in self.get_paths():
yield Special.delete_mozilla_url_history(path)
class OfficeRegistryModifications(FileActionProvider):
"""Action to delete LibreOffice history"""
action_key = 'office_registrymodifications'
def get_commands(self):
for path in self.get_paths():
yield Command.Function(
path,
Special.delete_office_registrymodifications,
_('Clean'))
class Process(ActionProvider):
"""Action to run a process"""
action_key = 'process'
def __init__(self, action_element):
self.cmd = expandvars(action_element.getAttribute('cmd'))
# by default, wait
self.wait = True
wait = action_element.getAttribute('wait')
if wait and wait.lower()[0] in ('f', 'n'):
# false or no
self.wait = False
def get_commands(self):
def run_process():
try:
if self.wait:
args = self.cmd.split(' ')
(rc, stdout, stderr) = General.run_external(args)
else:
rc = 0 # unknown because we don't wait
from subprocess import Popen
Popen(self.cmd)
except Exception as e:
raise RuntimeError(
'Exception in external command\nCommand: %s\nError: %s' % (self.cmd, str(e)))
else:
if not 0 == rc:
logging.getLogger(__name__).warning('Command: %s\nReturn code: %d\nStdout: %s\nStderr: %s\n',
self.cmd, rc, stdout, stderr)
return 0
yield Command.Function(path=None, func=run_process, label=_("Run external command: %s") % self.cmd)
class Shred(FileActionProvider):
"""Action to shred files (override preference)"""
action_key = 'shred'
def get_commands(self):
for path in self.get_paths():
yield Command.Shred(path)
class SqliteVacuum(FileActionProvider):
"""Action to vacuum SQLite databases"""
action_key = 'sqlite.vacuum'
def get_commands(self):
for path in self.get_paths():
yield Command.Function(
path,
FileUtilities.vacuum_sqlite3,
# TRANSLATORS: Vacuum is a verb. The term is jargon
# from the SQLite database. Microsoft Access uses
# the term 'Compact Database' (which you may translate
# instead). Another synonym is 'defragment.'
_('Vacuum'))
class Truncate(FileActionProvider):
"""Action to truncate files"""
action_key = 'truncate'
def get_commands(self):
for path in self.get_paths():
yield Command.Truncate(path)
class WinShellChangeNotify(ActionProvider):
"""Action to clean the Windows Registry"""
action_key = 'win.shell.change.notify'
def get_commands(self):
from bleachbit import Windows
yield Command.Function(
None,
Windows.shell_change_notify,
None)
class Winreg(ActionProvider):
"""Action to clean the Windows Registry"""
action_key = 'winreg'
def __init__(self, action_element):
self.keyname = action_element.getAttribute('path')
self.name = action_element.getAttribute('name')
def get_commands(self):
yield Command.Winreg(self.keyname, self.name)
class YumCleanAll(ActionProvider):
"""Action to run 'yum clean all'"""
action_key = 'yum.clean_all'
def __init__(self, action_element):
pass
def get_commands(self):
# Checking allows auto-hide to work for non-APT systems
if not FileUtilities.exe_exists('yum'):
raise StopIteration
yield Command.Function(
None,
Unix.yum_clean,
'yum clean all')
|
brahmastra2016/bleachbit
|
bleachbit/Action.py
|
Python
|
gpl-3.0
| 14,848
|
from httpserver import app as application
|
Samuel-Phillips/pv-viability-map
|
index.py
|
Python
|
gpl-3.0
| 42
|
import _thread
from FaustBot.Communication.Observable import Observable
class LeaveObservable(Observable):
def input(self, raw_data, connection):
data = {}
leave_or_part = "PART" if raw_data.find('PART') != -1 else "QUIT"
data['raw'] = raw_data
data['nick'] = raw_data.split('!')[0][1:]
data['channel'] = raw_data.split(leave_or_part + ' ')[1].split(' :')[0]
data['raw_nick'] = raw_data.split(' ' + leave_or_part)[0][1:]
self.notify_observers(data, connection)
def notify_observers(self, data, connection):
for observer in self._observers:
_thread.start_new_thread(observer.__class__.update_on_leave, (observer, data, connection))
|
SophieBartmann/Faust-Bot
|
FaustBot/Communication/LeaveObservable.py
|
Python
|
gpl-3.0
| 719
|
"""
WSGI config for credenciamento project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "credenciamento.settings")
application = get_wsgi_application()
|
dparaujo/projeto
|
credenciamento/wsgi.py
|
Python
|
gpl-3.0
| 406
|
'''
gui.py
-------------
Various classes for providing a graphical user interface.
'''
import sys, webbrowser
from .qt.widgets import (QMainWindow, QApplication, QPushButton, QWidget, QAction,
QTabWidget,QVBoxLayout,QGridLayout,QLabel,QGraphicsView,QKeySequence)
from .qt.QtGui import QIcon
from .qt.QtCore import pyqtSlot
from . import config
from . import __version__
from . import __author__
'''
Main Window
Definition of Main Panel
'''
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle("GridEdge AutoTesting %s" % __version__)
self.setGeometry(config.InMainWindowWidth, config.InMainWindowHeight,
config.FinMainWindowWidth, config.FinMainWindowHeight)
self.aboutwid = AboutWidget()
self.weblinks = WebLinksWidget()
self.table_widget = MainTableWidget(self)
self.setGeometry(config.InMainWindowWidth, config.InMainWindowHeight,
config.FinMainWindowWidth, config.FinMainWindowHeight)
self.setCentralWidget(self.table_widget)
#self.show()
#### define actions ####
# actions for "File" menu
self.fileQuitAction = self.createAction("&Quit", self.fileQuit,
QKeySequence("Ctrl+q"), None,
"Close the application.")
self.fileActions = [None, self.fileQuitAction]
# actions for "Help" menu
self.helpAction = self.createAction("&Help", self.weblinks.help,
None, None,
"Show help")
self.devAction = self.createAction("&Development and Bugs", self.weblinks.dev,
None, None,
"Development and Bugs")
self.aboutAction = self.createAction("&About", self.aboutwid.show,
None, None,
"About GridEdge AutoTesting")
self.helpActions = [None, self.helpAction, None, self.aboutAction,
None, self.devAction]
#### Create menu bar ####
fileMenu = self.menuBar().addMenu("&File")
self.addActions(fileMenu, self.fileActions)
#processMenu = self.menuBar().addMenu("&Process")
#self.addActions(processMenu, self.processActions)
#self.enableProcessActions(False)
helpMenu = self.menuBar().addMenu("&Help")
self.addActions(helpMenu, self.helpActions)
#### Create status bar ####
self.statusBar().showMessage("Ready", 5000)
def createAction(self, text, slot=None, shortcut=None, icon=None,
tip=None, checkable=False, signal="triggered()"):
""" Convenience function that creates an action with the specified attributes. """
action = QAction(text, self)
if icon is not None:
action.setIcon(QIcon(":/{0}.png".format(icon)))
if shortcut is not None:
action.setShortcut(shortcut)
if tip is not None:
action.setToolTip(tip)
action.setStatusTip(tip)
if slot is not None:
action.triggered.connect(slot)
if checkable:
action.setCheckable(True)
return action
def addActions(self, target, actions):
"""
Convenience function that adds the actions to the target.
If an action is None a separator will be added.
"""
for action in actions:
if action is None:
target.addSeparator()
else:
target.addAction(action)
def fileQuit(self):
"""Special quit-function as the normal window closing might leave something on the background """
QApplication.closeAllWindows()
'''
Main Table Class
Definition of the Table for Tabs
'''
class MainTableWidget(QWidget):
def __init__(self, parent):
super(QWidget, self).__init__(parent)
self.layout = QVBoxLayout()
# Initialize tab screen
self.tabs = QTabWidget()
self.tab1 = QWidget()
self.tab2 = QWidget()
self.tab3 = QWidget()
self.tabs.resize(300,400)
# Add tabs
self.tabs.addTab(self.tab1,"Main")
self.tabs.addTab(self.tab2,"Plots")
self.tabs.addTab(self.tab3,"Camera")
self.tab1.layout = QVBoxLayout(self)
self.pushButton1 = QPushButton("Button")
self.tab1.layout.addWidget(self.pushButton1)
self.tab1.setLayout(self.tab1.layout)
# Add tabs to widget
self.layout.addWidget(self.tabs)
self.setLayout(self.layout)
'''
GraphicsView
Definition of the View for Camera
'''
class GraphicsView(QGraphicsView):
""" Custom GraphicsView to display the scene. """
def __init__(self, parent=None):
super(GraphicsView, self).__init__(parent)
self.setRenderHints(QPainter.Antialiasing)
def resizeEvent(self, event):
self.fitInView(self.sceneRect(), Qt.KeepAspectRatio)
def drawBackground(self, painter, rect):
painter.fillRect(rect, QBrush(Qt.lightGray))
self.scene().drawBackground(painter, rect)
'''
About Widget
Definition of About Panel
'''
class AboutWidget(QWidget):
""" PyQt widget for About Box Panel """
def __init__(self):
super(AboutWidget, self).__init__()
self.initUI()
def initUI(self):
self.setGeometry(100, 200, 400, 200)
self.setWindowTitle('About GridEdge AutoTesting')
self.gridLayout = QGridLayout()
self.setLayout(self.gridLayout)
self.verticalLayout = QVBoxLayout()
self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 1)
self.labelTitle = QLabel("<qt><b><big><a href = http://gridedgesolar.com>GridEdgeSolar %s</a></b></big></qt>" % __version__, self);
self.labelBy = QLabel("by: %s" % __author__, self)
self.labelContact = QLabel("<qt>Contacts: <a href = mailto:feranick@hotmail.com> feranick@hotmail.com</a></qt>", self)
self.labelDetails = QLabel("If GridEdgeSolar is a Solar PV project at MIT ", self)
self.labelPaper = QLabel("<qt> GridEdgeSolar", self)
for label in [self.labelTitle, self.labelBy, self.labelContact, self.labelDetails, self.labelPaper]:
label.setWordWrap(True)
label.setOpenExternalLinks(True);
self.verticalLayout.addWidget(label)
'''
WebLinks Widget
Definition of Web links
'''
class WebLinksWidget():
def __init__(self):
super(WebLinksWidget, self).__init__()
def help(self):
webbrowser.open("https://sites.google.com/site/gridedgesolar/")
def dev(self):
webbrowser.open("https://github.mit.edu/GridEdgeSolar/Autotesting")
|
feranick/GES_AT
|
Other/GES-AT_Tabs/gridedgeat/gui.py
|
Python
|
gpl-3.0
| 6,718
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------##
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------##
# imports
# PySol imports
from pysollib.mygettext import _
from pysollib.gamedb import registerGame, GameInfo, GI
from pysollib.util import ANY_SUIT, KING
from pysollib.mfxutil import kwdefault
from pysollib.stack import \
DealRowTalonStack, \
InitialDealTalonStack, \
OpenStack, \
RedealTalonStack, \
ReserveStack, \
SS_FoundationStack, \
StackWrapper, \
Yukon_AC_RowStack, \
Yukon_SS_RowStack, \
WasteStack, \
WasteTalonStack
from pysollib.game import Game
from pysollib.layout import Layout
from pysollib.hint import Yukon_Hint
from pysollib.pysoltk import MfxCanvasText
from pysollib.games.spider import Spider_SS_Foundation
# ************************************************************************
# * Yukon
# ************************************************************************
class Yukon(Game):
Layout_Method = staticmethod(Layout.yukonLayout)
Talon_Class = InitialDealTalonStack
Foundation_Class = SS_FoundationStack
RowStack_Class = StackWrapper(Yukon_AC_RowStack, base_rank=KING)
Hint_Class = Yukon_Hint
def createGame(self, **layout):
# create layout
l, s = Layout(self), self.s
kwdefault(layout, rows=7, texts=0, playcards=25)
self.Layout_Method(l, **layout)
self.setSize(l.size[0], l.size[1])
# create stacks
s.talon = self.Talon_Class(l.s.talon.x, l.s.talon.y, self)
for r in l.s.foundations:
s.foundations.append(
self.Foundation_Class(
r.x, r.y, self, suit=r.suit, max_move=0))
for r in l.s.rows:
s.rows.append(self.RowStack_Class(r.x, r.y, self))
# default
l.defaultAll()
return l
def startGame(self):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(rows=self.s.rows[i:], flip=0, frames=0)
for i in range(4):
self.s.talon.dealRow(rows=self.s.rows[1:], flip=1, frames=0)
self._startAndDealRow()
def getHighlightPilesStacks(self):
return ()
shallHighlightMatch = Game._shallHighlightMatch_AC
# ************************************************************************
# * Russian Solitaire (like Yukon, but build down by suit)
# ************************************************************************
class RussianSolitaire(Yukon):
RowStack_Class = StackWrapper(Yukon_SS_RowStack, base_rank=KING)
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Moosehide (build down in any suit but the same)
# ************************************************************************
class Moosehide_RowStack(Yukon_AC_RowStack):
def _isSequence(self, c1, c2):
return (c1.suit != c2.suit and c1.rank == c2.rank+1)
def getHelp(self):
return _('Tableau. Build down in any suit but the same, can move '
'any face-up cards regardless of sequence.')
class Moosehide(Yukon):
RowStack_Class = StackWrapper(Moosehide_RowStack, base_rank=KING)
def shallHighlightMatch(self, stack1, card1, stack2, card2):
return (card1.suit != card2.suit and
abs(card1.rank-card2.rank) == 1)
# ************************************************************************
# * Odessa (just like Russian Solitaire, only a different initial
# * card layout)
# ************************************************************************
class Odessa(RussianSolitaire):
def startGame(self):
for i in range(3):
self.s.talon.dealRow(flip=0, frames=0)
self._dealNumRows(2)
for i in range(2):
self.s.talon.dealRow(rows=self.s.rows[1:6], frames=0)
self._startAndDealRow()
# ************************************************************************
# * Grandfather
# ************************************************************************
class Grandfather_Talon(RedealTalonStack):
def dealCards(self, sound=False):
self.redealCards(sound=sound, shuffle=True)
class Grandfather(RussianSolitaire):
Talon_Class = StackWrapper(Grandfather_Talon, max_rounds=3)
def createGame(self):
layout = Yukon.createGame(self)
layout.createRoundText(self.s.talon, 'nn')
def startGame(self):
frames = 0
sound = False
for i, j in ((1, 7), (1, 6), (2, 6), (2, 5), (3, 5), (3, 4)):
if len(self.s.talon.cards) <= j-i:
frames = -1
sound = True
self.startDealSample()
self.s.talon.dealRowAvail(rows=self.s.rows[i:j],
flip=0, frames=frames)
if not sound:
self.startDealSample()
self.s.talon.dealRowAvail()
for i in range(4):
self.s.talon.dealRowAvail(rows=self.s.rows[1:])
redealCards = startGame
# ************************************************************************
# * Alaska (like Russian Solitaire, but build up or down in suit)
# ************************************************************************
class Alaska_RowStack(Yukon_SS_RowStack):
def _isSequence(self, c1, c2):
return (c1.suit == c2.suit and
((c1.rank + self.cap.dir) % self.cap.mod == c2.rank or
(c2.rank + self.cap.dir) % self.cap.mod == c1.rank))
def getHelp(self):
return _('Tableau. Build up or down by suit, can move any face-up '
'cards regardless of sequence.')
class Alaska(RussianSolitaire):
RowStack_Class = StackWrapper(Alaska_RowStack, base_rank=KING)
# ************************************************************************
# * Roslin (like Yukon, but build up or down by alternate color)
# ************************************************************************
class Roslin_RowStack(Yukon_AC_RowStack):
def _isSequence(self, c1, c2):
return (c1.color != c2.color and
((c1.rank + self.cap.dir) % self.cap.mod == c2.rank or
(c2.rank + self.cap.dir) % self.cap.mod == c1.rank))
def getHelp(self):
return _('Tableau. Build up or down by alternate color, can move '
'any face-up cards regardless of sequence.')
class Roslin(Yukon):
RowStack_Class = StackWrapper(Roslin_RowStack, base_rank=KING)
# ************************************************************************
# * Chinese Discipline
# * Chinese Solitaire
# ************************************************************************
class ChineseDiscipline(Yukon):
Layout_Method = staticmethod(Layout.klondikeLayout)
Talon_Class = DealRowTalonStack
def createGame(self):
return Yukon.createGame(self, waste=0, texts=1)
def startGame(self):
for i in (3, 3, 3, 4, 5, 6):
self.s.talon.dealRow(rows=self.s.rows[:i], flip=1, frames=0)
self.s.talon.dealRow(rows=self.s.rows[i:], flip=0, frames=0)
self._startAndDealRow()
class ChineseSolitaire(ChineseDiscipline):
RowStack_Class = Yukon_AC_RowStack # anything on an empty space
# ************************************************************************
# * Queenie
# ************************************************************************
class Queenie(Yukon):
Layout_Method = staticmethod(Layout.klondikeLayout)
Talon_Class = DealRowTalonStack
def createGame(self):
return Yukon.createGame(self, waste=0, texts=1)
def startGame(self, flip=1, reverse=1):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(
rows=self.s.rows[i:], flip=flip, frames=0, reverse=reverse)
self.startDealSample()
self.s.talon.dealRow(reverse=reverse)
# ************************************************************************
# * Rushdike (like Queenie, but built down by suit)
# ************************************************************************
class Rushdike(RussianSolitaire):
Layout_Method = staticmethod(Layout.klondikeLayout)
Talon_Class = DealRowTalonStack
def createGame(self):
return RussianSolitaire.createGame(self, waste=0, texts=1)
def startGame(self, flip=0, reverse=1):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(
rows=self.s.rows[i:], flip=flip, frames=0, reverse=reverse)
self.startDealSample()
self.s.talon.dealRow(reverse=reverse)
# ************************************************************************
# * Russian Point (Rushdike in a different layout)
# ************************************************************************
class RussianPoint(Rushdike):
def startGame(self):
r = self.s.rows
for i in (1, 1, 2, 2, 3, 3):
self.s.talon.dealRow(rows=r[i:len(r)-i], flip=0, frames=0)
self._startAndDealRow()
# ************************************************************************
# * Abacus
# ************************************************************************
class Abacus_Foundation(SS_FoundationStack):
def __init__(self, x, y, game, suit, **cap):
kwdefault(cap, base_rank=suit, mod=13, dir=suit+1, max_move=0)
SS_FoundationStack.__init__(self, x, y, game, suit, **cap)
class Abacus_RowStack(Yukon_SS_RowStack):
def _isSequence(self, c1, c2):
dir, mod = -(c1.suit + 1), 13
return c1.suit == c2.suit and (c1.rank + dir) % mod == c2.rank
class Abacus(Rushdike):
Foundation_Class = Abacus_Foundation
RowStack_Class = Abacus_RowStack
def createGame(self):
layout = Rushdike.createGame(self)
help = (_('''\
Club: A 2 3 4 5 6 7 8 9 T J Q K
Spade: 2 4 6 8 T Q A 3 5 7 9 J K
Heart: 3 6 9 Q 2 5 8 J A 4 7 T K
Diamond: 4 8 Q 3 7 J 2 6 T A 5 9 K'''))
self.texts.help = MfxCanvasText(self.canvas,
layout.XM, self.height - layout.YM,
text=help,
anchor="sw",
font=self.app.getFont("canvas_fixed"))
def _shuffleHook(self, cards):
# move Twos to top of the Talon (i.e. first cards to be dealt)
return self._shuffleHookMoveToTop(
cards, lambda c: (c.id in (0, 14, 28, 42), c.suit))
def startGame(self, flip=1, reverse=1):
self.s.talon.dealRow(rows=self.s.foundations, frames=0)
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(
rows=self.s.rows[i:], flip=flip, frames=0, reverse=reverse)
self.startDealSample()
self.s.talon.dealRow(reverse=reverse)
def shallHighlightMatch(self, stack1, card1, stack2, card2):
dir, mod = -(card1.suit + 1), 13
return (card1.suit == card2.suit and
((card1.rank + dir) % mod == card2.rank or
(card2.rank + dir) % mod == card1.rank))
# ************************************************************************
# * Double Yukon
# * Double Russian Solitaire
# ************************************************************************
class DoubleYukon(Yukon):
def createGame(self):
Yukon.createGame(self, rows=10)
def startGame(self):
for i in range(1, len(self.s.rows)-1):
self.s.talon.dealRow(rows=self.s.rows[i:], flip=0, frames=0)
# self.s.talon.dealRow(rows=self.s.rows, flip=0, frames=0)
for i in range(5):
self.s.talon.dealRow(flip=1, frames=0)
self._startAndDealRow()
class DoubleRussianSolitaire(DoubleYukon):
RowStack_Class = StackWrapper(Yukon_SS_RowStack, base_rank=KING)
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Triple Yukon
# * Triple Russian Solitaire
# ************************************************************************
class TripleYukon(Yukon):
def createGame(self):
Yukon.createGame(self, rows=13, playcards=34)
def startGame(self):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(rows=self.s.rows[i:], flip=0, frames=0)
for i in range(5):
self.s.talon.dealRow(rows=self.s.rows, flip=1, frames=0)
self._startAndDealRow()
class TripleRussianSolitaire(TripleYukon):
RowStack_Class = StackWrapper(Yukon_SS_RowStack, base_rank=KING)
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Ten Across
# ************************************************************************
class TenAcross(Yukon):
Foundation_Class = Spider_SS_Foundation
RowStack_Class = StackWrapper(Yukon_SS_RowStack, base_rank=KING)
Layout_Method = staticmethod(Layout.freeCellLayout)
#
# game layout
#
def createGame(self, **layout):
# create layout
lay, s = Layout(self), self.s
kwdefault(layout, rows=10, reserves=2, texts=0)
self.Layout_Method(lay, **layout)
self.setSize(lay.size[0], lay.size[1])
# create stacks
s.talon = InitialDealTalonStack(lay.s.talon.x, lay.s.talon.y, self)
for r in lay.s.foundations:
self.s.foundations.append(
self.Foundation_Class(r.x, r.y, self, suit=r.suit))
for r in lay.s.rows:
s.rows.append(self.RowStack_Class(r.x, r.y, self))
for r in lay.s.reserves:
self.s.reserves.append(ReserveStack(r.x, r.y, self))
# default
lay.defaultAll()
#
# game overrides
#
def startGame(self):
n = 1
for i in range(4):
self.s.talon.dealRow(rows=self.s.rows[:n], frames=0)
self.s.talon.dealRow(rows=self.s.rows[n:-n], frames=0, flip=0)
self.s.talon.dealRow(rows=self.s.rows[-n:], frames=0)
n += 1
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealRow(rows=self.s.reserves)
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Panopticon
# ************************************************************************
class Panopticon(TenAcross):
Foundation_Class = SS_FoundationStack
def createGame(self):
TenAcross.createGame(self, rows=8, reserves=4)
def startGame(self):
self.s.talon.dealRow(frames=0, flip=0)
n = 1
for i in range(3):
self.s.talon.dealRow(rows=self.s.rows[:n], frames=0)
self.s.talon.dealRow(rows=self.s.rows[n:-n], frames=0, flip=0)
self.s.talon.dealRow(rows=self.s.rows[-n:], frames=0)
n += 1
self.s.talon.dealRow(frames=0)
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealRow(rows=self.s.reserves)
# ************************************************************************
# * Australian Patience
# * Raw Prawn
# * Bim Bom
# ************************************************************************
class AustralianPatience(RussianSolitaire):
RowStack_Class = StackWrapper(Yukon_SS_RowStack, base_rank=KING)
def createGame(self, rows=7):
l, s = Layout(self), self.s
Layout.klondikeLayout(l, rows=rows, waste=1)
self.setSize(l.size[0], l.size[1])
s.talon = WasteTalonStack(l.s.talon.x, l.s.talon.y, self, max_rounds=1)
s.waste = WasteStack(l.s.waste.x, l.s.waste.y, self)
for r in l.s.foundations:
s.foundations.append(
SS_FoundationStack(r.x, r.y, self, suit=r.suit))
for r in l.s.rows:
s.rows.append(self.RowStack_Class(r.x, r.y, self))
l.defaultAll()
def startGame(self):
self._startDealNumRowsAndDealRowAndCards(3)
class RawPrawn(AustralianPatience):
RowStack_Class = Yukon_SS_RowStack
class BimBom(AustralianPatience):
RowStack_Class = Yukon_SS_RowStack
def createGame(self):
AustralianPatience.createGame(self, rows=8)
def startGame(self):
self._startDealNumRowsAndDealRowAndCards(4)
# ************************************************************************
# * Geoffrey
# ************************************************************************
class Geoffrey(Yukon):
Layout_Method = staticmethod(Layout.klondikeLayout)
RowStack_Class = StackWrapper(Yukon_SS_RowStack, base_rank=KING)
def createGame(self):
Yukon.createGame(self, rows=8, waste=0)
def startGame(self):
for i in (4, 4, 4, 4, 8):
self.s.talon.dealRow(rows=self.s.rows[:i], flip=1, frames=0)
self.s.talon.dealRow(rows=self.s.rows[i:], flip=0, frames=0)
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealRow(rows=self.s.rows[:4])
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Queensland
# ************************************************************************
class Queensland(Yukon):
Layout_Method = staticmethod(Layout.klondikeLayout)
RowStack_Class = Yukon_SS_RowStack
def createGame(self):
Yukon.createGame(self, waste=0)
def startGame(self):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(rows=self.s.rows[i:], flip=0, frames=0)
self._startDealNumRows(3)
self.s.talon.dealRow()
self.s.talon.dealRowAvail()
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Russian Spider
# * Double Russian Spider
# ************************************************************************
class RussianSpider_RowStack(Yukon_SS_RowStack): # Spider_SS_RowStack
def canDropCards(self, stacks):
if len(self.cards) < 13:
return (None, 0)
cards = self.cards[-13:]
for s in stacks:
if s is not self and s.acceptsCards(self, cards):
return (s, 13)
return (None, 0)
class RussianSpider(RussianSolitaire):
RowStack_Class = StackWrapper(RussianSpider_RowStack, base_rank=KING)
Foundation_Class = Spider_SS_Foundation
def createGame(self, rows=7):
# create layout
l, s = Layout(self), self.s
l.yukonLayout(rows=rows, texts=0, playcards=25)
self.setSize(l.size[0], l.size[1])
# create stacks
s.talon = self.Talon_Class(l.s.talon.x, l.s.talon.y, self)
for r in l.s.foundations:
s.foundations.append(
self.Foundation_Class(
r.x, r.y, self, suit=ANY_SUIT, max_move=0))
for r in l.s.rows:
s.rows.append(self.RowStack_Class(r.x, r.y, self))
# default
l.defaultAll()
class DoubleRussianSpider(RussianSpider, DoubleRussianSolitaire):
def createGame(self):
RussianSpider.createGame(self, rows=10)
def startGame(self):
DoubleRussianSolitaire.startGame(self)
# ************************************************************************
# * Brisbane
# ************************************************************************
class Brisbane_RowStack(Yukon_AC_RowStack):
def _isSequence(self, c1, c2):
return (c1.rank + self.cap.dir) % self.cap.mod == c2.rank
def getHelp(self):
return _('Tableau. Build down regardless of suit, can move any '
'face-up cards regardless of sequence.')
class Brisbane(Yukon):
RowStack_Class = Brisbane_RowStack
def startGame(self):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(rows=self.s.rows[i:], flip=0, frames=0)
self._startDealNumRows(3)
self.s.talon.dealRow()
self.s.talon.dealRowAvail()
def getHighlightPilesStacks(self):
return ()
shallHighlightMatch = Game._shallHighlightMatch_RK
# ************************************************************************
# * Hawaiian
# ************************************************************************
class Hawaiian(Game):
Hint_Class = Yukon_Hint
def createGame(self, rows=10, playcards=20):
l, s = Layout(self), self.s
self.setSize(l.XM+max(rows, 8)*l.XS,
l.YM+2*l.YS+playcards*l.YOFFSET)
x, y = l.XM, l.YM
stack = OpenStack(x, y, self, max_move=1, max_accept=0)
s.reserves.append(stack)
l.createText(stack, 'ne')
x, y = self.width-8*l.XS, l.YM
for i in range(8):
s.foundations.append(SS_FoundationStack(x, y, self, suit=i//2))
x += l.XS
x, y = self.width-rows*l.XS, l.YM+l.YS
for i in range(rows):
s.rows.append(Yukon_AC_RowStack(x, y, self))
x += l.XS
x, y = l.XM, self.height-l.YS
s.talon = InitialDealTalonStack(x, y, self)
l.defaultStackGroups()
def startGame(self):
for i in range(104-5*10):
self.s.talon.dealRow(rows=self.s.reserves, frames=0)
self._startDealNumRowsAndDealSingleRow(4)
def getHighlightPilesStacks(self):
return ()
shallHighlightMatch = Game._shallHighlightMatch_AC
# ************************************************************************
# * Wave
# ************************************************************************
class WaveTalon(DealRowTalonStack):
def dealCards(self, sound=False):
if sound and self.game.app.opt.animations:
self.game.startDealSample()
n = self.dealRowAvail(flip=0, sound=False)
n += self.dealRowAvail(sound=False)
if sound:
self.game.stopSamples()
return n
class Wave(Game):
Hint_Class = Yukon_Hint
def createGame(self, rows=8):
l, s = Layout(self), self.s
l.klondikeLayout(rows=rows, waste=0, playcards=25)
self.setSize(l.size[0], l.size[1])
s.talon = WaveTalon(l.s.talon.x, l.s.talon.y, self)
for r in l.s.foundations:
s.foundations.append(SS_FoundationStack(r.x, r.y, self,
suit=r.suit))
for r in l.s.rows:
s.rows.append(Yukon_AC_RowStack(r.x, r.y, self))
l.defaultAll()
def startGame(self):
self.s.talon.dealRow(frames=0)
self.s.talon.dealRow(frames=0, flip=0)
self._startAndDealRow()
shallHighlightMatch = Game._shallHighlightMatch_AC
# register the game
registerGame(GameInfo(19, Yukon, "Yukon",
GI.GT_YUKON, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(20, RussianSolitaire, "Russian Solitaire",
GI.GT_YUKON, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(27, Odessa, "Odessa",
GI.GT_YUKON, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(278, Grandfather, "Grandfather",
GI.GT_YUKON, 1, 2, GI.SL_BALANCED))
registerGame(GameInfo(186, Alaska, "Alaska",
GI.GT_YUKON, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(187, ChineseDiscipline, "Chinese Discipline",
GI.GT_YUKON | GI.GT_XORIGINAL, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(188, ChineseSolitaire, "Chinese Solitaire",
GI.GT_YUKON | GI.GT_XORIGINAL, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(189, Queenie, "Queenie",
GI.GT_YUKON | GI.GT_XORIGINAL, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(190, Rushdike, "Rushdike",
GI.GT_YUKON | GI.GT_XORIGINAL, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(191, RussianPoint, "Russian Point",
GI.GT_YUKON | GI.GT_XORIGINAL, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(192, Abacus, "Abacus",
GI.GT_YUKON | GI.GT_XORIGINAL, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(271, DoubleYukon, "Double Yukon",
GI.GT_YUKON, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(272, TripleYukon, "Triple Yukon",
GI.GT_YUKON, 3, 0, GI.SL_BALANCED))
registerGame(GameInfo(284, TenAcross, "Ten Across",
GI.GT_YUKON, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(285, Panopticon, "Panopticon",
GI.GT_YUKON | GI.GT_ORIGINAL, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(339, Moosehide, "Moosehide",
GI.GT_YUKON, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(387, Roslin, "Roslin",
GI.GT_YUKON, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(447, AustralianPatience, "Australian Patience",
GI.GT_YUKON, 1, 0, GI.SL_BALANCED,
altnames=('Outback Patience',)))
registerGame(GameInfo(450, RawPrawn, "Raw Prawn",
GI.GT_YUKON, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(456, BimBom, "Bim Bom",
GI.GT_YUKON | GI.GT_ORIGINAL, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(466, DoubleRussianSolitaire, "Double Russian Solitaire",
GI.GT_YUKON, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(488, TripleRussianSolitaire, "Triple Russian Solitaire",
GI.GT_YUKON, 3, 0, GI.SL_BALANCED))
registerGame(GameInfo(492, Geoffrey, "Geoffrey",
GI.GT_YUKON, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(525, Queensland, "Queensland",
GI.GT_YUKON, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(530, RussianSpider, "Russian Spider",
GI.GT_SPIDER, 1, 0, GI.SL_BALANCED,
altnames=('Ukrainian Solitaire',)))
registerGame(GameInfo(531, DoubleRussianSpider, "Double Russian Spider",
GI.GT_SPIDER | GI.GT_ORIGINAL, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(603, Brisbane, "Brisbane",
GI.GT_SPIDER, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(707, Hawaiian, "Hawaiian",
GI.GT_2DECK_TYPE | GI.GT_ORIGINAL, 2, 0, GI.SL_BALANCED))
registerGame(GameInfo(732, Wave, "Wave",
GI.GT_2DECK_TYPE | GI.GT_ORIGINAL, 2, 0, GI.SL_BALANCED))
|
jimsize/PySolFC
|
pysollib/games/yukon.py
|
Python
|
gpl-3.0
| 27,170
|
# -*- coding: utf-8 -*-
# crunchyfrog - a database schema browser and query tool
# Copyright (C) 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Preferences"""
from inspect import isclass
import gtk
import gobject
try:
import gconf
HAVE_GCONF = True
except ImportError:
HAVE_GCONF = False
import os
import sys
from gettext import gettext as _
import logging
log = logging.getLogger("PREFS")
import cf
from cf.ui import dialogs
from cf.plugins.core import GenericPlugin
class PreferencesDialog(object):
def __init__(self, win, mode="editor"):
self.win = win
self.app = win.app
self.builder = gtk.Builder()
self.builder.set_translation_domain('crunchyfrog')
fname = self.app.get_glade_file('preferences.glade')
self.builder.add_from_file(fname)
self.builder.connect_signals(self)
self.dlg = self.builder.get_object('preferences_dialog')
self._setup_widget()
self._setup_connections()
self.refresh()
if mode == "editor":
curr_page = 1
elif mode == "plugins":
curr_page = 2
else:
curr_page = 0
self.builder.get_object("notebook1").set_current_page(curr_page)
def _setup_widget(self):
self._setup_plugins()
self._setup_editor()
self._setup_shortcuts()
# Fix secondary button
btn = self.builder.get_object("btn_help")
box = self.builder.get_object("dialog-action_area1")
box.set_child_secondary(btn, True)
def _setup_editor(self):
model = gtk.ListStore(int, str, gtk.gdk.Pixbuf)
iconview = self.builder.get_object("editor_iconview")
iconview.set_model(model)
iconview.set_text_column(1)
iconview.set_pixbuf_column(2)
def get_pb(stock):
return self.app.load_icon(stock, 36, gtk.ICON_LOOKUP_FORCE_SVG)
model.append([0, _(u"General"), get_pb("gtk-execute")])
model.append([1, _(u"View"), get_pb("gtk-justify-left")])
model.append([2, _(u"Editor"), get_pb("gtk-edit")])
model.append([3, _(u"Fonts & Colors"), get_pb("gtk-font")])
model.append([4, _(u'Keyboard\nShortcuts'),
get_pb('preferences-desktop-keyboard-shortcuts')])
iconview.connect("selection-changed", self.on_editor_selection_changed)
schemes = self.builder.get_object("editor_schemes")
model = gtk.ListStore(str, str)
model.set_sort_column_id(1, gtk.SORT_ASCENDING)
schemes.set_model(model)
col = gtk.TreeViewColumn("", gtk.CellRendererText(), markup=1)
schemes.append_column(col)
import gtksourceview2
sm = gtksourceview2.style_scheme_manager_get_default()
selected = None
for id in sm.get_scheme_ids():
scheme = sm.get_scheme(id)
lbl = "<b>%s</b>" % scheme.get_name()
if scheme.get_description():
lbl += "\n"+scheme.get_description()
iter = model.append(None)
model.set(iter, 0, id, 1, lbl)
if id == self.app.config.get("editor.scheme"):
selected = iter
sel = schemes.get_selection()
sel.select_iter(selected)
sel.connect("changed", self.on_editor_option_changed)
sel.set_data("config_option", "editor.scheme")
def _setup_plugins(self):
"""Set up the plugins view"""
self.plugin_model = gtk.TreeStore(gobject.TYPE_PYOBJECT, # 0 Plugin class
bool, # 1 active
str, # 2 label
gtk.gdk.Pixbuf, # 3 icon
bool, # 4 active visible
)
self.plugin_model.set_sort_column_id(2, gtk.SORT_ASCENDING)
self.plugin_list = self.builder.get_object("plugin_list")
self.plugin_list.set_model(self.plugin_model)
# label
col = gtk.TreeViewColumn()
renderer = gtk.CellRendererToggle()
renderer.connect("toggled", self.on_plugin_active_toggled)
col.pack_start(renderer, expand=False)
col.add_attribute(renderer, "active", 1)
col.add_attribute(renderer, "visible", 4)
renderer = gtk.CellRendererPixbuf()
col.pack_start(renderer, expand=False)
col.add_attribute(renderer, 'pixbuf', 3)
renderer = gtk.CellRendererText()
col.pack_start(renderer, expand=True)
col.add_attribute(renderer, 'markup', 2)
self.plugin_list.append_column(col)
sel = self.plugin_list.get_selection()
sel.connect("changed", self.on_plugin_selection_changed)
def _setup_shortcuts(self):
self.shortcuts_model = gtk.TreeStore(str, # 0 label
int, # 1 keyval
gtk.gdk.ModifierType, # 2 mods
bool, # 3 visible
str, # 4 tooltip
gobject.TYPE_PYOBJECT,# 5 action
)
self.list_shortcuts = self.builder.get_object('list_shortcuts')
col = gtk.TreeViewColumn(_('Action'), gtk.CellRendererText(), text=0)
self.list_shortcuts.append_column(col)
renderer = gtk.CellRendererAccel()
renderer.connect('accel-edited', self.on_accel_edited)
col = gtk.TreeViewColumn(_(u'Shortcut'), renderer,
accel_key=1, accel_mods=2, visible=3,
editable=3)
self.list_shortcuts.append_column(col)
self.shortcuts_model.set_sort_column_id(0, gtk.SORT_ASCENDING)
self.list_shortcuts.set_model(self.shortcuts_model)
def _setup_connections(self):
self.app.plugins.connect("plugin-added", self.on_plugin_added)
self.app.plugins.connect("plugin-removed", self.on_plugin_removed)
def run(self):
"""Run the dialog."""
return self.dlg.run()
def destroy(self):
"""Destroy the dialog."""
self.dlg.destroy()
def on_accel_edited(self, renderer, path, accel_key, accel_mods,
hardware_keycode):
model = self.shortcuts_model
iter_ = model.get_iter(path)
action = model.get_value(iter_, 5)
if not action:
return
if accel_key == gtk.keysyms.Delete and not accel_mods:
accel_key = accel_mods = 0
model.set_value(iter_, 1, accel_key)
model.set_value(iter_, 2, accel_mods)
gtk.accel_map_change_entry(action.get_accel_path(),
accel_key, accel_mods, True)
def on_editor_reuse_conn_toggled(self, toggle):
self.app.config.set("editor.reuse_connection", toggle.get_active())
def on_editor_hide_results_toggled(self, toggle):
self.app.config.set('editor.hide_results_pane', toggle.get_active())
def on_editor_option_changed(self, widget, *args):
option = widget.get_data("config_option")
conf = self.app.config
if isinstance(widget, gtk.CheckButton):
conf.set(option, widget.get_active())
elif isinstance(widget, gtk.SpinButton):
conf.set(option, widget.get_value_as_int())
elif isinstance(widget, gtk.FontButton):
conf.set(option, widget.get_font_name())
elif isinstance(widget, gtk.TreeSelection):
model, iter = widget.get_selected()
conf.set(option, model.get_value(iter, 0))
if option == "editor.wrap_text":
self.builder.get_object("editor_wrap_split").set_sensitive(widget.get_active())
if option == "editor.right_margin":
self.builder.get_object("editor_right_margin_position_box").set_sensitive(widget.get_active())
if option == "editor.default_font":
self.builder.get_object("editor_font_box").set_sensitive(not widget.get_active())
if option == "plugins.repo_enabled":
gobject.idle_add(self.refresh_plugins)
def on_editor_selection_changed(self, iconview):
model = iconview.get_model()
for path in iconview.get_selected_items():
iter = model.get_iter(path)
nb = self.builder.get_object("editor_notebook")
nb.set_current_page(model.get_value(iter, 0))
def on_help(self, *args):
self.app.show_help()
def on_plugin_active_toggled(self, renderer, path):
iter = self.plugin_model.get_iter(path)
plugin = self.plugin_model.get_value(iter, 0)
if issubclass(plugin, GenericPlugin):
self.plugin_model.set_value(iter, 1, not renderer.get_active())
gobject.idle_add(self.app.plugins.set_active, plugin, not renderer.get_active())
def on_plugin_added(self, manager, plugin):
iter = self.plugin_model.get_iter_first()
while iter:
if self.plugin_model.get_value(iter, 0) == plugin.plugin_type:
break
iter = self.plugin_model.iter_next(iter)
if iter:
lbl = '<b>%s</b>' % plugin.name or _(u"Unknown")
if plugin.description:
lbl += "\n"+plugin.description
if plugin.icon:
ico = self.app.load_icon(plugin.icon,
gtk.ICON_SIZE_LARGE_TOOLBAR,
gtk.ICON_LOOKUP_FORCE_SVG)
else:
ico = None
citer = self.plugin_model.append(iter)
self.plugin_model.set(citer,
0, plugin,
1, self.app.plugins.is_active(plugin),
2, lbl,
3, ico,
4, True)
def on_plugin_install(self, *args):
dlg = gtk.FileChooserDialog(_(u"Install plugin"), None,
gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
filter = gtk.FileFilter()
filter.set_name(_(u"CrunchyFrog plugins (*.zip, *.py)"))
filter.add_pattern("*.zip")
filter.add_pattern("*.py")
dlg.add_filter(filter)
dlg.set_filter(filter)
if dlg.run() == gtk.RESPONSE_OK:
uri = dlg.get_uri()
else:
uri = None
dlg.destroy()
gobject.idle_add(self.app.plugins.install_plugin, uri)
def on_plugin_removed(self, manager, plugin):
iter = self.plugin_model.get_iter_first()
while iter:
if self.plugin_model.get_value(iter, 0) == plugin.plugin_type:
citer = self.plugin_model.iter_children(iter)
while citer:
if self.plugin_model.get_value(citer, 0) == plugin:
self.plugin_model.remove(citer)
return
citer = self.plugin_model.iter_next(citer)
iter = self.plugin_model.iter_next(iter)
def on_plugin_prefs_show(self, *args):
sel = self.plugin_list.get_selection()
model, iter = sel.get_selected()
if not iter: return
obj = model.get_value(iter, 0)
if not isclass(obj) or not issubclass(obj, GenericPlugin):
return
if not obj.has_custom_options:
return
obj.run_custom_options_dialog(self.app)
def on_plugin_show_about(self, *args):
sel = self.plugin_list.get_selection()
model, iter = sel.get_selected()
if not iter: return
obj = model.get_value(iter, 0)
if not isclass(obj) or not issubclass(obj, GenericPlugin):
return
dlg = gtk.AboutDialog()
if obj.name: dlg.set_name(obj.name)
if obj.description: dlg.set_comments(obj.description)
if obj.icon: dlg.set_logo_icon_name(obj.icon)
if obj.author: dlg.set_authors([obj.author])
if obj.license: dlg.set_license(obj.license)
if obj.homepage: dlg.set_website(obj.homepage)
if obj.version: dlg.set_version(obj.version)
dlg.run()
dlg.destroy()
def on_plugin_selection_changed(self, selection, *args):
model, iter = selection.get_selected()
if not iter:
return
obj = model.get_value(iter, 0)
if isclass(obj) and issubclass(obj, GenericPlugin):
self.builder.get_object("plugin_about").set_sensitive(True)
self.builder.get_object("plugin_prefs").set_sensitive(obj.has_custom_options)
else:
self.builder.get_object("plugin_about").set_sensitive(False)
self.builder.get_object("plugin_prefs").set_sensitive(False)
def on_plugin_sync_repo(self, *args):
self.sync_repo_file()
def on_plugin_folder_show(self, *args):
gtk.show_uri(self.widget.get_screen(),
cf.USER_PLUGIN_URI,
gtk.gdk.x11_get_server_time(self.widget.window))
def refresh(self):
self.refresh_editor()
self.refresh_plugins()
self.refresh_shortcuts()
def refresh_editor(self):
config = self.app.config
gw = self.builder.get_object
gw('editor_reuse_connection').set_data('config_option',
'editor.reuse_connection')
gw("editor_reuse_connection").set_active(
config.get("editor.reuse_connection"))
gw('editor_hide_results').set_data('config_option',
'editor.hide_results_pane')
gw('editor_hide_results').set_active(
config.get('editor.hide_results_pane'))
gw("editor_replace_variables").set_data("config_option", "editor.replace_variables")
gw("editor_replace_variables").set_active(config.get("editor.replace_variables"))
gw("sqlparse_enabled").set_data("config_option", "sqlparse.enabled")
gw("sqlparse_enabled").set_active(config.get("sqlparse.enabled"))
gw('autocompl_tab').set_data('config_option', 'editor.tabcompletion')
gw('autocompl_tab').set_active(config.get('editor.tabcompletion'))
gw("editor_wrap_text").set_data("config_option", "editor.wrap_text")
gw("editor_wrap_text").set_active(config.get("editor.wrap_text"))
gw("editor_wrap_split").set_data("config_option", "editor.wrap_split")
gw("editor_wrap_split").set_active(config.get("editor.wrap_split"))
gw("editor_wrap_split").set_sensitive(gw("editor_wrap_text").get_active())
gw("editor_display_line_numbers").set_data("config_option", "editor.display_line_numbers")
gw("editor_display_line_numbers").set_active(config.get("editor.display_line_numbers"))
gw("editor_highlight_current_line").set_data("config_option", "editor.highlight_current_line")
gw("editor_highlight_current_line").set_active(config.get("editor.highlight_current_line"))
gw("editor_right_margin").set_data("config_option", "editor.right_margin")
gw("editor_right_margin").set_active(config.get("editor.right_margin"))
gw("editor_right_margin_position").set_data("config_option", "editor.right_margin_position")
gw("editor_right_margin_position").set_value(config.get("editor.right_margin_position"))
gw("editor_right_margin_position_box").set_sensitive(config.get("editor.right_margin"))
gw("editor_bracket_matching").set_data("config_option", "editor.bracket_matching")
gw("editor_bracket_matching").set_active(config.get("editor.bracket_matching"))
gw("editor_tabs_width").set_data("config_option", "editor.tabs_width")
gw("editor_tabs_width").set_value(config.get("editor.tabs_width"))
gw("editor_insert_spaces").set_data("config_option", "editor.insert_spaces")
gw("editor_insert_spaces").set_active(config.get("editor.insert_spaces"))
gw("editor_auto_indent").set_data("config_option", "editor.auto_indent")
gw("editor_auto_indent").set_active(config.get("editor.auto_indent"))
gw("editor_default_font").set_data("config_option", "editor.default_font")
gw("editor_default_font").set_active(config.get("editor.default_font"))
if HAVE_GCONF:
client = gconf.client_get_default()
default_font = client.get_string("/desktop/gnome/interface/monospace_font_name")
else:
default_font = 'Monospace 10'
gw("editor_default_font").set_label(gw("editor_default_font").get_label() % default_font)
gw("editor_font_box").set_sensitive(not config.get("editor.default_font"))
gw("editor_font").set_data("config_option", "editor.font")
gw("editor_font").set_font_name(config.get("editor.font"))
def refresh_plugins(self):
# Repo
self.builder.get_object("plugin_enable_repo").set_data("config_option", "plugins.repo_enabled")
self.builder.get_object("plugin_enable_repo").set_active(self.app.config.get("plugins.repo_enabled"))
# Plugins
model = self.plugin_model
model.clear()
for key, value in self.app.plugins.plugin_types.items():
iter = model.append(None)
model.set(iter,
0, key,
1, False,
2, '<b>%s</b>' % value[0],
3, None,
4, False)
for plugin in self.app.plugins.get_plugins(key):
lbl = '<b>%s</b>' % plugin.name or _(u"Unknown")
if plugin.description:
lbl += "\n"+plugin.description
if plugin.INIT_ERROR:
lbl += '\n<span color="red">'
lbl += _(u'ERROR')+': '+plugin.INIT_ERROR+'</span>'
if plugin.icon:
ico = self.app.load_icon(plugin.icon,
gtk.ICON_SIZE_LARGE_TOOLBAR,
gtk.ICON_LOOKUP_FORCE_SVG)
else:
ico = None
citer = model.append(iter)
model.set(citer,
0, plugin,
1, self.app.plugins.is_active(plugin),
2, lbl,
3, ico,
4, not bool(plugin.INIT_ERROR))
def _plugin_iter_for_ep(self, ep_name):
model = self.plugin_list.get_model()
iter = model.get_iter_first()
while iter:
if model.get_value(iter, 0) == ep_name:
return model, iter
iter = model.iter_next(iter)
return model, None
def refresh_shortcuts(self):
model = self.shortcuts_model
model.clear()
for group in self.win.ui.get_action_groups():
if group.get_name() == 'clipboard':
continue
iter_ = model.append(None)
lbl = group.get_data('cf::label') or group.get_name()
model.set(iter_, 0, lbl, 3, False)
for action in group.list_actions():
# Don't display menu items with submenus
if action.get_name().endswith('menu-action') \
or action.get_name().startswith('activate-editor'):
continue
citer = model.append(iter_)
accel_path = action.get_accel_path()
if accel_path is None:
keyval = mods = None
else:
shortcut = gtk.accel_map_lookup_entry(accel_path)
if shortcut is not None:
keyval, mods = shortcut
else:
keyval = mods = None
model.set(citer,
0, action.props.label.replace('_', ''),
3, True,
4, action.props.tooltip,
5, action)
if keyval is not None:
model.set(citer, 1, keyval)
if mods is not None:
model.set(citer, 2, mods)
|
angvp/angelvelasquez-crunchyfrog
|
cf/ui/prefs.py
|
Python
|
gpl-3.0
| 21,018
|
# coding: utf-8
from gettext import gettext
def _(text, args=None):
text = gettext(text)
if args is not None:
text %= args
return text
|
onlfait/uTopiaController
|
src/controller/i18n.py
|
Python
|
gpl-3.0
| 156
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import strip_jsonp, unified_strdate
class ElPaisIE(InfoExtractor):
_VALID_URL = r'https?://(?:[^.]+\.)?elpais\.com/.*/(?P<id>[^/#?]+)\.html(?:$|[?#])'
IE_DESC = 'El País'
_TESTS = [{
'url': 'http://blogs.elpais.com/la-voz-de-inaki/2014/02/tiempo-nuevo-recetas-viejas.html',
'md5': '98406f301f19562170ec071b83433d55',
'info_dict': {
'id': 'tiempo-nuevo-recetas-viejas',
'ext': 'mp4',
'title': 'Tiempo nuevo, recetas viejas',
'description': 'De lunes a viernes, a partir de las ocho de la mañana, Iñaki Gabilondo nos cuenta su visión de la actualidad nacional e internacional.',
'upload_date': '20140206',
}
}, {
'url': 'http://elcomidista.elpais.com/elcomidista/2016/02/24/articulo/1456340311_668921.html#?id_externo_nwl=newsletter_diaria20160303t',
'md5': '3bd5b09509f3519d7d9e763179b013de',
'info_dict': {
'id': '1456340311_668921',
'ext': 'mp4',
'title': 'Cómo hacer el mejor café con cafetera italiana',
'description': 'Que sí, que las cápsulas son cómodas. Pero si le pides algo más a la vida, quizá deberías aprender a usar bien la cafetera italiana. No tienes más que ver este vídeo y seguir sus siete normas básicas.',
'upload_date': '20160303',
}
}, {
'url': 'http://elpais.com/elpais/2017/01/26/ciencia/1485456786_417876.html',
'md5': '9c79923a118a067e1a45789e1e0b0f9c',
'info_dict': {
'id': '1485456786_417876',
'ext': 'mp4',
'title': 'Hallado un barco de la antigua Roma que naufragó en Baleares hace 1.800 años',
'description': 'La nave portaba cientos de ánforas y se hundió cerca de la isla de Cabrera por razones desconocidas',
'upload_date': '20170127',
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
prefix = self._html_search_regex(
r'var\s+url_cache\s*=\s*"([^"]+)";', webpage, 'URL prefix')
id_multimedia = self._search_regex(
r"id_multimedia\s*=\s*'([^']+)'", webpage, 'ID multimedia', default=None)
if id_multimedia:
url_info = self._download_json(
'http://elpais.com/vdpep/1/?pepid=' + id_multimedia, video_id, transform_source=strip_jsonp)
video_suffix = url_info['mp4']
else:
video_suffix = self._search_regex(
r"(?:URLMediaFile|urlVideo_\d+)\s*=\s*url_cache\s*\+\s*'([^']+)'", webpage, 'video URL')
video_url = prefix + video_suffix
thumbnail_suffix = self._search_regex(
r"(?:URLMediaStill|urlFotogramaFijo_\d+)\s*=\s*url_cache\s*\+\s*'([^']+)'",
webpage, 'thumbnail URL', fatal=False)
thumbnail = (
None if thumbnail_suffix is None
else prefix + thumbnail_suffix)
title = self._html_search_regex(
(r"tituloVideo\s*=\s*'([^']+)'", webpage, 'title',
r'<h2 class="entry-header entry-title.*?>(.*?)</h2>'),
webpage, 'title')
upload_date = unified_strdate(self._search_regex(
r'<p class="date-header date-int updated"\s+title="([^"]+)">',
webpage, 'upload date', default=None) or self._html_search_meta(
'datePublished', webpage, 'timestamp'))
return {
'id': video_id,
'url': video_url,
'title': title,
'description': self._og_search_description(webpage),
'thumbnail': thumbnail,
'upload_date': upload_date,
}
|
israeltobias/DownMedia
|
youtube-dl/youtube_dl/extractor/elpais.py
|
Python
|
gpl-3.0
| 3,823
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Raspa Preço documentation build configuration file, created by
# sphinx-quickstart on Mon Nov 6 10:52:29 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
path = os.path.abspath('.')
sys.path.insert(0, path)
print(path)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Raspa Preço'
copyright = '2017, Ivan Brasilico'
author = 'Ivan Brasilico'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'pt-BR'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'venv', '.tox']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'RaspaPrecodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'RaspaPreco.tex', 'Raspa Preço Documentation',
'Ivan Brasilico', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'raspapreco', 'Raspa Preço Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'RaspaPreco', 'Raspa Preço Documentation',
author, 'RaspaPreco', 'One line description of project.',
'Miscellaneous'),
]
|
IvanBrasilico/raspa-preco
|
conf.py
|
Python
|
gpl-3.0
| 5,357
|
# coding=utf-8
"""
Calcolare il fattore primo più grande di 600851475143
"""
__author__ = 'davide'
from euler._utils import genera_primi
import math
def ex3(n):
top = math.ceil(math.sqrt(n))
for m in genera_primi():
while not n % m:
yield m
n //= m
top = math.ceil(math.sqrt(n))
if m > top:
if n != 1:
yield n
return
if __name__ == "__main__":
n = 600851475143
p = 1
for d in ex3(n):
print(d, end=" ")
p *= d
assert p == n
|
DavideCanton/Python3
|
euler/ex3.py
|
Python
|
gpl-3.0
| 564
|
import argparse
import collections
import os
import re
import subprocess
import sys
import xml.etree.cElementTree as ElementTree
VERSION = "2.7.2"
NAMESPACE = "pyqtc.python.%s" % VERSION
FILTER_NAME = "Python %s" % VERSION
FILTER_ID = "python"
EXTENSIONS = {".html", ".css", ".js", ".txt", ".xml", ".jpg"}
class Symbol(object):
def __init__(self, full_name, type, filename):
self.full_name = full_name
self.type = type
self.filename = filename
self.name = full_name.split(".")[-1]
def LoadSphinxIndex(filename):
ret = []
with open(filename) as handle:
for line in handle:
if not line or line.startswith("#"):
continue
parts = line.strip().split(" ")
ret.append(Symbol(full_name=parts[0], type=parts[1],
filename="%s#%s" % (parts[2], parts[0])))
return ret
class Element(object):
def __init__(self, builder, name, args=None):
if args is None:
args = {}
self.builder = builder
self.name = name
self.args = args
def __enter__(self):
self.builder.start(self.name, self.args)
def __exit__(self, _exc_type, _exc_value, _traceback):
self.builder.end(self.name)
def Data(builder, element_name, data=None, args=None):
if args is None:
args = {}
builder.start(element_name, args)
if data is not None:
builder.data(data)
builder.end(element_name)
def WriteQhp(symbols, files, qhp_filename):
builder = ElementTree.TreeBuilder()
with Element(builder, "QtHelpProject", {"version": "1.0"}):
Data(builder, "namespace", NAMESPACE)
Data(builder, "virtualFolder", "doc")
with Element(builder, "customFilter", {"name": FILTER_NAME}):
Data(builder, "filterAttribute", FILTER_ID)
Data(builder, "filterAttribute", VERSION)
with Element(builder, "filterSection"):
Data(builder, "filterAttribute", FILTER_ID)
Data(builder, "filterAttribute", VERSION)
with Element(builder, "toc"):
pass
with Element(builder, "keywords"):
for sym in symbols:
Data(builder, "keyword", args={
"name": sym.full_name,
"id": sym.full_name,
"ref": sym.filename
})
with Element(builder, "files"):
for filename in files:
Data(builder, "file", filename)
with open(qhp_filename, "w") as handle:
handle.write(ElementTree.tostring(builder.close()))
def WriteQhcp(qhp_filenames, qch_filenames, qhcp_filename):
builder = ElementTree.TreeBuilder()
with Element(builder, "QHelpCollectionProject", {"version": "1.0"}):
with Element(builder, "docFiles"):
with Element(builder, "generate"):
for i, filename in enumerate(qhp_filenames):
with Element(builder, "file"):
Data(builder, "input", filename)
Data(builder, "output", qch_filenames[i])
with Element(builder, "register"):
for filename in qch_filenames:
Data(builder, "file", filename)
with open(qhcp_filename, "w") as handle:
handle.write(ElementTree.tostring(builder.close()))
def GetFileList(path):
ret = []
for root, _dirnames, filenames in os.walk(path):
for filename in filenames:
if os.path.splitext(filename)[1] in EXTENSIONS:
ret.append(os.path.relpath(os.path.join(root, filename), path))
return ret
def AdjustSpinxConf(filename):
contents = open(filename).read()
contents += '\nhtml_theme="sphinx-theme"' \
'\nhtml_theme_path=["%s"]\n' % \
os.path.join(os.path.dirname(__file__))
contents = re.sub(r'html_use_opensearch .*', '', contents)
open(filename, 'w').write(contents)
def main(args):
parser = argparse.ArgumentParser(
description="Builds a Qt Help file from Sphinx documentation")
parser.add_argument("--sphinx-dir", required=True, help="directory containing objects.inv")
parser.add_argument("--qhp", required=True, help=".qhp output filename")
parser.add_argument("--qhcp", required=True, help=".qhcp output filename")
args = parser.parse_args(args)
qhp = [args.qhp]
qch = [os.path.splitext(x)[0] + ".qch" for x in qhp]
qhc = os.path.splitext(args.qhcp)[0] + ".qhc"
# Edit the conf.py to use our minimal theme
conf_py = os.path.join(args.sphinx_dir, "conf.py")
AdjustSpinxConf(conf_py)
# Build the docs
subprocess.check_call(["make", "html"], cwd=args.sphinx_dir)
sphinx_output = os.path.join(args.sphinx_dir, "build/html")
# Read symbols from the objects.inv
symbols = LoadSphinxIndex(os.path.join(sphinx_output, "objects.inv"))
# Get the list of files to include
files = GetFileList(sphinx_output)
# Create the output files
for filename in qhp:
WriteQhp(symbols, files, filename)
WriteQhcp(qhp, qch, args.qhcp)
print "Now run:"
print " qcollectiongenerator %s -o %s" % (args.qhcp, qhc)
print " assistant-qt4 -collectionFile %s" % qhc
if __name__ == "__main__":
main(sys.argv[1:])
|
yarolig/pyqtc
|
tools/sphinx2qhcp.py
|
Python
|
gpl-3.0
| 4,974
|
# -*- coding: utf-8 -*-
# Copyright (C) 2016-2018 Mathew Topper
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Created on Thu Apr 23 12:51:14 2015
.. moduleauthor:: Mathew Topper <mathew.topper@dataonlygreater.com>
"""
import os
import sys
import json
import shutil
import logging
import tarfile
import tempfile
import traceback
from collections import namedtuple
import sip
import pandas as pd
import matplotlib.pyplot as plt
from win32event import CreateMutex
from PyQt4 import QtGui, QtCore
from dtocean_core.menu import ProjectMenu, ModuleMenu, ThemeMenu, DataMenu
from dtocean_core.pipeline import set_output_scope
from dtocean_core.utils.database import (database_from_files,
database_to_files,
filter_map,
get_database,
get_table_map)
from . import get_log_dir
from .help import HelpWidget
from .menu import DBSelector
from .simulation import SimulationDock
from .extensions import GUIStrategyManager, GUIToolManager
from .pipeline import (PipeLine,
SectionControl,
HubControl,
InputBranchControl,
OutputBranchControl,
InputVarControl,
OutputVarControl)
from .widgets.central import (ContextArea,
DetailsWidget,
FileManagerWidget,
PlotManagerWidget,
LevelComparison,
SimulationComparison)
from .widgets.dialogs import (DataCheck,
MainWindow,
ProjProperties,
Shuttle,
ProgressBar,
About)
from .widgets.display import (MPLWidget,
get_current_filetypes,
save_current_figure)
from .widgets.docks import LogDock
# Set up logging
module_logger = logging.getLogger(__name__)
class ThreadReadRaw(QtCore.QThread):
"""QThread for reading raw data"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, shell, variable, value):
super(ThreadReadRaw, self).__init__()
self._shell = shell
self._variable = variable
self._value = value
return
def run(self):
try:
self._variable.set_raw_interface(self._shell.core,
self._value)
self._variable.read(self._shell.core,
self._shell.project)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
self.taskFinished.emit()
return
class ThreadReadTest(QtCore.QThread):
"""QThread for reading test data"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, shell, control , path, overwrite):
super(ThreadReadTest, self).__init__()
self.shell = shell
self.control = control
self.path = path
self.overwrite = overwrite
return
def run(self):
try:
self.control._read_test_data(self.shell,
self.path,
self.overwrite)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
self.taskFinished.emit()
return
class ThreadOpen(QtCore.QThread):
"""QThread for opening save files"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, core, file_path):
super(ThreadOpen, self).__init__()
self._core = core
self._file_path = file_path
self._project = None
self._current_scope = None
self._strategy = None
self._project_path = None
return
def run(self):
try:
load_path = str(self._file_path)
dto_dir_path = None
prj_file_path = None
sco_file_path = None
stg_file_path = None
# Check the extension
if os.path.splitext(load_path)[1] == ".dto":
dto_dir_path = tempfile.mkdtemp()
tar = tarfile.open(load_path)
tar.extractall(dto_dir_path)
prj_file_path = os.path.join(dto_dir_path, "project.prj")
sco_file_path = os.path.join(dto_dir_path, "scope.json")
stg_file_path = os.path.join(dto_dir_path, "strategy.pkl")
if not os.path.isfile(stg_file_path): stg_file_path = None
elif os.path.splitext(load_path)[1] == ".prj":
prj_file_path = load_path
else:
errStr = ("The file path must be a file with either .dto or "
".prj extension")
raise ValueError(errStr)
# Load up the project
load_project = self._core.load_project(prj_file_path)
self._project = load_project
# Load up the scope if one was found
if sco_file_path is not None:
with open(sco_file_path, 'rb') as json_file:
self._current_scope = json.load(json_file)
else:
self._current_scope = "global"
# Load up the strategy if one was found
if stg_file_path is not None:
strategy_manager = GUIStrategyManager()
self._strategy = strategy_manager.load_strategy(stg_file_path)
else:
self._strategy = None
# Record the path after a successful load
self._project_path = load_path
# Delete temp directory
if dto_dir_path is not None: shutil.rmtree(dto_dir_path)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
self.taskFinished.emit()
return
class ThreadSave(QtCore.QThread):
"""QThread for saving files"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, core,
project,
save_path,
current_scope,
strategy):
super(ThreadSave, self).__init__()
self._core = core
self._project = project
self._save_path = save_path
self._current_scope = current_scope
self._strategy = strategy
return
def run(self):
try:
if self._save_path is None:
errStr = ("A file path must be provided in order to save a "
"project")
raise ValueError(errStr)
# Check the extension
if os.path.splitext(self._save_path)[1] not in [".dto", ".prj"]:
errStr = ("The file path must be a file with either .dto or "
".prj extension")
raise ValueError(errStr)
dto_dir_path = tempfile.mkdtemp()
# Dump the project
prj_file_path = os.path.join(dto_dir_path, "project.prj")
self._core.dump_project(self._project, prj_file_path)
# If saving a project file only
if os.path.splitext(self._save_path)[1] == ".prj":
shutil.move(prj_file_path, self._save_path)
shutil.rmtree(dto_dir_path)
self.taskFinished.emit()
return
# Dump the output scope
sco_file_path = os.path.join(dto_dir_path, "scope.json")
with open(sco_file_path, 'wb') as json_file:
json.dump(self._current_scope, json_file)
# Set the standard archive contents
arch_files = [prj_file_path, sco_file_path]
arch_paths = ["project.prj", "scope.json"]
# Dump the strategy (if there is one)
if self._strategy is not None:
strategy_manager = GUIStrategyManager()
stg_file_path = os.path.join(dto_dir_path, "strategy.pkl")
strategy_manager.dump_strategy(self._strategy, stg_file_path)
arch_files.append(stg_file_path)
arch_paths.append("strategy.pkl")
# Now tar the files together
dto_file_name = os.path.split(self._save_path)[1]
tar_file_name = "{}.tar".format(dto_file_name)
archive = tarfile.open(tar_file_name, "w")
for arch_file, arch_path in zip(arch_files, arch_paths):
archive.add(arch_file, arcname=arch_path)
archive.close()
shutil.move(tar_file_name, self._save_path)
shutil.rmtree(dto_dir_path)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
self.taskFinished.emit()
return
class ThreadDataFlow(QtCore.QThread):
"""QThread for initiating the dataflow"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, pipeline, shell):
super(ThreadDataFlow, self).__init__()
self.pipeline = pipeline
self.shell = shell
self.project_menu = ProjectMenu()
return
def run(self):
try:
# Activate modules and themes
self.shell.activate_module_queue()
self.shell.activate_theme_queue()
# Check if filters can be initiated
if ("Database Filtering Interface" in
self.shell.project_menu.get_active(self.shell.core,
self.shell.project)):
self.project_menu.initiate_filter(self.shell.core,
self.shell.project)
self.project_menu.initiate_dataflow(self.shell.core,
self.shell.project)
# Execute the project boundaries interface
if ("Project Boundaries Interface" in
self.shell.project_menu.get_active(self.shell.core,
self.shell.project)):
self.shell.project_menu._execute(
self.shell.core,
self.shell.project,
"Project Boundaries Interface")
self.pipeline._read_auto(self.shell)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
self.taskFinished.emit()
return
class ThreadCurrent(QtCore.QThread):
"""QThread for executing the current module"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, core, project):
super(ThreadCurrent, self).__init__()
self._core = core
self._project = project
self._module_menu = ModuleMenu()
return
def run(self):
try:
# Block signals
self._core.blockSignals(True)
self._project.blockSignals(True)
self._module_menu.execute_current(self._core,
self._project)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
return
class ThreadThemes(QtCore.QThread):
"""QThread for executing all themes"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, core, project):
super(ThreadThemes, self).__init__()
self._core = core
self._project = project
self._theme_menu = ThemeMenu()
return
def run(self):
try:
# Block signals
self._core.blockSignals(True)
self._project.blockSignals(True)
self._theme_menu.execute_all(self._core,
self._project)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
return
class ThreadStrategy(QtCore.QThread):
"""QThread for executing a strategy"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, core, project, strategy):
super(ThreadStrategy, self).__init__()
self._core = core
self._project = project
self._strategy = strategy
return
def run(self):
try:
# Block signals
self._core.blockSignals(True)
self._project.blockSignals(True)
self._strategy.execute(self._core,
self._project)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
return
class ThreadTool(QtCore.QThread):
"""QThread for executing dtocean-wec"""
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, core, project, tool):
super(ThreadTool, self).__init__()
self._tool = tool
self._core = core
self._project = project
self._tool_manager = GUIToolManager()
return
def run(self):
try:
self._tool_manager.execute_tool(self._core,
self._project,
self._tool)
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
return
class ThreadDump(QtCore.QThread):
"""QThread for executing database dump"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, credentials, root_path, selected):
super(ThreadDump, self).__init__()
self._credentials = credentials
self._root_path = root_path
self._selected = selected
return
def run(self):
try:
db = get_database(self._credentials, timeout=60)
table_list = get_table_map()
# Filter the table if required
selected = str(self._selected).lower()
if selected != "all":
filtered_dict = filter_map(table_list, selected)
table_list = [filtered_dict]
# make a directory if required
root_path = str(self._root_path)
if not os.path.exists(root_path): os.makedirs(root_path)
database_to_files(root_path,
table_list,
db,
print_function=module_logger.info)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
self.taskFinished.emit()
return
class ThreadLoad(QtCore.QThread):
"""QThread for executing database dump"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, credentials, root_path, selected):
super(ThreadLoad, self).__init__()
self._credentials = credentials
self._root_path = root_path
self._selected = selected
return
def run(self):
try:
db = get_database(self._credentials, timeout=60)
table_list = get_table_map()
# Filter the table if required
selected = str(self._selected).lower()
if selected != "all":
filtered_dict = filter_map(table_list, selected)
table_list = [filtered_dict]
database_from_files(str(self._root_path),
table_list,
db,
print_function=module_logger.info)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
self.taskFinished.emit()
return
class ThreadScope(QtCore.QThread):
"""QThread for setting the output scope"""
taskFinished = QtCore.pyqtSignal()
error_detected = QtCore.pyqtSignal(object, object, object)
def __init__(self, core, project, scope):
super(ThreadScope, self).__init__()
self._core = core
self._project = project
self._scope = scope
return
def run(self):
try:
# Block signals
self._core.blockSignals(True)
self._project.blockSignals(True)
# Switch the output scope on all simulations
for sim_idx in xrange(len(self._project)):
set_output_scope(self._core,
self._project,
self._scope,
sim_index=sim_idx)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
except:
etype, evalue, etraceback = sys.exc_info()
self.error_detected.emit(etype, evalue, etraceback)
# Reinstate signals and emit
self._core.blockSignals(False)
self._project.blockSignals(False)
self.taskFinished.emit()
return
class Shell(QtCore.QObject):
# Signals
project_activated = QtCore.pyqtSignal()
project_title_change = QtCore.pyqtSignal(str)
project_saved = QtCore.pyqtSignal()
project_closed = QtCore.pyqtSignal()
strategy_loaded = QtCore.pyqtSignal(object)
modules_queued = QtCore.pyqtSignal()
themes_queued = QtCore.pyqtSignal()
update_pipeline = QtCore.pyqtSignal(object)
update_scope = QtCore.pyqtSignal(str)
reset_widgets = QtCore.pyqtSignal()
update_run_action = QtCore.pyqtSignal()
database_updated = QtCore.pyqtSignal(str)
pipeline_active = QtCore.pyqtSignal()
bathymetry_active = QtCore.pyqtSignal()
filter_active = QtCore.pyqtSignal()
dataflow_active = QtCore.pyqtSignal()
module_executed = QtCore.pyqtSignal()
themes_executed = QtCore.pyqtSignal()
strategy_executed = QtCore.pyqtSignal()
strategy_completed = QtCore.pyqtSignal()
database_convert_active = QtCore.pyqtSignal()
database_convert_complete = QtCore.pyqtSignal()
def __init__(self, core):
super(Shell, self).__init__()
self.core = None
self.project_menu = None
self.module_menu = None
self.theme_menu = None
self.data_menu = None
self.project = None
self.project_path = None
self.project_unsaved = True
self.strategy = None
self.queued_interfaces = {"modules": None,
"themes": None}
self._active_thread = None
self._current_scope = None
self.core = self._init_core(core)
self.project_menu = self._init_project_menu()
self.module_menu = self._init_module_menu()
self.theme_menu = self._init_theme_menu()
self.data_menu = self._init_data_menu()
# Clean up after thread execution
self.database_convert_complete.connect(self._clear_active_thread)
self.dataflow_active.connect(self._clear_active_thread)
self.module_executed.connect(self._finalize_core)
self.themes_executed.connect(self._finalize_core)
self.strategy_executed.connect(self._finalize_project)
return
def _init_core(self, core):
# Relay status updated signal
core.status_updated.connect(self._emit_update_pipeline)
core.status_updated.connect(
lambda: self.reset_widgets.emit())
# Relay pipeline reset signal
core.pipeline_reset.connect(
lambda: self.update_run_action.emit())
return core
def _init_project_menu(self):
return ProjectMenu()
def _init_module_menu(self):
return ModuleMenu()
def _init_theme_menu(self):
return ThemeMenu()
def _init_data_menu(self):
return DataMenu()
def set_project_title(self, title):
self.project.title = title
self.project_title_change.emit(title)
return
def get_available_modules(self):
available_modules = self.module_menu.get_available(self.core,
self.project)
return available_modules
def get_active_modules(self):
if self.queued_interfaces["modules"] is not None:
active_modules = self.queued_interfaces["modules"]
else:
active_modules = self.module_menu.get_active(self.core,
self.project)
return active_modules
def get_current_module(self):
module_name = self.module_menu.get_current(self.core,
self.project)
return module_name
def get_scheduled_modules(self):
module_names = self.module_menu.get_scheduled(self.core,
self.project)
return module_names
def get_completed_modules(self):
module_names = self.module_menu.get_completed(self.core,
self.project)
return module_names
def get_available_themes(self):
available_themes = self.theme_menu.get_available(self.core,
self.project)
return available_themes
def get_active_themes(self):
if self.queued_interfaces["themes"] is not None:
active_themes = self.queued_interfaces["themes"]
else:
active_themes = self.theme_menu.get_active(self.core,
self.project)
return active_themes
def get_scheduled_themes(self):
module_names = self.theme_menu.get_scheduled(self.core,
self.project)
return module_names
@QtCore.pyqtSlot()
def new_project(self, title="Untitled project"):
self.project = self.project_menu.new_project(self.core, title)
self.project_path = None
# Update the active project
self.project_activated.emit()
# Relay active simulation change
self.project.active_index_changed.connect(self._emit_update_pipeline)
self.project.active_index_changed.connect(
lambda: self.reset_widgets.emit())
self.project.active_index_changed.connect(
lambda: self.update_run_action.emit())
self._current_scope = "global"
# Update the scope widget
self.update_scope.emit(self._current_scope)
return
@QtCore.pyqtSlot(str)
def open_project(self, file_path):
self._active_thread = ThreadOpen(self.core,
file_path)
self._active_thread.taskFinished.connect(self._finalize_open_project)
self._active_thread.start()
return
# @QtCore.pyqtSlot(str)
# def save_project(self, file_path=None):
# """An example of profiling"""
# import cProfile
# cProfile.runctx("self.save_project_(file_path)",
# globals(),
# locals(),
# "profile.stat")
@QtCore.pyqtSlot(str)
def save_project(self, file_path=None):
if self._active_thread is not None: self._active_thread.wait()
if file_path is None:
save_path = self.project_path
else:
save_path = str(file_path)
self._active_thread = ThreadSave(self.core,
self.project,
save_path,
self._current_scope,
self.strategy)
self._active_thread.taskFinished.connect(self._finalize_save_project)
self._active_thread.start()
return
@QtCore.pyqtSlot()
def close_project(self):
if self._active_thread is not None: self._active_thread.wait()
self.project = None
self.project_path = None
self.strategy = None
self.project_closed.emit()
self.project_title_change.emit("")
self.database_updated.emit("None")
self.update_pipeline.disconnect()
return
@QtCore.pyqtSlot(str, str)
def set_simulation_title(self, old_title, new_title):
if self._active_thread is not None: self._active_thread.wait()
if old_title == new_title: return
msg = "Changing title of simulation {} to {}".format(old_title,
new_title)
module_logger.debug(msg)
current_sim_titles = self.project.get_simulation_titles()
if new_title in current_sim_titles:
logMsg = ("Simulation title '{}' is already in list of current "
"titles").format(new_title)
module_logger.error(logMsg)
# Reset the list in the simulation dock
self.project.sims_updated.emit()
# Simulation dock needs informed which is active after item reset
active_sim_title = self.project.get_simulation_title()
self.project.active_title_changed.emit(active_sim_title)
else:
self.project.set_simulation_title(new_title, title=old_title)
return
@QtCore.pyqtSlot(str)
def set_active_simulation(self, title):
if self._active_thread is not None: self._active_thread.wait()
msg = "Setting simulation '{}' as active".format(title)
module_logger.debug(msg)
self.project.set_active_index(title=title)
return
@QtCore.pyqtSlot(str, dict)
def select_database(self, identifier, credentials):
if identifier is None: identifier = "Unnamed"
self.data_menu.select_database(self.project,
credentials=credentials)
self.database_updated.emit(identifier)
return
@QtCore.pyqtSlot()
def deselect_database(self):
self.data_menu.deselect_database(self.project)
self.database_updated.emit("None")
return
@QtCore.pyqtSlot(str, str, dict)
def dump_database(self, root_path, selected, credentials):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadDump(credentials, root_path, selected)
self._active_thread.start()
self.database_convert_active.emit()
self._active_thread.taskFinished.connect(
lambda: self.database_convert_complete.emit())
return
@QtCore.pyqtSlot(str, str, dict)
def load_database(self, root_path, selected, credentials):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadLoad(credentials, root_path, selected)
self._active_thread.start()
self.database_convert_active.emit()
self._active_thread.taskFinished.connect(
lambda: self.database_convert_complete.emit())
return
@QtCore.pyqtSlot()
def initiate_pipeline(self):
self.project_menu.initiate_pipeline(self.core, self.project)
sites_available = self.core.has_data(self.project,
"hidden.available_sites")
systems_available = self.core.has_data(self.project,
"hidden.available_systems")
if sites_available or systems_available:
self.project_menu.initiate_options(self.core, self.project)
if sites_available: self.filter_active.emit()
self.pipeline_active.emit()
return
@QtCore.pyqtSlot()
def initiate_bathymetry(self):
self.project_menu.initiate_bathymetry(self.core, self.project)
self.bathymetry_active.emit()
return
@QtCore.pyqtSlot(list)
def queue_module_list(self, module_list):
all_mods = self.module_menu.get_available(self.core, self.project)
ordered_mods = [x for x in all_mods if x in module_list]
self.queued_interfaces["modules"] = ordered_mods
self.modules_queued.emit()
return
@QtCore.pyqtSlot(list)
def queue_theme_list(self, theme_list):
all_themes = self.theme_menu.get_available(self.core, self.project)
ordered_themes = [x for x in all_themes if x in theme_list]
self.queued_interfaces["themes"] = ordered_themes
self.themes_queued.emit()
return
def activate_module_queue(self):
if self.queued_interfaces["modules"] is None: return
active_mods = self.module_menu.get_active(self.core, self.project)
for module_name in self.queued_interfaces["modules"]:
if module_name not in active_mods:
self.module_menu.activate(self.core,
self.project,
module_name)
self.queued_interfaces["modules"] = None
return
def activate_theme_queue(self):
if self.queued_interfaces["themes"] is None: return
active_themes = self.theme_menu.get_active(self.core, self.project)
for theme_name in self.queued_interfaces["themes"]:
if theme_name not in active_themes:
self.theme_menu.activate(self.core,
self.project,
theme_name)
self.queued_interfaces["themes"] = None
return
@QtCore.pyqtSlot(object)
def select_strategy(self, strategy):
if self._active_thread is not None: self._active_thread.wait()
if strategy is None:
logMsg = "Null strategy detected"
else:
logMsg = "Strategy {} detected".format(strategy.get_name())
module_logger.debug(logMsg)
self.strategy = strategy
simulation = self.project.get_simulation()
if strategy is None:
simulation.set_unavailable_variables(None)
else:
self.strategy.strategy_run = True
force_unavailable = self.strategy.get_variables()
simulation.set_unavailable_variables(force_unavailable)
self.core.set_interface_status(self.project)
self.update_run_action.emit()
return
@QtCore.pyqtSlot(object)
def initiate_dataflow(self, pipeline):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadDataFlow(pipeline,
self)
self._active_thread.taskFinished.connect(
lambda: self.dataflow_active.emit())
self._active_thread.start()
return
@QtCore.pyqtSlot(str, bool)
def export_data(self, file_path, mask_outputs=False):
self.data_menu.export_data(self.core,
self.project,
str(file_path),
bool(mask_outputs))
return
@QtCore.pyqtSlot(str, bool)
def import_data(self, file_path, skip_satisfied=False):
if self._active_thread is not None: self._active_thread.wait()
self.data_menu.import_data(self.core,
self.project,
str(file_path),
bool(skip_satisfied))
return
@QtCore.pyqtSlot(object, str, str)
def read_file(self, variable, interface_name, file_path):
if self._active_thread is not None: self._active_thread.wait()
variable.read_file(self.core,
self.project,
str(file_path),
str(interface_name))
return
@QtCore.pyqtSlot(object, str, str)
def write_file(self, variable, interface_name, file_path):
variable.write_file(self.core,
self.project,
str(file_path),
str(interface_name))
return
def read_raw(self, variable, value):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadReadRaw(self,
variable,
value)
self._active_thread.taskFinished.connect(self._clear_active_thread)
self._active_thread.start()
return
def read_test_data(self, control, path, overwrite):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadReadTest(self,
control,
path,
overwrite)
self._active_thread.taskFinished.connect(self._clear_active_thread)
self._active_thread.start()
return
@QtCore.pyqtSlot()
def execute_current(self):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadCurrent(self.core,
self.project)
self._active_thread.taskFinished.connect(
lambda: self.module_executed.emit())
self._active_thread.start()
return
@QtCore.pyqtSlot()
def execute_themes(self):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadThemes(self.core,
self.project)
self._active_thread.taskFinished.connect(
lambda: self.themes_executed.emit())
self._active_thread.start()
return
@QtCore.pyqtSlot()
def execute_strategy(self):
if self.strategy is None: return
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadStrategy(self.core,
self.project,
self.strategy)
self._active_thread.taskFinished.connect(
lambda: self.strategy_executed.emit())
self._active_thread.start()
return
@QtCore.pyqtSlot(str)
def set_output_scope(self, scope):
if self._active_thread is not None: self._active_thread.wait()
self._active_thread = ThreadScope(self.core,
self.project,
scope)
self._active_thread.taskFinished.connect(
lambda: self._finalize_scope(scope))
self._active_thread.start()
return
@QtCore.pyqtSlot()
def _finalize_open_project(self):
self.project = self._active_thread._project
self.project_path = self._active_thread._project_path
self.strategy = self._active_thread._strategy
self._current_scope = self._active_thread._current_scope
self.project_title_change.emit(self.project.title)
# Relay active simulation change
self.project.active_index_changed.connect(self._emit_update_pipeline)
self.project.active_index_changed.connect(
lambda: self.reset_widgets.emit())
self.project.active_index_changed.connect(
lambda: self.update_run_action.emit())
# Relay strategy change
if self.strategy is not None:
self.strategy_loaded.emit(self.strategy)
# Update the scope widget
self.update_scope.emit(self._current_scope)
# Release the active thread
self._clear_active_thread()
return
@QtCore.pyqtSlot()
def _finalize_save_project(self):
self.project_path = self._active_thread._save_path
self.project_saved.emit()
# Release the active thread
self._clear_active_thread()
return
@QtCore.pyqtSlot()
def _finalize_project(self):
# Emit signals on project
self.project.sims_updated.emit()
self.project.active_index_changed.emit()
active_sim_title = self.project.get_simulation_title()
if active_sim_title is not None:
self.project.active_title_changed.emit(active_sim_title)
# Assertain if the strategy can be released
self.strategy.strategy_run = self.strategy.allow_rerun
# If the strategy is no longer active release the hidden variables
if not self.strategy.strategy_run:
[sim.set_unavailable_variables()
for sim in self.project._simulations]
self.strategy_completed.emit()
# Emit signals on core
self._finalize_core()
return
@QtCore.pyqtSlot(str)
def _finalize_scope(self, scope):
# Record the scope
self._current_scope = scope
# Emit signals on core
self._finalize_core()
return
@QtCore.pyqtSlot()
def _finalize_core(self):
# Update the interface status
self.core.set_interface_status(self.project)
# Release the active thread
self._clear_active_thread()
return
@QtCore.pyqtSlot()
def _clear_active_thread(self):
if self._active_thread is None: return
self._active_thread.wait()
self._active_thread = None
return
@QtCore.pyqtSlot(object)
def _emit_update_pipeline(self):
Husk = namedtuple('Husk', ['core', 'project'])
husk = Husk(self.core, self.project)
self.update_pipeline.emit(husk)
return
class DTOceanWindow(MainWindow):
def __init__(self, shell, debug=False):
super(DTOceanWindow, self).__init__()
# Create a windows mutex
self._mutexname = "mutex_{AEF365BF-44B8-41E8-9906-4D1BADEE42E0}"
self._mutex = CreateMutex(None, False, self._mutexname)
# Context Area
self._data_context = None
self._plot_context = None
self._comp_context = None
# Details widgets
self._data_details = None
self._plot_details = None
# Dialogs
self._project_properties = None
self._data_check = None
self._module_shuttle = None
self._assessment_shuttle = None
self._db_selector = None
self._strategy_manager = None
self._help = None
self._progress = None
self._about = None
# Docks
self._pipeline_dock = None
self._simulation_dock = None
self._system_dock = None
# Widget re-use
self._last_tree_controller = None
self._last_data_controller = None
self._last_data_controller_status = None
self._last_plot_id = None
self._last_plot_name = "auto"
self._force_plot = False
# Last used stack index
self._last_stack_index = None
# Threads
self._thread_tool = None
# Tools
self._tool_manager = None
# Redirect excepthook
if not debug: sys.excepthook = self._display_error
# Init Shell
self._shell = self._init_shell(shell)
# Init context areas
self._init_context()
# Init dialogs
self._init_shuttles()
self._init_dialogs()
# Initiate docks
self._init_pipeline_dock()
self._init_simulation_dock()
self._init_system_dock(debug)
# Initiate menus
self._init_file_menu()
self._init_sim_menu()
self._init_data_menu()
self._init_view_menu(debug)
self._init_tools_menu()
self._init_help_menu()
return
def _init_shell(self, shell):
shell.project_activated.connect(self._active_project_ui_switch)
shell.project_closed.connect(self._closed_project_ui_switch)
shell.reset_widgets.connect(
lambda: self._set_context_widget(self._last_tree_controller, True))
shell.pipeline_active.connect(self._active_pipeline_ui_switch)
shell.bathymetry_active.connect(self._active_bathymetry_ui_switch)
shell.filter_active.connect(self._active_filter_ui_switch)
shell.dataflow_active.connect(self._active_dataflow_ui_switch)
shell.update_run_action.connect(self._run_action_ui_switch)
shell.module_executed.connect(self._run_action_ui_switch)
shell.themes_executed.connect(self._run_action_ui_switch)
shell.strategy_executed.connect(self._run_action_ui_switch)
shell.strategy_executed.connect(
lambda: self.stackedWidget.setCurrentIndex(self._last_stack_index))
shell.update_scope.connect(self._current_scope_ui_switch)
# Collect all saved and unsaved signals
shell.project_title_change.connect(self._set_project_unsaved)
shell.project_activated.connect(self._set_project_unsaved)
shell.reset_widgets.connect(self._set_project_unsaved)
shell.update_run_action.connect(self._set_project_unsaved)
shell.project_saved.connect(self._set_project_saved)
return shell
def _init_context(self):
# Blank context
blank_widget = QtGui.QWidget(self)
self.stackedWidget.addWidget(blank_widget)
# Data context
self._data_context = ContextArea(self)
self.stackedWidget.addWidget(self._data_context)
# Plot context
self._plot_context = ContextArea(self)
self.stackedWidget.addWidget(self._plot_context)
# Comparison context
self._comp_context = ContextArea(self)
self._comp_context._top_left.setMaximumWidth(16777215)
self._comp_context._top_right.setMinimumWidth(320)
self.stackedWidget.addWidget(self._comp_context)
# Collect the input widget parent
self._shell.core.set_input_parent(self._data_context._bottom)
return
def _init_shuttles(self):
# Set up the module shuttle widget
self._module_shuttle = Shuttle(self, "Add Modules...")
self._module_shuttle.list_updated.connect(
self._shell.queue_module_list)
# Set up the assessment shuttle widget
self._assessment_shuttle = Shuttle(self, "Add Assessment...")
self._assessment_shuttle.list_updated.connect(
self._shell.queue_theme_list)
return
def _init_dialogs(self):
# Set up project properties dialog
self._project_properties = ProjProperties(self)
self._project_properties.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._set_project_title)
# Set up the database selection dialog
self._db_selector = DBSelector(self, self._shell.data_menu)
self._db_selector.database_selected.connect(
self._shell.select_database)
self._db_selector.database_deselected.connect(
self._shell.deselect_database)
self._db_selector.database_dump.connect(self._dump_database)
self._db_selector.database_load.connect( self._load_database)
self._shell.database_updated.connect(
self._db_selector._update_current)
self._shell.database_convert_active.connect(
self._db_selector._convert_disabled)
self._shell.database_convert_complete.connect(
self._db_selector._convert_enabled)
self._shell.database_convert_active.connect(
lambda: self.actionInitiate_Pipeline.setDisabled(True))
self._shell.database_convert_complete.connect(
lambda: self.actionInitiate_Pipeline.setEnabled(True))
# Set up the strategy manager
self._strategy_manager = GUIStrategyManager(self)
self._strategy_manager.strategy_selected.connect(
self._shell.select_strategy)
self._shell.strategy_loaded.connect(
self._strategy_manager._load_strategy)
self._shell.strategy_completed.connect(
self._strategy_manager._complete_strategy)
# Set up the data check diaglog
self._data_check = DataCheck(self)
self._data_check.setModal(True)
# Set up progress bar
self._progress = ProgressBar(self)
self._progress.setModal(True)
self._progress.force_quit.connect(self.close)
# Set up the help dialog
self._help = HelpWidget(self)
# Set up the about dialog (actionAbout)
self._about = About(self)
self._about.setModal(True)
return
def _init_pipeline_dock(self):
# Give the bottom left corner to left dock
self.setCorner(QtCore.Qt.Corner(0x00002), QtCore.Qt.DockWidgetArea(1))
# Pipeline dock
self._pipeline_dock = PipeLine(self)
self._pipeline_dock._showclose_filter._show.connect(
lambda: self.actionShow_Pipeline.setEnabled(False))
self._pipeline_dock._showclose_filter._close.connect(
lambda: self.actionShow_Pipeline.setEnabled(True))
self.addDockWidget(QtCore.Qt.DockWidgetArea(1), self._pipeline_dock)
# Set widgets on tree click
self._pipeline_dock.treeView.clicked.connect(
self._set_details_widget)
self._pipeline_dock.treeView.clicked.connect(
self._set_context_widget)
# Change the output scope on button click
self._pipeline_dock.globalRadioButton.clicked.connect(
lambda: self._waitcursor_scope("global"))
self._pipeline_dock.localRadioButton.clicked.connect(
lambda: self._waitcursor_scope("local"))
self._pipeline_dock.scopeFrame.setDisabled(True)
# Variable filtering
self._pipeline_dock.filterFrame.setDisabled(True)
# Refresh on module and theme activation or execution
self._shell.modules_queued.connect(
lambda: self._pipeline_dock._refresh(self._shell))
self._shell.themes_queued.connect(
lambda: self._pipeline_dock._refresh(self._shell))
self._shell.module_executed.connect(
lambda: self._pipeline_dock._refresh(self._shell))
self._shell.themes_executed.connect(
lambda: self._pipeline_dock._refresh(self._shell))
self._shell.strategy_executed.connect(
lambda: self._pipeline_dock._refresh(self._shell))
# Repeat any filtering on widget update
self._shell.reset_widgets.connect(self._pipeline_dock._repeat_filter)
# Add context menu(s)
self._pipeline_dock.treeView.customContextMenuRequested.connect(
lambda x: self._pipeline_dock._make_menus(self._shell, x))
# Handle errors
self._pipeline_dock.error_detected.connect(self._display_error)
return
def _init_simulation_dock(self):
# Simulation dock
self._simulation_dock = SimulationDock(self)
self._simulation_dock._showclose_filter._show.connect(
lambda: self.actionShow_Simulations.setEnabled(False))
self._simulation_dock._showclose_filter._close.connect(
lambda: self.actionShow_Simulations.setEnabled(True))
self.addDockWidget(QtCore.Qt.DockWidgetArea(1), self._simulation_dock)
self._simulation_dock.name_changed.connect(
self._shell.set_simulation_title)
self._simulation_dock.active_changed.connect(
self._shell.set_active_simulation)
# Add context menu(s)
self._simulation_dock.listWidget.customContextMenuRequested.connect(
lambda x: self._simulation_dock._make_menus(self._shell, x))
# Set disabled until dataflow activated.
self._simulation_dock.setDisabled(True)
# Tab docks
self.setTabPosition(QtCore.Qt.DockWidgetArea(1),
QtGui.QTabWidget.TabPosition(0))
self.tabifyDockWidget(self._simulation_dock, self._pipeline_dock)
# Collect unsaved signals
self._simulation_dock.name_changed.connect(self._set_project_unsaved)
self._simulation_dock.active_changed.connect(self._set_project_unsaved)
return
def _init_system_dock(self, disable_log=False):
if disable_log: return
# System dock
self._system_dock = LogDock(self)
self._system_dock._showclose_filter._show.connect(
lambda: self.actionSystem_Log.setEnabled(False))
self._system_dock._showclose_filter._close.connect(
lambda: self.actionSystem_Log.setEnabled(True))
self.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._system_dock)
return
def _init_file_menu(self):
self.actionNew.triggered.connect(self._new_project)
self.actionOpen.triggered.connect(self._open_project)
self.actionSave.triggered.connect(self._save_project)
self.actionSave_As.triggered.connect(self._saveas_project)
self.actionProperties.triggered.connect(
self._set_project_properties)
self.actionClose.triggered.connect(self._close_project)
self.actionExit.triggered.connect(self.close)
return
def _init_sim_menu(self):
# Set up the simulation menu
self.actionAdd_Modules.triggered.connect(self._set_module_shuttle)
self.actionAdd_Assessment.triggered.connect(
self._set_assessment_shuttle)
self.actionAdd_Strategy.triggered.connect(self._set_strategy)
self.actionRun_Current.triggered.connect(self._execute_current)
self.actionRun_Themes.triggered.connect(self._execute_themes)
self.actionRun_Strategy.triggered.connect(self._execute_strategy)
return
def _init_data_menu(self):
# Database selection dialog
self.actionSelect_Database.triggered.connect(
self._set_database_properties)
# Set up data preparation stages
self.actionInitiate_Pipeline.triggered.connect(self._initiate_pipeline)
self.actionInitiate_Bathymetry.triggered.connect(
self._initiate_bathymetry)
self.actionInitiate_Dataflow.triggered.connect(self._initiate_dataflow)
# Data export / import functions
self.actionExport.triggered.connect(self._export_data)
self.actionExport_mask.triggered.connect(self._export_data_mask)
self.actionImport.triggered.connect(self._import_data)
self.actionImport_skip.triggered.connect(self._import_data_skip)
return
def _init_view_menu(self, disable_log=False):
# Dock show buttons
self.actionShow_Pipeline.triggered.connect(self._pipeline_dock.show)
self.actionShow_Pipeline.triggered.connect(
lambda: self.actionShow_Pipeline.setDisabled(True))
self.actionShow_Simulations.triggered.connect(
self._simulation_dock.show)
self.actionShow_Simulations.triggered.connect(
lambda: self.actionShow_Simulations.setDisabled(True))
if not disable_log:
self.actionSystem_Log.triggered.connect(self._system_dock.show)
self.actionSystem_Log.triggered.connect(
lambda: self.actionSystem_Log.setDisabled(True))
# Context Actions
self.actionData.triggered.connect(
lambda: self.stackedWidget.setCurrentIndex(1))
self.actionPlots.triggered.connect(
lambda: self.stackedWidget.setCurrentIndex(2))
self.actionComparison.triggered.connect(
lambda: self.stackedWidget.setCurrentIndex(3))
self.actionData.triggered.connect(
lambda: self._set_context_widget(self._last_tree_controller))
self.actionPlots.triggered.connect(
lambda: self._set_context_widget(self._last_tree_controller))
self.contextGroup = QtGui.QActionGroup(self)
self.contextGroup.addAction(self.actionData)
self.contextGroup.addAction(self.actionPlots)
self.contextGroup.addAction(self.actionComparison)
return
def _init_tools_menu(self):
"""Dynamically generate tool menu entries and signal/slots"""
self._tool_manager = GUIToolManager()
all_tools = self._tool_manager.get_available()
for tool_name in all_tools:
new_action = self._add_dynamic_action(tool_name, "menuTools")
new_action.triggered.connect(
lambda x, name=tool_name: self._open_tool(name))
self._dynamic_actions[tool_name] = new_action
return
def _init_help_menu(self):
self.actionHelp_Index.triggered.connect(self._help.show)
self.actionAbout.triggered.connect(self._about.show)
# Open the logs folder
log_dir = get_log_dir()
log_dir_path = log_dir.get_path()
open_log_dir = lambda: os.startfile(log_dir_path)
self.actionView_Logs.triggered.connect(open_log_dir)
return
@QtCore.pyqtSlot(str)
def _set_window_title(self, title):
if not title:
title_str = "DTOcean"
else:
title_str = "DTOcean: {}".format(title)
self.setWindowTitle(title_str)
return
@QtCore.pyqtSlot()
def _set_project_properties(self):
self._project_properties.lineEdit.setText(self._shell.project.title)
self._project_properties.show()
return
@QtCore.pyqtSlot()
def _set_project_title(self):
new_title = self._project_properties.lineEdit.text()
self._shell.set_project_title(new_title)
return
@QtCore.pyqtSlot()
def _set_project_saved(self):
if self._shell.project is None: return
if self._shell.project_path is None:
window_title = self._shell.project.title
else:
window_title = "{} ({})".format(self._shell.project.title,
self._shell.project_path)
self._set_window_title(window_title)
self._shell.project_unsaved = False
return
@QtCore.pyqtSlot()
def _set_project_unsaved(self):
if self._shell.project is None: return
if self._shell.project_path is None:
window_title = "{}*".format(self._shell.project.title)
else:
window_title = "{} ({})*".format(self._shell.project.title,
self._shell.project_path)
self._set_window_title(window_title)
self._shell.project_unsaved = True
return
@QtCore.pyqtSlot()
def _set_database_properties(self):
self._db_selector.show()
return
@QtCore.pyqtSlot()
def _active_project_ui_switch(self):
# Disable Actions
self.actionNew.setDisabled(True)
self.actionOpen.setDisabled(True)
self.actionSave.setDisabled(True)
self.actionSave_As.setDisabled(True)
self.actionComparison.setDisabled(True)
# Enable Actions
self.actionProperties.setEnabled(True)
self.actionClose.setEnabled(True)
self.actionData.setEnabled(True)
self.actionPlots.setEnabled(True)
self.actionInitiate_Pipeline.setEnabled(True)
self.actionSelect_Database.setEnabled(True)
self.actionExport.setEnabled(True)
self.actionExport_mask.setEnabled(True)
self.actionImport.setEnabled(True)
self.actionImport_skip.setEnabled(True)
# Activate the pipeline
start_branch_map = [{"hub": SectionControl,
"name": "Configuration"},
{"hub": HubControl,
"name": "Scenario",
"args": ["project",
InputBranchControl,
True,
["System Type Selection",
"Database Filtering Interface",
"Project Boundaries Interface"]]}
]
self._pipeline_dock._set_branch_map(start_branch_map)
self._pipeline_dock._refresh(self._shell)
self._pipeline_dock._set_title("Define scenario selections...")
self._pipeline_dock.scopeFrame.setEnabled(True)
self._pipeline_dock.filterFrame.setEnabled(True)
# Link the project to the simulation dock and initialise the list
self._simulation_dock.setDisabled(True)
self._shell.project.sims_updated.connect(
lambda: self._simulation_dock._update_simulations(
self._shell.project))
self._simulation_dock._update_simulations(self._shell.project)
# Set up details widget on the data context area
self._data_details = DetailsWidget(self)
self._data_context._top_left_box.addWidget(self._data_details)
# Set up file manager widget on the data context area
self._data_file_manager = FileManagerWidget(self)
self._data_context._top_right_box.addWidget(self._data_file_manager)
self._data_file_manager.setDisabled(True)
# Set up details widget on the plot context area
self._plot_details = DetailsWidget(self)
self._plot_context._top_left_box.addWidget(self._plot_details)
# Set up plot manager widget on the plot context area
self._plot_manager = PlotManagerWidget(self)
self._plot_context._top_right_box.addWidget(self._plot_manager)
self._plot_manager.setDisabled(True)
# Set up the level comparison in the comparison context area
self._level_comparison = LevelComparison(self)
self._comp_context._top_left_box.addWidget(self._level_comparison)
# Set up the simulation comparison in the comparison context area
self._sim_comparison = SimulationComparison(self)
self._comp_context._top_right_box.addWidget(self._sim_comparison)
# Set up level comparison signals
self._level_comparison.varBox.currentIndexChanged.connect(
self._sim_comparison_ui_switch)
self._level_comparison.plot_levels.connect(self._set_level_plot)
self._level_comparison.tab_levels.connect(self._set_level_table)
self._level_comparison.save_plot.connect(self._save_comparison_plot)
self._level_comparison.save_data.connect(self._save_comparison_data)
# Set up simulation comparison signals
self._sim_comparison.plot_levels.connect(self._set_sim_plot)
self._sim_comparison.tab_levels.connect(self._set_sim_table)
self._sim_comparison.save_plot.connect(self._save_comparison_plot)
self._sim_comparison.save_data.connect(self._save_comparison_data)
# Update the central widget
self.stackedWidget.setCurrentIndex(1)
self.actionData.setChecked(True)
# Connect actions
self._shell.update_pipeline.connect(self._tool_menu_ui_switch)
self._shell.update_pipeline.connect(self._set_project_unsaved)
# Trigger the pipeline
self._pipeline_dock._set_top_item()
# Trigger tools menu (not likely concurrent)
self._tool_menu_ui_switch(self._shell)
# Update the active sim title
active_sim_title = self._shell.project.get_simulation_title()
self._shell.project.active_title_changed.emit(active_sim_title)
return
@QtCore.pyqtSlot()
def _closed_project_ui_switch(self):
# Disable Actions
self.actionSave.setDisabled(True)
self.actionSave_As.setDisabled(True)
self.actionProperties.setDisabled(True)
self.actionClose.setDisabled(True)
self.actionData.setDisabled(True)
self.actionPlots.setDisabled(True)
self.actionComparison.setDisabled(True)
self.actionInitiate_Pipeline.setDisabled(True)
self.actionSelect_Database.setDisabled(True)
self.actionInitiate_Dataflow.setDisabled(True)
self.actionInitiate_Bathymetry.setDisabled(True)
self.actionAdd_Modules.setDisabled(True)
self.actionAdd_Assessment.setDisabled(True)
self.actionAdd_Strategy.setDisabled(True)
self.actionRun_Current.setDisabled(True)
self.actionRun_Themes.setDisabled(True)
self.actionRun_Strategy.setDisabled(True)
self.actionExport.setDisabled(True)
self.actionExport_mask.setDisabled(True)
self.actionImport.setDisabled(True)
self.actionImport_skip.setDisabled(True)
# Enable actions
self.actionNew.setEnabled(True)
self.actionOpen.setEnabled(True)
# Close the strategy manager
self._strategy_manager.close()
# Clear the pipeline
self._pipeline_dock._clear()
self._pipeline_dock._clear_filter()
self._pipeline_dock._set_title("Waiting...")
self._pipeline_dock.scopeFrame.setDisabled(True)
self._pipeline_dock.filterFrame.setDisabled(True)
# Disable the simulation widget
self._simulation_dock.setDisabled(True)
self._simulation_dock._update_simulations(None)
# Remove details widget from data context
self._data_context._top_left_box.removeWidget(self._data_details)
self._data_details.setParent(None)
self._data_details.deleteLater()
self._data_details = None
# Remove file manager widget from data context
self._data_context._top_right_box.removeWidget(self._data_file_manager)
self._data_file_manager.setParent(None)
self._data_file_manager.deleteLater()
self._data_file_manager = None
# Remove details widget from plot context
self._plot_context._top_left_box.removeWidget(self._plot_details)
self._plot_details.setParent(None)
self._plot_details.deleteLater()
self._plot_details = None
# Remove plot manager widget from plot context
self._plot_context._top_right_box.removeWidget(self._plot_manager)
self._plot_manager.setParent(None)
self._plot_manager.deleteLater()
self._plot_manager = None
# Remove level comparison widget from comparison context
self._comp_context._top_left_box.removeWidget(self._level_comparison)
self._level_comparison.setParent(None)
self._level_comparison.deleteLater()
self._level_comparison = None
# Remove simulation comparison widget from comparison context
self._plot_context._top_right_box.removeWidget(self._sim_comparison)
self._sim_comparison.setParent(None)
self._sim_comparison.deleteLater()
self._sim_comparison = None
# Remove main widget from comparison context
if self._comp_context._bottom_contents is not None:
self._clear_bottom_contents(self._comp_context)
# Update the central widget
self.stackedWidget.setCurrentIndex(0)
self._last_tree_controller = None
self._last_data_controller = None
self._last_data_controller_status = None
self._last_plot_id = None
self._last_plot_name = "auto"
# Trigger the tool menu switcher (not likely concurrent)
self._tool_menu_ui_switch(self._shell)
# Reset the window title
self._set_window_title("")
return
@QtCore.pyqtSlot()
def _active_filter_ui_switch(self):
# Enable Actions
self.actionInitiate_Bathymetry.setEnabled(True)
return
@QtCore.pyqtSlot()
def _active_pipeline_ui_switch(self):
# Close dialog
self._db_selector.close()
# Disable Actions
self.actionInitiate_Pipeline.setDisabled(True)
self.actionSelect_Database.setDisabled(True)
# Enabale Actions
self.actionAdd_Modules.setEnabled(True)
self.actionAdd_Assessment.setEnabled(True)
self.actionInitiate_Dataflow.setEnabled(True)
# Update the pipeline
fresh_branch_map = [{"hub": SectionControl,
"name": "Configuration"},
{"hub": HubControl,
"name": "Scenario",
"args": ["project",
InputBranchControl,
True,
["System Type Selection",
"Database Filtering Interface",
"Project Boundaries Interface"]]},
{"hub": HubControl,
"name": "Modules",
"args": ["modules",
InputBranchControl,
False]},
{"hub": HubControl,
"name": "Assessment",
"args": ["themes",
InputBranchControl,
False]}
]
self._pipeline_dock._set_branch_map(fresh_branch_map)
self._pipeline_dock._refresh(self._shell)
return
@QtCore.pyqtSlot()
def _active_bathymetry_ui_switch(self):
# Disable Actions
self.actionInitiate_Bathymetry.setDisabled(True)
# Update the pipeline
self._pipeline_dock._refresh(self._shell)
return
@QtCore.pyqtSlot()
def _active_dataflow_ui_switch(self):
self._pipeline_dock._refresh(self._shell)
# Close dialogs
self._module_shuttle.close()
self._assessment_shuttle.close()
# Enable the simulation widget
self._simulation_dock.setEnabled(True)
# Setup and enable comparison context
self._level_comparison._set_interfaces(self._shell)
self._sim_comparison._set_interfaces(self._shell, include_str=True)
if self._shell.strategy is not None:
self._level_comparison.strategyBox.setChecked(False)
self._level_comparison.strategyBox.setEnabled(True)
self._sim_comparison.strategyBox.setChecked(False)
self._sim_comparison.strategyBox.setEnabled(True)
self.actionComparison.setEnabled(True)
# Enable Actions
self.actionSave.setEnabled(True)
self.actionSave_As.setEnabled(True)
self.actionAdd_Strategy.setEnabled(True)
self._run_action_ui_switch()
# Disable Actions
self.actionAdd_Modules.setDisabled(True)
self.actionAdd_Assessment.setDisabled(True)
self.actionInitiate_Dataflow.setDisabled(True)
self.actionInitiate_Bathymetry.setDisabled(True)
return
@QtCore.pyqtSlot(str)
def _current_scope_ui_switch(self, scope):
sane_scope = str(scope)
if sane_scope == "global":
self._pipeline_dock.globalRadioButton.setChecked(True)
elif sane_scope == "local":
self._pipeline_dock.localRadioButton.setChecked(True)
else:
errStr = ("Valid scopes are 'local' or 'global'. Passed scope "
"was {}").format(sane_scope)
raise ValueError(errStr)
@QtCore.pyqtSlot()
def _run_action_ui_switch(self):
modules_scheduled = self._shell.get_scheduled_modules()
modules_completed = self._shell.get_completed_modules()
themes_scheduled = self._shell.get_scheduled_themes()
# Set the run action buttons
if (self._shell.strategy is None or
(self._shell.strategy is not None and
not self._shell.strategy.strategy_run)):
self.actionRun_Strategy.setDisabled(True)
if modules_scheduled:
self.actionRun_Current.setEnabled(True)
else:
self.actionRun_Current.setDisabled(True)
if themes_scheduled:
self.actionRun_Themes.setEnabled(True)
else:
self.actionRun_Themes.setDisabled(True)
else:
self.actionRun_Current.setDisabled(True)
self.actionRun_Themes.setDisabled(True)
if modules_scheduled:
self.actionRun_Strategy.setEnabled(True)
else:
self.actionRun_Strategy.setDisabled(True)
# Set the pipeline title
if not modules_completed and modules_scheduled:
pipeline_msg = "Define simulation inputs..."
elif modules_completed and modules_scheduled:
pipeline_msg = "Simulation in progress..."
elif modules_completed and not modules_scheduled:
pipeline_msg = "Simulation complete..."
elif (not modules_completed and
not modules_scheduled and
themes_scheduled):
pipeline_msg = "Assessment only mode..."
elif (not modules_completed and
not modules_scheduled and
not themes_scheduled):
pipeline_msg = "No modules or assessments selected..."
else:
errStr = "Whoa, take 'er easy there, Pilgrim"
raise SystemError(errStr)
self._pipeline_dock._set_title(pipeline_msg)
return
@QtCore.pyqtSlot(int)
def _sim_comparison_ui_switch(self, box_number):
if box_number == -1:
self._sim_comparison.setDisabled(True)
else:
self._sim_comparison.setEnabled(True)
return
@QtCore.pyqtSlot(object)
def _tool_menu_ui_switch(self, shell):
for tool_name, action in self._dynamic_actions.iteritems():
tool = self._tool_manager.get_tool(tool_name)
if self._tool_manager.can_execute_tool(shell.core,
shell.project,
tool):
action.setEnabled(True)
else:
action.setDisabled(True)
return
@QtCore.pyqtSlot()
def _set_module_shuttle(self):
self._module_shuttle._add_items_from_lists(
self._shell.get_available_modules(),
self._shell.get_active_modules())
self._module_shuttle.show()
return
@QtCore.pyqtSlot()
def _set_assessment_shuttle(self):
self._assessment_shuttle._add_items_from_lists(
self._shell.get_available_themes(),
self._shell.get_active_themes())
self._assessment_shuttle.show()
return
@QtCore.pyqtSlot()
def _set_strategy(self):
self._strategy_manager.show(self._shell)
return
@QtCore.pyqtSlot(object, int)
def _set_details_widget(self, proxy_index):
controller = self._pipeline_dock._find_controller(proxy_index)
if isinstance(controller, (InputVarControl, OutputVarControl)):
# Collect the meta data from the variable
meta = controller._variable.get_metadata(self._shell.core)
title = meta.title
description = meta.description
else:
title = None
description = None
self._data_details._set_details(title, description)
self._plot_details._set_details(title, description)
return
@QtCore.pyqtSlot(object, bool)
def _set_context_widget(self, proxy_index_or_controller,
reset=False):
controller = None
# reset all the stored controllers and update given controller
if reset:
self._last_tree_controller = None
self._last_data_controller = None
self._last_data_controller_status = None
self._force_plot = True
if proxy_index_or_controller is not None:
model_index = \
proxy_index_or_controller._get_index_from_address()
proxy_index_or_controller = \
proxy_index_or_controller._proxy.mapFromSource(model_index)
# Return a controller class
if proxy_index_or_controller is not None:
# If this is a proxy index then get the controller
if isinstance(proxy_index_or_controller, QtCore.QModelIndex):
proxy_index = proxy_index_or_controller
controller = self._pipeline_dock._find_controller(proxy_index)
else:
controller = proxy_index_or_controller
# If given a hidden variable then reset to the pipeline root
if controller is not None and controller._is_hidden():
controller = self._pipeline_dock._controls[0]
current_context_action = self.contextGroup.checkedAction()
if current_context_action is None:
pass
elif str(current_context_action.text()) == "Data":
self._set_data_widget(controller)
self._set_file_manager_widget(controller)
elif str(current_context_action.text()) == "Plots":
self._set_plot_widget(controller, force_plot=self._force_plot)
self._set_plot_manager_widget(controller)
self._force_plot = False
self._last_tree_controller = controller
return
def _set_file_manager_widget(self, controller):
# Avoid being in a race where the data file manager is None
if self._data_file_manager is None: return
current_context_action = self.contextGroup.checkedAction()
if (current_context_action is None or
str(current_context_action.text()) == "Plots"):
return
variable = None
load_ext_dict = {}
if isinstance(controller, InputVarControl):
variable = controller._variable
interface_dict = controller._variable.get_file_input_interfaces(
self._shell.core,
include_auto=True)
if interface_dict:
for interface_name, ext_list in interface_dict.iteritems():
repeated_exts = set(ext_list).intersection(
load_ext_dict.keys())
if repeated_exts:
extsStr = ", ".join(repeated_exts)
errStr = ("Repeated interface extensions '{}'"
"found").format(extsStr)
raise RuntimeError(errStr)
interface_exts = {ext: interface_name for ext in ext_list}
load_ext_dict.update(interface_exts)
save_ext_dict = {}
if isinstance(controller, (InputVarControl, OutputVarControl)):
variable = controller._variable
interface_dict = controller._variable.get_file_output_interfaces(
self._shell.core,
self._shell.project,
include_auto=True)
if interface_dict:
for interface_name, ext_list in interface_dict.iteritems():
repeated_exts = set(ext_list).intersection(
save_ext_dict.keys())
if repeated_exts:
extsStr = ", ".join(repeated_exts)
errStr = ("Repeated interface extensions '{}'"
"found").format(extsStr)
raise RuntimeError(errStr)
interface_exts = {ext: interface_name for ext in ext_list}
save_ext_dict.update(interface_exts)
if not load_ext_dict: load_ext_dict = None
if not save_ext_dict: save_ext_dict = None
if self._data_file_manager._load_connected:
self._data_file_manager.load_file.disconnect()
self._data_file_manager._load_connected = False
if self._data_file_manager._save_connected:
self._data_file_manager.save_file.disconnect()
self._data_file_manager._save_connected = False
self._data_file_manager._set_files(variable,
load_ext_dict,
save_ext_dict)
if self._data_file_manager._file_mode is None: return
if isinstance(controller, InputVarControl):
self._data_file_manager.load_file.connect(self._shell.read_file)
self._data_file_manager._load_connected = True
if isinstance(controller, (InputVarControl, OutputVarControl)):
self._data_file_manager.save_file.connect(self._shell.write_file)
self._data_file_manager._save_connected = True
return
def _set_plot_manager_widget(self, controller):
# Avoid race condition
if self._plot_manager is None: return
current_context_action = self.contextGroup.checkedAction()
if (current_context_action is None or
str(current_context_action.text()) == "Data"):
return
plot_list = []
plot_auto = False
if isinstance(controller, (InputVarControl, OutputVarControl)):
plot_list = controller._variable.get_available_plots(
self._shell.core,
self._shell.project)
all_interfaces = controller._variable._get_receivers(
self._shell.core,
self._shell.project,
"PlotInterface",
"AutoPlot")
if set(all_interfaces) - set(plot_list):
plot_auto = True
if self._plot_manager._plot_connected:
self._plot_manager.plot.disconnect()
self._plot_manager.save.disconnect()
self._plot_manager._plot_connected = False
if not plot_list: plot_list = None
self._plot_manager._set_plots(controller,
plot_list,
plot_auto)
if plot_list is None and not plot_auto: return
if isinstance(controller, (InputVarControl, OutputVarControl)):
self._plot_manager.plot.connect(self._set_plot_widget)
self._plot_manager.save.connect(self._save_plot)
self._plot_manager._plot_connected = True
return
def _set_data_widget(self, controller):
if controller is None: return
if (self._last_data_controller is not None and
controller._id == self._last_data_controller._id and
type(controller) == type(self._last_data_controller)):
if (controller._status is not None and
controller._status != self._last_data_controller_status and
"unavailable" in controller._status):
self._data_context._bottom_contents.setDisabled(True)
self._last_data_controller_status = controller._status
return
if self._data_context._bottom_contents is not None:
self._clear_bottom_contents(self._data_context)
self._last_data_controller = controller
widget = controller._get_data_widget(self._shell)
if widget is None: return
# Add the widget to the context
self._data_context._bottom_box.addWidget(widget)
self._data_context._bottom_contents = widget
# Connect the widgets read and nullify events
widget._get_read_event().connect(
lambda: self._read_raw(controller._variable, widget._get_result()))
widget._get_nullify_event().connect(
lambda: self._read_raw(controller._variable, None))
if "unavailable" in controller._status:
if "_disable" in dir(widget):
widget._disable()
else:
widget.setDisabled(True)
return
@QtCore.pyqtSlot(object, str)
def _set_plot_widget(self, controller,
plot_name=None,
force_plot=False):
if controller is None: return
if (controller._id == self._last_plot_id and
plot_name == self._last_plot_name and
not force_plot): return
if (controller._id == self._last_plot_id and
plot_name is None):
plot_name = self._last_plot_name
if plot_name == "auto": plot_name = None
if self._plot_context._bottom_contents is not None:
self._clear_bottom_contents(self._plot_context)
self._last_plot_id = controller._id
self._last_plot_name = plot_name
widget = controller._get_plot_widget(self._shell, plot_name)
if widget is None: return
# Add the widget to the context
self._plot_context._bottom_box.addWidget(widget)
self._plot_context._bottom_contents = widget
# Draw the widget
widget.draw_idle()
if len(plt.get_fignums()) > 2:
num_strs = ["{}".format(x) for x in plt.get_fignums()]
num_str = ", ".join(num_strs)
err_msg = ("Too many matplotlib figures detected. "
"Numbers: {}").format(num_str)
raise RuntimeError(err_msg)
if "unavailable" in controller._status: widget.setDisabled(True)
return
@QtCore.pyqtSlot(object, str, object, object)
def _save_plot(self, controller, file_path, size, plot_name="auto"):
if controller is None: return
if plot_name == "auto": plot_name = None
controller._save_plot(self._shell, file_path, size, plot_name)
if len(plt.get_fignums()) > 2:
num_strs = ["{}".format(x) for x in plt.get_fignums()]
num_str = ", ".join(num_strs)
err_msg = ("Too many matplotlib figures detected. "
"Numbers: {}").format(num_str)
raise RuntimeError(err_msg)
return
@QtCore.pyqtSlot(str, bool)
def _set_level_plot(self, var_id, ignore_strategy):
# Sanitise var_id
var_id = str(var_id)
# Collect the current scope
if self._pipeline_dock.globalRadioButton.isChecked():
scope = "global"
elif self._pipeline_dock.localRadioButton.isChecked():
scope = "local"
else:
errStr = "Feck!"
raise SystemError(errStr)
if self._comp_context._bottom_contents is not None:
self._clear_bottom_contents(self._comp_context)
# Switch off save button
self._level_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
# Collect the sim titles from the sim dock
sim_titles = self._simulation_dock._get_list_values()
# Get the plot figure
widget = self._strategy_manager.get_level_values_plot(
self._shell,
var_id,
scope,
ignore_strategy,
sim_titles)
# Add the widget to the context
self._comp_context._bottom_box.addWidget(widget)
self._comp_context._bottom_contents = widget
# Draw the widget
widget.draw_idle()
if len(plt.get_fignums()) > 2:
num_strs = ["{}".format(x) for x in plt.get_fignums()]
num_str = ", ".join(num_strs)
err_msg = ("Too many matplotlib figures detected. "
"Numbers: {}").format(num_str)
raise RuntimeError(err_msg)
# Switch on save button
self._sim_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
self._level_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setEnabled(True)
return
@QtCore.pyqtSlot(str, bool)
def _set_level_table(self, var_id, ignore_strategy):
# Sanitise var_id
var_id = str(var_id)
# Collect the current scope
if self._pipeline_dock.globalRadioButton.isChecked():
scope = "global"
elif self._pipeline_dock.localRadioButton.isChecked():
scope = "local"
else:
errStr = "Feck!"
raise SystemError(errStr)
if self._comp_context._bottom_contents is not None:
self._clear_bottom_contents(self._comp_context)
# Switch off save button
self._level_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
# Get the table widget
widget = self._strategy_manager.get_level_values_df(self._shell,
var_id,
scope,
ignore_strategy)
# Add the widget to the context
self._comp_context._bottom_box.addWidget(widget)
self._comp_context._bottom_contents = widget
# Switch on save button
self._sim_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
self._level_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setEnabled(True)
return
@QtCore.pyqtSlot(str, str, bool)
def _set_sim_plot(self, var_one_id, module, ignore_strategy):
# Sanitise strings
var_one_id = str(var_one_id)
module = str(module)
# Get the first variable id from the level comparison widget
var_two_name = str(self._level_comparison.varBox.currentText())
var_two_id = self._level_comparison._get_var_id(var_two_name)
# Collect the current scope
if self._pipeline_dock.globalRadioButton.isChecked():
scope = "global"
elif self._pipeline_dock.localRadioButton.isChecked():
scope = "local"
else:
errStr = "Feck!"
raise SystemError(errStr)
if self._comp_context._bottom_contents is not None:
self._clear_bottom_contents(self._comp_context)
# Switch off save button
self._sim_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
# Get the plot figure
widget = self._strategy_manager.get_comparison_values_plot(
self._shell,
var_one_id,
var_two_id,
module,
scope,
ignore_strategy)
# Add the widget to the context
self._comp_context._bottom_box.addWidget(widget)
self._comp_context._bottom_contents = widget
# Draw the widget
widget.draw_idle()
if len(plt.get_fignums()) > 2:
num_strs = ["{}".format(x) for x in plt.get_fignums()]
num_str = ", ".join(num_strs)
err_msg = ("Too many matplotlib figures detected. "
"Numbers: {}").format(num_str)
raise RuntimeError(err_msg)
# Switch save buttons
self._level_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
self._sim_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setEnabled(True)
return
@QtCore.pyqtSlot(str, str, bool)
def _set_sim_table(self, var_one_id, module, ignore_strategy):
# Sanitise strings
var_one_id = str(var_one_id)
module = str(module)
# Get the first variable id from the level comparison widget
var_two_name = str(self._level_comparison.varBox.currentText())
var_two_id = self._level_comparison._get_var_id(var_two_name)
# Collect the current scope
if self._pipeline_dock.globalRadioButton.isChecked():
scope = "global"
elif self._pipeline_dock.localRadioButton.isChecked():
scope = "local"
else:
errStr = "Feck!"
raise SystemError(errStr)
if self._comp_context._bottom_contents is not None:
self._clear_bottom_contents(self._comp_context)
# Switch off save button
self._sim_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
# Get the table widget
widget = self._strategy_manager.get_comparison_values_df(
self._shell,
var_one_id,
var_two_id,
module,
scope,
ignore_strategy)
# Add the widget to the context
self._comp_context._bottom_box.addWidget(widget)
self._comp_context._bottom_contents = widget
# Switch on save button
self._sim_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setEnabled(True)
self._level_comparison.buttonBox.button(
QtGui.QDialogButtonBox.Save).setDisabled(True)
return
@QtCore.pyqtSlot()
def _save_comparison_plot(self):
extlist = ["{} (*.{})".format(v, k) for k, v in
get_current_filetypes().iteritems()]
extStr = ";;".join(extlist)
fdialog_msg = "Save plot"
save_path = QtGui.QFileDialog.getSaveFileName(None,
fdialog_msg,
'.',
extStr)
if save_path:
save_current_figure(str(save_path))
return
@QtCore.pyqtSlot()
def _save_comparison_data(self):
extlist = ["comma-separated values (*.csv)"]
extStr = ";;".join(extlist)
fdialog_msg = "Save data"
save_path = QtGui.QFileDialog.getSaveFileName(None,
fdialog_msg,
'.',
extStr)
if save_path:
df = self._strategy_manager._last_df
df.to_csv(str(save_path), index=False)
return
@QtCore.pyqtSlot(object)
def _read_raw(self, variable, value):
self._shell.read_raw(variable, value)
self._shell._active_thread.error_detected.connect(self._display_error)
return
@QtCore.pyqtSlot()
def _new_project(self):
self._shell.new_project()
return
@QtCore.pyqtSlot()
def _open_project(self):
msg = "Open Project"
valid_exts = "DTOcean Files (*.dto *.prj)"
file_path = QtGui.QFileDialog.getOpenFileName(None,
msg,
'.',
valid_exts)
if not file_path: return
if self._shell.project is not None:
self._shell.close_project()
self._waitcursor_open(file_path)
return
@QtCore.pyqtSlot()
def _open_project_finalize(self):
self._active_project_ui_switch()
self._active_pipeline_ui_switch()
# Recreate the existing branch map
new_branch_map = [{"hub": SectionControl,
"name": "Configuration"},
{"hub": HubControl,
"name": "Scenario",
"args": ["project",
InputBranchControl,
True,
["System Type Selection",
"Database Filtering Interface",
"Project Boundaries Interface"]]},
{"hub": HubControl,
"name": "Modules",
"args": ["modules",
InputBranchControl,
True]},
{"hub": HubControl,
"name": "Assessment",
"args": ["themes",
InputBranchControl,
True]},
{"hub": SectionControl,
"name": "Results"},
{"hub": HubControl,
"name": "Assessment",
"args": ["themes",
OutputBranchControl,
True]},
{"hub": HubControl,
"name": "Modules",
"args": ["modules",
OutputBranchControl,
True]}
]
self._pipeline_dock._set_branch_map(new_branch_map)
self._active_dataflow_ui_switch()
# Update the active project
active_sim_title = self._shell.project.get_simulation_title()
self._shell.project.active_title_changed.emit(active_sim_title)
self._shell.core.status_updated.emit()
self._set_project_saved()
return
@QtCore.pyqtSlot()
def _save_project(self):
result = True
if self._shell.project_path is None:
result = self._saveas_project()
else:
self._waitcursor_save()
return result
@QtCore.pyqtSlot()
def _saveas_project(self):
msg = "Save Project"
valid_exts = ("DTOcean Application File (*.dto);;"
"DTOcean Project File (*.prj)")
file_path = QtGui.QFileDialog.getSaveFileName(None,
msg,
'.',
valid_exts)
result = False
if file_path:
self._waitcursor_save(file_path)
result = True
return result
@QtCore.pyqtSlot()
def _close_project(self):
reply = self._project_close_warning()
if (reply == QtGui.QMessageBox.Save or
reply == QtGui.QMessageBox.Discard): self._shell.close_project()
return
@QtCore.pyqtSlot(str, str, dict)
def _dump_database(self, root_path, selected, credentials):
self._shell.dump_database(root_path, selected, credentials)
self._shell._active_thread.error_detected.connect(self._display_error)
return
@QtCore.pyqtSlot(str, str, dict)
def _load_database(self, root_path, selected, credentials):
self._shell.load_database(root_path, selected, credentials)
self._shell._active_thread.error_detected.connect(self._display_error)
return
@QtCore.pyqtSlot()
def _export_data(self):
msg = "Export Data"
valid_exts = "Datastate Files (*.dts)"
file_path = QtGui.QFileDialog.getSaveFileName(None,
msg,
'.',
valid_exts)
if file_path:
self._shell.export_data(file_path)
return
@QtCore.pyqtSlot()
def _export_data_mask(self):
msg = "Export Data (Mask Outputs)"
valid_exts = "Datastate Files (*.dts)"
file_path = QtGui.QFileDialog.getSaveFileName(None,
msg,
'.',
valid_exts)
if file_path:
self._shell.export_data(file_path, True)
return
@QtCore.pyqtSlot()
def _import_data(self):
msg = "Import Data"
valid_exts = "Datastate Files (*.dts)"
file_path = QtGui.QFileDialog.getOpenFileName(None,
msg,
'.',
valid_exts)
if file_path:
self._shell.import_data(file_path)
return
@QtCore.pyqtSlot()
def _import_data_skip(self):
msg = "Import Data (Skip Satisfied)"
valid_exts = "Datastate Files (*.dts)"
file_path = QtGui.QFileDialog.getOpenFileName(None,
msg,
'.',
valid_exts)
if file_path:
self._shell.import_data(file_path, True)
return
@QtCore.pyqtSlot()
def _initiate_pipeline(self):
# Find the "System Type Selection" branch
branch_control = self._pipeline_dock._find_controller(
controller_title="System Type Selection",
controller_class=InputBranchControl)
# Check for required values
required_address = branch_control._get_required_address(self._shell)
# Remap OK button
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.disconnect()
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._shell.initiate_pipeline)
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._data_check.accept)
self._data_check.show(required_address)
return
@QtCore.pyqtSlot()
def _initiate_bathymetry(self):
if self._shell.project_menu.is_executable(self._shell.core,
self._shell.project,
"Site Boundary Selection"):
required_address = None
else:
raw_required = {"Section": ["Scenario"],
"Branch": ["Database Filtering Interface"],
"Item": ["Selected Site"]}
required_address = pd.DataFrame(raw_required)
# Remap OK button
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.disconnect()
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._shell.initiate_bathymetry)
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._data_check.accept)
self._data_check.show(required_address)
return
@QtCore.pyqtSlot()
def _initiate_dataflow(self):
required_address = None
# Check if filters can be initiated
if ("Database Filtering Interface" in
self._shell.project_menu.get_active(self._shell.core,
self._shell.project)):
# Find the "Database Filtering Interface" branch
branch_control = self._pipeline_dock._find_controller(
controller_title="Database Filtering Interface",
controller_class=InputBranchControl)
# Check for required values
required_address = branch_control._get_required_address(
self._shell)
# Remap OK button
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.disconnect()
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._progress_dataflow)
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._data_check.accept)
self._data_check.show(required_address)
return
@QtCore.pyqtSlot()
def _execute_current(self):
# Close the strategy manager
self._strategy_manager.close()
# Get the current module name
current_mod = self._shell.get_current_module()
# Find the module branch
branch_control = self._pipeline_dock._find_controller(
controller_title=current_mod,
controller_class=InputBranchControl)
# Check for required values
required_address = branch_control._get_required_address(self._shell)
# Find any required values for any themes:
all_themes = self._shell.get_active_themes()
for theme_name in all_themes:
branch_control = self._pipeline_dock._find_controller(
controller_title=theme_name,
controller_class=InputBranchControl)
# Check for required values
theme_address = branch_control._get_required_address(self._shell)
# Loop if None
if theme_address is None: continue
# Otherwise merge
if required_address is None:
required_address = theme_address
else:
required_address = pd.concat([required_address, theme_address],
ignore_index=True)
# Remap OK button
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.disconnect()
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._progress_current)
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._data_check.accept)
self._data_check.show(required_address)
return
@QtCore.pyqtSlot()
def _execute_themes(self):
# Close the strategy manager
self._strategy_manager.close()
# Check for required values
required_address = None
# Find any required values for any themes:
all_themes = self._shell.get_active_themes()
for theme_name in all_themes:
branch_control = self._pipeline_dock._find_controller(
controller_title=theme_name,
controller_class=InputBranchControl)
# Check for required values
theme_address = branch_control._get_required_address(self._shell)
# Loop if None
if theme_address is None: continue
# Otherwise merge
if required_address is None:
required_address = theme_address
else:
required_address = pd.concat([required_address, theme_address],
ignore_index=True)
# Remap OK button
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.disconnect()
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._progress_themes)
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._data_check.accept)
self._data_check.show(required_address)
return
@QtCore.pyqtSlot()
def _execute_strategy(self):
# Close the strategy manager
self._strategy_manager.close()
# Get the current module name
scheduled_mods = self._shell.get_scheduled_modules()
required_address = None
for scheduled_mod in scheduled_mods:
# Find the module branch
branch_control = self._pipeline_dock._find_controller(
controller_title=scheduled_mod,
controller_class=InputBranchControl)
# Check for required values
mod_address = branch_control._get_required_address(self._shell)
# Loop if None
if mod_address is None: continue
# Otherwise merge
if required_address is None:
required_address = mod_address
else:
required_address = pd.concat([required_address, mod_address],
ignore_index=True)
# Find any required values for any themes:
all_themes = self._shell.get_active_themes()
for theme_name in all_themes:
branch_control = self._pipeline_dock._find_controller(
controller_title=theme_name,
controller_class=InputBranchControl)
# Check for required values
theme_address = branch_control._get_required_address(self._shell)
# Loop if None
if theme_address is None: continue
# Otherwise merge
if required_address is None:
required_address = theme_address
else:
required_address = pd.concat([required_address, theme_address],
ignore_index=True)
# Remap OK button
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.disconnect()
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._progress_strategy)
self._data_check.buttonBox.button(
QtGui.QDialogButtonBox.Ok).clicked.connect(
self._data_check.accept)
self._data_check.show(required_address)
return
@QtCore.pyqtSlot(str)
def _waitcursor_open(self, file_path):
self.setEnabled(False)
QtGui.QApplication.setOverrideCursor(
QtGui.QCursor(QtCore.Qt.WaitCursor))
self._shell.open_project(file_path)
self._shell._active_thread.error_detected.connect(self._display_error)
self._shell._active_thread.finished.connect(
self._open_project_finalize)
self._shell._active_thread.finished.connect(self._reset_cursor)
return
@QtCore.pyqtSlot(str)
def _waitcursor_save(self, file_path=None):
self.setEnabled(False)
QtGui.QApplication.setOverrideCursor(
QtGui.QCursor(QtCore.Qt.WaitCursor))
self._shell.save_project(file_path)
self._shell._active_thread.error_detected.connect(self._display_error)
self._shell._active_thread.finished.connect(self._reset_cursor)
return
@QtCore.pyqtSlot()
def _progress_dataflow(self):
# Recreate the existing branch map
new_branch_map = [{"hub": SectionControl,
"name": "Configuration"},
{"hub": HubControl,
"name": "Scenario",
"args": ["project",
InputBranchControl,
True,
["System Type Selection",
"Database Filtering Interface",
"Project Boundaries Interface"]]},
{"hub": HubControl,
"name": "Modules",
"args": ["modules",
InputBranchControl,
True]},
{"hub": HubControl,
"name": "Assessment",
"args": ["themes",
InputBranchControl,
True]},
{"hub": SectionControl,
"name": "Results"},
{"hub": HubControl,
"name": "Assessment",
"args": ["themes",
OutputBranchControl,
True]},
{"hub": HubControl,
"name": "Modules",
"args": ["modules",
OutputBranchControl,
True]}
]
self._pipeline_dock._set_branch_map(new_branch_map)
self._progress.allow_close = False
self._progress.set_pulsing()
self._shell.initiate_dataflow(self._pipeline_dock)
self._shell._active_thread.error_detected.connect(self._display_error)
self._shell._active_thread.finished.connect(self._close_progress)
self._progress.show()
return
@QtCore.pyqtSlot()
def _progress_current(self):
self._progress.allow_close = False
self._progress.set_pulsing()
self._shell.execute_current()
self._shell._active_thread.error_detected.connect(self._display_error)
self._shell._active_thread.finished.connect(self._close_progress)
self._progress.show()
return
@QtCore.pyqtSlot()
def _progress_themes(self):
self._progress.allow_close = False
self._progress.set_pulsing()
self._shell.execute_themes()
self._shell._active_thread.error_detected.connect(self._display_error)
self._shell._active_thread.finished.connect(self._close_progress)
self._progress.show()
return
@QtCore.pyqtSlot()
def _progress_strategy(self):
self._last_stack_index = self.stackedWidget.currentIndex()
self.stackedWidget.setCurrentIndex(0)
self._progress.allow_close = False
self._progress.set_pulsing()
self._shell.execute_strategy()
self._shell._active_thread.error_detected.connect(self._display_error)
self._shell._active_thread.finished.connect(self._close_progress)
self._progress.show()
return
@QtCore.pyqtSlot(str)
def _waitcursor_scope(self, scope):
self.setEnabled(False)
QtGui.QApplication.setOverrideCursor(
QtGui.QCursor(QtCore.Qt.WaitCursor))
self._shell.set_output_scope(scope)
self._shell._active_thread.error_detected.connect(self._display_error)
self._shell._active_thread.finished.connect(self._reset_cursor)
return
@QtCore.pyqtSlot(str)
def _open_tool(self, tool_name):
if self._thread_tool is not None: return
# Pick up the tool
tool = self._tool_manager.get_tool(tool_name)
self._thread_tool = ThreadTool(self._shell.core,
self._shell.project,
tool)
self._thread_tool.start()
self._thread_tool.error_detected.connect(self._display_error)
self._thread_tool.finished.connect(lambda: self._close_tool(tool))
return
@QtCore.pyqtSlot()
def _reset_cursor(self):
QtGui.QApplication.restoreOverrideCursor()
self.setEnabled(True)
return
@QtCore.pyqtSlot(object)
def _close_tool(self, tool):
if tool.has_widget():
widget = tool.get_widget()
if widget is not None: widget.show()
self._thread_tool = None
return
@QtCore.pyqtSlot()
def _close_progress(self):
self._progress.allow_close = True
self._progress.close()
return
@QtCore.pyqtSlot(object, object, object)
def _display_error(self, etype, evalue, etraceback):
type_str = str(etype)
type_strs = type_str.split(".")
sane_type_str = type_strs[-1].replace("'>", "")
if sane_type_str[0].lower() in "aeiou":
article = "An"
else:
article = "A"
errMsg = "{} {} occurred: {:s}".format(article, sane_type_str, evalue)
module_logger.critical(errMsg)
module_logger.critical(''.join(traceback.format_tb(etraceback)))
QtGui.QMessageBox.critical(self, "ERROR", errMsg)
return
def _project_close_warning(self):
if (self._shell.project is None or
not self.actionSave.isEnabled() or
not self._shell.project_unsaved): return QtGui.QMessageBox.Discard
qstr = "Do you want to save your changes?"
reply = QtGui.QMessageBox.warning(self,
'Project modified',
qstr,
QtGui.QMessageBox.Save,
QtGui.QMessageBox.Discard,
QtGui.QMessageBox.Cancel)
if reply == QtGui.QMessageBox.Save:
if not self._save_project(): reply = QtGui.QMessageBox.Cancel
return reply
def closeEvent(self, event):
# Check for active thread
if (self._shell._active_thread is not None or
self._thread_tool is not None):
qstr = ("Quitting now may cause DATA CORRUPTION or\n"
"LOSS OF RESULTS! Are you sure?")
reply = QtGui.QMessageBox.critical(
self,
'Active thread detected',
qstr,
QtGui.QMessageBox.Yes,
QtGui.QMessageBox.No | QtGui.QMessageBox.Default)
if reply == QtGui.QMessageBox.Yes:
sys.excepthook = sys.__excepthook__
event.accept()
elif reply == QtGui.QMessageBox.No:
event.ignore()
return
else:
err_msg = "Sooner or later, everyone comes to Babylon 5"
raise ValueError(err_msg)
# Check for open project
reply = self._project_close_warning()
if reply == QtGui.QMessageBox.Cancel:
event.ignore()
else:
event.accept()
return
@staticmethod
def _clear_bottom_contents(context):
context._bottom_box.removeWidget(context._bottom_contents)
context._bottom_contents.setParent(None)
if isinstance(context._bottom_contents, MPLWidget):
fignum = context._bottom_contents.figure.number
log_msg = "Closing figure {}".format(fignum)
module_logger.debug(log_msg)
sip.delete(context._bottom_contents)
plt.close(fignum)
else:
sip.delete(context._bottom_contents)
context._bottom_contents = None
return
|
DTOcean/dtocean-app
|
dtocean_app/main.py
|
Python
|
gpl-3.0
| 130,886
|
#!/usr/bin/python2
'''
This example shows that private values (values stored in attributes
whose names is prefixed by one or two underscores) are not really private.
- _values can be changed as is.
- __values are just hidden in funny names. The reason for this is ofcourse inheritance.
NOTE: new style (deriving from 'object') type object or old style object make no
difference as to this point.
'''
from __future__ import print_function
class Book(object):
def __init__(self, price, name):
self.__price = price
self._name = name
def printMe(self):
print('price is', self.__price)
print('name is', self._name)
def setPrice(self, price):
self.__price = price
def getPrice(self):
return self.__price
def setName(self, name):
self._name = name
def getName(self):
return self._name
b = Book(50, 'Lord of the Rings')
b.printMe()
# lets try to change the __price attribute directly...
# Notice that we get an exception not because the attribute is 'private'
# but rather because such an attribute really DOES NOT exist...
try:
print('price is', b.__price)
except AttributeError as e:
print(e)
print(
'You see,you cannot directly change the attribute because THERE IS no such attribute')
# We CAN change the name since attributes that have just one _ in front of them appear AS IS
# in the object
print(
'if you see silmarilion below it means we changed the attribute even though it has _ in front of it')
b._name = 'silmarilion'
b.printMe()
# Now lets see what the object really has...
print(dir(b))
# OK. I got it,the object really has the '_Book__price' attribute. Lets see if we can change
# that... YES WE CAN...
print(
'if you see price=70 it means we changed the attribute even though it has __ in front of it')
b._Book__price = 70
b.printMe()
|
nonZero/demos-python
|
src/examples/short/object_oriented/oo_private_2.py
|
Python
|
gpl-3.0
| 1,874
|
import logging
from qtpy.QtWidgets import QUndoCommand
from larray_editor.utils import logger
class ArrayValueChange(object):
"""
Class representing the change of one value of an array.
Parameters
----------
key: list/tuple of str
Key associated with the value
old_value: scalar
Previous value
new_value: scalar
New value
"""
def __init__(self, key, old_value, new_value):
self.key = key
self.old_value = old_value
self.new_value = new_value
# XXX: we need to handle the case of several changes at once because the method paste()
# of ArrayEditorWidget can be used on objects not handling MultiIndex axes (LArray, Numpy).
class EditArrayCommand(QUndoCommand):
"""
Class representing the change of one or several value(s) of an array.
Parameters
----------
editor: MappingEditor
Instance of MappingEditor
target : object
target array to edit. Can be given under any form.
changes: (list of) instance(s) of ArrayValueChange
List of changes
"""
def __init__(self, editor, target, changes):
QUndoCommand.__init__(self)
self.editor = editor
self.target = target
assert isinstance(changes, list)
self.changes = changes
text_command = self.get_description(target, changes)
self.setText(text_command)
if logger.isEnabledFor(logging.DEBUG):
logger.debug("Edit command pushed: {}".format(text_command))
def undo(self):
for change in self.changes:
self.apply_change(change.key, change.old_value)
self.editor.arraywidget.model_data.reset()
def redo(self):
for change in self.changes:
self.apply_change(change.key, change.new_value)
self.editor.arraywidget.model_data.reset()
def get_description(self, target, changes):
raise NotImplementedError()
def apply_change(self, key, new_value):
raise NotImplementedError()
class EditSessionArrayCommand(EditArrayCommand):
"""
Class representing the change of one or several value(s) of an array.
Parameters
----------
editor: MappingEditor
Instance of MappingEditor
target : str
name of array to edit
changes: (list of) instance(s) of ArrayValueChange
List of changes
"""
def get_description(self, target, changes):
if len(changes) == 1:
return "Editing Cell {} of {}".format(changes[0].key, target)
else:
return "Pasting {} Cells in {}".format(len(changes), target)
def apply_change(self, key, new_value):
self.editor.kernel.shell.run_cell("{}[{}] = {}".format(self.target, key, new_value))
class EditCurrentArrayCommand(EditArrayCommand):
"""
Class representing the change of one or several value(s) of the current array.
Parameters
----------
editor : ArrayEditor
Instance of ArrayEditor
target : Array
array to edit
changes : (list of) instance(s) of ArrayValueChange
List of changes
"""
def get_description(self, target, changes):
if len(changes) == 1:
return "Editing Cell {}".format(changes[0].key)
else:
return "Pasting {} Cells".format(len(changes))
def apply_change(self, key, new_value):
self.target[key] = new_value
|
larray-project/larray-editor
|
larray_editor/commands.py
|
Python
|
gpl-3.0
| 3,416
|
import sqlite3
from ..models.dbhandler import *
from ..models.Schedule import *
from ..models.Scheduler import *
from ..models.Repetition import *
from ..models.MetronomeSetup import *
from ..config.config import Config
from ..test.helper import Helper
import os
import datetime
class DatabaseTestBase(object):
dbfile = 'test.db'
def teardown(self):
if os.path.exists(self.dbfile):
os.unlink(self.dbfile)
class TestDatabase(Helper, DatabaseTestBase):
def test_phrase(self):
dbh = DatabaseHandler(self.dbfile)
dbh.init_database()
phrase = self._create_phrase()
phrase_id = dbh.insert_phrase(phrase)
assert phrase_id != None
new_phrase = dbh.get_phrase_by_id(phrase_id)
assert new_phrase == phrase
new_phrase.set_name("New phrase")
dbh.update_phrase(new_phrase)
updated_phrase = dbh.get_phrase_by_id(phrase_id)
assert updated_phrase == new_phrase
dbh.remove_phrase(new_phrase)
deleted_phrase = dbh.get_phrase_by_id(phrase_id)
assert deleted_phrase == None
phrase1 = self._create_phrase()
phrase2 = self._create_phrase()
dbh.insert_phrase(phrase1)
dbh.insert_phrase(phrase2)
current_phrases = dbh.get_phrases(orderby="filename")
assert len(current_phrases) == 2
def test_schedule(self):
dbh = DatabaseHandler(self.dbfile)
dbh.init_database()
phrase = self._create_phrase()
phrase_id = dbh.insert_phrase(phrase)
schedule = Schedule()
schedule.set_phrase_id(phrase_id)
schedule_id = dbh.insert_schedule(schedule)
assert schedule_id != None
new_schedule = dbh.get_schedule_by_id(schedule_id)
assert schedule == new_schedule
schedule1 = Schedule()
schedule1.set_phrase_id(phrase_id)
schedule2 = Schedule()
schedule2.set_phrase_id(phrase_id)
dbh.insert_schedule(schedule1)
dbh.insert_schedule(schedule2)
schedules = dbh.get_schedules()
assert len(schedules) == 3
def test_repetition(self):
dbh = DatabaseHandler(self.dbfile)
dbh.init_database()
phrase = self._create_phrase()
phrase_id = dbh.insert_phrase(phrase)
rep = Repetition()
rep.set_phrase_id(phrase_id)
rep.set_date(datetime.date.today())
rep.set_pitch(1)
rep.set_speed(100)
rep.set_comment("Test")
rep.set_grade(5)
rep_id = dbh.insert_repetition(rep)
assert rep_id != None
new_rep = dbh.get_repetition_by_id(rep_id)
assert new_rep == rep
reps = dbh.get_repetitions()
assert len(reps) == 1
assert dbh.remove_repetition(new_rep)
reps = dbh.get_repetitions()
assert len(reps) == 0
def test_scheduler(self):
dbh = DatabaseHandler(self.dbfile)
dbh.init_database()
phrase = self._create_phrase()
phrase_id = dbh.insert_phrase(phrase)
scheduler = Scheduler()
schedule = scheduler.get_new_schedule(phrase, 5) # using new grade
print(schedule)
assert schedule is not None
# no previous repetitions => shall be repeated tomorrow
print((schedule.get_next_repetition()))
print((datetime.date.today() + datetime.timedelta(days=2)))
assert datetime.date.today() + datetime.timedelta(days=1) <= schedule.get_next_repetition() <= datetime.date.today() + datetime.timedelta(days=3)
# two previous repetitions
r1 = Repetition()
r1.set_date(datetime.date.today() - datetime.timedelta(days=5))
r2 = Repetition()
r2.set_date(datetime.date.today() - datetime.timedelta(days=3))
schedule = scheduler.get_new_schedule(phrase, 5, repetition_list=[r1, r2])
assert datetime.date.today() + datetime.timedelta(days=3) <= schedule.get_next_repetition() <= datetime.date.today() + datetime.timedelta(days=6)
def test_metronome_setup(self):
dbh = DatabaseHandler(self.dbfile)
dbh.init_database()
phrase = self._create_phrase()
phrase_id = dbh.insert_phrase(phrase)
ms = MetronomeSetup()
ms.phrase_id = 1
ms.speed = 100
ms.meter = 4
ms.duration = 300
ms.increment = 2
last_id = dbh.insert_metronome_setup(ms)
assert last_id != None
new_ms = dbh.get_metronome_setup_by_id(last_id)
assert new_ms == ms
new_ms.speed = 130
new_ms.meter = 5
new_ms.duration = 320
new_ms.increment = 3
result = dbh.update_metronome_setup(new_ms)
assert result == True
updated_ms = dbh.get_metronome_setup_by_id(new_ms.id)
assert updated_ms == new_ms
class TestPrioritizedScheduleDatabaseHandler(Helper, DatabaseTestBase):
def test_priority(self):
psh = PrioritizedScheduleDatabaseHandler(self.dbfile)
psh.init_database()
phrase_count = 5
for p in range(phrase_count):
phrase = self._create_phrase()
phrase_id = psh.insert_phrase(phrase)
schedule = self._create_schedule()
schedule.set_next_repetition(datetime.date.today() - datetime.timedelta(days=p))
schedule.set_phrase_id(phrase_id)
schedule_id = psh.insert_schedule(schedule)
repetition_count = 3
for i in range(repetition_count):
rep = Repetition()
rep.set_phrase_id(phrase_id)
rep.set_date(schedule.get_next_repetition() - datetime.timedelta(days=i * repetition_count))
rep_id = psh.insert_repetition(rep)
schedules = psh.get_active_schedules()
for schedule in schedules:
print(("%.2f\t%s" % (schedule.get_priority(), schedule.get_next_repetition())))
assert schedules[0].get_priority() > schedules[1].get_priority() > schedules[2].get_priority() > schedules[3].get_priority() > schedules[4].get_priority()
|
ideabulbs/muspractice
|
muspractice/test/test_database.py
|
Python
|
gpl-3.0
| 6,082
|
"""Tests for the validators module"""
import pytest
import redbaron
import reddel_server
def test_OptionalRegionValidator_call():
"""Test that it's valid to not specify a region"""
testsrc = redbaron.RedBaron("1+1")
validator = reddel_server.OptionalRegionValidator()
validator(testsrc)
def test_OptionalRegionValidator_transform_region():
"""Test that the region is extracted when specified"""
testsrc = redbaron.RedBaron("a=1+1\nb=5")
start = reddel_server.Position(1, 1)
end = reddel_server.Position(1, 4)
validator = reddel_server.OptionalRegionValidator()
result = validator.transform(testsrc, start, end)
expected = (testsrc[0], reddel_server.Position(1, 1), reddel_server.Position(1, 5))
assert expected == result, "Expected that the region was extracted and the bounding box was updated."
def test_OptionalRegionValidator_transform_no_region():
"""Test that there is no tranformation without any region"""
testsrc = redbaron.RedBaron("a=1+1\nb=5")
validator = reddel_server.OptionalRegionValidator()
result = validator.transform(testsrc)
expected = (testsrc, None, None)
assert expected == result
def test_MandatoryRegionValidator_no_region():
"""Test that the validator raises without a region"""
testsrc = redbaron.RedBaron("1+1")
validator = reddel_server.MandatoryRegionValidator()
with pytest.raises(reddel_server.ValidationException):
validator(testsrc, start=(1, 1), end=None)
def test_MandatoryRegionValidator_region():
"""Test that there has to be a region"""
testsrc = redbaron.RedBaron("1+1")
validator = reddel_server.MandatoryRegionValidator()
validator(testsrc, start=reddel_server.Position(1, 1), end=reddel_server.Position(1, 3))
def test_SingleNodeValidator_no_region_invalid():
"""Test that the validator raises when there is more than one node and no region"""
testsrc = redbaron.RedBaron("1+1\n2+2")
validator = reddel_server.SingleNodeValidator()
with pytest.raises(reddel_server.ValidationException):
validator(testsrc)
def test_SingleNodeValidator_no_region_valid():
"""Test that the validator does not raise when there is one node and no region"""
testsrc = redbaron.RedBaron("1+1")
validator = reddel_server.SingleNodeValidator()
validator(testsrc)
def test_SingleNodeValidator_no_region_single_node_valid():
"""Test that the validator does not raise when there is one node and no region"""
testsrc = redbaron.RedBaron("for i in range(10):\n\ta=1\n\tb=2")[0]
validator = reddel_server.SingleNodeValidator()
validator(testsrc)
def test_SingleNodeValidator_region_invalid():
"""Test that the validator raises when there is more than one node in the region"""
testsrc = redbaron.RedBaron("1+1\n2+2")
validator = reddel_server.SingleNodeValidator()
with pytest.raises(reddel_server.ValidationException):
validator(testsrc, start=reddel_server.Position(1, 1), end=reddel_server.Position(2, 3))
def test_SingleNodeValidator_region_valid():
"""Test that the validator does not raise when there is one node in the region"""
testsrc = redbaron.RedBaron("1+1\n2+2")
validator = reddel_server.SingleNodeValidator()
validator(testsrc, start=reddel_server.Position(2, 1), end=reddel_server.Position(2, 3))
def test_SingleNodeValidator_transform_no_region_no_list():
"""Test that there is no transformation if there is no list"""
testsrc = redbaron.RedBaron("1+1")[0]
validator = reddel_server.SingleNodeValidator()
assert (testsrc, None, None) == validator.transform(testsrc, start=None, end=None)
def test_SingleNodeValidator_transform_region_no_list():
"""Test that there is no transformation if there is a region"""
testsrc = redbaron.RedBaron("1+1")
validator = reddel_server.SingleNodeValidator()
expected = (testsrc, (1, 1), (1, 3))
assert expected == validator.transform(testsrc, start=reddel_server.Position(1, 1),
end=reddel_server.Position(1, 3))
def test_SingleNodeValidator_transform_no_region_list():
"""Test the transformation when there is no region"""
testsrc = redbaron.RedBaron("1+1")
validator = reddel_server.SingleNodeValidator()
expected = "1+1"
assert expected == validator.transform(testsrc)[0].dumps()
def test_TypeValidator_valid_no_region_no_list():
"""Test a valid source that is not a list without a region"""
testsrc = redbaron.RedBaron("def foo(): pass")[0]
validator = reddel_server.TypeValidator(['def'])
validator(testsrc)
def test_TypeValidator_valid_no_region_list():
"""Test a valid source that is a list without a region"""
testsrc = redbaron.RedBaron("def foo(): pass\ndef bar(): pass")
validator = reddel_server.TypeValidator(['def'])
validator(testsrc)
def test_TypeValidator_valid_region_list():
"""Test a valid source that is a list with a region"""
testsrc = redbaron.RedBaron("a=1\ndef foo(): pass\ndef bar(): pass")
validator = reddel_server.TypeValidator(['def'])
validator(testsrc, start=reddel_server.Position(2, 1), end=reddel_server.Position(3, 1))
def test_TypeValidator_valid_region_no_list():
"""Test a valid source where the region specifies a single node"""
testsrc = redbaron.RedBaron("a=1\ndef foo(): pass\nb=2")
validator = reddel_server.TypeValidator(['def'])
validator(testsrc, start=reddel_server.Position(2, 1), end=reddel_server.Position(2, 1))
def test_TypeValidator_invalid_no_region_no_list():
"""Test that the validator raises for invalid sources without a region and list"""
testsrc = redbaron.RedBaron("1+1")[0]
validator = reddel_server.TypeValidator(['def'])
with pytest.raises(reddel_server.ValidationException):
validator(testsrc)
def test_TypeValidator_invalid_no_region_list():
"""Test that the validator raises for invalid sources without a region but a list"""
testsrc = redbaron.RedBaron("def foo(): pass\na=1")
validator = reddel_server.TypeValidator(['def'])
with pytest.raises(reddel_server.ValidationException):
validator(testsrc)
def test_TypeValidator_invalid_region_list():
"""Test that the validator raises for invalid sources with a region and list"""
testsrc = redbaron.RedBaron("def foo():\n\ta=1\n\tdef bar(): pass")
validator = reddel_server.TypeValidator(['def'])
with pytest.raises(reddel_server.ValidationException):
validator(testsrc, start=reddel_server.Position(2, 3), end=reddel_server.Position(3, 3))
def test_TypeValidator_invalid_region_no_list():
"""Test that the validator raises for invalid sources with a region and no list"""
testsrc = redbaron.RedBaron("def foo():\n\ta=1")
validator = reddel_server.TypeValidator(['def'])
with pytest.raises(reddel_server.ValidationException):
validator(testsrc, start=reddel_server.Position(2, 3), end=reddel_server.Position(2, 4))
|
storax/reddel-server
|
tests/test_validators.py
|
Python
|
gpl-3.0
| 7,005
|
import unittest
from reckerbot import Message
class TestMessage(unittest.TestCase):
def test_is_direct_message(self):
actual = Message(data={'channel': 'D0120K8DHQX'}).is_direct_message()
self.assertEqual(actual, True)
actual = Message(data={'channel': 'C012MGSMG5S'}).is_direct_message()
self.assertEqual(actual, False)
if __name__ == '__main__':
unittest.main()
|
arecker/reckerbot
|
test/test_message.py
|
Python
|
gpl-3.0
| 409
|
# noinspection PyPackageRequirements
from collections import namedtuple
from logbook import Logger
import uuid
import time
import config
import base64
import secrets
import hashlib
import json
from jose import jwt
from jose.exceptions import ExpiredSignatureError, JWTError, JWTClaimsError
import os
import datetime
from service.const import EsiSsoMode, EsiEndpoints
from service.settings import EsiSettings, NetworkSettings
from datetime import timedelta
from requests_cache import CachedSession
from requests import Session
from urllib.parse import urlencode
pyfalog = Logger(__name__)
scopes = [
'esi-skills.read_skills.v1',
'esi-fittings.read_fittings.v1',
'esi-fittings.write_fittings.v1'
]
ApiBase = namedtuple('ApiBase', ['sso', 'esi'])
supported_servers = {
"Tranquility": ApiBase("login.eveonline.com", "esi.evetech.net"),
"Singularity": ApiBase("sisilogin.testeveonline.com", "esi.evetech.net"),
"Serenity": ApiBase("login.evepc.163.com", "esi.evepc.163.com")
}
class GenericSsoError(Exception):
""" Exception used for generic SSO errors that aren't directly related to an API call
"""
pass
class APIException(Exception):
""" Exception for API related errors """
def __init__(self, url, code, json_response):
self.url = url
self.status_code = code
self.response = json_response
super(APIException, self).__init__(str(self))
def __str__(self):
if 'error_description' in self.response:
return 'HTTP Error %s: %s' % (self.status_code,
self.response['error_description'])
elif 'message' in self.response:
return 'HTTP Error %s: %s' % (self.status_code,
self.response['message'])
return 'HTTP Error %s' % self.status_code
class EsiAccess:
def __init__(self):
self.settings = EsiSettings.getInstance()
self.server_base: ApiBase = supported_servers[self.settings.get("server")]
# session request stuff
self._session = Session()
self._basicHeaders = {
'Accept': 'application/json',
'User-Agent': (
'pyfa v{}'.format(config.version)
)
}
self._session.headers.update(self._basicHeaders)
self._session.proxies = NetworkSettings.getInstance().getProxySettingsInRequestsFormat()
# Set up cached session. This is only used for SSO meta data for now, but can be expanded to actually handle
# various ESI caching (using ETag, for example) in the future
cached_session = CachedSession(
os.path.join(config.savePath, config.ESI_CACHE),
backend="sqlite",
cache_control=True, # Use Cache-Control headers for expiration, if available
expire_after=timedelta(days=1), # Otherwise expire responses after one day
stale_if_error=True, # In case of request errors, use stale cache data if possible
)
cached_session.headers.update(self._basicHeaders)
cached_session.proxies = NetworkSettings.getInstance().getProxySettingsInRequestsFormat()
meta_call = cached_session.get("https://%s/.well-known/oauth-authorization-server" % self.server_base.sso)
meta_call.raise_for_status()
self.server_meta = meta_call.json()
jwks_call = cached_session.get(self.server_meta["jwks_uri"])
jwks_call.raise_for_status()
self.jwks = jwks_call.json()
@property
def sso_url(self):
return 'https://%s/v2' % self.server_base.sso
@property
def esi_url(self):
return 'https://%s' % self.server_base.esi
@property
def oauth_authorize(self):
return self.server_meta["authorization_endpoint"]
@property
def oauth_token(self):
return self.server_meta["token_endpoint"]
@property
def client_id(self):
return self.settings.get('clientID') or config.API_CLIENT_ID
@staticmethod
def update_token(char, tokenResponse):
""" helper function to update token data from SSO response """
char.accessToken = tokenResponse['access_token']
char.accessTokenExpires = datetime.datetime.fromtimestamp(time.time() + tokenResponse['expires_in'])
if 'refresh_token' in tokenResponse:
char.refreshToken = config.cipher.encrypt(tokenResponse['refresh_token'].encode())
def get_login_uri(self, redirect=None):
self.state = str(uuid.uuid4())
# Generate the PKCE code challenge
self.code_verifier = base64.urlsafe_b64encode(secrets.token_bytes(32))
m = hashlib.sha256()
m.update(self.code_verifier)
d = m.digest()
code_challenge = base64.urlsafe_b64encode(d).decode().replace("=", "")
state_arg = {
'mode': self.settings.get('loginMode'),
'redirect': redirect,
'state': self.state
}
args = {
'response_type': 'code',
'redirect_uri': config.SSO_CALLBACK,
'client_id': self.client_id,
'scope': ' '.join(scopes),
'code_challenge': code_challenge,
'code_challenge_method': 'S256',
'state': base64.b64encode(bytes(json.dumps(state_arg), 'utf-8'))
}
return '%s?%s' % (
self.oauth_authorize,
urlencode(args)
)
def get_oauth_header(self, token):
""" Return the Bearer Authorization header required in oauth calls
:return: a dict with the authorization header
"""
return {'Authorization': 'Bearer %s' % token}
def auth(self, code):
values = {
'grant_type': 'authorization_code',
'code': code,
'client_id': self.client_id,
"code_verifier": self.code_verifier
}
res = self.token_call(values)
json_res = res.json()
decoded_jwt = self.validate_eve_jwt(json_res['access_token'])
return json_res, decoded_jwt
def refresh(self, ssoChar):
# todo: properly handle invalid refresh token
values = {
"grant_type": "refresh_token",
"refresh_token": config.cipher.decrypt(ssoChar.refreshToken).decode(),
"client_id": self.client_id,
}
res = self.token_call(values)
json_res = res.json()
self.update_token(ssoChar, json_res)
return json_res
def token_call(self, values):
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Host": self.server_base.sso,
}
res = self._session.post(
self.server_meta["token_endpoint"],
data=values,
headers=headers,
)
if res.status_code != 200:
raise APIException(
self.server_meta["token_endpoint"],
res.status_code,
res.json()
)
return res
def validate_eve_jwt(self, jwt_token):
"""Validate a JWT token retrieved from the EVE SSO.
Args:
jwt_token: A JWT token originating from the EVE SSO
Returns
dict: The contents of the validated JWT token if there are no
validation errors
"""
try:
jwk_sets = self.jwks["keys"]
except KeyError as e:
raise GenericSsoError("Something went wrong when retrieving the JWK set. The returned "
"payload did not have the expected key {}. \nPayload returned "
"from the SSO looks like: {}".format(e, self.jwks))
jwk_set = next((item for item in jwk_sets if item["alg"] == "RS256"))
try:
return jwt.decode(
jwt_token,
jwk_set,
algorithms=jwk_set["alg"],
issuer=[self.server_base.sso, "https://%s" % self.server_base.sso]
)
except ExpiredSignatureError as e:
raise GenericSsoError("The JWT token has expired: {}".format(str(e)))
except JWTError as e:
raise GenericSsoError("The JWT signature was invalid: {}".format(str(e)))
except JWTClaimsError as e:
raise GenericSsoError("The issuer claim was not from login.eveonline.com or "
"https://login.eveonline.com: {}".format(str(e)))
def _before_request(self, ssoChar):
self._session.headers.clear()
self._session.headers.update(self._basicHeaders)
if ssoChar is None:
return
if ssoChar.is_token_expired():
pyfalog.info("Refreshing token for {}".format(ssoChar.characterName))
self.refresh(ssoChar)
if ssoChar.accessToken is not None:
self._session.headers.update(self.get_oauth_header(ssoChar.accessToken))
def _after_request(self, resp):
if "warning" in resp.headers:
pyfalog.warn("{} - {}".format(resp.headers["warning"], resp.url))
if resp.status_code >= 400:
raise APIException(
resp.url,
resp.status_code,
resp.json()
)
return resp
def get(self, ssoChar, endpoint, **kwargs):
self._before_request(ssoChar)
endpoint = endpoint.format(**kwargs)
return self._after_request(self._session.get("{}{}".format(self.esi_url, endpoint)))
def post(self, ssoChar, endpoint, json, **kwargs):
self._before_request(ssoChar)
endpoint = endpoint.format(**kwargs)
return self._after_request(self._session.post("{}{}".format(self.esi_url, endpoint), data=json))
def delete(self, ssoChar, endpoint, **kwargs):
self._before_request(ssoChar)
endpoint = endpoint.format(**kwargs)
return self._after_request(self._session.delete("{}{}".format(self.esi_url, endpoint)))
# todo: move these off to another class which extends this one. This class should only handle the low level
# authentication and
def getDynamicItem(self, typeID, itemID):
return self.get(None, EsiEndpoints.DYNAMIC_ITEM.value, type_id=typeID, item_id=itemID)
def getSkills(self, char):
return self.get(char, EsiEndpoints.CHAR_SKILLS.value, character_id=char.characterID)
def getSecStatus(self, char):
return self.get(char, EsiEndpoints.CHAR.value, character_id=char.characterID)
def getFittings(self, char):
return self.get(char, EsiEndpoints.CHAR_FITTINGS.value, character_id=char.characterID)
def postFitting(self, char, json_str):
# @todo: new fitting ID can be recovered from resp.data,
return self.post(char, EsiEndpoints.CHAR_FITTINGS.value, json_str, character_id=char.characterID)
def delFitting(self, char, fittingID):
return self.delete(char, EsiEndpoints.CHAR_DEL_FIT.value, character_id=char.characterID, fitting_id=fittingID)
|
pyfa-org/Pyfa
|
service/esiAccess.py
|
Python
|
gpl-3.0
| 11,002
|
# -*- coding: utf-8 -*-
import re
from module.plugins.Hook import Hook
from module.utils import remove_chars
class MultiHook(Hook):
__name__ = "MultiHook"
__type__ = "hook"
__version__ = "0.27"
__config__ = [("mode" , "all;listed;unlisted", "Use for plugins (if supported)" , "all"),
("pluginlist" , "str" , "Plugin list (comma separated)" , "" ),
("revertfailed", "bool" , "Revert to standard download if download fails", False),
("interval" , "int" , "Reload interval in hours (0 to disable)" , 12 )]
__description__ = """Hook plugin for multi hoster/crypter"""
__license__ = "GPLv3"
__authors__ = [("pyLoad Team", "admin@pyload.org"),
("Walter Purcaro", "vuolter@gmail.com")]
MIN_INTERVAL = 12 * 60 * 60 #: reload plugins every 12h
PLUGIN_REPLACEMENTS = [("1fichier.com" , "onefichier.com"),
("2shared.com" , "twoshared.com" ),
("4shared.com" , "fourshared.com"),
("cloudnator.com" , "shragle.com" ),
("easy-share.com" , "crocko.com" ),
("fileparadox.com", "fileparadox.in"),
("freakshare.net" , "freakshare.com"),
("hellshare.com" , "hellshare.cz" ),
("ifile.it" , "filecloud.io" ),
("nowdownload.ch" , "nowdownload.sx"),
("nowvideo.co" , "nowvideo.sx" ),
("putlocker.com" , "firedrive.com" ),
("share-rapid.cz" , "multishare.cz" ),
("sharerapid.cz" , "multishare.cz" ),
("ul.to" , "uploaded.to" ),
("uploaded.net" , "uploaded.to" )]
def setup(self):
self.type = self.core.pluginManager.findPlugin(self.__name__)[1] or "hoster"
self.plugins = []
self.supported = []
self.new_supported = []
def coreReady(self):
self.account = self.core.accountManager.getAccountPlugin(self.__name__)
def getURL(self, *args, **kwargs): #@TODO: Remove in 0.4.10
""" see HTTPRequest for argument list """
h = pyreq.getHTTPRequest(timeout=120)
try:
rep = h.load(*args, **kwargs)
finally:
h.close()
return rep
def getConfig(self, option, default=''):
"""getConfig with default value - sublass may not implements all config options"""
try:
return self.getConf(option)
except KeyError:
return default
def pluginCached(self):
if not self.plugins:
try:
pluginset = self.pluginSet(self.getHosters() if self.type == "hoster" else self.getCrypters())
except Exception, e:
self.logError(e)
return []
try:
configmode = self.getConfig("mode", 'all')
if configmode in ("listed", "unlisted"):
pluginlist = self.getConfig("pluginlist", '').replace('|', ',').replace(';', ',').split(',')
configset = self.pluginSet(pluginlist)
if configmode == "listed":
pluginset &= configset
else:
pluginset -= configset
except Exception, e:
self.logError(e)
self.plugins = list(pluginset)
return self.plugins
def pluginSet(self, plugins):
plugins = set((str(x).strip().lower() for x in plugins))
for rep in self.PLUGIN_REPLACEMENTS:
if rep[0] in plugins:
plugins.remove(rep[0])
plugins.add(rep[1])
plugins.discard('')
return plugins
def getHosters(self):
"""Load list of supported hoster
:return: List of domain names
"""
raise NotImplementedError
def getCrypters(self):
"""Load list of supported crypters
:return: List of domain names
"""
raise NotImplementedError
def periodical(self):
"""reload plugin list periodically"""
self.interval = max(self.getConfig("interval", 0), self.MIN_INTERVAL)
self.logInfo(_("Reloading supported %s list") % self.type)
old_supported = self.supported
self.supported = []
self.new_supported = []
self.plugins = []
self.overridePlugins()
old_supported = [plugin for plugin in old_supported if plugin not in self.supported]
if old_supported:
self.logDebug("Unload: %s" % ", ".join(old_supported))
for plugin in old_supported:
self.unloadPlugin(plugin)
def overridePlugins(self):
excludedList = []
if self.type == "hoster":
pluginMap = dict((name.lower(), name) for name in self.core.pluginManager.hosterPlugins.iterkeys())
accountList = [account.type.lower() for account in self.core.api.getAccounts(False) if account.valid and account.premium]
else:
pluginMap = {}
accountList = [name[::-1].replace("Folder"[::-1], "", 1).lower()[::-1] for name in self.core.pluginManager.crypterPlugins.iterkeys()]
for plugin in self.pluginCached():
name = remove_chars(plugin, "-.")
if name in accountList:
excludedList.append(plugin)
else:
if name in pluginMap:
self.supported.append(pluginMap[name])
else:
self.new_supported.append(plugin)
if not self.supported and not self.new_supported:
self.logError(_("No %s loaded") % self.type)
return
module = self.core.pluginManager.getPlugin(self.__name__)
klass = getattr(module, self.__name__)
# inject plugin plugin
self.logDebug("Overwritten %ss: %s" % (self.type, ", ".join(sorted(self.supported))))
for plugin in self.supported:
hdict = self.core.pluginManager.plugins[self.type][plugin]
hdict['new_module'] = module
hdict['new_name'] = self.__name__
if excludedList:
self.logInfo(_("%ss not overwritten: %s") % (self.type.capitalize(), ", ".join(sorted(excludedList))))
if self.new_supported:
plugins = sorted(self.new_supported)
self.logDebug("New %ss: %s" % (self.type, ", ".join(plugins)))
# create new regexp
regexp = r'.*(%s).*' % "|".join([x.replace(".", "\.") for x in plugins])
if hasattr(klass, "__pattern__") and isinstance(klass.__pattern__, basestring) and '://' in klass.__pattern__:
regexp = r'%s|%s' % (klass.__pattern__, regexp)
self.logDebug("Regexp: %s" % regexp)
hdict = self.core.pluginManager.plugins[self.type][self.__name__]
hdict['pattern'] = regexp
hdict['re'] = re.compile(regexp)
def unloadPlugin(self, plugin):
hdict = self.core.pluginManager.plugins[self.type][plugin]
if "module" in hdict:
del hdict['module']
if "new_module" in hdict:
del hdict['new_module']
del hdict['new_name']
def unload(self):
"""Remove override for all plugins. Scheduler job is removed by hookmanager"""
for plugin in self.supported:
self.unloadPlugin(plugin)
# reset pattern
klass = getattr(self.core.pluginManager.getPlugin(self.__name__), self.__name__)
hdict = self.core.pluginManager.plugins[self.type][self.__name__]
hdict['pattern'] = getattr(klass, "__pattern__", r'^unmatchable$')
hdict['re'] = re.compile(hdict['pattern'])
def downloadFailed(self, pyfile):
"""remove plugin override if download fails but not if file is offline/temp.offline"""
if pyfile.hasStatus("failed") and self.getConfig("revertfailed", True):
hdict = self.core.pluginManager.plugins[self.type][pyfile.pluginname]
if "new_name" in hdict and hdict['new_name'] == self.__name__:
self.logDebug("Unload MultiHook", pyfile.pluginname, hdict)
self.unloadPlugin(pyfile.pluginname)
pyfile.setStatus("queued")
|
mariusbaumann/pyload
|
module/plugins/internal/MultiHook.py
|
Python
|
gpl-3.0
| 8,647
|
import re
import base64
import os
# Web app
flask_host = os.getenv('FLASK_HOST', 'localhost.localdomain')
flask_port = int(os.getenv('FLASK_PORT', 5000))
flask_use_ssl = bool(int(os.getenv('FLASK_USE_SSL', 1)))
flask_server_name = os.getenv('FLASK_SERVER_NAME', None)
if not flask_server_name:
if ((flask_use_ssl and flask_port == 443) or
(not flask_use_ssl and flask_port == 80)):
flask_port_spec = ''
else:
flask_port_spec = ':{}'.format(flask_port)
flask_server_name = '{}{}'.format(
flask_host,
flask_port_spec
)
try:
flask_secret_key = base64.b64decode(os.getenv('FLASK_SECRET_KEY', None))
except TypeError:
raise Exception('FLASK_SECRET_KEY must be base64 encoded.')
flask_debug_mode = bool(int(os.getenv('FLASK_DEBUG_MODE', 1)))
# OIDC
oidc_domain = os.getenv('OIDC_DOMAIN', None)
oidc_client_id = os.getenv('OIDC_CLIENT_ID', None)
oidc_client_secret = os.getenv('OIDC_CLIENT_SECRET', None)
# Mail
email_enabled = bool(int(os.getenv('EMAIL_ENABLED', 1)))
email_account = os.getenv('EMAIL_ACCOUNT', 'automation@mozilla.com')
email_password = os.getenv('EMAIL_PASSWORD', None)
email_from = os.getenv('EMAIL_FROM', 'Mozilla A-Team <auto-tools@mozilla.com>')
email_smtp_server = os.getenv('EMAIL_SMTP_SERVER', 'smtp.mozilla.org')
email_smtp_port = int(os.getenv('EMAIL_SMTP_PORT', 25))
email_ssl = bool(int(os.getenv('EMAIL_SSL', 0)))
# Database
database_url = os.getenv('DATABASE_URL',
'postgresql://root@localhost/pulseguardian')
# `postgres://` is deprecated in favor of `postgresql://`, but heroku doesn't use postgresql
database_url = re.sub('^postgres:', 'postgresql:', database_url)
pool_recycle_interval = int(os.getenv('POOL_RECYCLE_INTERVAL', 60))
# RabbitMQ
# Management API URL.
rabbit_management_url = os.getenv('RABBIT_MANAGEMENT_URL',
'http://localhost:15672/api/')
rabbit_vhost = os.getenv('RABBIT_VHOST', '/')
# RabbitMQ user with administrator privilege.
rabbit_user = os.getenv('RABBIT_USER', 'guest')
# Password of the RabbitMQ user.
rabbit_password = os.getenv('RABBIT_PASSWORD', 'guest')
# reserved users
reserved_users_regex = os.getenv('RESERVED_USERS_REGEX', None)
reserved_users_message = os.getenv('RESERVED_USERS_MESSAGE', None)
# PulseGuardian
warn_queue_size = int(os.getenv('WARN_QUEUE_SIZE', 2000))
del_queue_size = int(os.getenv('DEL_QUEUE_SIZE', 8000))
polling_interval = int(os.getenv('POLLING_INTERVAL', 5))
polling_max_interval = int(os.getenv('POLLING_MAX_INTERVAL', 300))
fake_account = os.getenv('FAKE_ACCOUNT', None)
# Only used if at least one log path is specified above.
max_log_size = int(os.getenv('MAX_LOG_SIZE', 20480))
backup_count = int(os.getenv('BACKUP_COUNT', 5))
|
mozilla/pulseguardian
|
pulseguardian/config.py
|
Python
|
mpl-2.0
| 2,761
|
# encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import unittest
from bugzilla_etl import bz_etl, extract_bugzilla
from bugzilla_etl.alias_analysis import AliasAnalyzer
from bugzilla_etl.bz_etl import etl, etl_comments
from bugzilla_etl.extract_bugzilla import get_current_time
from mo_dots import Data
from mo_logs import startup, Log, constants
from mo_threads import ThreadedQueue
from pyLibrary import convert
from pyLibrary.sql.mysql import all_db, MySQL
from pyLibrary.testing import elasticsearch
from pyLibrary.testing.elasticsearch import FakeES
from test_etl import compare_both, MIN_TIMESTAMP, refresh_metadata
from util.database import make_test_instance
class TestExamples(unittest.TestCase):
"""
USE THIS TO TEST A SPECIFIC SET OF BUGS FROM A LARGE BUGZILLA DATABASE
I USE THIS TO IDENTIFY CANDIDATES TO ADD TO THE TEST SUITE
"""
def setUp(self):
self.settings = startup.read_settings(filename="tests/resources/config/test_examples.json")
constants.set(self.settings.constants)
Log.start(self.settings.debug)
self.alias_analyzer = AliasAnalyzer(self.settings.alias)
def tearDown(self):
#CLOSE THE CACHED MySQL CONNECTIONS
bz_etl.close_db_connections()
if all_db:
Log.error("not all db connections are closed")
Log.stop()
def test_specific_bugs(self):
"""
USE A MYSQL DATABASE TO FILL AN ES INSTANCE (USE Fake_ES() INSTANCES TO KEEP
THIS TEST LOCAL) WITH VERSIONS OF BUGS FROM settings.param.bugs. COMPARE
THOSE VERSIONS TO A REFERENCE ES (ALSO CHECKED INTO REPOSITORY)
"""
reference = FakeES(self.settings.reference)
candidate = elasticsearch.make_test_instance(self.settings.bugs)
candidate_comments = elasticsearch.make_test_instance(self.settings.comments)
make_test_instance(self.settings.bugzilla)
with MySQL(self.settings.bugzilla) as db:
# SETUP RUN PARAMETERS
param = Data()
param.end_time = convert.datetime2milli(get_current_time(db))
param.start_time = MIN_TIMESTAMP
param.start_time_str = extract_bugzilla.milli2string(db, MIN_TIMESTAMP)
param.alias_file = self.settings.param.alias_file
param.bug_list = self.settings.param.bugs
param.allow_private_bugs = self.settings.param.allow_private_bugs
with ThreadedQueue("etl queue", candidate, batch_size=1000) as output:
etl(db, output, param, self.alias_analyzer, please_stop=None)
with ThreadedQueue("etl queue", candidate_comments, batch_size=1000) as output:
etl_comments(db, output, param, please_stop=None)
# COMPARE ALL BUGS
refresh_metadata(candidate)
compare_both(candidate, reference, self.settings, self.settings.param.bugs)
|
klahnakoski/Bugzilla-ETL
|
tests/test_examples.py
|
Python
|
mpl-2.0
| 3,218
|
import os
config = {
# mozconfig file to use, it depends on branch and platform names
"platform": "macosx64",
"stage_product": "firefox",
"update_platform": "Darwin_x86_64-gcc3",
"mozconfig": "%(branch)s/browser/config/mozconfigs/macosx-universal/l10n-mozconfig",
"bootstrap_env": {
"SHELL": '/bin/bash',
"MOZ_OBJDIR": "obj-l10n",
"EN_US_BINARY_URL": "%(en_us_binary_url)s",
"MOZ_UPDATE_CHANNEL": "%(update_channel)s",
"MOZ_PKG_PLATFORM": "mac",
# "IS_NIGHTLY": "yes",
"DIST": "%(abs_objdir)s",
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
"L10NBASEDIR": "../../l10n",
"MOZ_MAKE_COMPLETE_MAR": "1",
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
'TOOLTOOL_CACHE': '/builds/tooltool_cache',
'TOOLTOOL_HOME': '/builds',
},
"ssh_key_dir": "~/.ssh",
"log_name": "single_locale",
"objdir": "obj-l10n",
"js_src_dir": "js/src",
"vcs_share_base": "/builds/hg-shared",
"upload_env_extra": {
"MOZ_PKG_PLATFORM": "mac",
},
# tooltool
'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
'tooltool_script': ["/builds/tooltool.py"],
'tooltool_bootstrap': "setup.sh",
'tooltool_manifest_src': 'browser/config/tooltool-manifests/macosx64/releng.manifest',
# balrog credential file:
'balrog_credentials_file': 'oauth.txt',
# l10n
"ignore_locales": ["en-US", "ja"],
"l10n_dir": "l10n",
"locales_file": "%(branch)s/browser/locales/all-locales",
"locales_dir": "browser/locales",
"hg_l10n_tag": "default",
"merge_locales": True,
# MAR
"previous_mar_dir": "dist/previous",
"current_mar_dir": "dist/current",
"update_mar_dir": "dist/update", # sure?
"previous_mar_filename": "previous.mar",
"current_work_mar_dir": "current.work",
"package_base_dir": "dist/l10n-stage",
"application_ini": "Contents/Resources/application.ini",
"buildid_section": 'App',
"buildid_option": "BuildID",
"unpack_script": "tools/update-packaging/unwrap_full_update.pl",
"incremental_update_script": "tools/update-packaging/make_incremental_update.sh",
"balrog_release_pusher_script": "scripts/updates/balrog-release-pusher.py",
"update_packaging_dir": "tools/update-packaging",
"local_mar_tool_dir": "dist/host/bin",
"mar": "mar",
"mbsdiff": "mbsdiff",
"current_mar_filename": "firefox-%(version)s.%(locale)s.mac.complete.mar",
"complete_mar": "firefox-%(version)s.en-US.mac.complete.mar",
"localized_mar": "firefox-%(version)s.%(locale)s.mac.complete.mar",
"partial_mar": "firefox-%(version)s.%(locale)s.mac.partial.%(from_buildid)s-%(to_buildid)s.mar",
'installer_file': "firefox-%(version)s.en-US.mac.dmg",
'exes': {
'hgtool.py': os.path.join(
os.getcwd(), 'build', 'tools', 'buildfarm', 'utils', 'hgtool.py'
),
},
}
|
cstipkovic/spidermonkey-research
|
testing/mozharness/configs/single_locale/macosx64.py
|
Python
|
mpl-2.0
| 2,930
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2018: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
#
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (C) 2009-2014:
# aviau, alexandre.viau@savoirfairelinux.com
# Grégory Starck, g.starck@gmail.com
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
"""
This module provide SchedulerLink and SchedulerLinks classes used to manage schedulers
"""
import logging
from alignak.objects.satellitelink import SatelliteLink, SatelliteLinks
from alignak.property import BoolProp, IntegerProp, StringProp, FULL_STATUS
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class SchedulerLink(SatelliteLink):
"""
Class to manage the scheduler information
"""
# Ok we lie a little here because we are a mere link in fact
my_type = 'scheduler'
my_name_property = "%s_name" % my_type
properties = SatelliteLink.properties.copy()
properties.update({
'type':
StringProp(default=u'scheduler', fill_brok=[FULL_STATUS], to_send=True),
'scheduler_name':
StringProp(default='', fill_brok=[FULL_STATUS]),
'port':
IntegerProp(default=7768, fill_brok=[FULL_STATUS], to_send=True),
'weight':
IntegerProp(default=1, fill_brok=[FULL_STATUS]),
'skip_initial_broks':
BoolProp(default=False, fill_brok=[FULL_STATUS], to_send=True),
'accept_passive_unknown_check_results':
BoolProp(default=False, fill_brok=[FULL_STATUS], to_send=True),
})
running_properties = SatelliteLink.running_properties.copy()
running_properties.update({
# 'conf':
# StringProp(default=None),
# 'cfg':
# DictProp(default={}),
'need_conf':
StringProp(default=True),
'external_commands':
StringProp(default=[]),
})
def get_override_configuration(self):
"""
Some parameters can give as 'overridden parameters' like use_timezone
so they will be mixed (in the scheduler) with the standard conf sent by the arbiter
:return: dictionary of properties
:rtype: dict
"""
res = {}
properties = self.__class__.properties
for prop, entry in list(properties.items()):
if entry.override:
res[prop] = getattr(self, prop)
return res
class SchedulerLinks(SatelliteLinks):
"""Please Add a Docstring to describe the class here"""
name_property = "scheduler_name"
inner_class = SchedulerLink
|
Alignak-monitoring/alignak
|
alignak/objects/schedulerlink.py
|
Python
|
agpl-3.0
| 3,944
|
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from .models import Partner
@admin.register(Partner)
class PartnerAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'email', 'phones', 'address', 'is_company')
list_display_links = ('id', 'name')
search_fields = ('id', 'name', 'email')
ordering = ('name',)
list_filter = ('is_company',)
readonly_fields = ('created_at', 'last_modified', 'created_by')
fieldsets = ( # Edition form
(None, {'fields': (('name', 'is_company'), ('email', 'website'), ('phone', 'mobile'), ('address',), ('comment',))}),
(_('More...'), {'fields': (('created_at', 'last_modified'), 'created_by'), 'classes': ('collapse',)}),
)
def get_fieldsets(self, request, obj=None):
fieldsets = super().get_fieldsets(request, obj)
if obj is None:
fieldsets = ( # Creation form
(None, {'fields': (('name', 'is_company'), ('email', 'website'), ('phone', 'mobile'), ('address',), ('comment',))}),
)
return fieldsets
def save_model(self, request, obj, form, change):
if change is False:
obj.created_by = request.user
super().save_model(request, obj, form, change)
|
mrsarm/django-coleman
|
partner/admin.py
|
Python
|
agpl-3.0
| 1,275
|
"""
"""
import os
import numpy, time, cPickle, gzip, os, sys
import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
#from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from mlp import HiddenLayer
from rbm import RBM
from logistic_sgd import LogisticRegression
class DBN(object):
"""Deep Belief Network
A deep belief network is obtained by stacking several RBMs on top of each
other. The hidden layer of the RBM at layer `i` becomes the input of the
RBM at layer `i+1`. The first layer RBM gets as input the input of the
network, and the hidden layer of the last RBM represents the output. When
used for classification, the DBN is treated as a MLP, by adding a logistic
regression layer on top.
"""
def __init__(self, numpy_rng, theano_rng = None, n_ins = 784,
hidden_layers_sizes = [500,500], n_outs = 10):
"""This class is made to support a variable number of layers.
:type numpy_rng: numpy.random.RandomState
:param numpy_rng: numpy random number generator used to draw initial
weights
:type theano_rng: theano.tensor.shared_randomstreams.RandomStreams
:param theano_rng: Theano random generator; if None is given one is
generated based on a seed drawn from `rng`
:type n_ins: int
:param n_ins: dimension of the input to the DBN
:type n_layers_sizes: list of ints
:param n_layers_sizes: intermediate layers size, must contain
at least one value
:type n_outs: int
:param n_outs: dimension of the output of the network
"""
self.sigmoid_layers = []
self.rbm_layers = []
self.params = []
self.n_layers = len(hidden_layers_sizes)
assert self.n_layers > 0
if not theano_rng:
theano_rng = RandomStreams(numpy_rng.randint(2**30))
# allocate symbolic variables for the data
self.x = T.matrix('x') # the data is presented as rasterized images
self.y = T.ivector('y') # the labels are presented as 1D vector of
# [int] labels
# The DBN is an MLP, for which all weights of intermediate layers are shared with a
# different RBM. We will first construct the DBN as a deep multilayer perceptron, and
# when constructing each sigmoidal layer we also construct an RBM that shares weights
# with that layer. During pretraining we will train these RBMs (which will lead
# to chainging the weights of the MLP as well) During finetuning we will finish
# training the DBN by doing stochastic gradient descent on the MLP.
for i in xrange( self.n_layers ):
# construct the sigmoidal layer
# the size of the input is either the number of hidden units of the layer below or
# the input size if we are on the first layer
if i == 0 :
input_size = n_ins
else:
input_size = hidden_layers_sizes[i-1]
# the input to this layer is either the activation of the hidden layer below or the
# input of the DBN if you are on the first layer
if i == 0 :
layer_input = self.x
else:
layer_input = self.sigmoid_layers[-1].output
sigmoid_layer = HiddenLayer(rng = numpy_rng,
input = layer_input,
n_in = input_size,
n_out = hidden_layers_sizes[i],
activation = T.nnet.sigmoid)
# add the layer to our list of layers
self.sigmoid_layers.append(sigmoid_layer)
# its arguably a philosophical question... but we are going to only declare that
# the parameters of the sigmoid_layers are parameters of the DBN. The visible
# biases in the RBM are parameters of those RBMs, but not of the DBN.
self.params.extend(sigmoid_layer.params)
# Construct an RBM that shared weights with this layer
rbm_layer = RBM(numpy_rng = numpy_rng, theano_rng = theano_rng,
input = layer_input,
n_visible = input_size,
n_hidden = hidden_layers_sizes[i],
W = sigmoid_layer.W,
hbias = sigmoid_layer.b)
self.rbm_layers.append(rbm_layer)
# We now need to add a logistic layer on top of the MLP
self.logLayer = LogisticRegression(\
input = self.sigmoid_layers[-1].output,\
n_in = hidden_layers_sizes[-1], n_out = n_outs)
self.params.extend(self.logLayer.params)
# compute the cost for second phase of training, defined as the
# negative log likelihood of the logistic regression (output) layer
self.finetune_cost = self.logLayer.negative_log_likelihood(self.y)
# compute the gradients with respect to the model parameters
# symbolic variable that points to the number of errors made on the
# minibatch given by self.x and self.y
self.errors = self.logLayer.errors(self.y)
def pretraining_functions(self, train_set_x, batch_size,k):
''' Generates a list of functions, for performing one step of gradient descent at a
given layer. The function will require as input the minibatch index, and to train an
RBM you just need to iterate, calling the corresponding function on all minibatch
indexes.
:type train_set_x: theano.tensor.TensorType
:param train_set_x: Shared var. that contains all datapoints used for training the RBM
:type batch_size: int
:param batch_size: size of a [mini]batch
:param k: number of Gibbs steps to do in CD-k / PCD-k
'''
# index to a [mini]batch
index = T.lscalar('index') # index to a minibatch
learning_rate = T.scalar('lr') # learning rate to use
# number of batches
n_batches = train_set_x.value.shape[0] / batch_size
# begining of a batch, given `index`
batch_begin = index * batch_size
# ending of a batch given `index`
batch_end = batch_begin+batch_size
pretrain_fns = []
for rbm in self.rbm_layers:
# get the cost and the updates list
# using CD-k here (persisent=None) for training each RBM.
# TODO: change cost function to reconstruction error
cost,updates = rbm.get_cost_updates(learning_rate, persistent=None, k =k)
# compile the theano function
fn = theano.function(inputs = [index,
theano.Param(learning_rate, default = 0.1)],
outputs = cost,
updates = updates,
givens = {self.x :train_set_x[batch_begin:batch_end]})
# append `fn` to the list of functions
pretrain_fns.append(fn)
return pretrain_fns
#def build_finetune_functions(self, datasets, batch_size, learning_rate):
# '''Generates a function `train` that implements one step of finetuning, a function
# `validate` that computes the error on a batch from the validation set, and a function
# `test` that computes the error on a batch from the testing set
#
# :type datasets: list of pairs of theano.tensor.TensorType
# :param datasets: It is a list that contain all the datasets; the has to contain three
# pairs, `train`, `valid`, `test` in this order, where each pair is formed of two Theano
# variables, one for the datapoints, the other for the labels
# :type batch_size: int
# :param batch_size: size of a minibatch
# :type learning_rate: float
# :param learning_rate: learning rate used during finetune stage
# '''
#
# (train_set_x, train_set_y) = datasets[0]
# (valid_set_x, valid_set_y) = datasets[1]
# (test_set_x , test_set_y ) = datasets[2]
#
# # compute number of minibatches for training, validation and testing
# n_valid_batches = valid_set_x.value.shape[0] / batch_size
# n_test_batches = test_set_x.value.shape[0] / batch_size
#
# index = T.lscalar('index') # index to a [mini]batch
#
# # compute the gradients with respect to the model parameters
# gparams = T.grad(self.finetune_cost, self.params)
#
# # compute list of fine-tuning updates
# updates = {}
# for param, gparam in zip(self.params, gparams):
# updates[param] = param - gparam*learning_rate
#
# train_fn = theano.function(inputs = [index],
# outputs = self.finetune_cost,
# updates = updates,
# givens = {
# self.x : train_set_x[index*batch_size:(index+1)*batch_size],
# self.y : train_set_y[index*batch_size:(index+1)*batch_size]})
#
# test_score_i = theano.function([index], self.errors,
# givens = {
# self.x: test_set_x[index*batch_size:(index+1)*batch_size],
# self.y: test_set_y[index*batch_size:(index+1)*batch_size]})
#
# valid_score_i = theano.function([index], self.errors,
# givens = {
# self.x: valid_set_x[index*batch_size:(index+1)*batch_size],
# self.y: valid_set_y[index*batch_size:(index+1)*batch_size]})
#
# # Create a function that scans the entire validation set
# def valid_score():
# return [valid_score_i(i) for i in xrange(n_valid_batches)]
#
# # Create a function that scans the entire test set
# def test_score():
# return [test_score_i(i) for i in xrange(n_test_batches)]
#
# return train_fn, valid_score, test_score
#added MKT
def export_model():
return self.params;
def load_model(inpt_params):
self.params=inpt_params;
|
utunga/hashmapd
|
hashmapd/boneyard/DBN.py
|
Python
|
agpl-3.0
| 10,383
|
#!/usr/bin/env python3
# encoding: UTF-8
# This file is part of Addison Arches.
#
# Addison Arches is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Addison Arches is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Addison Arches. If not, see <http://www.gnu.org/licenses/>.
import itertools
import unittest
from turberfield.dialogue.model import Model
from turberfield.dialogue.model import SceneScript
from bluemonday78.presenter import Presenter
import bluemonday78.story
from bluemonday78.types import Character
from bluemonday78.types import Narrator
class DialogueLoader:
def setUp(self):
self.folders = [
SceneScript.Folder(
pkg="bluemonday78",
description="A Spike for Folder patterns.",
metadata={"location": "inner"},
paths=[
"dialogue/outer/inner/story/scene_01.rst",
],
interludes=itertools.repeat(None)
)
]
self.ensemble = [
Character(name="A Test Actor").set_state(10),
bluemonday78.story.build_narrator()
]
self.presenter = Presenter(None, self.ensemble)
class PresenterTests(DialogueLoader, unittest.TestCase):
def test_frame(self):
dialogue = self.presenter.dialogue(self.folders, self.ensemble)
presenter = Presenter(dialogue)
shot_index = 0
self.assertEqual(10, self.ensemble[0].state)
while presenter.pending:
with self.subTest(shot_index=shot_index):
frame = presenter.frame()
self.assertTrue(frame)
shot_index += 1
self.assertEqual(3, shot_index)
self.assertEqual(20, self.ensemble[0].state)
def test_prologue(self):
dialogue = self.presenter.dialogue(self.folders, self.ensemble)
presenter = Presenter(dialogue)
frame = presenter.frame()
self.assertEqual(3, len(frame[Model.Still]))
self.assertEqual(0, len(frame[Model.Audio]))
self.assertTrue(
all(isinstance(i, Presenter.Animation) for i in frame[Model.Still])
)
self.assertTrue(
all(isinstance(i.element, Model.Still) for i in frame[Model.Still])
)
self.assertEqual(20, max(i.duration for i in frame[Model.Still]))
self.assertEqual(20000, max(i.element.duration for i in frame[Model.Still]))
self.assertEqual(1, len(frame[Model.Line]))
self.assertTrue(
all(isinstance(i, Presenter.Animation) for i in frame[Model.Line])
)
self.assertTrue(
all(isinstance(i.element, Model.Line) for i in frame[Model.Line])
)
def test_option_0(self):
dialogue = self.presenter.dialogue(self.folders, self.ensemble)
presenter = Presenter(dialogue)
while presenter.pending != 1:
frame = presenter.frame()
self.assertEqual(0, len(frame[Model.Still]))
self.assertEqual(1, len(frame[Model.Line]))
self.assertEqual(0, len(frame[Model.Audio]))
self.assertTrue(
all(isinstance(i, Presenter.Animation) for i in frame[Model.Line])
)
self.assertTrue(
all(isinstance(i.element, Model.Line) for i in frame[Model.Line])
)
self.assertEqual("On.", frame[Model.Line][0].element.text)
def test_epilogue(self):
dialogue = self.presenter.dialogue(self.folders, self.ensemble)
presenter = Presenter(dialogue)
while presenter.pending:
frame = presenter.frame()
self.assertEqual(0, len(frame[Model.Still]))
self.assertEqual(1, len(frame[Model.Line]))
self.assertEqual(1, len(frame[Model.Audio]))
self.assertTrue(
all(isinstance(i, Presenter.Animation) for i in frame[Model.Line])
)
self.assertTrue(
all(isinstance(i.element, Model.Line) for i in frame[Model.Line])
)
self.assertEqual(
"Goodbye from Actor .",
frame[Model.Line][0].element.text
)
class ValidationTests(unittest.TestCase):
def test_allowed(self):
for text in (
"http://foo.com/blah_blah",
"http://foo.com/blah_blah/",
"http://www.example.com/wpstyle/?p=364",
"http://142.42.1.1/",
"http://142.42.1.1:8080/",
):
with self.subTest(text=text):
self.assertTrue(Presenter.validation["url"].match(text))
def test_blocked(self):
for text in (
"http://.www.foo.bar/",
"h://test",
):
with self.subTest(text=text):
self.assertFalse(Presenter.validation["url"].match(text))
|
tundish/blue_monday_78
|
bluemonday78/test/test_presenter.py
|
Python
|
agpl-3.0
| 5,224
|
#from django.conf import settings
#from django.conf.urls.static import static
from django.conf.urls.defaults import patterns, include, url
from django.views.generic import TemplateView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
# Project page
url( r'^$', TemplateView.as_view(template_name='index.html'), name='home' ),
# Our apps
( r'^popit/', include('popit.urls') ),
)
# serve media_root files (only works when settings.DEBUG is True)
# https://docs.djangoproject.com/en/1.3/howto/static-files/#django.conf.urls.static.static
# urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
mysociety/polipop
|
polipop/urls.py
|
Python
|
agpl-3.0
| 794
|
from odoo import api, fields, models
class SlideChannelPartner(models.Model):
_inherit = 'slide.channel.partner'
@api.model
def create(self, vals):
res = super(SlideChannelPartner, self).create(vals)
res._create_slide_channel_survey()
return res
def _create_slide_channel_survey(self):
for record in self:
slide_channel = record.channel_id
slide_slides = slide_channel.slide_ids
for slide in slide_slides.filtered(
lambda s: s.slide_type == 'certification' and s.by_tutor):
survey_inputs = self.env['survey.user_input'].search([
('survey_id', '=', slide.survey_id.id),
('partner_id', '=', record.event_id.main_responsible_id.id),
])
if not survey_inputs:
main_responsible = record.event_id.main_responsible_id if record.event_id.main_responsible_id else record.event_id.second_responsible_id
survey_input = self.env['survey.user_input'].create({
'survey_id': slide.survey_id.id,
'event_id': record.event_id.id,
'student_id': record.partner_id.id,
'partner_id': main_responsible.partner_id.id if main_responsible else None
})
|
avanzosc/odoo-addons
|
slide_channel_survey/models/slide_channel_partner.py
|
Python
|
agpl-3.0
| 1,378
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from openerp import models, fields, api, _
_logger = logging.getLogger('credit.control.agree')
class CreditControlAgree(models.TransientModel):
_name = 'credit.control.agree'
_description = 'Add agreement in credit control lines'
_rec_name = 'partner_id'
@api.model
def _get_default_currency(self):
currency_id = False
if self._context.get('default_partner_id', False):
partner = self.env['res.partner'].browse(self._context['default_partner_id'])
currency_id = partner.property_product_pricelist.currency_id
return currency_id
@api.model
def _get_company(self):
company_obj = self.env['res.company']
return company_obj._company_default_get('credit.control.policy')
@api.model
def _get_policies(self):
return self.env['credit.control.policy'].search([])
@api.model
def _get_line_ids(self):
if self._context.get('default_partner_id', False):
partner = self.env['res.partner'].browse(self._context['default_partner_id'])
line_obj = self.env['credit.control.line']
lines = line_obj.search([('partner_id', '=', self._context['default_partner_id']), ('last_date', '=', partner.last_credit_control_date)])
_logger.info("lines %s:%s" % (lines, partner.last_credit_control_date))
return lines
@api.model
def _compute_credit_due(self):
_logger.info("Get %s" % self._context.get('default_partner_id', False))
if self._context.get('default_partner_id', False):
partner = self.env['res.partner'].browse(self._context['default_partner_id'])
_logger.info("Get due %s" % partner)
return partner.credit_limit - (partner.credit - partner.debit)
partner_id = fields.Many2one('res.partner', 'Partner',
required=True,
default=lambda self, *a: self._context.get('default_partner_id') and self._context['default_partner_id'])
last_credit_control_date = fields.Datetime(comodel_name="res.partner", string="Last date control", related="partner_id.last_credit_control_date", readonly=True)
currency_id = fields.Many2one('res.currency', 'Currency', required=True,
default=lambda self, *a: self._context.get('default_currency_id') and slef._context['default_currency_id'] or self._get_default_currency())
company_id = fields.Many2one('res.company',
string='Company',
default=_get_company,
required=True)
credit_control_line_ids = fields.Many2many('credit.control.line',
string='Credit Control Lines',
default=_get_line_ids)
policy_id = fields.Many2one(
'credit.control.policy',
string='Policies',
required=True
)
user_id = fields.Many2one('res.users',
default=lambda self: self.env.user,
string='User')
total_invoiced = fields.Float(string='Total Invoiced',
compute='_compute_total', store=False)
total_due = fields.Float(string='Total Due',
compute='_compute_total', store=False)
credit_limit_due = fields.Float(string='Due Credit limit',
default=_compute_credit_due)
amount_due = fields.Float(string='Due Amount Tax incl.',
required=True)
@api.model
def _get_total(self):
amount_field = 'credit_control_line_ids.amount_due'
return sum(self.mapped(amount_field))
@api.model
def _get_total_due(self):
balance_field = 'credit_control_line_ids.balance_due'
return sum(self.mapped(balance_field))
@api.one
@api.depends('credit_control_line_ids',
'credit_control_line_ids.amount_due',
'credit_control_line_ids.balance_due')
def _compute_total(self):
self.total_invoiced = self._get_total()
self.total_due = self._get_total_due()
@api.model
def _create_account_move(self, dt, ref, journal_id, company_id):
local_context = dict(self._context or {}, company_id=company_id)
start_at_datetime = datetime.strptime(dt, tools.DEFAULT_SERVER_DATETIME_FORMAT)
date_tz_user = fields.datetime.context_timestamp(cr, uid, start_at_datetime, context=context)
date_tz_user = date_tz_user.strftime(tools.DEFAULT_SERVER_DATE_FORMAT)
period_id = self.env['account.period'].find(dt=date_tz_user)
return self.env['account.move'].create({'ref': ref, 'journal_id': journal_id, 'period_id': period_id[0]})
@api.model
def _create_account_move_line(self, date, ref, partner_id, vals, move_id=None):
credit_control_agree_journal_setting = safe_eval(self.env['ir.config_parameter'].
get_param('credit_control_agree_journal_setting',
default="False"))
property_obj = self.env['ir.property']
account_period_obj = self.env['account.period']
account_move_obj = self.env['account.move']
cur_obj = self.env['res.currency']
partner = self.env['res.partner'].browse(partner_id)
account_def = property_obj.get('property_account_receivable', 'res.partner')
order_account = partner and \
partner.property_account_receivable and \
partner.property_account_receivable.id or \
account_def and account_def.id
move_id = self._create_account_move(date, ref, credit_control_agree_journal_setting, self.company_id.id)
move = account_move_obj.browse(move_id)
amount_total = 0.0
for inx, vl in enumerate(vals):
amount_total += vl[2]['credit'] - vl[2]['debit']
vals[inx]['partner_id'] = partner_id,
vals[inx]['journal_id'] = credit_control_agree_journal_setting
vals[inx]['period_id'] = move.period_id.id
vals[inx]['move_id'] = move_id
vals[inx]['company_id'] = self.company_id.id
vals.append((0, False, {
'date': date,
'ref': date,
'name': _("Agree temporary permit"),
'account_id': order_account,
'credit': ((amount_total < 0) and -amount_total) or 0.0,
'debit': ((amount_total > 0) and amount_total) or 0.0,
'partner_id': partner_id,
'journal_id': credit_control_agree_journal_setting,
'period_id': move.period_id.id,
'move_id' : move_id,
'company_id': self.company_id.id,
}))
move.write({'line_id': vals})
return move_id
#@api.one
#def action_cancel(self):
# return {'type': 'ir.actions.act_window_close'}
@api.one
def action_next(self):
credit_line_obj = self.env['credit.control.line']
controlling_date = self._context['default_date']
partner_id = self._context['default_partner_id']
ref = self._context['active_id']
if not self.policy_id.account_ids:
raise api.Warning(
_('You can only use a policy set on '
'account \n'
'Please choose one of the following '
'policies:\n')
)
return {'type': 'ir.actions.act_window_close'}
vals = []
amount_total = self._context['default_amount_due']
all_amount_total = 0.0
for account in self.policy_id.account_ids:
all_amount_total += amount_total
vals.append((0, False, {
'date': controlling_date,
'ref': ref,
'name': _("Agree temporary permit"),
'account_id': account.id,
'credit': ((amount_total < 0) and -amount_total) or 0.0,
'debit': ((amount_total > 0) and amount_total) or 0.0,
}))
move_id = self._create_account_move_line(controlling_date, ref, partner_id, all_amount_total, vals)
amount_due = sum([x.amount_due for x in self.credit_control_line_ids])
create = credit_line_obj.create_or_update_from_mv_lines
generated_lines = create(move_id,
self.new_policy_level_id,
controlling_date,
check_tolerance=False)
generated_lines.write({'amount_due': -amount_due, 'state': 'temporary_permit'})
#self._set_so_policy(self.move_line_ids, self.new_policy_id)
return {'type': 'ir.actions.act_window_close'}
|
rosenvladimirov/addons
|
partner_credit_control/wizard/credit_control_agree.py
|
Python
|
agpl-3.0
| 9,204
|
# -*- coding: utf-8 -*-
{'active': False,
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'category': 'Accounting & Finance',
'data': [
'account_voucher_view.xml',
],
'demo': [],
'depends': ['report_extended_voucher'],
'description': '''
Account Voucher Pop Up Print
============================
''',
'installable': True,
'name': 'Account Voucher Pop Up Print',
'test': [],
'version': '8.0.1.2.0'}
|
dvitme/odoo-addons
|
account_voucher_popup_print/__openerp__.py
|
Python
|
agpl-3.0
| 460
|
# -*- coding: utf-8 -*-
"""
PL Acc Lib
Used by
AccountLine
Created: 12 April 2019
Last up: 12 April 2019
"""
# ----------------------------------------------------------- Get Cuentab ---------------------
def get_cuentab(self, product_type):
"""
Get Cuentab
Used by Accounting
"""
print()
print('Pl Acc Lib - Get Cuentab')
#print(self)
#print(product_type)
#print(self.product)
print(self.product.name)
print(self.product.pl_account)
# Search
configurator = self.env['openhealth.configurator.emr'].search([
],
#order='date_begin asc',
#limit=10,
)
#print(configurator.name)
if product_type in ['product']:
#cuentab = configurator.cuentab_products
cuentab = self.product.pl_account
elif product_type in ['service']:
cuentab = configurator.cuentab_services
elif product_type in ['consu']:
cuentab = configurator.cuentab_consu
else:
print('Pl Acc Lib - Get Cuentab - This should not happen !')
#print(product_type)
#print(cuentab)
#return acc_vars.get_cuentab(product_type)
return cuentab
|
gibil5/openhealth
|
models/electronic_new/lib/pl_acc_lib.py
|
Python
|
agpl-3.0
| 1,099
|
# !/usr/bin/env python3
# Copyright (C) 2017 Qrama
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=c0111,c0301,c0325,c0103,r0913,r0902,e0401,C0302, R0914
import asyncio
import sys
import traceback
import logging
import json
from juju.model import Model
from juju.client import client
sys.path.append('/opt')
from sojobo_api import settings
from sojobo_api.api import w_datastore as datastore, w_juju as juju
async def remove_machine(username, password, controller_name, model_key, machine):
try:
auth_data = datastore.get_model_connection_info(username, controller_name, model_key)
model_connection = Model()
logger.info('Setting up Model connection for %s:%s', controller_name, auth_data['model']['name'])
await model_connection.connect(auth_data['controller']['endpoints'][0], auth_data['model']['uuid'], auth_data['user']['juju_username'], password, auth_data['controller']['ca_cert'])
logger.info('Model connection was successful')
for mach, entity in model_connection.state.machines.items():
if mach == machine:
logger.info('Destroying machine %s', machine)
facade = client.ClientFacade.from_connection(entity.connection)
await facade.DestroyMachines(True, [entity.id])
logger.info('Machine %s destroyed', machine)
await model_connection.disconnect()
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
for l in lines:
logger.error(l)
finally:
if 'model_connection' in locals():
await juju.disconnect(model_connection)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
ws_logger = logging.getLogger('websockets.protocol')
logger = logging.getLogger('remove_machine')
hdlr = logging.FileHandler('{}/log/remove_machine.log'.format(settings.SOJOBO_API_DIR))
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
ws_logger.addHandler(hdlr)
ws_logger.setLevel(logging.DEBUG)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
loop = asyncio.get_event_loop()
loop.set_debug(True)
loop.run_until_complete(remove_machine(sys.argv[1], sys.argv[2], sys.argv[3],
sys.argv[4], sys.argv[5]))
loop.close()
|
tengu-team/layer-sojobo
|
files/sojobo_api/scripts/remove_machine.py
|
Python
|
agpl-3.0
| 3,080
|
# -*- coding: utf-8 -*-
"""
This is the default template for our main set of AWS servers.
Common traits:
* Use memcached, and cache-backed sessions
* Use a MySQL 5.1 database
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
# Pylint gets confused by path.py instances, which report themselves as class
# objects. As a result, pylint applies the wrong regex in validating names,
# and throws spurious errors. Therefore, we disable invalid-name checking.
# pylint: disable=invalid-name
import datetime
import json
import dateutil
from .common import *
from openedx.core.lib.derived import derive_settings
from openedx.core.lib.logsettings import get_logger_config
import os
from path import Path as path
from xmodule.modulestore.modulestore_settings import convert_module_store_setting_if_needed
# SERVICE_VARIANT specifies name of the variant used, which decides what JSON
# configuration files are read during startup.
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
# CONFIG_ROOT specifies the directory where the JSON configuration
# files are expected to be found. If not specified, use the project
# directory.
CONFIG_ROOT = path(os.environ.get('CONFIG_ROOT', ENV_ROOT))
# CONFIG_PREFIX specifies the prefix of the JSON configuration files,
# based on the service variant. If no variant is use, don't use a
# prefix.
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
################################ ALWAYS THE SAME ##############################
DEBUG = False
DEFAULT_TEMPLATE_ENGINE['OPTIONS']['debug'] = False
EMAIL_BACKEND = 'django_ses.SESBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
# IMPORTANT: With this enabled, the server must always be behind a proxy that
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
# a user can fool our server into thinking it was an https connection.
# See
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
# for other warnings.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
###################################### CELERY ################################
# Don't use a connection pool, since connections are dropped by ELB.
BROKER_POOL_LIMIT = 0
BROKER_CONNECTION_TIMEOUT = 1
# For the Result Store, use the django cache named 'celery'
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
# When the broker is behind an ELB, use a heartbeat to refresh the
# connection and to detect if it has been dropped.
BROKER_HEARTBEAT = 60.0
BROKER_HEARTBEAT_CHECKRATE = 2
# Each worker should only fetch one message at a time
CELERYD_PREFETCH_MULTIPLIER = 1
# Rename the exchange and queues for each variant
QUEUE_VARIANT = CONFIG_PREFIX.lower()
CELERY_DEFAULT_EXCHANGE = 'edx.{0}core'.format(QUEUE_VARIANT)
HIGH_PRIORITY_QUEUE = 'edx.{0}core.high'.format(QUEUE_VARIANT)
DEFAULT_PRIORITY_QUEUE = 'edx.{0}core.default'.format(QUEUE_VARIANT)
LOW_PRIORITY_QUEUE = 'edx.{0}core.low'.format(QUEUE_VARIANT)
HIGH_MEM_QUEUE = 'edx.{0}core.high_mem'.format(QUEUE_VARIANT)
CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE
CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
CELERY_QUEUES = {
HIGH_PRIORITY_QUEUE: {},
LOW_PRIORITY_QUEUE: {},
DEFAULT_PRIORITY_QUEUE: {},
HIGH_MEM_QUEUE: {},
}
CELERY_ROUTES = "{}celery.Router".format(QUEUE_VARIANT)
CELERYBEAT_SCHEDULE = {} # For scheduling tasks, entries can be added to this dict
########################## NON-SECURE ENV CONFIG ##############################
# Things like server locations, ports, etc.
with open(CONFIG_ROOT / CONFIG_PREFIX + "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
# STATIC_ROOT specifies the directory where static files are
# collected
STATIC_ROOT_BASE = ENV_TOKENS.get('STATIC_ROOT_BASE', None)
if STATIC_ROOT_BASE:
STATIC_ROOT = path(STATIC_ROOT_BASE)
WEBPACK_LOADER['DEFAULT']['STATS_FILE'] = STATIC_ROOT / "webpack-stats.json"
# STATIC_URL_BASE specifies the base url to use for static files
STATIC_URL_BASE = ENV_TOKENS.get('STATIC_URL_BASE', None)
if STATIC_URL_BASE:
# collectstatic will fail if STATIC_URL is a unicode string
STATIC_URL = STATIC_URL_BASE.encode('ascii')
if not STATIC_URL.endswith("/"):
STATIC_URL += "/"
# DEFAULT_COURSE_ABOUT_IMAGE_URL specifies the default image to show for courses that don't provide one
DEFAULT_COURSE_ABOUT_IMAGE_URL = ENV_TOKENS.get('DEFAULT_COURSE_ABOUT_IMAGE_URL', DEFAULT_COURSE_ABOUT_IMAGE_URL)
# COURSE_MODE_DEFAULTS specifies the course mode to use for courses that do not set one
COURSE_MODE_DEFAULTS = ENV_TOKENS.get('COURSE_MODE_DEFAULTS', COURSE_MODE_DEFAULTS)
# MEDIA_ROOT specifies the directory where user-uploaded files are stored.
MEDIA_ROOT = ENV_TOKENS.get('MEDIA_ROOT', MEDIA_ROOT)
MEDIA_URL = ENV_TOKENS.get('MEDIA_URL', MEDIA_URL)
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', PLATFORM_NAME)
PLATFORM_DESCRIPTION = ENV_TOKENS.get('PLATFORM_DESCRIPTION', PLATFORM_DESCRIPTION)
# For displaying on the receipt. At Stanford PLATFORM_NAME != MERCHANT_NAME, but PLATFORM_NAME is a fine default
PLATFORM_TWITTER_ACCOUNT = ENV_TOKENS.get('PLATFORM_TWITTER_ACCOUNT', PLATFORM_TWITTER_ACCOUNT)
PLATFORM_FACEBOOK_ACCOUNT = ENV_TOKENS.get('PLATFORM_FACEBOOK_ACCOUNT', PLATFORM_FACEBOOK_ACCOUNT)
SOCIAL_SHARING_SETTINGS = ENV_TOKENS.get('SOCIAL_SHARING_SETTINGS', SOCIAL_SHARING_SETTINGS)
# Social media links for the page footer
SOCIAL_MEDIA_FOOTER_URLS = ENV_TOKENS.get('SOCIAL_MEDIA_FOOTER_URLS', SOCIAL_MEDIA_FOOTER_URLS)
CC_MERCHANT_NAME = ENV_TOKENS.get('CC_MERCHANT_NAME', PLATFORM_NAME)
EMAIL_BACKEND = ENV_TOKENS.get('EMAIL_BACKEND', EMAIL_BACKEND)
EMAIL_FILE_PATH = ENV_TOKENS.get('EMAIL_FILE_PATH', None)
EMAIL_HOST = ENV_TOKENS.get('EMAIL_HOST', 'localhost') # django default is localhost
EMAIL_PORT = ENV_TOKENS.get('EMAIL_PORT', 25) # django default is 25
EMAIL_USE_TLS = ENV_TOKENS.get('EMAIL_USE_TLS', False) # django default is False
SITE_NAME = ENV_TOKENS['SITE_NAME']
HTTPS = ENV_TOKENS.get('HTTPS', HTTPS)
SESSION_ENGINE = ENV_TOKENS.get('SESSION_ENGINE', SESSION_ENGINE)
SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN')
SESSION_COOKIE_HTTPONLY = ENV_TOKENS.get('SESSION_COOKIE_HTTPONLY', True)
SESSION_COOKIE_SECURE = ENV_TOKENS.get('SESSION_COOKIE_SECURE', SESSION_COOKIE_SECURE)
SESSION_SAVE_EVERY_REQUEST = ENV_TOKENS.get('SESSION_SAVE_EVERY_REQUEST', SESSION_SAVE_EVERY_REQUEST)
AWS_SES_REGION_NAME = ENV_TOKENS.get('AWS_SES_REGION_NAME', 'us-east-1')
AWS_SES_REGION_ENDPOINT = ENV_TOKENS.get('AWS_SES_REGION_ENDPOINT', 'email.us-east-1.amazonaws.com')
REGISTRATION_EXTRA_FIELDS = ENV_TOKENS.get('REGISTRATION_EXTRA_FIELDS', REGISTRATION_EXTRA_FIELDS)
REGISTRATION_EXTENSION_FORM = ENV_TOKENS.get('REGISTRATION_EXTENSION_FORM', REGISTRATION_EXTENSION_FORM)
REGISTRATION_EMAIL_PATTERNS_ALLOWED = ENV_TOKENS.get('REGISTRATION_EMAIL_PATTERNS_ALLOWED')
REGISTRATION_FIELD_ORDER = ENV_TOKENS.get('REGISTRATION_FIELD_ORDER', REGISTRATION_FIELD_ORDER)
# Set the names of cookies shared with the marketing site
# These have the same cookie domain as the session, which in production
# usually includes subdomains.
EDXMKTG_LOGGED_IN_COOKIE_NAME = ENV_TOKENS.get('EDXMKTG_LOGGED_IN_COOKIE_NAME', EDXMKTG_LOGGED_IN_COOKIE_NAME)
EDXMKTG_USER_INFO_COOKIE_NAME = ENV_TOKENS.get('EDXMKTG_USER_INFO_COOKIE_NAME', EDXMKTG_USER_INFO_COOKIE_NAME)
LMS_ROOT_URL = ENV_TOKENS.get('LMS_ROOT_URL')
LMS_INTERNAL_ROOT_URL = ENV_TOKENS.get('LMS_INTERNAL_ROOT_URL', LMS_ROOT_URL)
ENV_FEATURES = ENV_TOKENS.get('FEATURES', {})
for feature, value in ENV_FEATURES.items():
FEATURES[feature] = value
CMS_BASE = ENV_TOKENS.get('CMS_BASE', 'studio.edx.org')
ALLOWED_HOSTS = [
# TODO: bbeggs remove this before prod, temp fix to get load testing running
"*",
ENV_TOKENS.get('LMS_BASE'),
FEATURES['PREVIEW_LMS_BASE'],
]
# allow for environments to specify what cookie name our login subsystem should use
# this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can
# happen with some browsers (e.g. Firefox)
if ENV_TOKENS.get('SESSION_COOKIE_NAME', None):
# NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str()
SESSION_COOKIE_NAME = str(ENV_TOKENS.get('SESSION_COOKIE_NAME'))
CACHES = ENV_TOKENS['CACHES']
# Cache used for location mapping -- called many times with the same key/value
# in a given request.
if 'loc_cache' not in CACHES:
CACHES['loc_cache'] = {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
}
# Email overrides
DEFAULT_FROM_EMAIL = ENV_TOKENS.get('DEFAULT_FROM_EMAIL', DEFAULT_FROM_EMAIL)
DEFAULT_FEEDBACK_EMAIL = ENV_TOKENS.get('DEFAULT_FEEDBACK_EMAIL', DEFAULT_FEEDBACK_EMAIL)
ADMINS = ENV_TOKENS.get('ADMINS', ADMINS)
SERVER_EMAIL = ENV_TOKENS.get('SERVER_EMAIL', SERVER_EMAIL)
TECH_SUPPORT_EMAIL = ENV_TOKENS.get('TECH_SUPPORT_EMAIL', TECH_SUPPORT_EMAIL)
CONTACT_EMAIL = ENV_TOKENS.get('CONTACT_EMAIL', CONTACT_EMAIL)
BUGS_EMAIL = ENV_TOKENS.get('BUGS_EMAIL', BUGS_EMAIL)
PAYMENT_SUPPORT_EMAIL = ENV_TOKENS.get('PAYMENT_SUPPORT_EMAIL', PAYMENT_SUPPORT_EMAIL)
FINANCE_EMAIL = ENV_TOKENS.get('FINANCE_EMAIL', FINANCE_EMAIL)
UNIVERSITY_EMAIL = ENV_TOKENS.get('UNIVERSITY_EMAIL', UNIVERSITY_EMAIL)
PRESS_EMAIL = ENV_TOKENS.get('PRESS_EMAIL', PRESS_EMAIL)
CONTACT_MAILING_ADDRESS = ENV_TOKENS.get('CONTACT_MAILING_ADDRESS', CONTACT_MAILING_ADDRESS)
# Account activation email sender address
ACTIVATION_EMAIL_FROM_ADDRESS = ENV_TOKENS.get('ACTIVATION_EMAIL_FROM_ADDRESS', ACTIVATION_EMAIL_FROM_ADDRESS)
# Currency
PAID_COURSE_REGISTRATION_CURRENCY = ENV_TOKENS.get('PAID_COURSE_REGISTRATION_CURRENCY',
PAID_COURSE_REGISTRATION_CURRENCY)
# Payment Report Settings
PAYMENT_REPORT_GENERATOR_GROUP = ENV_TOKENS.get('PAYMENT_REPORT_GENERATOR_GROUP', PAYMENT_REPORT_GENERATOR_GROUP)
# Bulk Email overrides
BULK_EMAIL_DEFAULT_FROM_EMAIL = ENV_TOKENS.get('BULK_EMAIL_DEFAULT_FROM_EMAIL', BULK_EMAIL_DEFAULT_FROM_EMAIL)
BULK_EMAIL_EMAILS_PER_TASK = ENV_TOKENS.get('BULK_EMAIL_EMAILS_PER_TASK', BULK_EMAIL_EMAILS_PER_TASK)
BULK_EMAIL_DEFAULT_RETRY_DELAY = ENV_TOKENS.get('BULK_EMAIL_DEFAULT_RETRY_DELAY', BULK_EMAIL_DEFAULT_RETRY_DELAY)
BULK_EMAIL_MAX_RETRIES = ENV_TOKENS.get('BULK_EMAIL_MAX_RETRIES', BULK_EMAIL_MAX_RETRIES)
BULK_EMAIL_INFINITE_RETRY_CAP = ENV_TOKENS.get('BULK_EMAIL_INFINITE_RETRY_CAP', BULK_EMAIL_INFINITE_RETRY_CAP)
BULK_EMAIL_LOG_SENT_EMAILS = ENV_TOKENS.get('BULK_EMAIL_LOG_SENT_EMAILS', BULK_EMAIL_LOG_SENT_EMAILS)
BULK_EMAIL_RETRY_DELAY_BETWEEN_SENDS = ENV_TOKENS.get(
'BULK_EMAIL_RETRY_DELAY_BETWEEN_SENDS',
BULK_EMAIL_RETRY_DELAY_BETWEEN_SENDS
)
# We want Bulk Email running on the high-priority queue, so we define the
# routing key that points to it. At the moment, the name is the same.
# We have to reset the value here, since we have changed the value of the queue name.
BULK_EMAIL_ROUTING_KEY = ENV_TOKENS.get('BULK_EMAIL_ROUTING_KEY', HIGH_PRIORITY_QUEUE)
# We can run smaller jobs on the low priority queue. See note above for why
# we have to reset the value here.
BULK_EMAIL_ROUTING_KEY_SMALL_JOBS = ENV_TOKENS.get('BULK_EMAIL_ROUTING_KEY_SMALL_JOBS', LOW_PRIORITY_QUEUE)
# Queue to use for expiring old entitlements
ENTITLEMENTS_EXPIRATION_ROUTING_KEY = ENV_TOKENS.get('ENTITLEMENTS_EXPIRATION_ROUTING_KEY', LOW_PRIORITY_QUEUE)
# Message expiry time in seconds
CELERY_EVENT_QUEUE_TTL = ENV_TOKENS.get('CELERY_EVENT_QUEUE_TTL', None)
# Allow CELERY_QUEUES to be overwritten by ENV_TOKENS,
ENV_CELERY_QUEUES = ENV_TOKENS.get('CELERY_QUEUES', None)
if ENV_CELERY_QUEUES:
CELERY_QUEUES = {queue: {} for queue in ENV_CELERY_QUEUES}
# Then add alternate environment queues
ALTERNATE_QUEUE_ENVS = ENV_TOKENS.get('ALTERNATE_WORKER_QUEUES', '').split()
ALTERNATE_QUEUES = [
DEFAULT_PRIORITY_QUEUE.replace(QUEUE_VARIANT, alternate + '.')
for alternate in ALTERNATE_QUEUE_ENVS
]
CELERY_QUEUES.update(
{
alternate: {}
for alternate in ALTERNATE_QUEUES
if alternate not in CELERY_QUEUES.keys()
}
)
# following setting is for backward compatibility
if ENV_TOKENS.get('COMPREHENSIVE_THEME_DIR', None):
COMPREHENSIVE_THEME_DIR = ENV_TOKENS.get('COMPREHENSIVE_THEME_DIR')
COMPREHENSIVE_THEME_DIRS = ENV_TOKENS.get('COMPREHENSIVE_THEME_DIRS', COMPREHENSIVE_THEME_DIRS) or []
# COMPREHENSIVE_THEME_LOCALE_PATHS contain the paths to themes locale directories e.g.
# "COMPREHENSIVE_THEME_LOCALE_PATHS" : [
# "/edx/src/edx-themes/conf/locale"
# ],
COMPREHENSIVE_THEME_LOCALE_PATHS = ENV_TOKENS.get('COMPREHENSIVE_THEME_LOCALE_PATHS', [])
DEFAULT_SITE_THEME = ENV_TOKENS.get('DEFAULT_SITE_THEME', DEFAULT_SITE_THEME)
ENABLE_COMPREHENSIVE_THEMING = ENV_TOKENS.get('ENABLE_COMPREHENSIVE_THEMING', ENABLE_COMPREHENSIVE_THEMING)
# Marketing link overrides
MKTG_URL_LINK_MAP.update(ENV_TOKENS.get('MKTG_URL_LINK_MAP', {}))
# Intentional defaults.
SUPPORT_SITE_LINK = ENV_TOKENS.get('SUPPORT_SITE_LINK', SUPPORT_SITE_LINK)
ID_VERIFICATION_SUPPORT_LINK = ENV_TOKENS.get('ID_VERIFICATION_SUPPORT_LINK', SUPPORT_SITE_LINK)
PASSWORD_RESET_SUPPORT_LINK = ENV_TOKENS.get('PASSWORD_RESET_SUPPORT_LINK', SUPPORT_SITE_LINK)
ACTIVATION_EMAIL_SUPPORT_LINK = ENV_TOKENS.get(
'ACTIVATION_EMAIL_SUPPORT_LINK', SUPPORT_SITE_LINK
)
# Mobile store URL overrides
MOBILE_STORE_URLS = ENV_TOKENS.get('MOBILE_STORE_URLS', MOBILE_STORE_URLS)
# Timezone overrides
TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE)
# Translation overrides
LANGUAGES = ENV_TOKENS.get('LANGUAGES', LANGUAGES)
CERTIFICATE_TEMPLATE_LANGUAGES = ENV_TOKENS.get('CERTIFICATE_TEMPLATE_LANGUAGES', CERTIFICATE_TEMPLATE_LANGUAGES)
LANGUAGE_DICT = dict(LANGUAGES)
LANGUAGE_CODE = ENV_TOKENS.get('LANGUAGE_CODE', LANGUAGE_CODE)
LANGUAGE_COOKIE = ENV_TOKENS.get('LANGUAGE_COOKIE', LANGUAGE_COOKIE)
ALL_LANGUAGES = ENV_TOKENS.get('ALL_LANGUAGES', ALL_LANGUAGES)
USE_I18N = ENV_TOKENS.get('USE_I18N', USE_I18N)
# Additional installed apps
for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []):
INSTALLED_APPS.append(app)
WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED', WIKI_ENABLED)
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOG_DIR = ENV_TOKENS['LOG_DIR']
DATA_DIR = path(ENV_TOKENS.get('DATA_DIR', DATA_DIR))
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
local_loglevel=local_loglevel,
service_variant=SERVICE_VARIANT)
COURSE_LISTINGS = ENV_TOKENS.get('COURSE_LISTINGS', {})
COMMENTS_SERVICE_URL = ENV_TOKENS.get("COMMENTS_SERVICE_URL", '')
COMMENTS_SERVICE_KEY = ENV_TOKENS.get("COMMENTS_SERVICE_KEY", '')
CERT_NAME_SHORT = ENV_TOKENS.get('CERT_NAME_SHORT', CERT_NAME_SHORT)
CERT_NAME_LONG = ENV_TOKENS.get('CERT_NAME_LONG', CERT_NAME_LONG)
CERT_QUEUE = ENV_TOKENS.get("CERT_QUEUE", 'test-pull')
ZENDESK_URL = ENV_TOKENS.get('ZENDESK_URL', ZENDESK_URL)
ZENDESK_CUSTOM_FIELDS = ENV_TOKENS.get('ZENDESK_CUSTOM_FIELDS', ZENDESK_CUSTOM_FIELDS)
FEEDBACK_SUBMISSION_EMAIL = ENV_TOKENS.get("FEEDBACK_SUBMISSION_EMAIL")
MKTG_URLS = ENV_TOKENS.get('MKTG_URLS', MKTG_URLS)
# Badgr API
BADGR_API_TOKEN = ENV_TOKENS.get('BADGR_API_TOKEN', BADGR_API_TOKEN)
BADGR_BASE_URL = ENV_TOKENS.get('BADGR_BASE_URL', BADGR_BASE_URL)
BADGR_ISSUER_SLUG = ENV_TOKENS.get('BADGR_ISSUER_SLUG', BADGR_ISSUER_SLUG)
BADGR_TIMEOUT = ENV_TOKENS.get('BADGR_TIMEOUT', BADGR_TIMEOUT)
# git repo loading environment
GIT_REPO_DIR = ENV_TOKENS.get('GIT_REPO_DIR', '/edx/var/edxapp/course_repos')
GIT_IMPORT_STATIC = ENV_TOKENS.get('GIT_IMPORT_STATIC', True)
GIT_IMPORT_PYTHON_LIB = ENV_TOKENS.get('GIT_IMPORT_PYTHON_LIB', True)
PYTHON_LIB_FILENAME = ENV_TOKENS.get('PYTHON_LIB_FILENAME', 'python_lib.zip')
for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items():
oldvalue = CODE_JAIL.get(name)
if isinstance(oldvalue, dict):
for subname, subvalue in value.items():
oldvalue[subname] = subvalue
else:
CODE_JAIL[name] = value
COURSES_WITH_UNSAFE_CODE = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", [])
ASSET_IGNORE_REGEX = ENV_TOKENS.get('ASSET_IGNORE_REGEX', ASSET_IGNORE_REGEX)
# Event Tracking
if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS:
TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS")
# SSL external authentication settings
SSL_AUTH_EMAIL_DOMAIN = ENV_TOKENS.get("SSL_AUTH_EMAIL_DOMAIN", "MIT.EDU")
SSL_AUTH_DN_FORMAT_STRING = ENV_TOKENS.get(
"SSL_AUTH_DN_FORMAT_STRING",
"/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN={0}/emailAddress={1}"
)
# Django CAS external authentication settings
CAS_EXTRA_LOGIN_PARAMS = ENV_TOKENS.get("CAS_EXTRA_LOGIN_PARAMS", None)
if FEATURES.get('AUTH_USE_CAS'):
CAS_SERVER_URL = ENV_TOKENS.get("CAS_SERVER_URL", None)
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'django_cas.backends.CASBackend',
]
INSTALLED_APPS.append('django_cas')
MIDDLEWARE_CLASSES.append('django_cas.middleware.CASMiddleware')
CAS_ATTRIBUTE_CALLBACK = ENV_TOKENS.get('CAS_ATTRIBUTE_CALLBACK', None)
if CAS_ATTRIBUTE_CALLBACK:
import importlib
CAS_USER_DETAILS_RESOLVER = getattr(
importlib.import_module(CAS_ATTRIBUTE_CALLBACK['module']),
CAS_ATTRIBUTE_CALLBACK['function']
)
# Video Caching. Pairing country codes with CDN URLs.
# Example: {'CN': 'http://api.xuetangx.com/edx/video?s3_url='}
VIDEO_CDN_URL = ENV_TOKENS.get('VIDEO_CDN_URL', {})
# Branded footer
FOOTER_OPENEDX_URL = ENV_TOKENS.get('FOOTER_OPENEDX_URL', FOOTER_OPENEDX_URL)
FOOTER_OPENEDX_LOGO_IMAGE = ENV_TOKENS.get('FOOTER_OPENEDX_LOGO_IMAGE', FOOTER_OPENEDX_LOGO_IMAGE)
FOOTER_ORGANIZATION_IMAGE = ENV_TOKENS.get('FOOTER_ORGANIZATION_IMAGE', FOOTER_ORGANIZATION_IMAGE)
FOOTER_CACHE_TIMEOUT = ENV_TOKENS.get('FOOTER_CACHE_TIMEOUT', FOOTER_CACHE_TIMEOUT)
FOOTER_BROWSER_CACHE_MAX_AGE = ENV_TOKENS.get('FOOTER_BROWSER_CACHE_MAX_AGE', FOOTER_BROWSER_CACHE_MAX_AGE)
# Credit notifications settings
NOTIFICATION_EMAIL_CSS = ENV_TOKENS.get('NOTIFICATION_EMAIL_CSS', NOTIFICATION_EMAIL_CSS)
NOTIFICATION_EMAIL_EDX_LOGO = ENV_TOKENS.get('NOTIFICATION_EMAIL_EDX_LOGO', NOTIFICATION_EMAIL_EDX_LOGO)
# Determines whether the CSRF token can be transported on
# unencrypted channels. It is set to False here for backward compatibility,
# but it is highly recommended that this is True for enviroments accessed
# by end users.
CSRF_COOKIE_SECURE = ENV_TOKENS.get('CSRF_COOKIE_SECURE', False)
############# CORS headers for cross-domain requests #################
if FEATURES.get('ENABLE_CORS_HEADERS') or FEATURES.get('ENABLE_CROSS_DOMAIN_CSRF_COOKIE'):
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = ENV_TOKENS.get('CORS_ORIGIN_WHITELIST', ())
CORS_ORIGIN_ALLOW_ALL = ENV_TOKENS.get('CORS_ORIGIN_ALLOW_ALL', False)
CORS_ALLOW_INSECURE = ENV_TOKENS.get('CORS_ALLOW_INSECURE', False)
# If setting a cross-domain cookie, it's really important to choose
# a name for the cookie that is DIFFERENT than the cookies used
# by each subdomain. For example, suppose the applications
# at these subdomains are configured to use the following cookie names:
#
# 1) foo.example.com --> "csrftoken"
# 2) baz.example.com --> "csrftoken"
# 3) bar.example.com --> "csrftoken"
#
# For the cross-domain version of the CSRF cookie, you need to choose
# a name DIFFERENT than "csrftoken"; otherwise, the new token configured
# for ".example.com" could conflict with the other cookies,
# non-deterministically causing 403 responses.
#
# Because of the way Django stores cookies, the cookie name MUST
# be a `str`, not unicode. Otherwise there will `TypeError`s will be raised
# when Django tries to call the unicode `translate()` method with the wrong
# number of parameters.
CROSS_DOMAIN_CSRF_COOKIE_NAME = str(ENV_TOKENS.get('CROSS_DOMAIN_CSRF_COOKIE_NAME'))
# When setting the domain for the "cross-domain" version of the CSRF
# cookie, you should choose something like: ".example.com"
# (note the leading dot), where both the referer and the host
# are subdomains of "example.com".
#
# Browser security rules require that
# the cookie domain matches the domain of the server; otherwise
# the cookie won't get set. And once the cookie gets set, the client
# needs to be on a domain that matches the cookie domain, otherwise
# the client won't be able to read the cookie.
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN = ENV_TOKENS.get('CROSS_DOMAIN_CSRF_COOKIE_DOMAIN')
# Field overrides. To use the IDDE feature, add
# 'courseware.student_field_overrides.IndividualStudentOverrideProvider'.
FIELD_OVERRIDE_PROVIDERS = tuple(ENV_TOKENS.get('FIELD_OVERRIDE_PROVIDERS', []))
############################## SECURE AUTH ITEMS ###############
# Secret things: passwords, access keys, etc.
with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
############### XBlock filesystem field config ##########
if 'DJFS' in AUTH_TOKENS and AUTH_TOKENS['DJFS'] is not None:
DJFS = AUTH_TOKENS['DJFS']
############### Module Store Items ##########
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS = ENV_TOKENS.get('HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS', {})
# PREVIEW DOMAIN must be present in HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS for the preview to show draft changes
if 'PREVIEW_LMS_BASE' in FEATURES and FEATURES['PREVIEW_LMS_BASE'] != '':
PREVIEW_DOMAIN = FEATURES['PREVIEW_LMS_BASE'].split(':')[0]
# update dictionary with preview domain regex
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS.update({
PREVIEW_DOMAIN: 'draft-preferred'
})
MODULESTORE_FIELD_OVERRIDE_PROVIDERS = ENV_TOKENS.get(
'MODULESTORE_FIELD_OVERRIDE_PROVIDERS',
MODULESTORE_FIELD_OVERRIDE_PROVIDERS
)
XBLOCK_FIELD_DATA_WRAPPERS = ENV_TOKENS.get(
'XBLOCK_FIELD_DATA_WRAPPERS',
XBLOCK_FIELD_DATA_WRAPPERS
)
############### Mixed Related(Secure/Not-Secure) Items ##########
LMS_SEGMENT_KEY = AUTH_TOKENS.get('SEGMENT_KEY')
CC_PROCESSOR_NAME = AUTH_TOKENS.get('CC_PROCESSOR_NAME', CC_PROCESSOR_NAME)
CC_PROCESSOR = AUTH_TOKENS.get('CC_PROCESSOR', CC_PROCESSOR)
SECRET_KEY = AUTH_TOKENS['SECRET_KEY']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
if AWS_ACCESS_KEY_ID == "":
AWS_ACCESS_KEY_ID = None
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
if AWS_SECRET_ACCESS_KEY == "":
AWS_SECRET_ACCESS_KEY = None
AWS_STORAGE_BUCKET_NAME = AUTH_TOKENS.get('AWS_STORAGE_BUCKET_NAME', 'edxuploads')
# Disabling querystring auth instructs Boto to exclude the querystring parameters (e.g. signature, access key) it
# normally appends to every returned URL.
AWS_QUERYSTRING_AUTH = AUTH_TOKENS.get('AWS_QUERYSTRING_AUTH', True)
AWS_S3_CUSTOM_DOMAIN = AUTH_TOKENS.get('AWS_S3_CUSTOM_DOMAIN', 'edxuploads.s3.amazonaws.com')
if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
else:
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Specific setting for the File Upload Service to store media in a bucket.
FILE_UPLOAD_STORAGE_BUCKET_NAME = ENV_TOKENS.get('FILE_UPLOAD_STORAGE_BUCKET_NAME', FILE_UPLOAD_STORAGE_BUCKET_NAME)
FILE_UPLOAD_STORAGE_PREFIX = ENV_TOKENS.get('FILE_UPLOAD_STORAGE_PREFIX', FILE_UPLOAD_STORAGE_PREFIX)
# If there is a database called 'read_replica', you can use the use_read_replica_if_available
# function in util/query.py, which is useful for very large database reads
DATABASES = AUTH_TOKENS['DATABASES']
# The normal database user does not have enough permissions to run migrations.
# Migrations are run with separate credentials, given as DB_MIGRATION_*
# environment variables
for name, database in DATABASES.items():
if name != 'read_replica':
database.update({
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
})
XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE']
# Get the MODULESTORE from auth.json, but if it doesn't exist,
# use the one from common.py
MODULESTORE = convert_module_store_setting_if_needed(AUTH_TOKENS.get('MODULESTORE', MODULESTORE))
CONTENTSTORE = AUTH_TOKENS.get('CONTENTSTORE', CONTENTSTORE)
DOC_STORE_CONFIG = AUTH_TOKENS.get('DOC_STORE_CONFIG', DOC_STORE_CONFIG)
MONGODB_LOG = AUTH_TOKENS.get('MONGODB_LOG', {})
EMAIL_HOST_USER = AUTH_TOKENS.get('EMAIL_HOST_USER', '') # django default is ''
EMAIL_HOST_PASSWORD = AUTH_TOKENS.get('EMAIL_HOST_PASSWORD', '') # django default is ''
# Datadog for events!
DATADOG = AUTH_TOKENS.get("DATADOG", {})
DATADOG.update(ENV_TOKENS.get("DATADOG", {}))
# TODO: deprecated (compatibility with previous settings)
if 'DATADOG_API' in AUTH_TOKENS:
DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API']
# Analytics API
ANALYTICS_API_KEY = AUTH_TOKENS.get("ANALYTICS_API_KEY", ANALYTICS_API_KEY)
ANALYTICS_API_URL = ENV_TOKENS.get("ANALYTICS_API_URL", ANALYTICS_API_URL)
# Mailchimp New User List
MAILCHIMP_NEW_USER_LIST_ID = ENV_TOKENS.get("MAILCHIMP_NEW_USER_LIST_ID")
# Zendesk
ZENDESK_USER = AUTH_TOKENS.get("ZENDESK_USER")
ZENDESK_API_KEY = AUTH_TOKENS.get("ZENDESK_API_KEY")
# API Key for inbound requests from Notifier service
EDX_API_KEY = AUTH_TOKENS.get("EDX_API_KEY")
# Celery Broker
CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "")
CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "")
CELERY_BROKER_VHOST = ENV_TOKENS.get("CELERY_BROKER_VHOST", "")
CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "")
CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "")
BROKER_URL = "{0}://{1}:{2}@{3}/{4}".format(CELERY_BROKER_TRANSPORT,
CELERY_BROKER_USER,
CELERY_BROKER_PASSWORD,
CELERY_BROKER_HOSTNAME,
CELERY_BROKER_VHOST)
BROKER_USE_SSL = ENV_TOKENS.get('CELERY_BROKER_USE_SSL', False)
# Block Structures
BLOCK_STRUCTURES_SETTINGS = ENV_TOKENS.get('BLOCK_STRUCTURES_SETTINGS', BLOCK_STRUCTURES_SETTINGS)
# upload limits
STUDENT_FILEUPLOAD_MAX_SIZE = ENV_TOKENS.get("STUDENT_FILEUPLOAD_MAX_SIZE", STUDENT_FILEUPLOAD_MAX_SIZE)
# Event tracking
TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS['tracking_logs']['OPTIONS']['backends'].update(AUTH_TOKENS.get("EVENT_TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS['segmentio']['OPTIONS']['processors'][0]['OPTIONS']['whitelist'].extend(
AUTH_TOKENS.get("EVENT_TRACKING_SEGMENTIO_EMIT_WHITELIST", []))
TRACKING_SEGMENTIO_WEBHOOK_SECRET = AUTH_TOKENS.get(
"TRACKING_SEGMENTIO_WEBHOOK_SECRET",
TRACKING_SEGMENTIO_WEBHOOK_SECRET
)
TRACKING_SEGMENTIO_ALLOWED_TYPES = ENV_TOKENS.get("TRACKING_SEGMENTIO_ALLOWED_TYPES", TRACKING_SEGMENTIO_ALLOWED_TYPES)
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES = ENV_TOKENS.get(
"TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES",
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES
)
TRACKING_SEGMENTIO_SOURCE_MAP = ENV_TOKENS.get("TRACKING_SEGMENTIO_SOURCE_MAP", TRACKING_SEGMENTIO_SOURCE_MAP)
# Heartbeat
HEARTBEAT_CHECKS = ENV_TOKENS.get('HEARTBEAT_CHECKS', HEARTBEAT_CHECKS)
HEARTBEAT_EXTENDED_CHECKS = ENV_TOKENS.get('HEARTBEAT_EXTENDED_CHECKS', HEARTBEAT_EXTENDED_CHECKS)
HEARTBEAT_CELERY_TIMEOUT = ENV_TOKENS.get('HEARTBEAT_CELERY_TIMEOUT', HEARTBEAT_CELERY_TIMEOUT)
# Student identity verification settings
VERIFY_STUDENT = AUTH_TOKENS.get("VERIFY_STUDENT", VERIFY_STUDENT)
DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH = ENV_TOKENS.get(
"DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH",
DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH
)
# Grades download
GRADES_DOWNLOAD_ROUTING_KEY = ENV_TOKENS.get('GRADES_DOWNLOAD_ROUTING_KEY', HIGH_MEM_QUEUE)
GRADES_DOWNLOAD = ENV_TOKENS.get("GRADES_DOWNLOAD", GRADES_DOWNLOAD)
# Rate limit for regrading tasks that a grading policy change can kick off
POLICY_CHANGE_TASK_RATE_LIMIT = ENV_TOKENS.get('POLICY_CHANGE_TASK_RATE_LIMIT', POLICY_CHANGE_TASK_RATE_LIMIT)
# financial reports
FINANCIAL_REPORTS = ENV_TOKENS.get("FINANCIAL_REPORTS", FINANCIAL_REPORTS)
##### ORA2 ######
# Prefix for uploads of example-based assessment AI classifiers
# This can be used to separate uploads for different environments
# within the same S3 bucket.
ORA2_FILE_PREFIX = ENV_TOKENS.get("ORA2_FILE_PREFIX", ORA2_FILE_PREFIX)
##### ACCOUNT LOCKOUT DEFAULT PARAMETERS #####
MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED = ENV_TOKENS.get("MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED", 5)
MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS = ENV_TOKENS.get("MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS", 15 * 60)
#### PASSWORD POLICY SETTINGS #####
PASSWORD_MIN_LENGTH = ENV_TOKENS.get("PASSWORD_MIN_LENGTH")
PASSWORD_MAX_LENGTH = ENV_TOKENS.get("PASSWORD_MAX_LENGTH")
PASSWORD_COMPLEXITY = ENV_TOKENS.get("PASSWORD_COMPLEXITY", {})
PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD = ENV_TOKENS.get("PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD")
PASSWORD_DICTIONARY = ENV_TOKENS.get("PASSWORD_DICTIONARY", [])
### INACTIVITY SETTINGS ####
SESSION_INACTIVITY_TIMEOUT_IN_SECONDS = AUTH_TOKENS.get("SESSION_INACTIVITY_TIMEOUT_IN_SECONDS")
##### LMS DEADLINE DISPLAY TIME_ZONE #######
TIME_ZONE_DISPLAYED_FOR_DEADLINES = ENV_TOKENS.get("TIME_ZONE_DISPLAYED_FOR_DEADLINES",
TIME_ZONE_DISPLAYED_FOR_DEADLINES)
##### X-Frame-Options response header settings #####
X_FRAME_OPTIONS = ENV_TOKENS.get('X_FRAME_OPTIONS', X_FRAME_OPTIONS)
##### Third-party auth options ################################################
if FEATURES.get('ENABLE_THIRD_PARTY_AUTH'):
tmp_backends = ENV_TOKENS.get('THIRD_PARTY_AUTH_BACKENDS', [
'social_core.backends.google.GoogleOAuth2',
'social_core.backends.linkedin.LinkedinOAuth2',
'social_core.backends.facebook.FacebookOAuth2',
'social_core.backends.azuread.AzureADOAuth2',
'third_party_auth.saml.SAMLAuthBackend',
'third_party_auth.lti.LTIAuthBackend',
])
AUTHENTICATION_BACKENDS = list(tmp_backends) + list(AUTHENTICATION_BACKENDS)
del tmp_backends
# The reduced session expiry time during the third party login pipeline. (Value in seconds)
SOCIAL_AUTH_PIPELINE_TIMEOUT = ENV_TOKENS.get('SOCIAL_AUTH_PIPELINE_TIMEOUT', 600)
# Most provider configuration is done via ConfigurationModels but for a few sensitive values
# we allow configuration via AUTH_TOKENS instead (optionally).
# The SAML private/public key values do not need the delimiter lines (such as
# "-----BEGIN PRIVATE KEY-----", "-----END PRIVATE KEY-----" etc.) but they may be included
# if you want (though it's easier to format the key values as JSON without the delimiters).
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY = AUTH_TOKENS.get('SOCIAL_AUTH_SAML_SP_PRIVATE_KEY', '')
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT = AUTH_TOKENS.get('SOCIAL_AUTH_SAML_SP_PUBLIC_CERT', '')
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY_DICT = AUTH_TOKENS.get('SOCIAL_AUTH_SAML_SP_PRIVATE_KEY_DICT', {})
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT_DICT = AUTH_TOKENS.get('SOCIAL_AUTH_SAML_SP_PUBLIC_CERT_DICT', {})
SOCIAL_AUTH_OAUTH_SECRETS = AUTH_TOKENS.get('SOCIAL_AUTH_OAUTH_SECRETS', {})
SOCIAL_AUTH_LTI_CONSUMER_SECRETS = AUTH_TOKENS.get('SOCIAL_AUTH_LTI_CONSUMER_SECRETS', {})
# third_party_auth config moved to ConfigurationModels. This is for data migration only:
THIRD_PARTY_AUTH_OLD_CONFIG = AUTH_TOKENS.get('THIRD_PARTY_AUTH', None)
if ENV_TOKENS.get('THIRD_PARTY_AUTH_SAML_FETCH_PERIOD_HOURS', 24) is not None:
CELERYBEAT_SCHEDULE['refresh-saml-metadata'] = {
'task': 'third_party_auth.fetch_saml_metadata',
'schedule': datetime.timedelta(hours=ENV_TOKENS.get('THIRD_PARTY_AUTH_SAML_FETCH_PERIOD_HOURS', 24)),
}
# The following can be used to integrate a custom login form with third_party_auth.
# It should be a dict where the key is a word passed via ?auth_entry=, and the value is a
# dict with an arbitrary 'secret_key' and a 'url'.
THIRD_PARTY_AUTH_CUSTOM_AUTH_FORMS = AUTH_TOKENS.get('THIRD_PARTY_AUTH_CUSTOM_AUTH_FORMS', {})
##### OAUTH2 Provider ##############
if FEATURES.get('ENABLE_OAUTH2_PROVIDER'):
OAUTH_OIDC_ISSUER = ENV_TOKENS['OAUTH_OIDC_ISSUER']
OAUTH_ENFORCE_SECURE = ENV_TOKENS.get('OAUTH_ENFORCE_SECURE', True)
OAUTH_ENFORCE_CLIENT_SECURE = ENV_TOKENS.get('OAUTH_ENFORCE_CLIENT_SECURE', True)
# Defaults for the following are defined in lms.envs.common
OAUTH_EXPIRE_DELTA = datetime.timedelta(
days=ENV_TOKENS.get('OAUTH_EXPIRE_CONFIDENTIAL_CLIENT_DAYS', OAUTH_EXPIRE_CONFIDENTIAL_CLIENT_DAYS)
)
OAUTH_EXPIRE_DELTA_PUBLIC = datetime.timedelta(
days=ENV_TOKENS.get('OAUTH_EXPIRE_PUBLIC_CLIENT_DAYS', OAUTH_EXPIRE_PUBLIC_CLIENT_DAYS)
)
OAUTH_ID_TOKEN_EXPIRATION = ENV_TOKENS.get('OAUTH_ID_TOKEN_EXPIRATION', OAUTH_ID_TOKEN_EXPIRATION)
OAUTH_DELETE_EXPIRED = ENV_TOKENS.get('OAUTH_DELETE_EXPIRED', OAUTH_DELETE_EXPIRED)
##### ADVANCED_SECURITY_CONFIG #####
ADVANCED_SECURITY_CONFIG = ENV_TOKENS.get('ADVANCED_SECURITY_CONFIG', {})
##### GOOGLE ANALYTICS IDS #####
GOOGLE_ANALYTICS_ACCOUNT = AUTH_TOKENS.get('GOOGLE_ANALYTICS_ACCOUNT')
GOOGLE_ANALYTICS_TRACKING_ID = AUTH_TOKENS.get('GOOGLE_ANALYTICS_TRACKING_ID')
GOOGLE_ANALYTICS_LINKEDIN = AUTH_TOKENS.get('GOOGLE_ANALYTICS_LINKEDIN')
GOOGLE_SITE_VERIFICATION_ID = ENV_TOKENS.get('GOOGLE_SITE_VERIFICATION_ID')
##### BRANCH.IO KEY #####
BRANCH_IO_KEY = AUTH_TOKENS.get('BRANCH_IO_KEY')
##### OPTIMIZELY PROJECT ID #####
OPTIMIZELY_PROJECT_ID = AUTH_TOKENS.get('OPTIMIZELY_PROJECT_ID', OPTIMIZELY_PROJECT_ID)
#### Course Registration Code length ####
REGISTRATION_CODE_LENGTH = ENV_TOKENS.get('REGISTRATION_CODE_LENGTH', 8)
# REGISTRATION CODES DISPLAY INFORMATION
INVOICE_CORP_ADDRESS = ENV_TOKENS.get('INVOICE_CORP_ADDRESS', INVOICE_CORP_ADDRESS)
INVOICE_PAYMENT_INSTRUCTIONS = ENV_TOKENS.get('INVOICE_PAYMENT_INSTRUCTIONS', INVOICE_PAYMENT_INSTRUCTIONS)
# Which access.py permission names to check;
# We default this to the legacy permission 'see_exists'.
COURSE_CATALOG_VISIBILITY_PERMISSION = ENV_TOKENS.get(
'COURSE_CATALOG_VISIBILITY_PERMISSION',
COURSE_CATALOG_VISIBILITY_PERMISSION
)
COURSE_ABOUT_VISIBILITY_PERMISSION = ENV_TOKENS.get(
'COURSE_ABOUT_VISIBILITY_PERMISSION',
COURSE_ABOUT_VISIBILITY_PERMISSION
)
DEFAULT_COURSE_VISIBILITY_IN_CATALOG = ENV_TOKENS.get(
'DEFAULT_COURSE_VISIBILITY_IN_CATALOG',
DEFAULT_COURSE_VISIBILITY_IN_CATALOG
)
DEFAULT_MOBILE_AVAILABLE = ENV_TOKENS.get(
'DEFAULT_MOBILE_AVAILABLE',
DEFAULT_MOBILE_AVAILABLE
)
# Enrollment API Cache Timeout
ENROLLMENT_COURSE_DETAILS_CACHE_TIMEOUT = ENV_TOKENS.get('ENROLLMENT_COURSE_DETAILS_CACHE_TIMEOUT', 60)
# PDF RECEIPT/INVOICE OVERRIDES
PDF_RECEIPT_TAX_ID = ENV_TOKENS.get('PDF_RECEIPT_TAX_ID', PDF_RECEIPT_TAX_ID)
PDF_RECEIPT_FOOTER_TEXT = ENV_TOKENS.get('PDF_RECEIPT_FOOTER_TEXT', PDF_RECEIPT_FOOTER_TEXT)
PDF_RECEIPT_DISCLAIMER_TEXT = ENV_TOKENS.get('PDF_RECEIPT_DISCLAIMER_TEXT', PDF_RECEIPT_DISCLAIMER_TEXT)
PDF_RECEIPT_BILLING_ADDRESS = ENV_TOKENS.get('PDF_RECEIPT_BILLING_ADDRESS', PDF_RECEIPT_BILLING_ADDRESS)
PDF_RECEIPT_TERMS_AND_CONDITIONS = ENV_TOKENS.get('PDF_RECEIPT_TERMS_AND_CONDITIONS', PDF_RECEIPT_TERMS_AND_CONDITIONS)
PDF_RECEIPT_TAX_ID_LABEL = ENV_TOKENS.get('PDF_RECEIPT_TAX_ID_LABEL', PDF_RECEIPT_TAX_ID_LABEL)
PDF_RECEIPT_LOGO_PATH = ENV_TOKENS.get('PDF_RECEIPT_LOGO_PATH', PDF_RECEIPT_LOGO_PATH)
PDF_RECEIPT_COBRAND_LOGO_PATH = ENV_TOKENS.get('PDF_RECEIPT_COBRAND_LOGO_PATH', PDF_RECEIPT_COBRAND_LOGO_PATH)
PDF_RECEIPT_LOGO_HEIGHT_MM = ENV_TOKENS.get('PDF_RECEIPT_LOGO_HEIGHT_MM', PDF_RECEIPT_LOGO_HEIGHT_MM)
PDF_RECEIPT_COBRAND_LOGO_HEIGHT_MM = ENV_TOKENS.get(
'PDF_RECEIPT_COBRAND_LOGO_HEIGHT_MM', PDF_RECEIPT_COBRAND_LOGO_HEIGHT_MM
)
if FEATURES.get('ENABLE_COURSEWARE_SEARCH') or \
FEATURES.get('ENABLE_DASHBOARD_SEARCH') or \
FEATURES.get('ENABLE_COURSE_DISCOVERY') or \
FEATURES.get('ENABLE_TEAMS'):
# Use ElasticSearch as the search engine herein
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
ELASTIC_SEARCH_CONFIG = ENV_TOKENS.get('ELASTIC_SEARCH_CONFIG', [{}])
# Facebook app
FACEBOOK_API_VERSION = AUTH_TOKENS.get("FACEBOOK_API_VERSION")
FACEBOOK_APP_SECRET = AUTH_TOKENS.get("FACEBOOK_APP_SECRET")
FACEBOOK_APP_ID = AUTH_TOKENS.get("FACEBOOK_APP_ID")
XBLOCK_SETTINGS = ENV_TOKENS.get('XBLOCK_SETTINGS', {})
XBLOCK_SETTINGS.setdefault("VideoDescriptor", {})["licensing_enabled"] = FEATURES.get("LICENSING", False)
XBLOCK_SETTINGS.setdefault("VideoModule", {})['YOUTUBE_API_KEY'] = AUTH_TOKENS.get('YOUTUBE_API_KEY', YOUTUBE_API_KEY)
##### VIDEO IMAGE STORAGE #####
VIDEO_IMAGE_SETTINGS = ENV_TOKENS.get('VIDEO_IMAGE_SETTINGS', VIDEO_IMAGE_SETTINGS)
##### VIDEO TRANSCRIPTS STORAGE #####
VIDEO_TRANSCRIPTS_SETTINGS = ENV_TOKENS.get('VIDEO_TRANSCRIPTS_SETTINGS', VIDEO_TRANSCRIPTS_SETTINGS)
##### ECOMMERCE API CONFIGURATION SETTINGS #####
ECOMMERCE_PUBLIC_URL_ROOT = ENV_TOKENS.get('ECOMMERCE_PUBLIC_URL_ROOT', ECOMMERCE_PUBLIC_URL_ROOT)
ECOMMERCE_API_URL = ENV_TOKENS.get('ECOMMERCE_API_URL', ECOMMERCE_API_URL)
ECOMMERCE_API_TIMEOUT = ENV_TOKENS.get('ECOMMERCE_API_TIMEOUT', ECOMMERCE_API_TIMEOUT)
COURSE_CATALOG_API_URL = ENV_TOKENS.get('COURSE_CATALOG_API_URL', COURSE_CATALOG_API_URL)
ECOMMERCE_SERVICE_WORKER_USERNAME = ENV_TOKENS.get(
'ECOMMERCE_SERVICE_WORKER_USERNAME',
ECOMMERCE_SERVICE_WORKER_USERNAME
)
##### Custom Courses for EdX #####
if FEATURES.get('CUSTOM_COURSES_EDX'):
INSTALLED_APPS += ['lms.djangoapps.ccx', 'openedx.core.djangoapps.ccxcon.apps.CCXConnectorConfig']
MODULESTORE_FIELD_OVERRIDE_PROVIDERS += (
'lms.djangoapps.ccx.overrides.CustomCoursesForEdxOverrideProvider',
)
CCX_MAX_STUDENTS_ALLOWED = ENV_TOKENS.get('CCX_MAX_STUDENTS_ALLOWED', CCX_MAX_STUDENTS_ALLOWED)
##### Individual Due Date Extensions #####
if FEATURES.get('INDIVIDUAL_DUE_DATES'):
FIELD_OVERRIDE_PROVIDERS += (
'courseware.student_field_overrides.IndividualStudentOverrideProvider',
)
##### Self-Paced Course Due Dates #####
XBLOCK_FIELD_DATA_WRAPPERS += (
'lms.djangoapps.courseware.field_overrides:OverrideModulestoreFieldData.wrap',
)
MODULESTORE_FIELD_OVERRIDE_PROVIDERS += (
'courseware.self_paced_overrides.SelfPacedDateOverrideProvider',
)
# PROFILE IMAGE CONFIG
PROFILE_IMAGE_BACKEND = ENV_TOKENS.get('PROFILE_IMAGE_BACKEND', PROFILE_IMAGE_BACKEND)
PROFILE_IMAGE_SECRET_KEY = AUTH_TOKENS.get('PROFILE_IMAGE_SECRET_KEY', PROFILE_IMAGE_SECRET_KEY)
PROFILE_IMAGE_MAX_BYTES = ENV_TOKENS.get('PROFILE_IMAGE_MAX_BYTES', PROFILE_IMAGE_MAX_BYTES)
PROFILE_IMAGE_MIN_BYTES = ENV_TOKENS.get('PROFILE_IMAGE_MIN_BYTES', PROFILE_IMAGE_MIN_BYTES)
PROFILE_IMAGE_DEFAULT_FILENAME = 'images/profiles/default'
PROFILE_IMAGE_SIZES_MAP = ENV_TOKENS.get(
'PROFILE_IMAGE_SIZES_MAP',
PROFILE_IMAGE_SIZES_MAP
)
# EdxNotes config
EDXNOTES_PUBLIC_API = ENV_TOKENS.get('EDXNOTES_PUBLIC_API', EDXNOTES_PUBLIC_API)
EDXNOTES_INTERNAL_API = ENV_TOKENS.get('EDXNOTES_INTERNAL_API', EDXNOTES_INTERNAL_API)
EDXNOTES_CONNECT_TIMEOUT = ENV_TOKENS.get('EDXNOTES_CONNECT_TIMEOUT', EDXNOTES_CONNECT_TIMEOUT)
EDXNOTES_READ_TIMEOUT = ENV_TOKENS.get('EDXNOTES_READ_TIMEOUT', EDXNOTES_READ_TIMEOUT)
##### Credit Provider Integration #####
CREDIT_PROVIDER_SECRET_KEYS = AUTH_TOKENS.get("CREDIT_PROVIDER_SECRET_KEYS", {})
##################### LTI Provider #####################
if FEATURES.get('ENABLE_LTI_PROVIDER'):
INSTALLED_APPS.append('lti_provider.apps.LtiProviderConfig')
AUTHENTICATION_BACKENDS.append('lti_provider.users.LtiBackend')
LTI_USER_EMAIL_DOMAIN = ENV_TOKENS.get('LTI_USER_EMAIL_DOMAIN', 'lti.example.com')
# For more info on this, see the notes in common.py
LTI_AGGREGATE_SCORE_PASSBACK_DELAY = ENV_TOKENS.get(
'LTI_AGGREGATE_SCORE_PASSBACK_DELAY', LTI_AGGREGATE_SCORE_PASSBACK_DELAY
)
##################### Credit Provider help link ####################
CREDIT_HELP_LINK_URL = ENV_TOKENS.get('CREDIT_HELP_LINK_URL', CREDIT_HELP_LINK_URL)
#### JWT configuration ####
JWT_AUTH.update(ENV_TOKENS.get('JWT_AUTH', {}))
JWT_AUTH.update(AUTH_TOKENS.get('JWT_AUTH', {}))
################# PROCTORING CONFIGURATION ##################
PROCTORING_BACKEND_PROVIDER = AUTH_TOKENS.get("PROCTORING_BACKEND_PROVIDER", PROCTORING_BACKEND_PROVIDER)
PROCTORING_SETTINGS = ENV_TOKENS.get("PROCTORING_SETTINGS", PROCTORING_SETTINGS)
################# MICROSITE ####################
MICROSITE_CONFIGURATION = ENV_TOKENS.get('MICROSITE_CONFIGURATION', {})
MICROSITE_ROOT_DIR = path(ENV_TOKENS.get('MICROSITE_ROOT_DIR', ''))
# this setting specify which backend to be used when pulling microsite specific configuration
MICROSITE_BACKEND = ENV_TOKENS.get("MICROSITE_BACKEND", MICROSITE_BACKEND)
# this setting specify which backend to be used when loading microsite specific templates
MICROSITE_TEMPLATE_BACKEND = ENV_TOKENS.get("MICROSITE_TEMPLATE_BACKEND", MICROSITE_TEMPLATE_BACKEND)
# TTL for microsite database template cache
MICROSITE_DATABASE_TEMPLATE_CACHE_TTL = ENV_TOKENS.get(
"MICROSITE_DATABASE_TEMPLATE_CACHE_TTL", MICROSITE_DATABASE_TEMPLATE_CACHE_TTL
)
# Offset for pk of courseware.StudentModuleHistoryExtended
STUDENTMODULEHISTORYEXTENDED_OFFSET = ENV_TOKENS.get(
'STUDENTMODULEHISTORYEXTENDED_OFFSET', STUDENTMODULEHISTORYEXTENDED_OFFSET
)
# Cutoff date for granting audit certificates
if ENV_TOKENS.get('AUDIT_CERT_CUTOFF_DATE', None):
AUDIT_CERT_CUTOFF_DATE = dateutil.parser.parse(ENV_TOKENS.get('AUDIT_CERT_CUTOFF_DATE'))
################################ Settings for Credentials Service ################################
CREDENTIALS_GENERATION_ROUTING_KEY = ENV_TOKENS.get('CREDENTIALS_GENERATION_ROUTING_KEY', HIGH_PRIORITY_QUEUE)
# The extended StudentModule history table
if FEATURES.get('ENABLE_CSMH_EXTENDED'):
INSTALLED_APPS.append('coursewarehistoryextended')
API_ACCESS_MANAGER_EMAIL = ENV_TOKENS.get('API_ACCESS_MANAGER_EMAIL')
API_ACCESS_FROM_EMAIL = ENV_TOKENS.get('API_ACCESS_FROM_EMAIL')
# Mobile App Version Upgrade config
APP_UPGRADE_CACHE_TIMEOUT = ENV_TOKENS.get('APP_UPGRADE_CACHE_TIMEOUT', APP_UPGRADE_CACHE_TIMEOUT)
AFFILIATE_COOKIE_NAME = ENV_TOKENS.get('AFFILIATE_COOKIE_NAME', AFFILIATE_COOKIE_NAME)
############## Settings for LMS Context Sensitive Help ##############
HELP_TOKENS_BOOKS = ENV_TOKENS.get('HELP_TOKENS_BOOKS', HELP_TOKENS_BOOKS)
############## OPEN EDX ENTERPRISE SERVICE CONFIGURATION ######################
# The Open edX Enterprise service is currently hosted via the LMS container/process.
# However, for all intents and purposes this service is treated as a standalone IDA.
# These configuration settings are specific to the Enterprise service and you should
# not find references to them within the edx-platform project.
# Publicly-accessible enrollment URL, for use on the client side.
ENTERPRISE_PUBLIC_ENROLLMENT_API_URL = ENV_TOKENS.get(
'ENTERPRISE_PUBLIC_ENROLLMENT_API_URL',
(LMS_ROOT_URL or '') + LMS_ENROLLMENT_API_PATH
)
# Enrollment URL used on the server-side.
ENTERPRISE_ENROLLMENT_API_URL = ENV_TOKENS.get(
'ENTERPRISE_ENROLLMENT_API_URL',
(LMS_INTERNAL_ROOT_URL or '') + LMS_ENROLLMENT_API_PATH
)
# Enterprise logo image size limit in KB's
ENTERPRISE_CUSTOMER_LOGO_IMAGE_SIZE = ENV_TOKENS.get(
'ENTERPRISE_CUSTOMER_LOGO_IMAGE_SIZE',
ENTERPRISE_CUSTOMER_LOGO_IMAGE_SIZE
)
# Course enrollment modes to be hidden in the Enterprise enrollment page
# if the "Hide audit track" flag is enabled for an EnterpriseCustomer
ENTERPRISE_COURSE_ENROLLMENT_AUDIT_MODES = ENV_TOKENS.get(
'ENTERPRISE_COURSE_ENROLLMENT_AUDIT_MODES',
ENTERPRISE_COURSE_ENROLLMENT_AUDIT_MODES
)
# A support URL used on Enterprise landing pages for when a warning
# message goes off.
ENTERPRISE_SUPPORT_URL = ENV_TOKENS.get(
'ENTERPRISE_SUPPORT_URL',
ENTERPRISE_SUPPORT_URL
)
# A shared secret to be used for encrypting passwords passed from the enterprise api
# to the enteprise reporting script.
ENTERPRISE_REPORTING_SECRET = AUTH_TOKENS.get(
'ENTERPRISE_REPORTING_SECRET',
ENTERPRISE_REPORTING_SECRET
)
# A default dictionary to be used for filtering out enterprise customer catalog.
ENTERPRISE_CUSTOMER_CATALOG_DEFAULT_CONTENT_FILTER = ENV_TOKENS.get(
'ENTERPRISE_CUSTOMER_CATALOG_DEFAULT_CONTENT_FILTER',
ENTERPRISE_CUSTOMER_CATALOG_DEFAULT_CONTENT_FILTER
)
############## ENTERPRISE SERVICE API CLIENT CONFIGURATION ######################
# The LMS communicates with the Enterprise service via the EdxRestApiClient class
# The below environmental settings are utilized by the LMS when interacting with
# the service, and override the default parameters which are defined in common.py
DEFAULT_ENTERPRISE_API_URL = None
if LMS_INTERNAL_ROOT_URL is not None:
DEFAULT_ENTERPRISE_API_URL = LMS_INTERNAL_ROOT_URL + '/enterprise/api/v1/'
ENTERPRISE_API_URL = ENV_TOKENS.get('ENTERPRISE_API_URL', DEFAULT_ENTERPRISE_API_URL)
DEFAULT_ENTERPRISE_CONSENT_API_URL = None
if LMS_INTERNAL_ROOT_URL is not None:
DEFAULT_ENTERPRISE_CONSENT_API_URL = LMS_INTERNAL_ROOT_URL + '/consent/api/v1/'
ENTERPRISE_CONSENT_API_URL = ENV_TOKENS.get('ENTERPRISE_CONSENT_API_URL', DEFAULT_ENTERPRISE_CONSENT_API_URL)
ENTERPRISE_SERVICE_WORKER_USERNAME = ENV_TOKENS.get(
'ENTERPRISE_SERVICE_WORKER_USERNAME',
ENTERPRISE_SERVICE_WORKER_USERNAME
)
ENTERPRISE_API_CACHE_TIMEOUT = ENV_TOKENS.get(
'ENTERPRISE_API_CACHE_TIMEOUT',
ENTERPRISE_API_CACHE_TIMEOUT
)
############## ENTERPRISE SERVICE LMS CONFIGURATION ##################################
# The LMS has some features embedded that are related to the Enterprise service, but
# which are not provided by the Enterprise service. These settings override the
# base values for the parameters as defined in common.py
ENTERPRISE_PLATFORM_WELCOME_TEMPLATE = ENV_TOKENS.get(
'ENTERPRISE_PLATFORM_WELCOME_TEMPLATE',
ENTERPRISE_PLATFORM_WELCOME_TEMPLATE
)
ENTERPRISE_SPECIFIC_BRANDED_WELCOME_TEMPLATE = ENV_TOKENS.get(
'ENTERPRISE_SPECIFIC_BRANDED_WELCOME_TEMPLATE',
ENTERPRISE_SPECIFIC_BRANDED_WELCOME_TEMPLATE
)
ENTERPRISE_TAGLINE = ENV_TOKENS.get(
'ENTERPRISE_TAGLINE',
ENTERPRISE_TAGLINE
)
ENTERPRISE_EXCLUDED_REGISTRATION_FIELDS = set(
ENV_TOKENS.get(
'ENTERPRISE_EXCLUDED_REGISTRATION_FIELDS',
ENTERPRISE_EXCLUDED_REGISTRATION_FIELDS
)
)
BASE_COOKIE_DOMAIN = ENV_TOKENS.get(
'BASE_COOKIE_DOMAIN',
BASE_COOKIE_DOMAIN
)
############## CATALOG/DISCOVERY SERVICE API CLIENT CONFIGURATION ######################
# The LMS communicates with the Catalog service via the EdxRestApiClient class
# The below environmental settings are utilized by the LMS when interacting with
# the service, and override the default parameters which are defined in common.py
COURSES_API_CACHE_TIMEOUT = ENV_TOKENS.get('COURSES_API_CACHE_TIMEOUT', COURSES_API_CACHE_TIMEOUT)
# Add an ICP license for serving content in China if your organization is registered to do so
ICP_LICENSE = ENV_TOKENS.get('ICP_LICENSE', None)
############## Settings for CourseGraph ############################
COURSEGRAPH_JOB_QUEUE = ENV_TOKENS.get('COURSEGRAPH_JOB_QUEUE', LOW_PRIORITY_QUEUE)
########################## Parental controls config #######################
# The age at which a learner no longer requires parental consent, or None
# if parental consent is never required.
PARENTAL_CONSENT_AGE_LIMIT = ENV_TOKENS.get(
'PARENTAL_CONSENT_AGE_LIMIT',
PARENTAL_CONSENT_AGE_LIMIT
)
# Do NOT calculate this dynamically at startup with git because it's *slow*.
EDX_PLATFORM_REVISION = ENV_TOKENS.get('EDX_PLATFORM_REVISION', EDX_PLATFORM_REVISION)
########################## Extra middleware classes #######################
# Allow extra middleware classes to be added to the app through configuration.
MIDDLEWARE_CLASSES.extend(ENV_TOKENS.get('EXTRA_MIDDLEWARE_CLASSES', []))
########################## Settings for Completion API #####################
# Once a user has watched this percentage of a video, mark it as complete:
# (0.0 = 0%, 1.0 = 100%)
COMPLETION_VIDEO_COMPLETE_PERCENTAGE = ENV_TOKENS.get(
'COMPLETION_VIDEO_COMPLETE_PERCENTAGE',
COMPLETION_VIDEO_COMPLETE_PERCENTAGE,
)
# The time a block needs to be viewed to be considered complete, in milliseconds.
COMPLETION_BY_VIEWING_DELAY_MS = ENV_TOKENS.get('COMPLETION_BY_VIEWING_DELAY_MS', COMPLETION_BY_VIEWING_DELAY_MS)
############### Settings for django-fernet-fields ##################
FERNET_KEYS = AUTH_TOKENS.get('FERNET_KEYS', FERNET_KEYS)
################# Settings for the maintenance banner #################
MAINTENANCE_BANNER_TEXT = ENV_TOKENS.get('MAINTENANCE_BANNER_TEXT', None)
############### Settings for Retirement #####################
RETIRED_USERNAME_PREFIX = ENV_TOKENS.get('RETIRED_USERNAME_PREFIX', RETIRED_USERNAME_PREFIX)
RETIRED_EMAIL_PREFIX = ENV_TOKENS.get('RETIRED_EMAIL_PREFIX', RETIRED_EMAIL_PREFIX)
RETIRED_EMAIL_DOMAIN = ENV_TOKENS.get('RETIRED_EMAIL_DOMAIN', RETIRED_EMAIL_DOMAIN)
RETIREMENT_SERVICE_WORKER_USERNAME = ENV_TOKENS.get(
'RETIREMENT_SERVICE_WORKER_USERNAME',
RETIREMENT_SERVICE_WORKER_USERNAME
)
RETIREMENT_STATES = ENV_TOKENS.get('RETIREMENT_STATES', RETIREMENT_STATES)
############## Settings for Course Enrollment Modes ######################
COURSE_ENROLLMENT_MODES = ENV_TOKENS.get('COURSE_ENROLLMENT_MODES', COURSE_ENROLLMENT_MODES)
############################### Plugin Settings ###############################
from openedx.core.djangoapps.plugins import plugin_settings, constants as plugin_constants
plugin_settings.add_plugins(__name__, plugin_constants.ProjectType.LMS, plugin_constants.SettingsType.AWS)
########################## Derive Any Derived Settings #######################
derive_settings(__name__)
|
ahmedaljazzar/edx-platform
|
lms/envs/aws.py
|
Python
|
agpl-3.0
| 50,591
|
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
import mock
from odoo.tests import SavepointCase
class TestSaleExceptionMultiRecord(SavepointCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.env = cls.env(context=dict(cls.env.context, tracking_disable=True))
def test_sale_order_exception(self):
exception_no_sol = self.env.ref("sale_exception.excep_no_sol")
exception_no_free = self.env.ref("sale_exception.excep_no_free")
exception_no_dumping = self.env.ref("sale_exception.excep_no_dumping")
exceptions = exception_no_sol + exception_no_free + exception_no_dumping
exceptions.write({"active": True})
partner = self.env.ref("base.res_partner_1")
p = self.env.ref("product.product_product_7")
so1 = self.env["sale.order"].create(
{
"partner_id": partner.id,
"partner_invoice_id": partner.id,
"partner_shipping_id": partner.id,
"order_line": [
(
0,
0,
{
"name": p.name,
"product_id": p.id,
"product_uom_qty": 2,
"product_uom": p.uom_id.id,
"price_unit": p.list_price,
},
)
],
"pricelist_id": self.env.ref("product.list0").id,
}
)
so2 = self.env["sale.order"].create(
{
"partner_id": partner.id,
"partner_invoice_id": partner.id,
"partner_shipping_id": partner.id,
"pricelist_id": self.env.ref("product.list0").id,
}
)
so3 = self.env["sale.order"].create(
{
"partner_id": partner.id,
"partner_invoice_id": partner.id,
"partner_shipping_id": partner.id,
"order_line": [
(
0,
0,
{
"name": p.name,
"product_id": p.id,
"product_uom_qty": 2,
"product_uom": p.uom_id.id,
"price_unit": p.list_price / 2,
},
)
],
"pricelist_id": self.env.ref("product.list0").id,
}
)
orders = so1 + so2 + so3
for order in orders:
# ensure init state
self.assertTrue(order.state == "draft")
self.assertTrue(len(order.exception_ids) == 0)
self.env["sale.order"].test_all_draft_orders()
# basic tests
self.assertTrue(so1.state == "draft")
self.assertTrue(len(so1.exception_ids) == 0)
self.assertTrue(so2.state == "draft")
self.assertTrue(exception_no_sol in so2.exception_ids)
self.assertTrue(exception_no_free in so2.exception_ids)
self.assertTrue(so3.state == "draft")
self.assertTrue(exception_no_dumping in so3.exception_ids)
self.assertEqual(
so3.order_line[0].exceptions_summary,
(
"<ul>"
"<li>No dumping: <i>A product is sold cheaper than his cost.</i></li>"
"</ul>"
),
)
# test return value of detect_exception()
all_detected = orders.detect_exceptions()
self.assertTrue(exception_no_sol.id in all_detected)
self.assertTrue(exception_no_dumping.id in all_detected)
self.assertTrue(exception_no_free.id in all_detected)
one_two_detected = (so1 + so2).detect_exceptions()
self.assertTrue(exception_no_sol.id in one_two_detected)
self.assertFalse(exception_no_dumping.id in one_two_detected)
self.assertTrue(exception_no_free.id in one_two_detected)
# test subset of rules
domain = [("model", "=", "sale.order"), ("id", "!=", exception_no_sol.id)]
with mock.patch.object(type(orders), "_rule_domain", return_value=domain):
# even if the rule is excluded from the search
# it should still be present on the sale order
orders.detect_exceptions()
all_detected = orders.mapped("exception_ids").ids
self.assertTrue(exception_no_sol.id in all_detected)
self.assertTrue(exception_no_dumping.id in all_detected)
self.assertTrue(exception_no_free.id in all_detected)
|
OCA/sale-workflow
|
sale_exception/tests/test_multi_records.py
|
Python
|
agpl-3.0
| 4,701
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2013 Noviat nv/sa (www.noviat.com). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import xlwt
from openerp.addons.report_xls.report_xls import report_xls
from openerp.addons.report_xls.utils import rowcol_to_cell
from openerp.addons.account_financial_report_webkit.report.partner_balance \
import PartnerBalanceWebkit
from openerp.tools.translate import _
# import logging
# _logger = logging.getLogger(__name__)
def display_line(all_comparison_lines):
return any([line.get('balance') for line in all_comparison_lines])
class partners_balance_xls(report_xls):
column_sizes = [12, 40, 25, 17, 17, 17, 17, 17]
def print_title(self, ws, _p, row_position, xlwtlib, _xs):
cell_style = xlwtlib.easyxf(_xs['xls_title'])
report_name = ' - '.join([_p.report_name.upper(),
_p.company.partner_id.name,
_p.company.currency_id.name])
c_specs = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_empty_row(self, ws, row_position):
c_sizes = self.column_sizes
c_specs = [('empty%s' % i, 1, c_sizes[i], 'text', None)
for i in range(0, len(c_sizes))]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, set_column_size=True)
return row_position
def print_header_titles(self, ws, _p, data, row_position, xlwtlib, _xs):
cell_format = _xs['bold'] + _xs['fill_blue'] + _xs['borders_all']
cell_style = xlwtlib.easyxf(cell_format)
cell_style_center = xlwtlib.easyxf(cell_format + _xs['center'])
c_specs = [
('fy', 1, 0, 'text', _('Fiscal Year'), None, cell_style_center),
('af', 1, 0, 'text', _('Accounts Filter'),
None, cell_style_center),
('df', 1, 0, 'text', _p.filter_form(data) == 'filter_date' and _(
'Dates Filter') or _('Periods Filter'), None,
cell_style_center),
('pf', 1, 0, 'text', _('Partners Filter'),
None, cell_style_center),
('tm', 1, 0, 'text', _('Target Moves'), None, cell_style_center),
('ib', 1, 0, 'text', _('Initial Balance'),
None, cell_style_center),
('coa', 1, 0, 'text', _('Chart of Account'),
None, cell_style_center),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_header_data(self, ws, _p, data, row_position, xlwtlib, _xs,
initial_balance_text):
cell_format = _xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style = xlwtlib.easyxf(cell_format)
cell_style_center = xlwtlib.easyxf(cell_format + _xs['center'])
c_specs = [
('fy', 1, 0, 'text', _p.fiscalyear.name if _p.fiscalyear else '-',
None, cell_style_center),
('af', 1, 0, 'text', _p.accounts(data) and ', '.join(
[account.code for account in _p.accounts(data)]) or _('All'),
None, cell_style_center),
]
df = _('From') + ': '
if _p.filter_form(data) == 'filter_date':
df += _p.start_date if _p.start_date else u''
else:
df += _p.start_period.name if _p.start_period else u''
df += ' ' + _('\nTo') + ': '
if _p.filter_form(data) == 'filter_date':
df += _p.stop_date if _p.stop_date else u''
else:
df += _p.stop_period.name if _p.stop_period else u''
c_specs += [
('df', 1, 0, 'text', df, None, cell_style_center),
('tm', 1, 0, 'text', _p.display_partner_account(
data), None, cell_style_center),
('pf', 1, 0, 'text', _p.display_target_move(
data), None, cell_style_center),
('ib', 1, 0, 'text', initial_balance_text[
_p.initial_balance_mode], None, cell_style_center),
('coa', 1, 0, 'text', _p.chart_account.name,
None, cell_style_center),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_comparison_header(self, _xs, xlwtlib, row_position, _p, ws,
initial_balance_text):
cell_format_ct = _xs['bold'] + _xs['fill_blue'] + _xs['borders_all']
cell_style_ct = xlwtlib.easyxf(cell_format_ct)
c_specs = [('ct', 7, 0, 'text', _('Comparisons'))]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style_ct)
cell_format = _xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style_center = xlwtlib.easyxf(cell_format)
for index, params in enumerate(_p.comp_params):
c_specs = [
('c', 2, 0, 'text', _('Comparison') + str(index + 1) +
' (C' + str(index + 1) + ')')]
if params['comparison_filter'] == 'filter_date':
c_specs += [('f', 2, 0, 'text', _('Dates Filter') + ': ' +
_p.formatLang(params['start'], date=True) + ' - '
+ _p.formatLang(params['stop'], date=True))]
elif params['comparison_filter'] == 'filter_period':
c_specs += [('f', 2, 0, 'text', _('Periods Filter') +
': ' + params['start'].name + ' - ' +
params['stop'].name)]
else:
c_specs += [('f', 2, 0, 'text', _('Fiscal Year') +
': ' + params['fiscalyear'].name)]
c_specs += [('ib', 2, 0, 'text', _('Initial Balance') +
': ' +
initial_balance_text[params['initial_balance_mode']])]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style_center)
return row_position
def print_account_header(self, ws, _p, _xs, xlwtlib, row_position):
cell_format = _xs['bold'] + _xs['fill'] + \
_xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style = xlwtlib.easyxf(cell_format)
cell_style_right = xlwtlib.easyxf(cell_format + _xs['right'])
cell_style_center = xlwtlib.easyxf(cell_format + _xs['center'])
if len(_p.comp_params) == 2:
account_span = 3
else:
account_span = _p.initial_balance_mode and 2 or 3
c_specs = [
('account', account_span, 0, 'text', _('Account / Partner Name')),
('code', 1, 0, 'text', _('Code / Ref')),
]
if _p.comparison_mode == 'no_comparison':
if _p.initial_balance_mode:
c_specs += [('init_bal', 1, 0, 'text',
_('Initial Balance'), None, cell_style_right)]
c_specs += [
('debit', 1, 0, 'text', _('Debit'), None, cell_style_right),
('credit', 1, 0, 'text', _('Credit'), None, cell_style_right),
]
if _p.comparison_mode == 'no_comparison' or not _p.fiscalyear:
c_specs += [('balance', 1, 0, 'text',
_('Balance'), None, cell_style_right)]
else:
c_specs += [('balance_fy', 1, 0, 'text', _('Balance %s') %
_p.fiscalyear.name, None, cell_style_right)]
if _p.comparison_mode in ('single', 'multiple'):
for index in range(_p.nb_comparison):
if _p.comp_params[index][
'comparison_filter'] == 'filter_year' \
and _p.comp_params[index].get('fiscalyear', False):
c_specs += [('balance_%s' % index, 1, 0, 'text',
_('Balance %s') %
_p.comp_params[index]['fiscalyear'].name,
None, cell_style_right)]
else:
c_specs += [('balance_%s' % index, 1, 0, 'text',
_('Balance C%s') % (index + 1), None,
cell_style_right)]
if _p.comparison_mode == 'single':
c_specs += [
('diff', 1, 0, 'text', _('Difference'),
None, cell_style_right),
('diff_percent', 1, 0, 'text',
_('% Difference'), None, cell_style_center),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_row_code_account(self, ws, current_account, row_position, _xs,
xlwtlib):
cell_format = _xs['xls_title'] + _xs['bold'] + \
_xs['fill'] + _xs['borders_all']
cell_style = xlwtlib.easyxf(cell_format)
c_specs = [
('acc_title', 7, 0, 'text', ' - '.join([current_account.code,
current_account.name])), ]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, cell_style)
return row_position
def print_account_totals(self, _xs, xlwtlib, ws, row_start_account,
row_position, current_account, _p):
cell_format = _xs['bold'] + _xs['fill'] + \
_xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style = xlwtlib.easyxf(cell_format)
cell_style_decimal = xlwtlib.easyxf(
cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
c_specs = [
('acc_title', 2, 0, 'text', current_account.name),
('code', 1, 0, 'text', current_account.code),
]
for column in range(3, 7):
# in case of one single comparison, the column 6 will contain
# percentages
if (_p.comparison_mode == 'single' and column == 6):
total_diff = rowcol_to_cell(row_position, column - 1)
total_balance = rowcol_to_cell(row_position, column - 2)
account_formula = 'Round(' + total_diff + \
'/' + total_balance + '*100;0)'
else:
account_start = rowcol_to_cell(row_start_account, column)
account_end = rowcol_to_cell(row_position - 1, column)
account_formula = 'Round(SUM(' + \
account_start + ':' + account_end + ');2)'
c_specs += [('total%s' % column, 1, 0, 'text', None,
account_formula, None, cell_style_decimal)]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, cell_style)
return row_position + 1
def generate_xls_report(self, _p, _xs, data, objects, wb):
# Initialisations
ws = wb.add_sheet(_p.report_name[:31])
ws.panes_frozen = True
ws.remove_splits = True
ws.portrait = 0 # Landscape
ws.fit_width_to_pages = 1
row_pos = 0
ws.header_str = self.xls_headers['standard']
ws.footer_str = self.xls_footers['standard']
# Print Title
row_pos = self.print_title(ws, _p, row_pos, xlwt, _xs)
# Print empty row to define column sizes
row_pos = self.print_empty_row(ws, row_pos)
# Print Header Table titles (Fiscal Year - Accounts Filter - Periods
# Filter...)
row_pos = self.print_header_titles(ws, _p, data, row_pos, xlwt, _xs)
initial_balance_text = {
'initial_balance': _('Computed'),
'opening_balance': _('Opening Entries'),
False: _('No')} # cf. account_report_partner_balance.mako
# Print Header Table data
row_pos = self.print_header_data(
ws, _p, data, row_pos, xlwt, _xs, initial_balance_text)
# Print comparison header table
if _p.comparison_mode in ('single', 'multiple'):
row_pos += 1
row_pos = self.print_comparison_header(
_xs, xlwt, row_pos, _p, ws, initial_balance_text)
# Freeze the line
ws.set_horz_split_pos(row_pos)
# cell styles for account data
regular_cell_format = _xs['borders_all']
regular_cell_style = xlwt.easyxf(regular_cell_format)
regular_cell_style_decimal = xlwt.easyxf(
regular_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
row_pos += 1
for current_account in objects:
partners_order = current_account.partners_order
# do not display accounts without partners
if not partners_order:
continue
comparisons = current_account.comparisons
# in multiple columns mode, we do not want to print accounts
# without any rows
if _p.comparison_mode in ('single', 'multiple'):
all_comparison_lines = [comp['partners_amounts'][partner_id[1]]
for partner_id in partners_order
for comp in comparisons]
if not display_line(all_comparison_lines):
continue
current_partner_amounts = current_account.partners_amounts
if _p.comparison_mode in ('single', 'multiple'):
comparison_total = {}
for i, comp in enumerate(comparisons):
comparison_total[i] = {'balance': 0.0}
# print row: Code - Account name
row_pos = self.print_row_code_account(
ws, current_account, row_pos, _xs, xlwt)
row_account_start = row_pos
# Print row: Titles "Account/Partner Name-Code/ref-Initial
# Balance-Debit-Credit-Balance" or "Account/Partner
# Name-Code/ref-Balance Year-Balance Year2-Balance C2-Balance C3"
row_pos = self.print_account_header(ws, _p, _xs, xlwt, row_pos)
for (partner_code_name, partner_id, partner_ref, partner_name) \
in partners_order:
partner = current_partner_amounts.get(partner_id, {})
# in single mode, we have to display all the partners even if
# their balance is 0.0 because the initial balance should match
# with the previous year closings
# in multiple columns mode, we do not want to print partners
# which have a balance at 0.0 in each comparison column
if _p.comparison_mode in ('single', 'multiple'):
all_comparison_lines = [comp['partners_amounts']
[partner_id]
for comp in comparisons
if comp['partners_amounts'].
get(partner_id)]
if not display_line(all_comparison_lines):
continue
# display data row
if len(_p.comp_params) == 2:
account_span = 3
else:
account_span = _p.initial_balance_mode and 2 or 3
c_specs = [('acc_title', account_span, 0, 'text',
partner_name if partner_name else
_('Unallocated'))]
c_specs += [('partner_ref', 1, 0, 'text',
partner_ref if partner_ref else '')]
if _p.comparison_mode == 'no_comparison':
bal_formula = ''
if _p.initial_balance_mode:
init_bal_cell = rowcol_to_cell(row_pos, 3)
bal_formula = init_bal_cell + '+'
debit_col = 4
c_specs += [
('init_bal', 1, 0, 'number', partner.get(
'init_balance', 0.0), None,
regular_cell_style_decimal),
]
else:
debit_col = 3
c_specs += [
('debit', 1, 0, 'number', partner.get('debit', 0.0),
None, regular_cell_style_decimal),
('credit', 1, 0, 'number', partner.get('credit', 0.0),
None, regular_cell_style_decimal),
]
debit_cell = rowcol_to_cell(row_pos, debit_col)
credit_cell = rowcol_to_cell(row_pos, debit_col + 1)
bal_formula += debit_cell + '-' + credit_cell
c_specs += [('bal', 1, 0, 'number', None,
bal_formula, regular_cell_style_decimal), ]
else:
c_specs += [('bal', 1, 0, 'number', partner.get('balance',
0.0),
None, regular_cell_style_decimal), ]
if _p.comparison_mode in ('single', 'multiple'):
for i, comp in enumerate(comparisons):
comp_partners = comp['partners_amounts']
balance = diff = percent_diff = 0
if comp_partners.get(partner_id):
balance = comp_partners[partner_id]['balance']
diff = comp_partners[partner_id]['diff']
percent_diff = comp_partners[
partner_id]['percent_diff']
comparison_total[i]['balance'] += balance
c_specs += [('balance_%s' % i, 1, 0, 'number',
balance, None,
regular_cell_style_decimal), ]
# no diff in multiple comparisons because it shows too much
# data
if _p.comparison_mode == 'single':
c_specs += [('balance_diff', 1, 0, 'number',
diff, None, regular_cell_style_decimal), ]
if percent_diff is False:
c_specs += [('balance', 1, 0, 'number',
diff, None, regular_cell_style_decimal), ]
else:
c_specs += [('perc_diff', 1, 0, 'number',
int(round(percent_diff))), ]
row_data = self.xls_row_template(
c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, regular_cell_style)
row_pos = self.print_account_totals(
_xs, xlwt, ws, row_account_start, row_pos, current_account, _p)
partners_balance_xls('report.account.account_report_partner_balance_xls',
'account.account',
parser=PartnerBalanceWebkit)
|
rschnapka/account-financial-reporting
|
account_financial_report_webkit_xls/report/partners_balance_xls.py
|
Python
|
agpl-3.0
| 20,868
|
# MySQL-specific implementations for south
# Original author: Andrew Godwin
# Patches by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
from django.db import connection
from django.conf import settings
from south.db import generic
from south.db.generic import DryRunError, INVALID
from south.logger import get_logger
def delete_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Deletes the constraints from the database and clears local cache.
"""
def _column_rm(self, table_name, column_name, *args, **opts):
# Delete foreign key constraints
try:
self.delete_foreign_key(table_name, column_name)
except ValueError:
pass # If no foreign key on column, OK because it checks first
# Delete constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_name)
for cname, rtable, rcolumn in reverse:
self.delete_foreign_key(rtable, rcolumn)
except DryRunError:
pass
return func(self, table_name, column_name, *args, **opts)
return _column_rm
def copy_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Determines existing constraints and copies them to a new column
"""
def _column_cp(self, table_name, column_old, column_new, *args, **opts):
# Copy foreign key constraint
try:
constraint = self._find_foreign_constraints(table_name, column_old)[0]
(ftable, fcolumn) = self._lookup_constraint_references(table_name, constraint)
if ftable and fcolumn:
fk_sql = self.foreign_key_sql(
table_name, column_new, ftable, fcolumn)
get_logger().debug("Foreign key SQL: " + fk_sql)
self.add_deferred_sql(fk_sql)
except IndexError:
pass # No constraint exists so ignore
except DryRunError:
pass
# Copy constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_old)
for cname, rtable, rcolumn in reverse:
fk_sql = self.foreign_key_sql(
rtable, rcolumn, table_name, column_new)
self.add_deferred_sql(fk_sql)
except DryRunError:
pass
return func(self, table_name, column_old, column_new, *args, **opts)
return _column_cp
def invalidate_table_constraints(func):
"""
For MySQL we grab all table constraints simultaneously, so this is
effective.
It further solves the issues of invalidating referred table constraints.
"""
def _cache_clear(self, table, *args, **opts):
db_name = self._get_setting('NAME')
if db_name in self._constraint_cache:
del self._constraint_cache[db_name]
if db_name in self._reverse_cache:
del self._reverse_cache[db_name]
if db_name in self._constraint_references:
del self._constraint_references[db_name]
return func(self, table, *args, **opts)
return _cache_clear
class DatabaseOperations(generic.DatabaseOperations):
"""
MySQL implementation of database operations.
MySQL has no DDL transaction support This can confuse people when they ask
how to roll back - hence the dry runs, etc., found in the migration code.
"""
backend_name = "mysql"
alter_string_set_type = ''
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
delete_primary_key_sql = "ALTER TABLE %(table)s DROP PRIMARY KEY"
delete_foreign_key_sql = "ALTER TABLE %(table)s DROP FOREIGN KEY %(constraint)s"
allows_combined_alters = False
has_ddl_transactions = False
has_check_constraints = False
delete_unique_sql = "ALTER TABLE %s DROP INDEX %s"
rename_table_sql = "RENAME TABLE %s TO %s;"
geom_types = ['geometry', 'point', 'linestring', 'polygon']
text_types = ['text', 'blob',]
def __init__(self, db_alias):
self._constraint_references = {}
self._reverse_cache = {}
super(DatabaseOperations, self).__init__(db_alias)
def _is_valid_cache(self, db_name, table_name):
cache = self._constraint_cache
# we cache the whole db so if there are any tables table_name is valid
return db_name in cache and cache[db_name].get(table_name, None) is not INVALID
def _fill_constraint_cache(self, db_name, table_name):
# for MySQL grab all constraints for this database. It's just as cheap as a single column.
self._constraint_cache[db_name] = {}
self._constraint_cache[db_name][table_name] = {}
self._reverse_cache[db_name] = {}
self._constraint_references[db_name] = {}
name_query = """
SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`,
kc.`referenced_table_name`, kc.`referenced_column_name`
FROM information_schema.key_column_usage AS kc
WHERE
kc.table_schema = %s
"""
rows = self.execute(name_query, [db_name])
if not rows:
return
cnames = {}
for constraint, column, table, ref_table, ref_column in rows:
key = (table, constraint)
cnames.setdefault(key, set())
cnames[key].add((column, ref_table, ref_column))
type_query = """
SELECT c.constraint_name, c.table_name, c.constraint_type
FROM information_schema.table_constraints AS c
WHERE
c.table_schema = %s
"""
rows = self.execute(type_query, [db_name])
for constraint, table, kind in rows:
key = (table, constraint)
self._constraint_cache[db_name].setdefault(table, {})
try:
cols = cnames[key]
except KeyError:
cols = set()
for column_set in cols:
(column, ref_table, ref_column) = column_set
self._constraint_cache[db_name][table].setdefault(column, set())
if kind == 'FOREIGN KEY':
self._constraint_cache[db_name][table][column].add((kind,
constraint))
# Create constraint lookup, see constraint_references
self._constraint_references[db_name][(table,
constraint)] = (ref_table, ref_column)
# Create reverse table lookup, reverse_lookup
self._reverse_cache[db_name].setdefault(ref_table, {})
self._reverse_cache[db_name][ref_table].setdefault(ref_column,
set())
self._reverse_cache[db_name][ref_table][ref_column].add(
(constraint, table, column))
else:
self._constraint_cache[db_name][table][column].add((kind,
constraint))
def connection_init(self):
"""
Run before any SQL to let database-specific config be sent as a command,
e.g. which storage engine (MySQL) or transaction serialisability level.
"""
cursor = self._get_connection().cursor()
if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
cursor.execute("SET storage_engine=%s;" % self._get_setting('STORAGE_ENGINE'))
# Turn off foreign key checks, and turn them back on at the end
cursor.execute("SET FOREIGN_KEY_CHECKS=0;")
self.deferred_sql.append("SET FOREIGN_KEY_CHECKS=1;")
@copy_column_constraints
@delete_column_constraints
@invalidate_table_constraints
def rename_column(self, table_name, old, new):
if old == new or self.dry_run:
return []
rows = [x for x in self.execute('DESCRIBE %s' % (self.quote_name(table_name),)) if x[0] == old]
if not rows:
raise ValueError("No column '%s' in '%s'." % (old, table_name))
params = (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
rows[0][1],
rows[0][2] == "YES" and "NULL" or "NOT NULL",
rows[0][4] and "DEFAULT " or "",
rows[0][4] and "%s" or "",
rows[0][5] or "",
)
sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params
if rows[0][4]:
self.execute(sql, (rows[0][4],))
else:
self.execute(sql)
@delete_column_constraints
def delete_column(self, table_name, name):
super(DatabaseOperations, self).delete_column(table_name, name)
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
super(DatabaseOperations, self).rename_table(old_table_name,
table_name)
@invalidate_table_constraints
def delete_table(self, table_name):
super(DatabaseOperations, self).delete_table(table_name)
def _lookup_constraint_references(self, table_name, cname):
"""
Provided an existing table and constraint, returns tuple of (foreign
table, column)
"""
db_name = self._get_setting('NAME')
try:
return self._constraint_references[db_name][(table_name, cname)]
except KeyError:
return None
def _lookup_reverse_constraint(self, table_name, column_name=None):
"""Look for the column referenced by a foreign constraint"""
db_name = self._get_setting('NAME')
if self.dry_run:
raise DryRunError("Cannot get constraints for columns.")
if not self._is_valid_cache(db_name, table_name):
# Piggy-back on lookup_constraint, ensures cache exists
self.lookup_constraint(db_name, table_name)
try:
table = self._reverse_cache[db_name][table_name]
if column_name == None:
return [(y, tuple(y)) for x, y in table.items()]
else:
return tuple(table[column_name])
except KeyError, e:
return []
def _field_sanity(self, field):
"""
This particular override stops us sending DEFAULTs for BLOB/TEXT columns.
"""
# MySQL does not support defaults for geometry columns also
type = self._db_type_for_alter_column(field).lower()
is_geom = True in [ type.find(t) > -1 for t in self.geom_types ]
is_text = True in [ type.find(t) > -1 for t in self.text_types ]
if is_geom or is_text:
field._suppress_default = True
return field
def _alter_set_defaults(self, field, name, params, sqls):
"""
MySQL does not support defaults on text or blob columns.
"""
type = params['type']
# MySQL does not support defaults for geometry columns also
is_geom = True in [ type.find(t) > -1 for t in self.geom_types ]
is_text = True in [ type.find(t) > -1 for t in self.text_types ]
if not is_geom and not is_text:
super(DatabaseOperations, self)._alter_set_defaults(field, name, params, sqls)
|
MechanisM/musicdb
|
contrib/south/db/mysql.py
|
Python
|
agpl-3.0
| 11,404
|
# See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class HotelServices(models.Model):
_name = "hotel.services"
_description = "Hotel Services and its charges"
product_id = fields.Many2one(
"product.product",
"Service_id",
required=True,
ondelete="cascade",
delegate=True,
)
categ_id = fields.Many2one(
"hotel.service.type", string="Service Category", required=True
)
product_manager = fields.Many2one("res.users", string="Product Manager")
|
OCA/vertical-hotel
|
hotel/models/hotel_services.py
|
Python
|
agpl-3.0
| 562
|
from collections import OrderedDict
from django.contrib.auth.models import User, Group
from django.apps import apps
from django.contrib.admin import AdminSite
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from tk.chunks import admin as ca
from tk.chunks import models as co
from tk.material import admin as ma
from tk.material import models as mo
class TKAdmin(AdminSite):
site_header = _("TK admin")
site_title = _("TK admin")
index_template = 'admin/tk_index.html'
grouping = [{
'name': _("Material"),
'models': [
'material.Activity',
'material.Video',
'material.Reading',
'material.Link',
]}, {
'name': _("Material classification"),
'models': [
'material.Subject',
'material.GroupFeature',
'material.Location',
]}, {
'name': _("Others"),
'models': [
'chunks.Chunk',
'auth.User',
'auth.Group',
]}
]
def get_app_list(self, request):
# Build the original app list so that we take into account user perms
app_list = super().get_app_list(request)
for g in self.grouping:
models = [ self._get_model(m, app_list) for m in g['models'] ]
models = [ m for m in models if m is not None ]
if models:
yield {'name': g['name'], 'models': models}
def _get_model(self, model, app_list):
app_name, model_name = model.split('.')
for a in app_list:
if a['app_label'] == app_name:
for m in a['models']:
if m['object_name'] == model_name:
return m
def index(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
# Add notifications about pending approval requests
Approval = apps.get_model('material', 'Approval')
extra_context['approvals_new'] = Approval.objects.filter(
timestamp__gte=request.user.last_login, approved=False)
extra_context['approvals_unapproved'] = Approval.objects.filter(
approved=False)
return super().index(request, extra_context)
def app_index(self, request, app_label, extra_context=None):
# Disallow app indices: redirect to main index
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
tkadmin = TKAdmin()
tkadmin.register(mo.Subject, ma.LocalizedAdmin)
tkadmin.register(mo.GroupFeature, ma.LocalizedAdmin)
tkadmin.register(mo.Location, ma.LocalizedAdmin)
tkadmin.register(mo.Approval, ma.ApprovalAdmin)
tkadmin.register(mo.Activity, ma.ActivityAdmin)
tkadmin.register(mo.Reading, ma.ReadingAdmin)
tkadmin.register(mo.Video, ma.VideoAdmin)
tkadmin.register(mo.Link, ma.LinkAdmin)
tkadmin.register(co.Chunk, ca.ChunkAdmin)
tkadmin.register(User)
tkadmin.register(Group)
|
GISAElkartea/tresna-kutxa
|
tk/admin.py
|
Python
|
agpl-3.0
| 3,075
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Didotech Inc. (<http://www.didotech.com>)
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import time
_logger = logging.getLogger(__name__)
from report import report_sxw
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'create_product_tree': self.create_product_tree,
})
self.context = context
self.sale_tree = False
def create_product_tree(self, product_line):
sales = []
for line in product_line:
if line.selected:
for number in range(0, line.qty):
sales.append(line)
self.sale_tree = sales
return sales
|
iw3hxn/LibrERP
|
garazd_product_label/report/product_label_parser.py
|
Python
|
agpl-3.0
| 1,674
|
import os
from django.core import management
from django.test import TestCase
class CreateDevDataSmokeTest(TestCase):
def test_createdevdata_works(self):
"""The createdevdata management command should run successfully,
without raising any exceptions."""
# Write stdout to /dev/null so as not to clutter the output
# from the tests.
with open(os.devnull, 'w') as devnull:
management.call_command('createdevdata', stdout=devnull)
|
freedomofpress/securethenews
|
home/management/commands/tests/test_createdevdata.py
|
Python
|
agpl-3.0
| 488
|
from .RegularVerb import RegularVerb
from .ToBe import ToBe
__all__ = [
"RegularVerb",
"ToBe"
]
|
etkirsch/legends-of-erukar
|
erukar/ext/nlg/verbs/__init__.py
|
Python
|
agpl-3.0
| 106
|
import re
from django.utils.translation import ugettext_lazy as _
from django.forms import ModelForm
from django import forms
from django.forms.widgets import ClearableFileInput
from etruekko.truekko.models import UserProfile
from etruekko.truekko.models import Group
from etruekko.truekko.models import User
from etruekko.truekko.models import Membership
from etruekko.truekko.models import Transfer
from etruekko.truekko.models import Item
from etruekko.truekko.models import Tag
from etruekko.truekko.models import ItemTagged
from etruekko.truekko.models import WallMessage
from etruekko.truekko.models import Commitment
from etruekko.truekko.models import PostalAddress
from etruekko.globaltags.tags import tooltip
class CustomImageWidget(ClearableFileInput):
template_with_initial = u'%(clear_template)s<br />%(input_text)s: %(input)s'
class UserProfileForm(ModelForm):
required_css_class = 'required'
email = forms.EmailField()
class Meta:
model = UserProfile
fields = ('photo', 'name', 'email', 'location', 'web', 'description', 'receive_notification')
widgets = {'photo': CustomImageWidget()}
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
self.fields['email'].initial = self.instance.user.email
def save(self):
instance = super(UserProfileForm, self).save(commit=False)
instance.user.email = self.cleaned_data['email']
instance.user.save()
instance.save()
class GroupForm(ModelForm):
required_css_class = 'required'
class Meta:
model = Group
fields = ('photo', 'name', 'location', 'web', 'description')
widgets = {'photo': CustomImageWidget()}
class RegisterForm(forms.Form):
required_css_class = 'required'
username = forms.CharField(label=_("Username"))
name = forms.CharField(label=_("Name"))
location = forms.CharField(label=_("Location"))
email = forms.EmailField(label=_("Email"))
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirm"), widget=forms.PasswordInput)
accept_terms = forms.BooleanField(label=_("I accept"),
help_text=_('You should accept '
'the <a href="/terms">terms of use</a> '
'and the <a href="/privacy">privacy policy</a> '
'to join to etruekko'),
error_messages={'required': _("You should accept the terms of use and the privacy policy")},
required=True)
def clean_username(self):
username = self.cleaned_data.get("username")
if not re.match("^\w+$", username):
raise forms.ValidationError(_("Username isn't valid, use only letters, numbers or _"))
if User.objects.filter(username=username).count():
raise forms.ValidationError(_("Username exists"))
return username
def clean_email(self):
email = self.cleaned_data.get("email")
if User.objects.filter(email=email).count():
raise forms.ValidationError(_("User exists with the same email"))
return email
def clean(self):
cleaned_data = self.cleaned_data
p1 = cleaned_data.get("password")
p2 = cleaned_data.get("password2")
if p1 != p2:
raise forms.ValidationError(_("Password and password confirm didn't match"))
return cleaned_data
def save(self, group=None):
data = self.cleaned_data
u = User(username=data["username"], email=data["email"], is_active=True)
u.set_password(data["password"])
u.save()
p = u.get_profile()
p.name = data["name"]
p.location = data["location"]
p.save()
if group:
m = Membership(user=u, group=group, role="REQ")
m.save()
class TransferDirectForm(forms.Form):
required_css_class = 'required'
concept = forms.CharField(label=_("Concept"), max_length=500)
credits = forms.IntegerField(label=_("Credits"))
def __init__(self, user_from, user_to, *args, **kwargs):
self.user_from = user_from
self.user_to = user_to
super(TransferDirectForm, self).__init__(*args, **kwargs)
def clean_credits(self):
credits = self.cleaned_data.get('credits')
if credits < 0:
raise forms.ValidationError(_("Can't make this transfer, negative credits isn't allowed"))
if credits > self.user_from.get_profile().credits:
raise forms.ValidationError(_("Can't make this transfer, insufficient credits"))
return credits
def save(self):
data = self.cleaned_data
t = Transfer(user_from=self.user_from,
user_to=self.user_to,
credits=data['credits'],
concept=data['concept'])
t.save()
class ItemAddForm(ModelForm):
required_css_class = 'required'
class Meta:
model = Item
fields = ('name', 'type', 'offer_or_demand', 'description', 'price', 'price_type')
def quantity(self):
return self.instance.quantity
class WallMessageForm(ModelForm):
required_css_class = 'required'
class Meta:
model = WallMessage
fields = ('msg', 'private')
class ContactForm(forms.Form):
required_css_class = 'required'
sender = forms.EmailField(label=_("Email"))
subject = forms.CharField(label=_("Subject"), max_length=100)
phone = forms.CharField(label=_("Phone"), required=False, max_length=100)
message = forms.CharField(label=_("Message"), widget=forms.Textarea)
cc_myself = forms.BooleanField(label=_("Send a copy to myself"), required=False)
def send(self):
subject = self.cleaned_data['subject']
message = self.cleaned_data['message']
sender = self.cleaned_data['sender']
phone = self.cleaned_data['phone']
cc_myself = self.cleaned_data['cc_myself']
msg = _("Phone: %(number)s\n\n%(msg)s") % dict(number=phone, msg=message)
recipients = ['info@etruekko.com']
if cc_myself:
recipients.append(sender)
from django.core.mail import send_mail
send_mail(subject, msg, sender, recipients)
class CommitmentForm(ModelForm):
required_css_class = 'required'
#def __init__(self, users, *args, **kwargs):
# super(CommitmentForm, self).__init__(*args, **kwargs)
# self.fields['user_from'].queryset = User.objects.filter(pk__in=users)
# self.fields['user_to'].queryset = User.objects.filter(pk__in=users)
class Meta:
model = Commitment
#fields = ('user_from', 'user_to', 'comment')
fields = ('comment', )
class PostalForm(ModelForm):
required_css_class = 'required'
class Meta:
model = PostalAddress
exclude = ('user',)
|
wadobo/etruekko
|
etruekko/truekko/forms.py
|
Python
|
agpl-3.0
| 7,037
|
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
import time
from openerp.osv import fields, orm
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from openerp.tools.translate import _
from openerp import models, fields, api
class hr_infraction_category(orm.Model):
_name = 'hr.infraction.category'
_description = 'Infraction Type'
name = fields.Char(
'Name',
required=True
)
code = fields.Char(
'Code',
required=True
)
class hr_infraction(orm.Model):
_name = 'hr.infraction'
_description = 'Infraction'
_inherit = ['mail.thread', 'ir.needaction_mixin']
name = fields.Char(
'Subject',
size=256,
required=True,
readonly=True,
states={'draft': [('readonly', False)]}
)
date = fields.Date(
'Date',
required=True,
readonly=True,
default = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
states={'draft': [('readonly', False)]}
)
employee_id = fields.Many2one(
'hr.employee',
'Employee',
required=True,
readonly=True,
states={'draft': [('readonly', False)]}
)
category_id = fields.Many2one(
'hr.infraction.category',
'Category',
required=True,
readonly=True,
states={'draft': [('readonly', False)]},
),
action_ids = fields.One2many(
'hr.infraction.action',
'infraction_id',
'Actions',
readonly=True
)
memo = fields.Text(
'Description',
readonly=True,
states={'draft': [('readonly', False)]}
)
state = fields.Selection(
[
('draft', 'Draft'),
('confirm', 'Confirmed'),
('action', 'Actioned'),
('noaction', 'No Action'),
],
'State',
readonly=True,
default = 'draft'
)
}
_track = {
'state': {
'hr_infraction.mt_alert_infraction_confirmed': (
lambda self, cr, u, obj, ctx=None: obj['state'] == 'confirm'),
'hr_infraction.mt_alert_infraction_action': (
lambda self, cr, u, obj, ctx=None: obj['state'] == 'action'),
'hr_infraction.mt_alert_infraction_noaction': (
lambda self, cr, u, obj, ctx=None: obj['state'] == 'noaction'),
},
}
@api.model
def _needaction_domain_get(self):
users_obj = self.pool.get('res.users')
domain = []
if users_obj.has_group('base.group_hr_manager'):
domain = [('state', '=', 'confirm')]
if len(domain) == 0:
return False
return domain
@api.multi
def unlink(self):
for infraction in self.browse():
if infraction.state not in ['draft']:
raise orm.except_orm(
_('Error'),
_('Infractions that have progressed beyond "Draft" state '
'may not be removed.')
)
return super(hr_infraction, self).unlink()
@api.onchange('category_id')
def onchange_category(self,category_id):
res = {'value': {'name': False}}
if category_id:
category = self.pool.get('hr.infraction.category').browse(
category_id
)
res['value']['name'] = category.name
return res
ACTION_TYPE_SELECTION = [
('warning_verbal', 'Verbal Warning'),
('warning_letter', 'Written Warning'),
('transfer', 'Transfer'),
('suspension', 'Suspension'),
('dismissal', 'Dismissal'),
]
class hr_infraction_action(orm.Model):
_name = 'hr.infraction.action'
_description = 'Action Based on Infraction'
infraction_id = fields.Many2one(
'hr.infraction',
'Infraction',
ondelete='cascade',
required=True,
readonly=True
)
type = fields.Selection(
ACTION_TYPE_SELECTION,
'Type',
required=True
)
memo = fields.Text(
'Notes'
)
employee_id = fields.Many2one(
'Employee',
'hr.employee',
realted='infraction_id.employee_id',
readonly=True
)
warning_id = fields.Many2one(
'hr.infraction.warning',
'Warning',
readonly=True
)
transfer_id = fields.Many2one(
'hr.department.transfer',
'Transfer',
readonly=True
)
_rec_name = 'type'
@api.multi
def unlink(self):
for action in self.browse():
if action.infraction_id.state not in ['draft']:
raise orm.except_orm(
_('Error'),
_('Actions belonging to Infractions not in "Draft" state '
'may not be removed.')
)
return super(hr_infraction_action, self).unlink(
)
class hr_warning(orm.Model):
_name = 'hr.infraction.warning'
_description = 'Employee Warning'
name = fields.Char(
'Subject',
size=256
)
date = fields.Date(
'Date Issued',
date = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
)
type = fields.Selection(
[
('verbal', 'Verbal'),
('written', 'Written'),
],
'Type',
required=True,
default = 'written'
)
action_id = fields.Many2one(
'hr.infraction.action',
'Action',
ondelete='cascade',
readonly=True
)
infraction_id = fields.Many2one(
realted='action_id.infraction_id',
obj='hr.infraction',
string='Infraction',
readonly=True
)
employee_id = fields.Many2one(
related='infraction_id.employee_id',
obj='hr.employee',
string='Employee',
readonly=True
)
@api.multi
def unlink(self):
for warning in self.browse():
if (warning.action_id
and warning.action_id.infraction_id.state != 'draft'):
raise orm.except_orm(
_('Error'),
_('Warnings attached to Infractions not in "Draft" state '
'may not be removed.')
)
return super(hr_warning, self).unlink(cr, uid, ids, context=context)
class hr_employee(orm.Model):
_name = 'hr.employee'
_inherit = 'hr.employee'
infraction_ids = fields.One2many(
'hr.infraction',
'employee_id',
'Infractions',
readonly=True
)
infraction_action_ids = fields.One2many(
'hr.infraction.action',
'employee_id',
'Disciplinary Actions',
readonly=True
)
|
iDTLabssl/hr
|
hr_unported/hr_infraction/hr_infraction.py
|
Python
|
agpl-3.0
| 7,920
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
from frappe.utils import cstr, flt, getdate, comma_and, nowdate, cint, now, nowtime , get_datetime
from erpnext.accounts.accounts_custom_methods import delte_doctype_data, prepare_serial_no_list, check_for_reassigned, update_status_to_completed, stock_entry_for_out, add_to_serial_no, get_idx_for_serialNo, open_next_branch
from tools.custom_data_methods import get_user_branch, get_branch_cost_center, get_branch_warehouse, update_serial_no, find_next_process
import datetime
from tools.custom_data_methods import generate_barcode
from tools.custom_data_methods import gererate_QRcode
import pdb
class ProcessAllotment(Document):
def validate(self):
# self.assign_task()
# self.update_process_status()
self.make_IssueSTE()
# self.update_task()
self.prepare_for_time_log()
# self.make_auto_ste()
# self.auto_ste_for_trials()
self.procees_allotment_qrcode()
self.procees_allotment_barcode()
self.check_extra_payment()
def check_extra_payment(self):
val = frappe.db.get_value('Costing Item', {'parent':self.item, 'branch': get_user_branch()}, 'max_extra_chg')
if val:
for d in self.get('employee_details'):
if d.tailor_extra_charge=='Yes':
if flt(d.tailor_extra_amt) > flt(val):
frappe.throw(_("Extra Amount can not be greater than {0}").format(val))
def make_IssueSTE(self):
if self.get('issue_raw_material'):
self.create_se(self.get('issue_raw_material'))
return "Done"
def procees_allotment_barcode(self):
if cint(frappe.db.get_value('Global Defaults',None,'barcode'))==1:
if not self.barcode:
#self.barcode=self.name
self.bar= generate_barcode(self.name, self.doctype)
self.barcode = '<img src="/files/Barcode/%s/%s.svg">'%(self.doctype,self.name.replace("/","-"))
def procees_allotment_qrcode(self):
if cint(frappe.db.get_value('Global Defaults',None,'qrcode'))==1:
if not self.qrcode:
self.bar= gererate_QRcode(self.name,self.doctype)
self.qrcode = '<img src="/files/QRCode/%s/%s.png">'%(self.doctype,self.name.replace("/","-"))
def show_trials_details(self):
trials_data = frappe.db.sql("select * from `tabProcess Log` where (ifnull(status,'') = 'Open' or ifnull(status,'')='Closed') and process_name='%s' and process_data = '%s' and trials is not null order by trials"%(self.process, self.name), as_dict=1)
self.set('trials_transaction', [])
for data in trials_data:
td = self.append('trials_transaction', {})
td.trial_no = data.trials
td.status = data.status
td.work_order = data.pr_work_order
def prepare_for_time_log(self):
if self.get('employee_details'):
for data in self.get('employee_details'):
self.validate_trials(data)
self.start_process_for_serialNo(data)
if cint(data.idx) == cint(len(self.get('employee_details'))):
status = 'Closed' if data.employee_status == 'Completed' else 'Open'
frappe.db.sql("update `tabTask` set status ='%s' where name='%s'"%( status, data.tailor_task))
def make_time_log(self, data, task):
tl = frappe.new_doc('Time Log')
tl.from_time = data.tailor_from_time
tl.hours = flt(data.work_completed_time)/60
tl.to_time = datetime.datetime.strptime(tl.from_time, '%Y-%m-%d %H:%M:%S') + datetime.timedelta(hours = flt(tl.hours))
tl.activity_type = self.process
tl.task = task
tl.project = self.sales_invoice_no
tl.save(ignore_permissions=True)
t = frappe.get_doc('Time Log', tl.name)
t.submit()
return tl.name
def start_process_for_serialNo(self, data):
if data.employee_status == 'Assigned':
idx = get_idx_for_serialNo(data, self.pdd, self.process)
details = open_next_branch(self.pdd, idx)
add_to_serial_no(details, self.process_work_order, data.tailor_serial_no, data.qc_required, data.employee_name)
else:
self.update_sn_status(data)
if data.employee_status == 'Completed' and not data.ste_no:
details = find_next_process(self.pdd, self.process, data.tailor_process_trials)
if cint(data.qc_required)==1:
if data.tailor_process_trials and cint(frappe.db.get_value('Trial Dates',{'parent':self.trial_dates, 'trial_no':data.tailor_process_trials,'process':self.process}, 'quality_check')) != 1:
data.ste_no = self.make_ste(details, data)
else:
data.ste_no = self.make_qc(details, data)
else:
data.ste_no = self.make_ste(details, data)
def make_qc(self, details, data):
sn_list = self.get_not_added_sn(data.tailor_serial_no, 'serial_no_data', 'Quality Inspection')
if sn_list:
qi = frappe.new_doc('Quality Inspection')
qi.inspection_type = 'In Process'
qi.report_date = nowdate()
qi.item_code = self.item
qi.inspected_by = frappe.session.user
qi.sample_size = data.assigned_work_qty
qi.customer_name = self.customer_name
qi.sales_invoice_no =self.sales_invoice_no
qi.serial_no_data = sn_list
qi.process = self.process
qi.work_order = self.process_work_order
qi.pdd = self.pdd
qi.trial_no = data.tailor_process_trials
qi.tdd = self.trial_dates
self.qa_specification_details(qi)
qi.save(ignore_permissions=True)
return qi.name
def qa_specification_details(self, obj):
qi_data = frappe.db.sql("""select * from `tabItem Quality Inspection Parameter`
where parent='%s' and qi_process='%s'"""%(self.item, self.process), as_dict=1)
if qi_data:
for data in qi_data:
qa = obj.append('qa_specification_details')
qa.process = data.process
qa.specification = data.specification
return "Done"
def make_ste(self, details, data):
s= {'work_order': self.process_work_order, 'status': 'Release', 'item': self.item, 'trial_no': self.process_trials}
sn_list = self.get_not_added_sn(data.tailor_serial_no, 'serial_no', 'Stock Entry Detail')
if sn_list:
branch, type_of_log = self.get_branch(details, data)
dte_no = stock_entry_for_out(s, branch, sn_list, data.assigned_work_qty, type_of_log)
return dte_no
def get_branch(self, pdlog, args):
type_of_log = 'No'
if pdlog:
branch = pdlog.branch
elif not args.tailor_process_trials:
branch = frappe.db.get_value('Production Dashboard Details', self.pdd, 'end_branch')
if branch:
self.Change_Completed_Status(args, branch) #newly added
type_of_log = 'Delivery'
if args.tailor_process_trials and self.trial_dates:
branch = frappe.db.get_value('Trial Dates', {'parent': self.trial_dates, 'trial_no': args.tailor_process_trials}, 'trial_branch')
type_of_log = 'Trial'
return branch, type_of_log
def Change_Completed_Status(self, args, branch):
if args.tailor_serial_no:
serial_no = cstr(args.tailor_serial_no).split('\n')
for sn in serial_no:
if sn:
frappe.db.sql("update `tabSerial No` set completed = 'Yes' where name = '%s'"%(sn))
def get_not_added_sn(self, sn_list, fieldname, table):
new_sn_list = ''
data = frappe.db.sql(""" select %s from `tab%s` where
work_order = '%s' and docstatus=0"""%(fieldname, table, self.process_work_order), as_list=1)
if data:
for sn in data:
sn = cstr(sn[0]).split('\n')
for s in sn:
if s:
serial_no = self.check_available(s, sn_list)
if new_sn_list:
new_sn_list = new_sn_list + '\n' + serial_no
else:
new_sn_list = serial_no
else:
new_sn_list = sn_list
duplicate_list = new_sn_list.split('\n')
unique_list = set(duplicate_list)
new_sn_list = '\n'.join(unique_list)
return new_sn_list
def check_available(self, serial_no, sn_list):
sn_data = ''
sn_list = cstr(sn_list).split('\n')
for sn in sn_list:
if sn and sn != serial_no:
if sn_data:
sn_data = sn_data + '\n' + sn
else:
sn_data = sn
return sn_data
def update_sn_status(self, args):
if args.tailor_serial_no:
serial_no_list = cstr(args.tailor_serial_no).split('\n')
for serial_no in serial_no_list:
if args.employee_status == 'Completed' or args.employee_status == 'Reassigned' and not args.ste_no:
update_status_to_completed(serial_no, self.name, self.emp_status, args)
def validate_trials(self, args):
if self.process_trials and cint(args.assigned_work_qty) > 1:
frappe.throw(_("Only one serial no is allocated for trial no"))
if args.employee_status == 'Completed' and args.tailor_process_trials:
details = frappe.db.sql("""select name, production_status from `tabTrial Dates` where
parent='%s' and trial_no='%s' and process='%s' """%(self.trial_dates, args.tailor_process_trials,self.process), as_list=1)
if details:
if details[0][1] != 'Closed' and cint(self.qc) != 1:
frappe.db.sql(""" update `tabTrial Dates` set production_status='Closed'
where name='%s' """%(details[0][0]))
if self.pdd:
frappe.db.sql(""" update `tabProcess Log` set completed_status = 'Yes'
where trials=%s and parent = '%s' """%(cint(self.process_trials), self.pdd))
# def make_auto_ste(self):
# if self.process_status == 'Closed':
# self.validate_trials_closed()
# cond = "1=1"
# current_name, next_name = self.get_details(cond)
# target_branch = frappe.db.get_value('Process Log', next_name, 'branch')
# args = {'qty': self.finished_good_qty, 'serial_data': self.serials_data, 'work_order': self.process_work_order, 'item': self.item}
# if get_user_branch() == target_branch:
# self.update_status(current_name, next_name)
# frappe.db.sql("""update `tabProcess Log` set status = 'Open' where name='%s' and trials is null"""%(next_name))
# else:
# parent = self.prepare_stock_entry_for_process(target_branch, args)
# if parent:
# self.update_status(current_name, next_name)
# frappe.msgprint("Created Stock Entry %s"%(parent))
# def validate_trials_closed(self):
# count = frappe.db.sql("select ifnull(count(*),0) from `tabProcess Log` where process_data = '%s' and status = 'Open' and trials is not null"%(self.name), debug=1)
# if count:
# if cint(count[0][0])!=0 and self.process_status == 'Closed':
# frappe.throw(_("You must have to closed all trials"))
# def update_status(self, current_name, next_name):
# frappe.db.sql("""update `tabProcess Log` set status = 'Closed' where name='%s'"""%(current_name))
# def prepare_stock_entry_for_process(self, target_branch, args):
# if self.branch != target_branch and not frappe.db.get_value('Stock Entry Detail', {'work_order': self.process_work_order, 'target_branch':target_branch, 'docstatus':0, 's_warehouse': get_branch_warehouse(self.branch)}, 'name'):
# parent = frappe.db.get_value('Stock Entry Detail', {'target_branch':target_branch, 'docstatus':0, 's_warehouse': get_branch_warehouse(self.branch)}, 'parent')
# if parent:
# st = frappe.get_doc('Stock Entry', parent)
# self.stock_entry_of_child(st, args, target_branch)
# st.save(ignore_permissions= True)
# else:
# parent = self.make_stock_entry(target_branch, args)
# frappe.msgprint(parent)
# return parent
# def auto_ste_for_trials(self):
# for d in self.get('employee_details'):
# cond = "1=1"
# self.update_serial_no_status(d)
# status = frappe.db.get_value('Process Log', {'process_data': self.name, 'trials': d.tailor_process_trials}, 'status')
# if d.employee_status == 'Completed' and not d.ste_no and status!='Closed':
# if d.tailor_process_trials:
# cond = "trials ='%s'"%(d.tailor_process_trials)
# current_name, next_name = self.get_details(cond)
# target_branch = self.get_target_branch(d, next_name)
# args = {'qty': d.assigned_work_qty, 'serial_data': d.tailor_serial_no, 'work_order': self.process_work_order, 'item': self.item}
# d.ste_no = self.prepare_stock_entry_for_process(target_branch, args)
# self.update_status(current_name, next_name)
# if d.tailor_process_trials:
# # trial_name = frappe.db.get_value('Trials',{'sales_invoice': self.sales_invoice_no, 'work_order': self.process_work_order, 'trial_no': d.tailor_process_trials}, 'name')
# parent = frappe.db.sql(""" select name from `tabTrials` where sales_invoice='%s' and work_order='%s'"""%(self.sales_invoice_no, self.process_work_order), as_list=1)
# if parent:
# frappe.db.sql("""update `tabTrial Dates` set production_status = 'Closed' where
# parent = '%s' and trial_no = '%s'"""%(parent[0][0], d.tailor_process_trials))
# def get_target_branch(self, args, next_name):
# if args.tailor_process_trials:
# trial_name = frappe.db.get_value('Trials',{'sales_invoice': self.sales_invoice_no, 'work_order': self.process_work_order}, 'name')
# trials = frappe.db.get_value('Trial Dates', {'parent': trial_name, 'process': self.process, 'trial_no': args.tailor_process_trials}, '*')
# return trials.trial_branch
# else:
# return frappe.db.get_value('Process Log', next_name, 'branch')
# def update_serial_no_status(self, args):
# if args.tailor_serial_no:
# serial_no = cstr(args.tailor_serial_no).split('\n')
# for sn in serial_no:
# msg = self.process + ' ' + self.emp_status
# parent = frappe.db.get_value('Process Log', {'process_data': self.name}, 'parent')
# update_serial_no(parent, sn, msg)
def find_start_time(self):
self.start_date = now()
return "Done"
def find_to_time(self, date_type=None):
import math
if not date_type:
self.end_date = self.date_formatting(now())
if self.start_date and self.end_date:
self.start_date = self.date_formatting(self.start_date)
self.end_date = self.date_formatting(self.end_date)
after = datetime.datetime.strptime(self.end_date, '%Y-%m-%d %H:%M:%S')
before = datetime.datetime.strptime(self.start_date, '%Y-%m-%d %H:%M:%S')
self.completed_time = cstr(math.floor(((after - before).total_seconds()) / 60))
else:
frappe.msgprint("Start Date is not mentioned")
return {
"completed_time":self.completed_time,
"end_date":self.end_date
}
def date_formatting(self,date):
date = get_datetime(date)
date = datetime.datetime.strftime(date, '%Y-%m-%d %H:%M:%S')
return date
def calculate_wage(self):
if self.process_tailor:
amount = frappe.db.sql(""" SELECT
type_of_payment,
CASE
WHEN type_of_payment='Amount'
THEN cost
WHEN type_of_payment='Percent'
THEN total_percentage
END as amount
FROM
`tabEmployeeSkill`
WHERE
process='{0}'
AND item_code='{1}' and parent='{2}' """.format(self.process,self.item,self.process_tailor),as_dict=1)
trial_cost = 0.0
tailor_cost = 0.0
serial_list = self.serial_no_data.split('\n')
serial_list = [serial for serial in serial_list if serial]
if self.process_trials:
trial_cost = self.calculate_trial_cost()
for serial_no in serial_list:
check_dict = self.get_dic_List(serial_no)
if frappe.db.get_value('Serial No Detail', check_dict, 'extra_style_cost_given') == 'Yes':
break
else:
if self.process_trials == 1 or not self.process_trials:
tailor_cost = self.calculate_process_wise_tailor_cost()
if amount:
if amount[0].get('type_of_payment') == 'Percent' and self.payment=='Yes':
self.wages_for_single_piece = ( (( flt(self.total_invoice_amount) - flt(self.total_expense) ) * flt(amount[0].get('amount')/100)) + trial_cost + tailor_cost )
self.wages = flt(self.wages_for_single_piece) * flt(len(serial_list))
if amount[0].get('type_of_payment') == 'Amount' and self.payment =='Yes':
self.wages_for_single_piece = flt(amount[0].get('amount')) + trial_cost + tailor_cost
self.wages = flt(self.wages_for_single_piece) * flt(len(serial_list))
else:
self.wages_for_single_piece = trial_cost + tailor_cost
self.wages = flt(self.wages_for_single_piece) * flt(len(serial_list))
# def make_stock_entry(self, t_branch, args):
# ste = frappe.new_doc('Stock Entry')
# ste.purpose_type = 'Material Out'
# ste.purpose ='Material Issue'
# self.stock_entry_of_child(ste, args, t_branch)
# ste.branch = get_user_branch()
# ste.save(ignore_permissions=True)
# return ste.name
# def stock_entry_of_child(self, obj, args, target_branch):
# ste = obj.append('mtn_details', {})
# ste.s_warehouse = get_branch_warehouse(self.branch)
# ste.target_branch = target_branch
# ste.t_warehouse = get_branch_warehouse(target_branch)
# ste.qty = args.get('qty')
# ste.serial_no = args.get('serial_data')
# ste.incoming_rate = 1.0
# ste.conversion_factor = 1.0
# ste.work_order = args.get('work_order')
# ste.item_code = args.get('item')
# ste.item_name = frappe.db.get_value('Item', ste.item_code, 'item_name')
# ste.stock_uom = frappe.db.get_value('Item', ste.item_code, 'stock_uom')
# company = frappe.db.get_value('GLobal Default', None, 'company')
# ste.expense_account = frappe.db.get_value('Company', company, 'default_expense_account')
# return "Done"
# def get_details(self , cond):
# name = frappe.db.sql("""SELECT ifnull(foo.name, '') AS current_name, (SELECT ifnull(name, '') FROM `tabProcess Log`
# WHERE name > foo.name AND parent = foo.parent order by process_data, trials limit 1) AS next_name
# FROM ( SELECT name, parent FROM `tabProcess Log` WHERE branch = '%s'
# and status != 'Closed' and process_data = '%s' and %s ORDER BY idx limit 1) AS foo """%(self.branch, self.name, cond), as_dict=1, debug=1)
# if name:
# return name[0].current_name, name[0].next_name
# else:
# '',''
def calculate_trial_cost(self):
trial_cost = 0.0
branch_dict = frappe.db.sql(""" SELECT
branch_dict
FROM
`tabProcess Item` as si
WHERE
parent = '%s'
and process_name = '%s' """%(self.item,self.process),as_list=1)
if branch_dict[0][0] and self.process_trials:
self.process_trials = cint(self.process_trials)
branch_dict[0][0] = eval(branch_dict[0][0])
trial_cost = flt(branch_dict[0][0].get("{0}".format( cint(self.process_trials) - 1 ) ).get('cost'))
return trial_cost
def calculate_process_wise_tailor_cost(self):
tailor_cost = 0.0
self.extra_style_cost_given = 'No'
process_wise_tailor_cost = frappe.db.sql(""" SELECT
process_wise_tailor_cost
FROM
`tabWO Style`
WHERE
parent = '{0}'
AND process_wise_tailor_cost LIKE "%{1}%" """.format(self.work_order,self.process),as_list=1)
if process_wise_tailor_cost:
self.extra_style_cost_given = 'Yes'
for row in process_wise_tailor_cost:
tailor_cost += flt(eval(row[0]).get(self.process))
return tailor_cost
def update_task(self):
if self.emp_status=='Assigned' and not self.get("__islocal") and self.process_tailor:
self.task = self.create_task()
# self.update_work_order()
if self.get('employee_details'):
for d in self.get('employee_details'):
if not d.tailor_task:
d.tailor_task = self.task
def update_work_order(self):
if self.process_trials:
fabric = ''
data = frappe.db.sql(""" select a.work_order as work_order, ifnull(a.actual_fabric, '') as actual_fabric, b.pdd as pdd from `tabTrial Dates` a, `tabTrials` b where a.parent= b.name
and b.work_order ='%s' and process = '%s' and trial_no = '%s'"""%(self.process_work_order, self.process, self.process_trials), as_dict=1)
if data:
for d in data:
if cint(d.actual_fabric) == 1:
fabric = frappe.db.get_value('Production Dashboard Details', d.pdd, 'fabric_code')
else:
fabric = frappe.db.get_value('Production Dashboard Details', d.pdd, 'dummy_fabric_code')
if fabric:
frappe.db.sql(""" update `tabWork Order` set fabric__code = '%s' and trial_no = '%s'
where name = '%s'"""%(fabric, self.process_trials, d.work_order))
def create_task(self):
self.validate_dates()
tsk = frappe.new_doc('Task')
tsk.subject = '%s for %s'%(self.process, frappe.db.get_value('Item',self.item,'item_name'))
tsk.project = self.sales_invoice_no
tsk.exp_start_date = datetime.datetime.strptime(self.start_date, '%Y-%m-%d %H:%M:%S').date()
tsk.exp_end_date = datetime.datetime.strptime(self.end_date, '%Y-%m-%d %H:%M:%S').date()
tsk.status = 'Open'
tsk.process_name = self.process
tsk.item_code = self.item
tsk.process_allotment_number = self.name
tsk.sales_order_number = self.sales_invoice_no
tsk.save(ignore_permissions=True)
return tsk.name
# def assigned_to_user(self, data):
# todo = frappe.new_doc('ToDo')
# todo.description = data.task_details or 'Do process %s for item %s'%(data.process, frappe.db.get_value('Item',self.item,'item_name'))
# todo.reference_type = 'Task'
# todo.reference_name = data.task
# todo.owner = data.user
# todo.save(ignore_permissions=True)
# return todo.name
# def validate_process(self, index):
# for data in self.get('wo_process'):
# if cint(data.idx)<index:
# if data.status == 'Pending' and cint(data.skip)!=1:
# frappe.throw(_("Previous Process is Pending, please check row {0} ").format(cint(data.idx)))
# def on_submit(self):
# self.check_status()
# self.change_status('Completed')
# # self.make_stock_entry_for_finished_goods()
# def check_status(self):
# for d in self.get('wo_process'):
# if d.status =='Pending' and cint(d.skip)!=1:
# frappe.throw(_("Process is Pending, please check row {0} ").format(cint(d.idx)))
# def on_cancel(self):
# self.change_status('Pending')
# self.set_to_null()
# self.delete_dependecy()
# def change_status(self,status):
# frappe.db.sql(""" update `tabProduction Dashboard Details`
# set process_status='%s'
# where sales_invoice_no='%s' and article_code='%s'
# and process_allotment='%s'"""%(status, self.sales_invoice_no, self.item, self.name))
# def set_to_null(self):
# frappe.db.sql(""" update `tabProduction Dashboard Details`
# set process_allotment= (select name from tabCustomer where 1=2)
# where sales_invoice_no='%s' and article_code='%s'
# and process_allotment='%s'"""%( self.sales_invoice_no, self.item, self.name))
# def delete_dependecy(self):
# for d in self.get('wo_process'):
# if d.task and d.user:
# frappe.db.sql("delete from `tabToDo` where reference_type='%s' and owner='%s'"%(d.task, d.user))
# production_dict = self.get_dict(d.task, d.user)
# delte_doctype_data(production_dict)
# def get_dict(self, task, user):
# return {'Task':{'name':task}}
# def on_status_trigger_method(self, args):
# self.set_completion_date(args)
# self.update_process_status(args)
# def set_completion_date(self, args):
# for d in self.get('wo_process'):
# if cint(d.idx) == cint(args.idx) and d.status == 'Completed':
# d.completion_date = cstr(nowdate())
# else:
# d.completion_date = ''
# return True
# def make_stock_entry(self):
# if self.get('issue_raw_material'):
# create_se(self.get('issue_raw_material'))
# def make_stock_entry_for_finished_goods(self):
# ste = frappe.new_doc('Stock Entry')
# ste.purpose = 'Manufacture/Repack'
# ste.branch = get_user_branch()
# ste.save(ignore_permissions=True)
# self.make_child_entry(ste.name)
# ste = frappe.get_doc('Stock Entry',ste.name)
# ste.submit()
# self.make_gs_entry()
# return ste.name
# def make_child_entry(self, name):
# ste = frappe.new_doc('Stock Entry Detail')
# ste.t_warehouse = 'Finished Goods - I'
# ste.item_code = self.item
# ste.serial_no = self.serials_data
# ste.qty = self.finished_good_qty
# ste.parent = name
# ste.conversion_factor = 1
# ste.has_trials = 'No'
# ste.parenttype = 'Stock Entry'
# ste.uom = frappe.db.get_value('Item', ste.item_code, 'stock_uom')
# ste.stock_uom = frappe.db.get_value('Item', ste.item_code, 'stock_uom')
# ste.incoming_rate = 1.00
# ste.parentfield = 'mtn_details'
# ste.expense_account = 'Stock Adjustment - I'
# ste.cost_center = 'Main - I'
# ste.transfer_qty = self.finished_good_qty
# ste.save(ignore_permissions = True)
# return "Done"
# def make_gs_entry(self):
# if self.serials_data:
# parent = frappe.db.get_value('Production Dashboard Details',{'sales_invoice_no':self.sales_invoice_no,'article_code':self.item,'process_allotment':self.name},'name')
# sn = cstr(self.serials_data).splitlines()
# for s in sn:
# if not frappe.db.get_value('Production Status Detail',{'item_code':self.item, 'serial_no':s[0]},'name'):
# if parent:
# pd = frappe.new_doc('Production Status Detail')
# pd.item_code = self.item
# pd.serial_no = s
# pd.status = 'Ready'
# pd.parent = parent
# pd.save(ignore_permissions = True)
# if parent:
# frappe.db.sql("update `tabProduction Dashboard Details` set status='Completed', trial_no=0 where name='%s'"%(parent))
# return "Done"
# def update_process_status(self, args=None):
# self.update_parent_status()
# self.update_child_status()
# def update_parent_status(self):
# if self.process_status_changes=='Yes':
# cond = "a.parent=b.name and a.process_data='%s' and a.process_name='%s' and b.sales_invoice_no='%s'"%(self.name, self.process, self.sales_invoice_no)
# frappe.db.sql("update `tabProcess Log` a, `tabProduction Dashboard Details` b set a.status='%s' where %s"%(self.process_status,cond))
# if self.process_status=='Closed':
# self.open_next_status(cond)
# self.process_status_changes='No'
# def update_child_status(self):
# for s in self.get('trials_transaction'):
# if s.trial_change_status=='Yes':
# cond = "a.parent=b.name and a.process_data='%s' and a.process_name='%s' and a.trials='%s' and b.sales_invoice_no='%s'"%(self.name, self.process, s.trial_no, self.sales_invoice_no)
# frappe.db.sql("update `tabProcess Log` a, `tabProduction Dashboard Details` b set a.status='%s' where %s"%(s.status, cond))
# if s.status=='Closed':
# self.open_next_status(cond)
# s.trial_change_status='No'
# def open_next_status(self, cond):
# name = frappe.db.sql("""select a.* from `tabProcess Log` a, `tabProduction Dashboard Details` b where %s """%(cond), as_dict=1)
# if name:
# for s in name:
# frappe.db.sql("update `tabProcess Log` set status='Open' where idx=%s and parent='%s'"%(cint(s.idx)+1, s.parent))
def assign_task_to_employee(self):
self.validate_WorkOrder_ReleaseStatus()
self.validate_Status()
self.validate_for_completed_process()
emp = self.append('employee_details',{})
emp.employee = self.process_tailor
emp.employee_name = frappe.db.get_value('Employee', self.process_tailor, 'employee_name')
emp.employee_status = self.emp_status
emp.tailor_payment = self.payment
emp.tailor_wages = self.wages
emp.tailor_process_trials = self.process_trials
emp.employee_work_order = self.work_order
emp.tailor_extra_wages = self.extra_charge
emp.tailor_extra_amt = self.extra_charge_amount
emp.tailor_from_time = self.start_date
emp.work_estimated_time = self.estimated_time
emp.work_completed_time = self.completed_time
emp.assigned_work_qty = self.work_qty
emp.deduct_late_work = self.deduct_late_work
emp.latework = self.latework
emp.tailor_serial_no = self.serial_no_data
emp.cost = self.cost
emp.wages_per_single_piece = flt(self.wages_for_single_piece)
emp.tailor_wages = flt(self.wages)
emp.qc_required = cint(self.qc)
emp.extra_style_cost_given = self.extra_style_cost_given
if self.emp_status == 'Assigned':
self.task = self.create_task()
elif self.emp_status == 'Completed':
self.task = self.get_task()
if self.task:
emp.time_log_name = self.make_time_log(emp, self.task)
emp.tailor_task = self.task
if self.emp_status == 'Completed':
self.add_to_completed_list()
self.save()
return "Done"
def get_task(self):
data = frappe.db.sql(''' select tailor_task from `tabEmployee Details` where parent = "%s"
and employee = "%s" and tailor_serial_no = "%s" and (employee_status = "Assigned" or employee_status = "Completed")'''%(self.name, self.process_tailor, self.serial_no_data), as_list=1)
if data:
return data[0][0]
else:
val = self.create_task()
return val
def add_to_completed_list(self):
self.serial_no_list = cstr(self.serial_no_list)
self.serial_no_list += self.serial_no_data + '\n'
def validate_for_completed_process(self):
if not self.process_trials and self.emp_status == 'Assigned':
sn_data = self.serial_no_data.split('\n')
sn_data = [serial for serial in sn_data if serial]
completed_sn_data = cstr(self.serial_no_list).split('\n')
if sn_data:
for serial_no in completed_sn_data:
if serial_no in sn_data:
frappe.throw("Serial No {0} is already completed.Please Assign Status as 'Reassigned' not 'Assigned' ".format(serial_no))
def validate_Status(self):
sn_data = cstr(self.serial_no_data).split('\n')
if sn_data:
for s in sn_data:
if s:
self.validate_processStatus(s) # to avoid duplicate process status
if self.emp_status == 'Reassigned' or self.emp_status == 'Completed':
self.check_PreviousStaus(s) # To check sequence of status
if self.emp_status == 'Assigned':
self.check_PrevStatus(s) # Check prev is completed
self.Next_process_assign(s) # If next process open then current has no
self.check_previous_process_assign(s)
def check_PrevStatus(self, serial_no):
if frappe.db.get_value('Serial No Detail', {'parent': serial_no}, 'name'):
if self.process_trials:
pdd, trial_no = self.get_PA_details('trial')
if frappe.db.get_value('Serial No Detail', {'process_data': pdd, 'trial_no': trial_no, 'parent': serial_no}, 'status') != 'Completed' and cint(self.process_trials) != 1:
frappe.throw(_("Previous trial is incompleted"))
elif frappe.db.get_value('Serial No Detail', {'process_data': pdd, 'parent': serial_no}, 'status') != 'Completed':
frappe.throw(_("Previous process is incompleted"))
else:
pdd, trial_no = self.get_PA_details('nontrial')
if pdd:
if frappe.db.get_value('Serial No Detail', {'process_data': pdd, 'parent': serial_no}, 'status') != 'Completed':
frappe.throw(_("Previous process is incompleted"))
def get_PA_details(self, type_of_trial):
msg = None
if type_of_trial == 'trial' and cint(self.process_trials) > 1:
return self.name, cint(self.process_trials) - 1
elif cint(frappe.db.get_value('Process Log', {'process_data': self.name, 'parent': self.pdd}, 'idx'))> 1:
data = frappe.db.sql("""select process_data from `tabProcess Log` where parent='%s' and
process_data < '%s' limit 1"""%(self.pdd, self.name), as_list=1)
if data:
msg = data[0][0]
return msg, 0
else:
return msg, 0
def Next_process_assign(self, serial_no):
data = frappe.db.sql("""select process_data from `tabProcess Log` where parent='%s' and
process_data > '%s' limit 1"""%(self.pdd, self.name), as_list=1)
if data:
if frappe.db.get_value('Serial No Detail', {'parent': serial_no, 'process_data': data[0][0]}, 'name'):
frappe.throw(_("Not allow to make changes in current process"))
def check_previous_process_assign(self, serial_no):
data = frappe.db.sql("""select process_data from `tabProcess Log` where parent='%s' and
process_data < '%s' limit 1"""%(self.pdd, self.name), as_list=1)
if data:
if not frappe.db.get_value('Serial No Detail', {'parent': serial_no, 'process_data': data[0][0]}, 'name'):
frappe.throw(_("Previous Process are uncomplete"))
def validate_processStatus(self, serial_no):
check_dict = self.get_dic_List(serial_no)
check_dict.setdefault('status', self.emp_status)
if frappe.db.get_value('Serial No Detail', check_dict, 'name'):
frappe.throw(_("Status {0} already defined For Serial No {1}").format(self.emp_status,serial_no))
def check_PreviousStaus(self, serial_no):
val = ['Assigned']
if self.emp_status=='Completed':
val.append('Reassigned')
if self.emp_status == 'Reassigned':
val.append('Completed')
val.append('Assigned')
val.append('Reassigned')
check_dict = self.get_dic_List(serial_no)
if frappe.db.get_value('Serial No Detail', check_dict, 'status') not in val:
frappe.throw(_("Sequence is not correct or previous process is not Completed").format(self.emp_status))
def get_dic_List(self, serial_no):
check_dict = {'parent': serial_no, 'process_data': self.name}
if self.process_trials:
check_dict = {'parent': serial_no, 'process_data': self.name, 'trial_no': self.process_trials}
return check_dict
def validate_WorkOrder_ReleaseStatus(self):
if not frappe.db.get_value('Work Order', self.process_work_order, 'status') == 'Release':
frappe.throw(_('Work order {0} must be Release').format(self.process_work_order))
def cal_extra_chg(self):
process_data = frappe.db.get_value('Process Item',{'parent':self.item, 'process_name':self.process, 'trials':1}, 'branch_dict')
if process_data:
process_data = eval(process_data)
for s in process_data:
if cint(self.process_trials) == cint(process_data[s]['trial']):
self.extra_charge_amount = process_data[s]['cost']
return True
def calculate_estimates_time(self):
if self.work_qty and self.start_date:
self.estimated_time = cint(self.work_qty) * cint(frappe.db.get_value('EmployeeSkill',{'parent':self.process_tailor, 'process':self.process, 'item_code': self.item},'time'))
self.start_date = self.date_formatting(self.start_date)
self.end_date = datetime.datetime.strptime(self.start_date, '%Y-%m-%d %H:%M:%S') + datetime.timedelta(minutes = cint(self.estimated_time))
return "Done"
def calculate_wages(self):
self.wages = 0.0
if self.payment == 'Yes':
self.wages = cint(self.work_qty) * cint(frappe.db.get_value('EmployeeSkill',{'parent':self.process_tailor, 'process':self.process, 'item_code': self.item},'cost'))
def calc_late_work_amt(self):
self.cost = flt(self.latework) * flt(frappe.db.get_value('Item',self.item,"late_work_cost"))
return "Done"
def validate_dates(self):
if not self.start_date and not self.end_date:
frappe.throw(_('Start and End Date is necessary to create task'))
def get_trial_serial_no(self):
get_trials = frappe.db.get_value('Trials', {'work_order':self.process_work_order}, '*')
self.serial_no_data = get_trials.trials_serial_no_status
self.work_qty = 1
return "Done"
def create_se(self, raw_material):
se = frappe.new_doc('Stock Entry')
se.naming_series = 'STE-'
se.purpose = 'Material Issue'
se.posting_date = nowdate()
se.posting_time = nowtime().split('.')[0]
se.company = frappe.db.get_value("Global Defaults", None, 'default_company')
se.fiscal_year = frappe.db.get_value("Global Defaults", None, 'current_fiscal_year')
item_list = self.make_ChildSTE_Issue(se, raw_material)
if item_list:
se.submit()
self.update_child_STE(se.name)
return "Done"
def update_child_STE(self, name):
data = self.get('issue_raw_material')
for d in data:
if not d.issue_stock_entry:
d.issue_stock_entry = name
def make_ChildSTE_Issue(self, obj, raw_material):
item_list = []
for item in raw_material:
if cint(item.selected) == 1 and item.status!='Issued':
sed = obj.append('mtn_details')
sed.s_warehouse = get_branch_warehouse(get_user_branch())
company = frappe.db.get_value('Global Defaults', None, 'default_company')
sed.expense_account = frappe.db.get_value('Company', company, 'default_expense_account') or 'Stock Adjustment - '+frappe.db.get_value('Company', company, 'abbr')
sed.cost_center = get_branch_cost_center(get_user_branch()) or 'Main - '+frappe.db.get_value('Company', company, 'abbr')
sed.item_code = item.raw_material_item_code
sed.item_name = frappe.db.get_value("Item", item.raw_material_item_code, 'item_name')
sed.description = frappe.db.get_value("Item", item.raw_material_item_code, 'description')
sed.stock_uom = item.uom
sed.uom = item.uom
sed.conversion_factor = 1
sed.incoming_rate = 1.0
sed.qty = flt(item.qty)
sed.transfer_qty = flt(item.qty) * 1
sed.serial_no = item.serial_no
item.status = 'Issued'
item_list.append(item.name)
return item_list
def update_IssueItem_status(self, IssuedItem_list):
if IssuedItem_list:
for name in IssuedItem_list:
frappe.db.sql(""" update `tabIssue Raw Material` set status= 'Completed'
where name = '%s'"""%(name))
def get_employee_details(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql(""" select name, employee_name from `tabEmployee` where name in (select distinct parent from `tabEmployeeSkill` where
process = "%(process)s" and item_code = "%(item_code)s") and (name like "%%%(name)s%%" or employee_name like "%%%(name)s%%")
order by name limit %(start)s, %(page_len)s
"""%{'process': filters.get('process'), 'item_code': filters.get('item_code'), 'name': txt, 'start': start, 'page_len': page_len})
def get_raw_serial_no(doctype, txt, searchfield, start, page_len, filters):
if filters.get('item_code'):
return frappe.db.sql(""" select name, item_name from `tabSerial No` where item_code = "%(item_code)s" and (name like "%%%(name)s%%" or item_name like "%%%(name)s%%")
order by name limit %(start)s, %(page_len)s
"""%{'item_code': filters.get('item_code'), 'name': txt, 'start': start, 'page_len': page_len})
else:
return [['']]
|
rohitwaghchaure/New_Theme_Erp
|
erpnext/manufacturing/doctype/process_allotment/process_allotment.py
|
Python
|
agpl-3.0
| 37,413
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from default import Test, with_context
from pybossa.view.account import get_update_feed
from factories import AppFactory, TaskFactory, TaskRunFactory, UserFactory, BlogpostFactory
class TestActivityFeed(Test):
def test_user_creation(self):
"""Test ACTIVITY FEED works for User creation."""
user = UserFactory.create()
update_feed = get_update_feed()
err_msg = "It should be the same user"
assert update_feed[0]['id'] == user.id, err_msg
assert update_feed[0]['fullname'] == user.fullname, err_msg
assert update_feed[0]['name'] == user.name, err_msg
assert update_feed[0].get('info') is not None, err_msg
err_msg = "The update action should be User"
assert update_feed[0]['action_updated'] == 'User', err_msg
def test_project_creation(self):
"""Test ACTIVITY FEED works for project creation."""
app = AppFactory.create()
update_feed = get_update_feed()
err_msg = "It should be the same project"
assert update_feed[0]['id'] == app.id, err_msg
assert update_feed[0]['name'] == app.name, err_msg
assert update_feed[0]['short_name'] == app.short_name, err_msg
assert update_feed[0].get('info') is None, err_msg
err_msg = "The update action should be Project"
assert update_feed[0]['action_updated'] == 'Project', err_msg
def test_blogpost_creation(self):
"""Test ACTIVITY FEED works for blog post creation."""
blogpost = BlogpostFactory.create()
update_feed = get_update_feed()
err_msg = "It should be the blog post"
assert update_feed[0]['id'] == blogpost.app_id, err_msg
assert update_feed[0]['name'] == blogpost.app.name, err_msg
assert update_feed[0]['short_name'] == blogpost.app.short_name, err_msg
assert update_feed[0].get('info') is not None, err_msg
err_msg = "The update action should be Project"
assert update_feed[0]['action_updated'] == 'Blog', err_msg
def test_task_creation(self):
"""Test ACTIVITY FEED works for task creation."""
task = TaskFactory.create()
update_feed = get_update_feed()
err_msg = "It should be the task"
assert update_feed[0]['id'] == task.app_id, err_msg
assert update_feed[0]['name'] == task.app.name, err_msg
assert update_feed[0]['short_name'] == task.app.short_name, err_msg
assert update_feed[0].get('info') is not None, err_msg
err_msg = "The update action should be Project"
assert update_feed[0]['action_updated'] == 'Task', err_msg
def test_taskrun_creation(self):
"""Test ACTIVITY FEED works for task_run creation."""
task_run = TaskRunFactory.create()
update_feed = get_update_feed()
err_msg = "It should be the same task_run"
assert update_feed[0]['id'] == task_run.user.id, err_msg
assert update_feed[0]['name'] == task_run.user.name, err_msg
assert update_feed[0]['fullname'] == task_run.user.fullname, err_msg
assert update_feed[0]['app_name'] == task_run.app.name, err_msg
assert update_feed[0]['app_short_name'] == task_run.app.short_name, err_msg
assert update_feed[0].get('info') is not None, err_msg
err_msg = "The update action should be Project"
assert update_feed[0]['action_updated'] == 'UserContribution', err_msg
def test_taskrun_creation_state_completed(self):
"""Test ACTIVITY FEED works for task_run creation state completed."""
task = TaskFactory.create(n_answers=1)
task_run = TaskRunFactory.create(task=task)
update_feed = get_update_feed()
err_msg = "It should be the same task_run"
assert update_feed[0]['id'] == task_run.app.id, err_msg
assert update_feed[0]['name'] == task_run.app.name, err_msg
assert update_feed[0]['short_name'] == task_run.app.short_name, err_msg
assert update_feed[0].get('info') is not None, err_msg
err_msg = "The update action should be Project"
assert update_feed[0]['action_updated'] == 'TaskCompleted', err_msg
def test_max_limit(self):
"""Test ACTIVITY FEED limit works."""
for i in range(0,105):
app = AppFactory.create()
update_feed = get_update_feed()
err_msg = "There should be at max 100 updates."
print len(update_feed)
assert len(update_feed) == 100, err_msg
|
stefanhahmann/pybossa
|
test/test_activity_update.py
|
Python
|
agpl-3.0
| 5,209
|
# coding=utf-8
"""
DCRM - Darwin Cydia Repository Manager
Copyright (C) 2017 WU Zheng <i.82@me.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Notice: You have used class-based views, that's awesome.
If not necessary, you can try function-based views.
You may add lines above as license.
"""
from __future__ import unicode_literals
from django.http import HttpResponseNotFound
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.vary import vary_on_headers
from django.views.generic import DetailView
from photologue.models import Gallery
from WEIPDCRM.models.version import Version
class PackageView(DetailView):
model = Version
context_object_name = 'package_info'
pk_url_kwarg = 'package_id'
template_name = 'package/package.html'
@xframe_options_exempt
@vary_on_headers('X-MACHINE')
def get(self, request, *args, **kwargs):
action_name = self.kwargs.get('action_name')
if action_name == "contact":
self.template_name = 'package/contact.html'
elif action_name == "history":
self.template_name = 'package/history.html'
elif action_name is None:
self.template_name = 'package/package.html'
else:
return HttpResponseNotFound()
return super(PackageView, self).get(request, *args, **kwargs)
def get_queryset(self):
package_id = self.kwargs.get('package_id')
queryset = super(PackageView, self).get_queryset().filter(id=package_id, enabled=True)
return queryset
def get_context_data(self, **kwargs):
context = super(PackageView, self).get_context_data(**kwargs)
package_id = self.kwargs.get('package_id')
action_name = self.kwargs.get('action_name')
p_version = Version.objects.get(id=package_id)
context['gallery'] = ''
if p_version.gallery is not None:
try:
try:
context['gallery'] = p_version.gallery
except Gallery.DoesNotExist:
pass
except NameError:
pass
if action_name == "history":
version_list = Version.objects.filter(c_package=p_version.c_package, enabled=True).order_by("-created_at")
context["version_list"] = version_list
return context
|
82Flex/DCRM
|
WEIPDCRM/styles/DefaultStyle/views/package.py
|
Python
|
agpl-3.0
| 2,971
|
import sys
from juju.lib.upstart import UpstartService
from juju.providers.common.cloudinit import get_default_origin, BRANCH
class ManagedMachineAgent(object):
agent_module = "juju.agents.machine"
def __init__(
self, juju_unit_namespace, zookeeper_hosts=None,
machine_id="0", log_file=None, juju_directory="/var/lib/juju",
public_key=None, juju_origin="ppa", juju_series=None):
"""
:param juju_series: The release series to use (maverick, natty, etc).
:param machine_id: machine id for the local machine.
:param zookeeper_hosts: Zookeeper hosts to connect.
:param log_file: A file to use for the agent logs.
:param juju_directory: The directory to use for all state and logs.
:param juju_unit_namespace: The machine agent will create units with
a known a prefix to allow for multiple users and multiple
environments to create containers. The namespace should be
unique per user and per environment.
:param public_key: An SSH public key (string) that will be
used in the container for access.
"""
self._juju_origin = juju_origin
if self._juju_origin is None:
origin, source = get_default_origin()
if origin == BRANCH:
origin = source
self._juju_origin = origin
env = {"JUJU_MACHINE_ID": machine_id,
"JUJU_ZOOKEEPER": zookeeper_hosts,
"JUJU_HOME": juju_directory,
"JUJU_ORIGIN": self._juju_origin,
"JUJU_UNIT_NS": juju_unit_namespace,
"JUJU_SERIES": juju_series,
"PYTHONPATH": ":".join(sys.path)}
if public_key:
env["JUJU_PUBLIC_KEY"] = public_key
self._service = UpstartService(
"juju-%s-machine-agent" % juju_unit_namespace, use_sudo=True)
self._service.set_description(
"Juju machine agent for %s" % juju_unit_namespace)
self._service.set_environ(env)
self._service_args = [
"/usr/bin/python", "-m", self.agent_module,
"--nodaemon", "--logfile", log_file,
"--session-file",
"/var/run/juju/%s-machine-agent.zksession" % juju_unit_namespace]
@property
def juju_origin(self):
return self._juju_origin
def start(self):
"""Start the machine agent."""
self._service.set_command(" ".join(self._service_args))
return self._service.start()
def stop(self):
"""Stop the machine agent."""
return self._service.destroy()
def is_running(self):
"""Boolean value, true if the machine agent is running."""
return self._service.is_running()
|
anbangr/trusted-juju
|
juju/providers/local/agent.py
|
Python
|
agpl-3.0
| 2,786
|
###############################################################################
# Copyright 2011,2012 GISA Elkartea. #
# #
# This file is part of django-ws. #
# #
# django-ws is free software: you can redistribute it and/or modify it #
# under the terms of the GNU Affero General Public License as published #
# by the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# django-ws is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with django-ws. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
from ws.tasks import BPMTask
from ws import forms
from time import sleep
class dummy(BPMTask):
def run(self, workflow_task):
return ''
class endless(BPMTask):
def run(self, workflow_task):
while True:
sleep(1000)
class AddForm(forms.BPMTaskForm):
a = forms.IntegerField(label="First number",
initial=2,
help_text="Must be a integer number",
max_value=999,
min_value=0)
b = forms.IntegerField(label="Second number",
initial=2,
help_text="Must be a integer number",
max_value=999,
min_value=0)
class add(BPMTask):
form = AddForm
def run(self, workflow_task, a, b):
return a + b
class WaitForm(forms.BPMTaskForm):
secs = forms.IntegerField(label="How many seconds to wait",
initial=2,
help_text="Must be a integer number",
max_value=999,
min_value=0)
class wait(BPMTask):
form = WaitForm
def run(self, workflow_task, secs):
sleep(secs)
return secs
|
unaizalakain/django-ws
|
ws/tasks/dummy.py
|
Python
|
agpl-3.0
| 2,723
|
# -*- coding: utf-8 -*-
#
# OpenCraft -- tools to aid developing and hosting free software projects
# Copyright (C) 2015-2019 OpenCraft <contact@opencraft.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
model utils - Tests, mostly for state machine
"""
# Imports #####################################################################
import json
from unittest import TestCase
from unittest.mock import Mock, patch
import ddt
from django.db import models
import consul
from instance.models.utils import (
get_base_playbook_name,
ResourceState,
ResourceStateDescriptor,
ModelResourceStateDescriptor,
WrongStateException,
ConsulAgent,
)
# Tests #######################################################################
from instance.tests.utils import skip_unless_consul_running
class ResourceStateTests(TestCase):
"""
Basic tests for the ResourceState class
"""
def test_state_declarations(self):
"""
Basic properties of a state can be declared easily and read from an instance or class.
"""
class Alpha(ResourceState):
"""
The first letter of the greek alphabet
"""
state_id = 'alpha'
alpha = Alpha(resource=Mock(), state_manager=Mock())
self.assertEqual(Alpha.state_id, 'alpha')
self.assertEqual(alpha.state_id, 'alpha')
self.assertEqual(Alpha.name, 'Alpha')
self.assertEqual(alpha.name, 'Alpha')
self.assertEqual(alpha.description, "The first letter of the greek alphabet")
self.assertEqual(Alpha.description, "The first letter of the greek alphabet")
class Beta(ResourceState):
""" A state called Beta """
state_id = 'beta'
name = 'Beta!'
description = "The second letter of the greek alphabet"
beta = Beta(resource=Mock(), state_manager=Mock())
self.assertEqual(Beta.state_id, 'beta')
self.assertEqual(beta.state_id, 'beta')
self.assertEqual(Beta.name, 'Beta!')
self.assertEqual(beta.name, 'Beta!')
self.assertEqual(Beta.description, "The second letter of the greek alphabet")
self.assertEqual(beta.description, "The second letter of the greek alphabet")
# One last check of another docstring format (make sure it has no trailing space):
class Gamma(ResourceState):
""" The third letter of the greek alphabet """
state_id = 'Γ'
gamma = Gamma(resource=Mock(), state_manager=Mock())
self.assertEqual(Gamma.state_id, 'Γ')
self.assertEqual(gamma.state_id, 'Γ')
self.assertEqual(Gamma.description, "The third letter of the greek alphabet")
self.assertEqual(gamma.description, "The third letter of the greek alphabet")
def test_state_enum(self):
"""
Test the ResourceState.Enum helper class
"""
class StateSet(ResourceState.Enum):
""" Enum class """
class StateA(ResourceState):
""" StateA """
state_id = 'a'
class StateB(ResourceState):
""" StateB """
state_id = 'b'
class Other:
""" Other object - not a state """
self.assertIsInstance(StateSet.states, tuple)
self.assertCountEqual(StateSet.states, [StateSet.StateA, StateSet.StateB])
# And with inheritance:
class MoreStates(StateSet):
""" Inherited enum class """
class StateC(ResourceState):
""" StateC """
state_id = 'c'
self.assertIsInstance(MoreStates.states, tuple)
self.assertCountEqual(MoreStates.states, [StateSet.StateA, StateSet.StateB, MoreStates.StateC])
class BaseState(ResourceState):
"""
The base class for the three test states, State1, State2, and State3
"""
class State1(BaseState):
""" The first test state """
state_id = 'state1'
name = "State 1"
class State2(BaseState):
""" The second test state """
state_id = 'state2'
name = "State 2"
class State3(BaseState):
""" The third test state """
state_id = 'state3'
name = "State 3"
class SimpleResource:
"""
A simple resource class for test purposes, which has one three-state FSM, 'state'.
"""
state = ResourceStateDescriptor(
state_classes=(State1, State2, State3),
default_state=State1,
)
# Define some transitions:
done_one = state.transition(from_states=State1, to_state=State2)
done_two = state.transition(from_states=State2, to_state=State3)
reset_to_one = state.transition(from_states=(State2, State3), to_state=State1)
reset_to_one_alt = state.transition(from_states=BaseState, to_state=State1)
return_value = True # Change this to change the expected return value of most of these methods.
@state.only_for(State1)
def method_one(self):
""" A method that only can be called in state 1 """
return self.return_value
@state.only_for(State1)
def method_one_with_args(self, a, b, c):
""" A method that only can be called in state 1 """
return (a * 1) + (b * 2) + (c * 3)
@state.only_for(State2)
def method_two(self):
""" A method that only can be called in state 2 """
return self.return_value
@state.only_for(State1, State3)
def method_odd(self):
""" A method that only can be called in states 1 or 3 """
return self.return_value
@property
@state.only_for(State1)
def prop_one(self):
""" A property whose value is only available in state 1 """
return self.return_value
@property
@state.only_for(State2, State3)
def prop_two(self):
""" A property whose value is only available in state 2 or 3 """
return self.return_value
@state.only_for(State1, State2)
def increment_state(self):
""" Increment the state """
if isinstance(self.state, State1):
self.done_one()
else:
self.done_two()
class SimpleResourceTestCase(TestCase):
"""
ResourceStateDescriptor tests that use the SimpleResource class
"""
make_resource = SimpleResource
def test_comparison_to_state_class(self):
"""
Test the overloaded comparison operators
"""
res1 = self.make_resource()
res2 = self.make_resource()
self.assertEqual(res1.state, State1)
self.assertEqual(res2.state, State1)
self.assertNotEqual(res1.state, BaseState)
self.assertNotEqual(res1.state, State2)
self.assertNotEqual(res2.state, State2)
self.assertTrue(res1.state == State1)
self.assertFalse(res1.state != State1)
def test_comparison_to_state_instance(self):
"""
Test the syntactic sugar that allows comparing ResourceState instances.
"""
res1 = self.make_resource()
res2 = self.make_resource()
self.assertEqual(res1.state, State1)
self.assertEqual(res2.state, State1)
# States are also equal if their instances are equal:
self.assertEqual(res1.state, res1.state)
self.assertEqual(hash(res1.state), hash(res1.state))
# States are also equal if they are the same type but different resources:
self.assertEqual(res1.state, res2.state)
self.assertEqual(hash(res1.state), hash(res2.state))
res2.increment_state()
self.assertNotEqual(res1.state, res2.state)
self.assertNotEqual(hash(res1.state), hash(res2.state))
def test_comparison_to_related_states(self):
"""
Test that states do not compare as equal to parent/child states.
(Use proper isinstance() / issubclass() syntax if you want to check that.)
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
base_state = BaseState(Mock(), Mock())
class ChildOverrideState(State1):
""" A child of State1 with the same state_id """
child_state = ChildOverrideState(Mock(), Mock())
# It's OK for two states that exist to have the same state_id, as long as they are not
# both used by the same ResourceStateDescriptor.
self.assertEqual(res.state.state_id, child_state.state_id)
# The syntactic sugar for comparison should not consider parent or child states equal.
# (Even if their state_id is the same.)
# The semantics of this are debatable, but this way is hopefully more clear and consistent.
self.assertNotEqual(res.state, base_state)
self.assertNotEqual(hash(res.state), hash(base_state))
self.assertNotEqual(res.state, child_state)
self.assertNotEqual(hash(res.state), hash(child_state))
def test_default_state(self):
"""
Test that when a resource is initialized it uses the correct default state.
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
self.assertEqual(res.state.name, "State 1")
def test_one_of(self):
"""
Test the one_of() helper method
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
self.assertTrue(res.state.one_of(State1))
self.assertTrue(res.state.one_of(State2, State1, State3))
self.assertFalse(res.state.one_of(State2, State3))
def test_unique_state_ids(self):
"""
It is forbidden to declare a ResourceStateDescriptor which has multiple states with the
same state_id.
"""
with self.assertRaisesRegex(AssertionError, "A resource's states must each have a unique state_id"):
ResourceStateDescriptor(state_classes=(State1, State1, State3), default_state=State1)
class ChildState(State2):
""" A child of state2 with the same state_id """
with self.assertRaisesRegex(AssertionError, "A resource's states must each have a unique state_id"):
ResourceStateDescriptor(state_classes=(State1, State2, ChildState), default_state=State1)
def test_missing_state_ids(self):
"""
It is forbidden to declare ResourceStateDescriptor using states that have no state_id
"""
self.assertEqual(BaseState.state_id, None)
with self.assertRaisesRegex(AssertionError, "A resource's states must each declare a state_id string"):
ResourceStateDescriptor(state_classes=(State1, BaseState), default_state=State1)
def test_cannot_assign_state(self):
"""
Ensure that a resource's state cannot be changed by assigning to the state attribute.
(Instead, a transition should be used.)
"""
res = self.make_resource()
expected_message = "You cannot assign to a state machine attribute to change the state."
with self.assertRaisesRegex(AttributeError, expected_message):
res.state = State2
def test_mutator(self):
"""
Test an example method that changes the state.
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
res.increment_state()
self.assertEqual(res.state, State2)
res.increment_state()
self.assertEqual(res.state, State3)
def test_disallowed_transition(self):
"""
Test that disallowed transitions will raise an exception.
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
expected_message = "This transition cannot be used to move from State 1 to State 3"
with self.assertRaisesRegex(WrongStateException, expected_message):
res.done_two()
expected_message = "This transition cannot be used to move from State 1 to State 1"
with self.assertRaisesRegex(WrongStateException, expected_message):
res.reset_to_one()
def test_multiple_from_states(self):
"""
Test that transitions can be defined with multiple from_states.
"""
res = self.make_resource()
res.increment_state()
res.increment_state()
self.assertEqual(res.state, State3)
res.reset_to_one()
self.assertEqual(res.state, State1)
def test_inherited_from_states(self):
"""
Test that transitions can be defined with from_states specifying a base class or mixin.
"""
res = self.make_resource()
res.increment_state()
self.assertEqual(res.state, State2)
res.reset_to_one_alt()
self.assertEqual(res.state, State1)
def test_method_only_for(self):
"""
Test that the @state.only_for() decorator works when used to decorate methods.
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
# In State 1, we can call method_one():
res.return_value = 'A'
self.assertEqual(res.method_one(), 'A')
self.assertEqual(res.method_one.is_available(), True)
# In State 1, we can call method_one_with_args():
self.assertEqual(res.method_one_with_args(4, 5, c=6), 32)
self.assertEqual(res.method_one_with_args.is_available(), True)
# But not method_two()
expected_message = "The method 'method_two' cannot be called in this state \\(State 1 / State1\\)."
with self.assertRaisesRegex(WrongStateException, expected_message):
res.method_two()
self.assertEqual(res.method_two.is_available(), False)
# In State 1, we can call method_odd():
res.return_value = 'B'
self.assertEqual(res.method_odd(), 'B')
self.assertEqual(res.method_odd.is_available(), True)
# Go to State 2:
res.increment_state()
self.assertEqual(res.state, State2)
expected_message = "The method 'method_one' cannot be called in this state \\(State 2 / State2\\)."
with self.assertRaisesRegex(WrongStateException, expected_message):
res.method_one()
self.assertEqual(res.method_one.is_available(), False)
res.return_value = 'C'
self.assertEqual(res.method_two(), 'C')
self.assertEqual(res.method_two.is_available(), True)
def test_property_only_for(self):
"""
Test that the @state.only_for() decorator works with the @property decorator.
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
# In State 1, we can access .prop_one:
res.return_value = 'A'
self.assertEqual(res.prop_one, 'A')
res.return_value = 'B'
self.assertEqual(res.prop_one, 'B')
# But not .prop_two:
expected_message = "The method 'prop_two' cannot be called in this state \\(State 1 / State1\\)."
with self.assertRaisesRegex(WrongStateException, expected_message):
dummy = res.prop_two
class DjangoResource:
"""
Same as SimpleResource but django-backed
"""
state = ModelResourceStateDescriptor(
state_classes=(State1, State2, State3),
default_state=State1,
model_field_name='backing_field',
)
backing_field = models.CharField(max_length=100, choices=state.model_field_choices)
# Define some transitions:
done_one = state.transition(from_states=State1, to_state=State2)
done_two = state.transition(from_states=State2, to_state=State3)
reset_to_one = state.transition(from_states=(State2, State3), to_state=State1)
reset_to_one_alt = state.transition(from_states=BaseState, to_state=State1)
return_value = True # Change this to change the expected return value of most of these methods.
@state.only_for(State1)
def method_one(self):
""" A method that only can be called in state 1 """
return self.return_value
@state.only_for(State1)
def method_one_with_args(self, a, b, c):
""" A method that only can be called in state 1 """
return (a * 1) + (b * 2) + (c * 3)
@state.only_for(State2)
def method_two(self):
""" A method that only can be called in state 2 """
return self.return_value
@state.only_for(State1, State3)
def method_odd(self):
""" A method that only can be called in states 1 or 3 """
return self.return_value
@property
@state.only_for(State1)
def prop_one(self):
""" A property whose value is only available in state 1 """
return self.return_value
@property
@state.only_for(State2, State3)
def prop_two(self):
""" A property whose value is only available in state 2 or 3 """
return self.return_value
@state.only_for(State1, State2)
def increment_state(self):
""" Increment the state """
if isinstance(self.state, State1):
self.done_one()
else:
self.done_two()
class DjangoResourceTest(SimpleResourceTestCase):
"""
Run the same tests as in SimpleResourceTestCase, but using DjangoResource.
"""
make_resource = DjangoResource
def setUp(self):
self.make_resource.save = Mock()
def test_model_field_choices(self):
"""
Test that ModelResourceStateDescriptor produces a sensible set of field choices.
"""
model_field_choices = self.make_resource.state.model_field_choices
expected_model_field_choices = [
('state1', 'State1'),
('state2', 'State2'),
('state3', 'State3'),
]
self.assertEqual(model_field_choices, expected_model_field_choices)
def test_mutator(self):
"""
Test an example method that changes the state.
"""
res = self.make_resource()
self.assertEqual(res.state, State1)
self.assertEqual(self.make_resource.save.call_count, 1)
self.make_resource.save.assert_called_with(update_fields=['backing_field'])
res.increment_state()
self.assertEqual(res.state, State2)
self.assertEqual(self.make_resource.save.call_count, 2)
self.make_resource.save.assert_called_with(update_fields=['backing_field'])
res.increment_state()
self.assertEqual(res.state, State3)
self.assertEqual(self.make_resource.save.call_count, 3)
self.make_resource.save.assert_called_with(update_fields=['backing_field'])
@skip_unless_consul_running()
class ConsulAgentTest(TestCase):
"""
A Test Case for ConsulAgent class that acts as a helper between this
code base and consul client'
"""
def setUp(self):
self.prefix = 'this/dummy/prefix/'
self.client = consul.Consul()
self.agent = ConsulAgent()
self.prefixed_agent = ConsulAgent(prefix=self.prefix)
if self.client.kv.get('', recurse=True)[1]:
self.skipTest('Consul contains unknown values!')
def test_init(self):
"""
Tests ConsulAgent's init method and the data it's expected to receive and set.
"""
agent = ConsulAgent()
self.assertEqual(agent.prefix, '')
self.assertIsInstance(agent._client, consul.Consul)
# With custom parameters
prefix = 'custom_prefix'
agent = ConsulAgent(prefix=prefix)
self.assertEqual(agent.prefix, prefix)
self.assertIsInstance(agent._client, consul.Consul)
def test_get_no_prefix(self):
"""
Tests getting bare keys of different data types from Consul's Key-Value store.
"""
agent = ConsulAgent()
# Test string values
key = 'string_key'
stored_value = 'String Value'
self.client.kv.put(key, stored_value)
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, str)
self.assertEqual(fetched_value, stored_value)
# Test integer values
key = 'int_key'
stored_value = 23
self.client.kv.put(key, str(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, int)
self.assertEqual(fetched_value, stored_value)
# Test float values
key = 'float_key'
stored_value = 23.23
self.client.kv.put(key, str(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, float)
self.assertEqual(fetched_value, stored_value)
# Test list values
key = 'list_key'
stored_value = [{'nice': 'good'}, {'awesome': 'things'}]
self.client.kv.put(key, json.dumps(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, list)
self.assertEqual(fetched_value, stored_value)
# Test dict values
key = 'dict_key'
stored_value = {'nice': 'good', 'awesome': 'things'}
self.client.kv.put(key, json.dumps(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, dict)
self.assertEqual(fetched_value, stored_value)
# Test other (boolean) objects
key = 'random_key'
stored_value = True
self.client.kv.put(key, str(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, str)
self.assertEqual(fetched_value, str(stored_value))
def test_get_with_prefix(self):
"""
Tests getting a prefixed key of different data types from Consul's KEy-Value store.
"""
prefix = 'some-dummy/prefix/'
agent = ConsulAgent(prefix=prefix)
# Test string values
key = 'string_key'
stored_value = 'String Value'
self.client.kv.put(prefix + key, stored_value)
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, str)
self.assertEqual(fetched_value, stored_value)
# Test integer values
key = 'int_key'
stored_value = 23
self.client.kv.put(prefix + key, str(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, int)
self.assertEqual(fetched_value, stored_value)
# Test float values
key = 'float_key'
stored_value = 23.23
self.client.kv.put(prefix + key, str(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, float)
self.assertEqual(fetched_value, stored_value)
# Test list values
key = 'list_key'
stored_value = [{'nice': 'good'}, {'awesome': 'things'}]
self.client.kv.put(prefix + key, json.dumps(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, list)
self.assertEqual(fetched_value, stored_value)
# Test dict values
key = 'dict_key'
stored_value = {'nice': 'good', 'awesome': 'things'}
self.client.kv.put(prefix + key, json.dumps(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, dict)
self.assertEqual(fetched_value, stored_value)
# Test other (boolean) objects
key = 'random_key'
stored_value = True
self.client.kv.put(prefix + key, str(stored_value))
fetched_value = agent.get(key)
self.assertIsInstance(fetched_value, str)
self.assertEqual(fetched_value, str(stored_value))
def test_put_no_prefix(self):
"""
Will test the put functionality on Consul with different data types with no prefix on keys.
"""
agent = ConsulAgent()
# Put string values
key = 'key'
value = 'value'
agent.put(key, value)
_, data = self.client.kv.get(key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, value)
# Put int values
key = 'key'
value = 1
agent.put(key, value)
_, data = self.client.kv.get(key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, str(value))
# Put float values
key = 'key'
value = 1.1
agent.put(key, value)
_, data = self.client.kv.get(key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, str(value))
# Put list values
key = 'key'
value = [1, 2, 3, 5]
agent.put(key, value)
_, data = self.client.kv.get(key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, json.dumps(value))
# Put dict values
key = 'key'
value = {'key': 'value', 'another_key': 12}
agent.put(key, value)
_, data = self.client.kv.get(key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, json.dumps(value))
# Put other values
key = 'key'
value = False
agent.put(key, value)
_, data = self.client.kv.get(key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, json.dumps(value))
def test_put_with_prefix(self):
"""
Will test the put functionality on Consul with different data types after prefixing the keys.
"""
prefix = 'some/testing-prefix'
agent = ConsulAgent(prefix=prefix)
# Put string values
key = 'key'
value = 'value'
agent.put(key, value)
_, data = self.client.kv.get(prefix + key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, value)
# Put int values
key = 'key'
value = 1
agent.put(key, value)
_, data = self.client.kv.get(prefix + key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, str(value))
# Put float values
key = 'key'
value = 1.1
agent.put(key, value)
_, data = self.client.kv.get(prefix + key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, str(value))
# Put list values
key = 'key'
value = [1, 2, 3, 5]
agent.put(key, value)
_, data = self.client.kv.get(prefix + key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, json.dumps(value))
# Put dict values
key = 'key'
value = {'key': 'value', 'another_key': 12}
agent.put(key, value)
_, data = self.client.kv.get(prefix + key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, json.dumps(value))
# Put other values
key = 'key'
value = False
agent.put(key, value)
_, data = self.client.kv.get(prefix + key)
fetched_value = data['Value'].decode()
self.assertEqual(fetched_value, json.dumps(value))
def test_delete_no_prefix(self):
"""
Will test whether a key is gonna be deleted or not from the Key-Value store.
"""
agent = ConsulAgent()
self.client.kv.put('key', 'value')
self.client.kv.put('another_key', 'another value')
self.client.kv.put('dummy_key', '1')
_, values = self.client.kv.get('', recurse=True)
self.assertEqual(len(values), 3)
agent.delete('key')
_, values = self.client.kv.get('', recurse=True)
self.assertEqual(len(values), 2)
def test_delete_with_prefix(self):
"""
Delete with prefix will delete the given key from a prefixed agent.
"""
prefix = 'nice-prefix'
agent = ConsulAgent(prefix=prefix)
self.client.kv.put(prefix + 'key', 'value')
self.client.kv.put(prefix + 'another_key', 'another value')
self.client.kv.put('dummy_key', '1')
_, values = self.client.kv.get('', recurse=True)
self.assertEqual(len(values), 3)
agent.delete('key')
_, values = self.client.kv.get('', recurse=True)
self.assertEqual(len(values), 2)
agent.delete('dummy_key')
_, values = self.client.kv.get('', recurse=True)
self.assertEqual(len(values), 2)
def test_purge_with_prefix(self):
"""
Purging with prefix should only remove the prefixed key.
All other values must not be touched.
"""
prefix = 'nice-prefix'
agent = ConsulAgent(prefix=prefix)
self.client.kv.put(prefix, 'only prefix value')
self.client.kv.put(prefix + 'key', 'value')
self.client.kv.put(prefix + 'another_key', 'another value')
self.client.kv.put('dummy_key', '1')
_, values = self.client.kv.get('', recurse=True)
self.assertEqual(len(values), 4)
agent.purge()
_, values = self.client.kv.get('', recurse=True)
self.assertEqual(len(values), 3)
def test_cast_value(self):
"""
Test the supported casted values in our Consul agent. Currently supporting integers,
floats, lists, dictionaries and strings
"""
self.assertEqual(self.agent._cast_value(b'string'), 'string')
self.assertEqual(self.agent._cast_value(bytes('ãáé string', 'utf-8')), 'ãáé string')
self.assertEqual(self.agent._cast_value(b'1'), 1)
self.assertEqual(self.agent._cast_value(b'1.3'), 1.3)
list_value = [{'test': 'value'}, {'another': 'test'}]
fetched_value = json.dumps(list_value).encode()
self.assertEqual(self.agent._cast_value(fetched_value), list_value)
dict_value = {'test': 'value', 'another': 'test'}
fetched_value = json.dumps(dict_value).encode()
self.assertEqual(self.agent._cast_value(fetched_value), dict_value)
self.assertIsNone(self.agent._cast_value(None))
def test_is_json_serializable(self):
"""
Tests that lists and dicts are identified as json objects or not.
"""
self.assertTrue(self.agent._is_json_serializable([1, 2, 3, 4, 5]))
self.assertTrue(self.agent._is_json_serializable({'key': 'value'}))
self.assertTrue(self.agent._is_json_serializable(False))
self.assertFalse(self.agent._is_json_serializable('nope'))
self.assertFalse(self.agent._is_json_serializable(1))
self.assertFalse(self.agent._is_json_serializable(1.1))
@patch.object(consul.Consul.KV, 'get')
@patch.object(consul.Consul.KV, 'put')
def test_create_or_update_dict(self, mock_kv_put, mock_kv_get):
"""
Tests create or update dict.
"""
test_dict = {'key1': 'value1', 'key2': 'value2', 'version': 1}
mock_kv_get.side_effect = [(1, {'Value': json.dumps(test_dict).encode('utf-8')}), (1, None)]
self.agent.create_or_update_dict({'key1': 'value1', 'key2': 'value2'})
# Assert the same object is saved
self.assertFalse(len(mock_kv_put.mock_calls))
mock_kv_get.side_effect = [(1, {'Value': json.dumps(test_dict).encode('utf-8')}), (1, None)]
self.agent.create_or_update_dict({'key1': 'value1-update'})
test_dict['key1'] = 'value1-update'
test_dict['version'] = 2
# Assert only one key changed and the version was updated
self.assertEqual(len(mock_kv_put.mock_calls), 1)
_, args, _ = mock_kv_put.mock_calls[0]
self.assertEqual(args[0], self.agent.prefix)
self.assertDictEqual(test_dict, json.loads(args[1].decode('utf-8')))
@patch.object(consul.Consul.KV, 'get')
@patch.object(consul.Consul.KV, 'put')
def test_delete_dict_key(self, mock_kv_put, mock_kv_get):
"""
Test deleting a single key from a stored dictionary.
"""
test_dict = {'key1': 'value1', 'key2': 'value2', 'version': 1}
mock_kv_get.side_effect = [(1, {'Value': json.dumps(test_dict).encode('utf-8')}), (1, None)]
self.agent.delete_dict_key('key1')
self.assertEqual(len(mock_kv_put.mock_calls), 1)
_, args, _ = mock_kv_put.mock_calls[0]
self.assertEqual(args[0], self.agent.prefix)
self.assertDictEqual({'key2': 'value2', 'version': 2}, json.loads(args[1].decode('utf-8')))
def tearDown(self):
self.client.kv.delete('', recurse=True)
@ddt.ddt
class PlaybookNameSelectorTestCase(TestCase):
"""
Checks if get_base_playbook_name returns correct values
"""
@ddt.data(
# Old playbook name
('open-release/ginkgo.1', 'playbooks/edx_sandbox.yml'),
('opencraft-release/ginkgo.2', 'playbooks/edx_sandbox.yml'),
('open-release/hawthorn.1', 'playbooks/edx_sandbox.yml'),
('opencraft-release/hawthorn.2', 'playbooks/edx_sandbox.yml'),
# New playbook name and defaults
('', 'playbooks/openedx_native.yml'),
('master', 'playbooks/openedx_native.yml'),
('open-release/ironwood.master', 'playbooks/openedx_native.yml'),
('opencraft-release/ironwood.master', 'playbooks/openedx_native.yml'),
)
@ddt.unpack
def test_get_base_playbook_name(self, openedx_release, playbook_name):
"""
Test the overloaded comparison operators
"""
self.assertEqual(get_base_playbook_name(openedx_release), playbook_name)
|
open-craft/opencraft
|
instance/tests/models/test_utils.py
|
Python
|
agpl-3.0
| 33,828
|
import datetime
import re
import sys
from contextlib import contextmanager
from unittest import SkipTest, skipIf
from xml.dom.minidom import parseString
import pytz
from django.contrib.auth.models import User
from django.core import serializers
from django.core.exceptions import ImproperlyConfigured
from django.db import connection, connections
from django.db.models import F, Max, Min
from django.http import HttpRequest
from django.template import (
Context, RequestContext, Template, TemplateSyntaxError, context_processors,
)
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, override_settings,
skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import requires_tz_support
from django.urls import reverse
from django.utils import timezone
from django.utils.timezone import timedelta
from .forms import (
EventForm, EventLocalizedForm, EventLocalizedModelForm, EventModelForm,
EventSplitForm,
)
from .models import (
AllDayEvent, Event, MaybeEvent, Session, SessionEvent, Timestamp,
)
# These tests use the EAT (Eastern Africa Time) and ICT (Indochina Time)
# who don't have Daylight Saving Time, so we can represent them easily
# with FixedOffset, and use them directly as tzinfo in the constructors.
# settings.TIME_ZONE is forced to EAT. Most tests use a variant of
# datetime.datetime(2011, 9, 1, 13, 20, 30), which translates to
# 10:20:30 in UTC and 17:20:30 in ICT.
UTC = timezone.utc
EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi
ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=False)
class LegacyDatabaseTests(TestCase):
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipUnlessDBFeature('supports_timezones')
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
@skipUnlessDBFeature('supports_timezones')
def test_aware_datetime_in_local_timezone_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
@skipUnlessDBFeature('supports_timezones')
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
@skipUnlessDBFeature('supports_timezones')
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
@skipIfDBFeature('supports_timezones')
def test_aware_datetime_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
msg = 'backend does not support timezone-aware datetimes when USE_TZ is False.'
with self.assertRaisesMessage(ValueError, msg):
Event.objects.create(dt=dt)
def test_auto_now_and_auto_now_add(self):
now = datetime.datetime.now()
past = now - datetime.timedelta(seconds=2)
future = now + datetime.timedelta(seconds=2)
Timestamp.objects.create()
ts = Timestamp.objects.get()
self.assertLess(past, ts.created)
self.assertLess(past, ts.updated)
self.assertGreater(future, ts.updated)
self.assertGreater(future, ts.updated)
def test_query_filter(self):
dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30)
dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30)
Event.objects.create(dt=dt1)
Event.objects.create(dt=dt2)
self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2)
self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1)
self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0)
def test_query_datetime_lookups(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2)
self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20),
})
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt,
)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt,
)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt,
)
def test_query_datetimes(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
self.assertSequenceEqual(Event.objects.datetimes('dt', 'year'), [datetime.datetime(2011, 1, 1, 0, 0, 0)])
self.assertSequenceEqual(Event.objects.datetimes('dt', 'month'), [datetime.datetime(2011, 1, 1, 0, 0, 0)])
self.assertSequenceEqual(Event.objects.datetimes('dt', 'day'), [datetime.datetime(2011, 1, 1, 0, 0, 0)])
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2011, 1, 1, 1, 0, 0),
datetime.datetime(2011, 1, 1, 4, 0, 0)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2011, 1, 1, 1, 30, 0),
datetime.datetime(2011, 1, 1, 4, 30, 0)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2011, 1, 1, 1, 30, 0),
datetime.datetime(2011, 1, 1, 4, 30, 0)]
)
def test_raw_sql(self):
# Regression test for #17755
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
event = Event.objects.create(dt=dt)
self.assertEqual(list(Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt])), [event])
def test_cursor_execute_accepts_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
with connection.cursor() as cursor:
cursor.execute('INSERT INTO timezones_event (dt) VALUES (%s)', [dt])
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_cursor_execute_returns_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
Event.objects.create(dt=dt)
with connection.cursor() as cursor:
cursor.execute('SELECT dt FROM timezones_event WHERE dt = %s', [dt])
self.assertEqual(cursor.fetchall()[0][0], dt)
def test_filter_date_field_with_aware_datetime(self):
# Regression test for #17742
day = datetime.date(2011, 9, 1)
AllDayEvent.objects.create(day=day)
# This is 2011-09-02T01:30:00+03:00 in EAT
dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC)
self.assertTrue(AllDayEvent.objects.filter(day__gte=dt).exists())
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=True)
class NewDatabaseTests(TestCase):
naive_warning = 'DateTimeField Event.dt received a naive datetime'
@requires_tz_support
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
with self.assertWarnsMessage(RuntimeWarning, self.naive_warning):
Event.objects.create(dt=dt)
event = Event.objects.get()
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(tzinfo=EAT))
@requires_tz_support
def test_datetime_from_date(self):
dt = datetime.date(2011, 9, 1)
with self.assertWarnsMessage(RuntimeWarning, self.naive_warning):
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, datetime.datetime(2011, 9, 1, tzinfo=EAT))
@requires_tz_support
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
with self.assertWarnsMessage(RuntimeWarning, self.naive_warning):
Event.objects.create(dt=dt)
event = Event.objects.get()
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(tzinfo=EAT))
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_aware_datetime_in_local_timezone_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_auto_now_and_auto_now_add(self):
now = timezone.now()
past = now - datetime.timedelta(seconds=2)
future = now + datetime.timedelta(seconds=2)
Timestamp.objects.create()
ts = Timestamp.objects.get()
self.assertLess(past, ts.created)
self.assertLess(past, ts.updated)
self.assertGreater(future, ts.updated)
self.assertGreater(future, ts.updated)
def test_query_filter(self):
dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)
dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt1)
Event.objects.create(dt=dt2)
self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2)
self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1)
self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0)
def test_query_filter_with_pytz_timezones(self):
tz = pytz.timezone('Europe/Paris')
dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=tz)
Event.objects.create(dt=dt)
next = dt + datetime.timedelta(seconds=3)
prev = dt - datetime.timedelta(seconds=3)
self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1)
self.assertEqual(Event.objects.filter(dt__exact=next).count(), 0)
self.assertEqual(Event.objects.filter(dt__in=(prev, next)).count(), 0)
self.assertEqual(Event.objects.filter(dt__in=(prev, dt, next)).count(), 1)
self.assertEqual(Event.objects.filter(dt__range=(prev, next)).count(), 1)
@requires_tz_support
def test_query_filter_with_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
dt = dt.replace(tzinfo=None)
# naive datetimes are interpreted in local time
with self.assertWarnsMessage(RuntimeWarning, self.naive_warning):
self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1)
with self.assertWarnsMessage(RuntimeWarning, self.naive_warning):
self.assertEqual(Event.objects.filter(dt__lte=dt).count(), 1)
with self.assertWarnsMessage(RuntimeWarning, self.naive_warning):
self.assertEqual(Event.objects.filter(dt__gt=dt).count(), 0)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetime_lookups(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2)
self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetime_lookups_in_other_timezone(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
with timezone.override(UTC):
# These two dates fall in the same day in EAT, but in different days,
# years and months in UTC.
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 1)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 1)
self.assertEqual(Event.objects.filter(dt__hour=22).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT),
})
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt,
)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt,
)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt,
)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetimes(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)]
)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetimes_in_other_timezone(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
with timezone.override(UTC):
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)]
)
self.assertSequenceEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)]
)
def test_raw_sql(self):
# Regression test for #17755
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
event = Event.objects.create(dt=dt)
self.assertSequenceEqual(list(Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt])), [event])
@skipUnlessDBFeature('supports_timezones')
def test_cursor_execute_accepts_aware_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
with connection.cursor() as cursor:
cursor.execute('INSERT INTO timezones_event (dt) VALUES (%s)', [dt])
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipIfDBFeature('supports_timezones')
def test_cursor_execute_accepts_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
utc_naive_dt = timezone.make_naive(dt, timezone.utc)
with connection.cursor() as cursor:
cursor.execute('INSERT INTO timezones_event (dt) VALUES (%s)', [utc_naive_dt])
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipUnlessDBFeature('supports_timezones')
def test_cursor_execute_returns_aware_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
with connection.cursor() as cursor:
cursor.execute('SELECT dt FROM timezones_event WHERE dt = %s', [dt])
self.assertEqual(cursor.fetchall()[0][0], dt)
@skipIfDBFeature('supports_timezones')
def test_cursor_execute_returns_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
utc_naive_dt = timezone.make_naive(dt, timezone.utc)
Event.objects.create(dt=dt)
with connection.cursor() as cursor:
cursor.execute('SELECT dt FROM timezones_event WHERE dt = %s', [utc_naive_dt])
self.assertEqual(cursor.fetchall()[0][0], utc_naive_dt)
@requires_tz_support
def test_filter_date_field_with_aware_datetime(self):
# Regression test for #17742
day = datetime.date(2011, 9, 1)
AllDayEvent.objects.create(day=day)
# This is 2011-09-02T01:30:00+03:00 in EAT
dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC)
self.assertFalse(AllDayEvent.objects.filter(day__gte=dt).exists())
def test_null_datetime(self):
# Regression test for #17294
e = MaybeEvent.objects.create()
self.assertIsNone(e.dt)
def test_update_with_timedelta(self):
initial_dt = timezone.now().replace(microsecond=0)
event = Event.objects.create(dt=initial_dt)
Event.objects.update(dt=F('dt') + timedelta(hours=2))
event.refresh_from_db()
self.assertEqual(event.dt, initial_dt + timedelta(hours=2))
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=True)
class ForcedTimeZoneDatabaseTests(TransactionTestCase):
"""
Test the TIME_ZONE database configuration parameter.
Since this involves reading and writing to the same database through two
connections, this is a TransactionTestCase.
"""
available_apps = ['timezones']
@classmethod
def setUpClass(cls):
# @skipIfDBFeature and @skipUnlessDBFeature cannot be chained. The
# outermost takes precedence. Handle skipping manually instead.
if connection.features.supports_timezones:
raise SkipTest("Database has feature(s) supports_timezones")
if not connection.features.test_db_allows_multiple_connections:
raise SkipTest("Database doesn't support feature(s): test_db_allows_multiple_connections")
super().setUpClass()
@contextmanager
def override_database_connection_timezone(self, timezone):
try:
orig_timezone = connection.settings_dict['TIME_ZONE']
connection.settings_dict['TIME_ZONE'] = timezone
# Clear cached properties, after first accessing them to ensure they exist.
connection.timezone
del connection.timezone
connection.timezone_name
del connection.timezone_name
yield
finally:
connection.settings_dict['TIME_ZONE'] = orig_timezone
# Clear cached properties, after first accessing them to ensure they exist.
connection.timezone
del connection.timezone
connection.timezone_name
del connection.timezone_name
def test_read_datetime(self):
fake_dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=UTC)
Event.objects.create(dt=fake_dt)
with self.override_database_connection_timezone('Asia/Bangkok'):
event = Event.objects.get()
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
self.assertEqual(event.dt, dt)
def test_write_datetime(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
with self.override_database_connection_timezone('Asia/Bangkok'):
Event.objects.create(dt=dt)
event = Event.objects.get()
fake_dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=UTC)
self.assertEqual(event.dt, fake_dt)
@skipUnlessDBFeature('supports_timezones')
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=True)
class UnsupportedTimeZoneDatabaseTests(TestCase):
def test_time_zone_parameter_not_supported_if_database_supports_timezone(self):
connections.databases['tz'] = connections.databases['default'].copy()
connections.databases['tz']['TIME_ZONE'] = 'Asia/Bangkok'
tz_conn = connections['tz']
try:
msg = (
"Connection 'tz' cannot set TIME_ZONE because its engine "
"handles time zones conversions natively."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
tz_conn.cursor()
finally:
connections['tz'].close() # in case the test fails
del connections['tz']
del connections.databases['tz']
@override_settings(TIME_ZONE='Africa/Nairobi')
class SerializationTests(SimpleTestCase):
# Backend-specific notes:
# - JSON supports only milliseconds, microseconds will be truncated.
# - PyYAML dumps the UTC offset correctly for timezone-aware datetimes,
# but when it loads this representation, it subtracts the offset and
# returns a naive datetime object in UTC (https://pyyaml.org/ticket/202).
# Tests are adapted to take these quirks into account.
def assert_python_contains_datetime(self, objects, dt):
self.assertEqual(objects[0]['fields']['dt'], dt)
def assert_json_contains_datetime(self, json, dt):
self.assertIn('"fields": {"dt": "%s"}' % dt, json)
def assert_xml_contains_datetime(self, xml, dt):
field = parseString(xml).getElementsByTagName('field')[0]
self.assertXMLEqual(field.childNodes[0].wholeText, dt)
def assert_yaml_contains_datetime(self, yaml, dt):
# Depending on the yaml dumper, '!timestamp' might be absent
self.assertRegex(yaml, r"\n fields: {dt: !(!timestamp)? '%s'}" % re.escape(dt))
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt)
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30.405")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30.405060")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30.405060")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt)
def test_aware_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, 405060, tzinfo=ICT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T17:20:30.405+07:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30.405060+07:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30.405060+07:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T10:20:30Z")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T10:20:30+00:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 10:20:30+00:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30+03:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30+03:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30+03:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T17:20:30+07:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30+07:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30+07:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True)
class TemplateTests(SimpleTestCase):
@requires_tz_support
def test_localtime_templatetag_and_filters(self):
"""
Test the {% localtime %} templatetag and related filters.
"""
datetimes = {
'utc': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
'eat': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'ict': datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT),
'naive': datetime.datetime(2011, 9, 1, 13, 20, 30),
}
templates = {
'notag': Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}"),
'noarg': Template(
"{% load tz %}{% localtime %}{{ dt }}|{{ dt|localtime }}|"
"{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"
),
'on': Template(
"{% load tz %}{% localtime on %}{{ dt }}|{{ dt|localtime }}|"
"{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"
),
'off': Template(
"{% load tz %}{% localtime off %}{{ dt }}|{{ dt|localtime }}|"
"{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"
),
}
# Transform a list of keys in 'datetimes' to the expected template
# output. This makes the definition of 'results' more readable.
def t(*result):
return '|'.join(datetimes[key].isoformat() for key in result)
# Results for USE_TZ = True
results = {
'utc': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('utc', 'eat', 'utc', 'ict'),
},
'eat': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('eat', 'eat', 'utc', 'ict'),
},
'ict': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('ict', 'eat', 'utc', 'ict'),
},
'naive': {
'notag': t('naive', 'eat', 'utc', 'ict'),
'noarg': t('naive', 'eat', 'utc', 'ict'),
'on': t('naive', 'eat', 'utc', 'ict'),
'off': t('naive', 'eat', 'utc', 'ict'),
}
}
for k1, dt in datetimes.items():
for k2, tpl in templates.items():
ctx = Context({'dt': dt, 'ICT': ICT})
actual = tpl.render(ctx)
expected = results[k1][k2]
self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected))
# Changes for USE_TZ = False
results['utc']['notag'] = t('utc', 'eat', 'utc', 'ict')
results['ict']['notag'] = t('ict', 'eat', 'utc', 'ict')
with self.settings(USE_TZ=False):
for k1, dt in datetimes.items():
for k2, tpl in templates.items():
ctx = Context({'dt': dt, 'ICT': ICT})
actual = tpl.render(ctx)
expected = results[k1][k2]
self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected))
def test_localtime_filters_with_pytz(self):
"""
Test the |localtime, |utc, and |timezone filters with pytz.
"""
# Use a pytz timezone as local time
tpl = Template("{% load tz %}{{ dt|localtime }}|{{ dt|utc }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 12, 20, 30)})
with self.settings(TIME_ZONE='Europe/Paris'):
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00|2011-09-01T10:20:30+00:00")
# Use a pytz timezone as argument
tpl = Template("{% load tz %}{{ dt|timezone:tz }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
# Use a pytz timezone name as argument
tpl = Template("{% load tz %}{{ dt|timezone:'Europe/Paris' }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
def test_localtime_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% localtime foo %}{% endlocaltime %}").render()
def test_localtime_filters_do_not_raise_exceptions(self):
"""
Test the |localtime, |utc, and |timezone filters on bad inputs.
"""
tpl = Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:tz }}")
with self.settings(USE_TZ=True):
# bad datetime value
ctx = Context({'dt': None, 'tz': ICT})
self.assertEqual(tpl.render(ctx), "None|||")
ctx = Context({'dt': 'not a date', 'tz': ICT})
self.assertEqual(tpl.render(ctx), "not a date|||")
# bad timezone value
tpl = Template("{% load tz %}{{ dt|timezone:tz }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': None})
self.assertEqual(tpl.render(ctx), "")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': 'not a tz'})
self.assertEqual(tpl.render(ctx), "")
@requires_tz_support
def test_timezone_templatetag(self):
"""
Test the {% timezone %} templatetag.
"""
tpl = Template(
"{% load tz %}"
"{{ dt }}|"
"{% timezone tz1 %}"
"{{ dt }}|"
"{% timezone tz2 %}"
"{{ dt }}"
"{% endtimezone %}"
"{% endtimezone %}"
)
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
'tz1': ICT, 'tz2': None})
self.assertEqual(
tpl.render(ctx),
"2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|2011-09-01T13:20:30+03:00"
)
def test_timezone_templatetag_with_pytz(self):
"""
Test the {% timezone %} templatetag with pytz.
"""
tpl = Template("{% load tz %}{% timezone tz %}{{ dt }}{% endtimezone %}")
# Use a pytz timezone as argument
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
# Use a pytz timezone name as argument
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'tz': 'Europe/Paris'})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
def test_timezone_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% timezone %}{% endtimezone %}").render()
with self.assertRaises(pytz.UnknownTimeZoneError):
Template("{% load tz %}{% timezone tz %}{% endtimezone %}").render(Context({'tz': 'foobar'}))
@skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names")
def test_get_current_timezone_templatetag(self):
"""
Test the {% get_current_timezone %} templatetag.
"""
tpl = Template("{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}")
self.assertEqual(tpl.render(Context()), "Africa/Nairobi")
with timezone.override(UTC):
self.assertEqual(tpl.render(Context()), "UTC")
tpl = Template(
"{% load tz %}{% timezone tz %}{% get_current_timezone as time_zone %}"
"{% endtimezone %}{{ time_zone }}"
)
self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700")
with timezone.override(UTC):
self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700")
def test_get_current_timezone_templatetag_with_pytz(self):
"""
Test the {% get_current_timezone %} templatetag with pytz.
"""
tpl = Template("{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}")
with timezone.override(pytz.timezone('Europe/Paris')):
self.assertEqual(tpl.render(Context()), "Europe/Paris")
tpl = Template(
"{% load tz %}{% timezone 'Europe/Paris' %}"
"{% get_current_timezone as time_zone %}{% endtimezone %}"
"{{ time_zone }}"
)
self.assertEqual(tpl.render(Context()), "Europe/Paris")
def test_get_current_timezone_templatetag_invalid_argument(self):
msg = "'get_current_timezone' requires 'as variable' (got ['get_current_timezone'])"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
Template("{% load tz %}{% get_current_timezone %}").render()
@skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names")
def test_tz_template_context_processor(self):
"""
Test the django.template.context_processors.tz template context processor.
"""
tpl = Template("{{ TIME_ZONE }}")
context = Context()
self.assertEqual(tpl.render(context), "")
request_context = RequestContext(HttpRequest(), processors=[context_processors.tz])
self.assertEqual(tpl.render(request_context), "Africa/Nairobi")
@requires_tz_support
def test_date_and_time_template_filters(self):
tpl = Template("{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)})
self.assertEqual(tpl.render(ctx), "2011-09-01 at 23:20:20")
with timezone.override(ICT):
self.assertEqual(tpl.render(ctx), "2011-09-02 at 03:20:20")
def test_date_and_time_template_filters_honor_localtime(self):
tpl = Template(
"{% load tz %}{% localtime off %}{{ dt|date:'Y-m-d' }} at "
"{{ dt|time:'H:i:s' }}{% endlocaltime %}"
)
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)})
self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20")
with timezone.override(ICT):
self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20")
@requires_tz_support
def test_now_template_tag_uses_current_time_zone(self):
# Regression for #17343
tpl = Template("{% now \"O\" %}")
self.assertEqual(tpl.render(Context({})), "+0300")
with timezone.override(ICT):
self.assertEqual(tpl.render(Context({})), "+0700")
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=False)
class LegacyFormsTests(TestCase):
def test_form(self):
form = EventForm({'dt': '2011-09-01 13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30))
def test_form_with_non_existent_time(self):
form = EventForm({'dt': '2011-03-27 02:30:00'})
with timezone.override(pytz.timezone('Europe/Paris')):
# this is obviously a bug
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 3, 27, 2, 30, 0))
def test_form_with_ambiguous_time(self):
form = EventForm({'dt': '2011-10-30 02:30:00'})
with timezone.override(pytz.timezone('Europe/Paris')):
# this is obviously a bug
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 10, 30, 2, 30, 0))
def test_split_form(self):
form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30))
def test_model_form(self):
EventModelForm({'dt': '2011-09-01 13:20:30'}).save()
e = Event.objects.get()
self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 13, 20, 30))
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True)
class NewFormsTests(TestCase):
@requires_tz_support
def test_form(self):
form = EventForm({'dt': '2011-09-01 13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
def test_form_with_other_timezone(self):
form = EventForm({'dt': '2011-09-01 17:20:30'})
with timezone.override(ICT):
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
def test_form_with_explicit_timezone(self):
form = EventForm({'dt': '2011-09-01 17:20:30+07:00'})
# Datetime inputs formats don't allow providing a time zone.
self.assertFalse(form.is_valid())
def test_form_with_non_existent_time(self):
with timezone.override(pytz.timezone('Europe/Paris')):
form = EventForm({'dt': '2011-03-27 02:30:00'})
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['dt'], [
"2011-03-27 02:30:00 couldn't be interpreted in time zone "
"Europe/Paris; it may be ambiguous or it may not exist."
]
)
def test_form_with_ambiguous_time(self):
with timezone.override(pytz.timezone('Europe/Paris')):
form = EventForm({'dt': '2011-10-30 02:30:00'})
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['dt'], [
"2011-10-30 02:30:00 couldn't be interpreted in time zone "
"Europe/Paris; it may be ambiguous or it may not exist."
]
)
@requires_tz_support
def test_split_form(self):
form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
@requires_tz_support
def test_localized_form(self):
form = EventLocalizedForm(initial={'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)})
with timezone.override(ICT):
self.assertIn("2011-09-01 17:20:30", str(form))
@requires_tz_support
def test_model_form(self):
EventModelForm({'dt': '2011-09-01 13:20:30'}).save()
e = Event.objects.get()
self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
@requires_tz_support
def test_localized_model_form(self):
form = EventLocalizedModelForm(instance=Event(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)))
with timezone.override(ICT):
self.assertIn("2011-09-01 17:20:30", str(form))
@override_settings(
DATETIME_FORMAT='c',
TIME_ZONE='Africa/Nairobi',
USE_L10N=False,
USE_TZ=True,
ROOT_URLCONF='timezones.urls',
)
class AdminTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
password='secret',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10, tzinfo=UTC),
is_superuser=True, username='super', first_name='Super', last_name='User',
email='super@example.com', is_staff=True, is_active=True,
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10, tzinfo=UTC),
)
def setUp(self):
self.client.force_login(self.u1)
@requires_tz_support
def test_changelist(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
response = self.client.get(reverse('admin_tz:timezones_event_changelist'))
self.assertContains(response, e.dt.astimezone(EAT).isoformat())
def test_changelist_in_other_timezone(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
with timezone.override(ICT):
response = self.client.get(reverse('admin_tz:timezones_event_changelist'))
self.assertContains(response, e.dt.astimezone(ICT).isoformat())
@requires_tz_support
def test_change_editable(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
response = self.client.get(reverse('admin_tz:timezones_event_change', args=(e.pk,)))
self.assertContains(response, e.dt.astimezone(EAT).date().isoformat())
self.assertContains(response, e.dt.astimezone(EAT).time().isoformat())
def test_change_editable_in_other_timezone(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
with timezone.override(ICT):
response = self.client.get(reverse('admin_tz:timezones_event_change', args=(e.pk,)))
self.assertContains(response, e.dt.astimezone(ICT).date().isoformat())
self.assertContains(response, e.dt.astimezone(ICT).time().isoformat())
@requires_tz_support
def test_change_readonly(self):
Timestamp.objects.create()
# re-fetch the object for backends that lose microseconds (MySQL)
t = Timestamp.objects.get()
response = self.client.get(reverse('admin_tz:timezones_timestamp_change', args=(t.pk,)))
self.assertContains(response, t.created.astimezone(EAT).isoformat())
def test_change_readonly_in_other_timezone(self):
Timestamp.objects.create()
# re-fetch the object for backends that lose microseconds (MySQL)
t = Timestamp.objects.get()
with timezone.override(ICT):
response = self.client.get(reverse('admin_tz:timezones_timestamp_change', args=(t.pk,)))
self.assertContains(response, t.created.astimezone(ICT).isoformat())
|
nesdis/djongo
|
tests/django_tests/tests/v21/tests/timezones/tests.py
|
Python
|
agpl-3.0
| 54,334
|
# This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
from ..attribute import Attribute, Bool, OverrideDefault, Var
from ..dimension import CM
from ..structure import TableOfContentsSection
from ..stylesheets import sphinx_article
from ..template import (DocumentTemplate, BodyPageTemplate, TitlePageTemplate,
ContentsPartTemplate, DocumentPartTemplate,
TitlePartTemplate, AbstractLocation)
__all__ = ['Article']
class ArticleFrontMatter(DocumentPartTemplate):
toc_section = TableOfContentsSection()
def _flowables(self, document):
meta = document.metadata
abstract_loc = document.get_option('abstract_location')
if ('abstract' in meta
and abstract_loc == AbstractLocation.FRONT_MATTER):
yield meta['abstract']
if document.get_option('table_of_contents'):
yield self.toc_section
class Article(DocumentTemplate):
stylesheet = OverrideDefault(sphinx_article)
table_of_contents = Attribute(Bool, True,
'Show or hide the table of contents')
abstract_location = Attribute(AbstractLocation, 'front matter',
'Where to place the abstract')
parts = OverrideDefault(['title', 'front_matter', 'contents'])
# default document part templates
title = TitlePartTemplate()
front_matter = ArticleFrontMatter(page_number_format='continue')
contents = ContentsPartTemplate(page_number_format='continue')
# default page templates
page = BodyPageTemplate(page_size=Var('paper_size'))
title_page = TitlePageTemplate(base='page',
top_margin=8*CM)
front_matter_page = BodyPageTemplate(base='page')
contents_page = BodyPageTemplate(base='page')
|
brechtm/rinohtype
|
src/rinoh/templates/article.py
|
Python
|
agpl-3.0
| 2,023
|
import re
from unittest import skipIf
from unittest.mock import patch
import django
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.views import INTERNAL_RESET_SESSION_TOKEN
from django.contrib.sessions.backends.cache import SessionStore
from django.core import mail
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpRequest
from django.test import override_settings, tag
from django.urls import reverse
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from django_webtest import WebTest
from factory import Faker
from core.auth import auth_log
from core.forms import (
EmailStaffUpdateForm, EmailUpdateForm, SystemPasswordChangeForm,
SystemPasswordResetForm, SystemPasswordResetRequestForm,
UserAuthenticationForm, UsernameRemindRequestForm,
UsernameUpdateForm, UserRegistrationForm,
)
from core.views import (
PasswordResetConfirmView, PasswordResetView, UsernameRemindView,
)
from ..assertions import AdditionalAsserts
from ..factories import UserFactory
def _snake_str(string):
return ''.join([c if i % 2 else c.upper() for i, c in enumerate(string)])
@tag('forms', 'forms-auth', 'auth')
class UserRegistrationFormTests(AdditionalAsserts, WebTest):
@classmethod
def setUpTestData(cls):
cls.expected_fields = [
'email',
'password1',
'password2',
'username',
'realm',
]
cls.honeypot_field = 'realm'
cls.user_one = UserFactory(invalid_email=True)
cls.user_two = UserFactory(is_active=False)
cls.test_transforms = [
lambda v: v,
lambda v: v.upper(),
lambda v: _snake_str(v),
]
cls.faker = Faker._get_faker()
def test_init(self):
form_empty = UserRegistrationForm()
# Verify that the expected fields are part of the form.
self.assertEqual(set(self.expected_fields), set(form_empty.fields))
# Verify that 'previous' values are empty.
self.assertEqual(form_empty.previous_uname, "")
self.assertEqual(form_empty.previous_email, "")
# Verify that only neccesary fields are marked 'required'.
for field in self.expected_fields:
with self.subTest(field=field):
if field != self.honeypot_field:
self.assertTrue(form_empty.fields[field].required)
else:
self.assertFalse(form_empty.fields[field].required)
# Verify that fields are correctly marked for credential managers.
field_markups = {
'password1': "new-password",
'password2': "new-password",
'username': "username",
}
for field, markup in field_markups.items():
with self.subTest(field=field):
self.assertIn('autocomplete', form_empty.fields[field].widget.attrs)
self.assertEqual(form_empty.fields[field].widget.attrs['autocomplete'], markup)
# Verify that the form's save method is protected in templates.
self.assertTrue(hasattr(form_empty.save, 'alters_data'))
@patch('core.mixins.is_password_compromised')
def test_blank_data(self, mock_pwd_check):
# Empty form is expected to be invalid.
form = UserRegistrationForm(data={})
mock_pwd_check.side_effect = AssertionError("password check API was unexpectedly called")
self.assertFalse(form.is_valid())
for field in set(self.expected_fields) - set([self.honeypot_field]):
with self.subTest(field=field):
self.assertIn(field, form.errors)
@patch('core.mixins.is_password_compromised')
def test_nonunique_username(self, mock_pwd_check):
mock_pwd_check.return_value = (False, 0)
for transform in self.test_transforms:
transformed_uname = transform(self.user_two.username)
with self.subTest(value=transformed_uname):
pwd = self.faker.password()
form = UserRegistrationForm(data={
'username': transformed_uname,
'password1': pwd,
'password2': pwd,
'email': self.faker.email(),
})
self.assertFalse(form.is_valid())
self.assertIn('username', form.errors)
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(
form.errors['username'],
["A user with a similar username already exists."]
)
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(
form.errors['username'],
["Uzanto kun simila salutnomo jam ekzistas."]
)
self.assertNotIn('password1', form.errors)
@patch('core.mixins.is_password_compromised')
def test_nonunique_email(self, mock_pwd_check):
mock_pwd_check.return_value = (False, 0)
for transform in self.test_transforms:
transformed_email = transform(self.user_one._clean_email)
with self.subTest(value=transformed_email):
pwd = self.faker.password()
form = UserRegistrationForm(data={
'username': self.faker.user_name(),
'password1': pwd,
'password2': pwd,
'email': transformed_email,
})
self.assertFalse(form.is_valid())
self.assertIn('email', form.errors)
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(
form.errors['email'],
["User address already in use."]
)
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(
form.errors['email'],
["Adreso de uzanto jam utiligita ĉe la retejo."]
)
self.assertNotIn('password1', form.errors)
@patch('core.mixins.is_password_compromised')
def test_password_similar_to_username(self, mock_pwd_check):
mock_pwd_check.return_value = (False, 0)
for transform in self.test_transforms + [lambda v: v[::-1]]:
username = self.faker.user_name()
transformed_value = transform(username) + "!@"
with self.subTest(username=username, password=transformed_value):
form = UserRegistrationForm(data={
'username': username,
'password1': transformed_value,
'password2': transformed_value,
'email': self.faker.email(),
})
self.assertFalse(form.is_valid())
self.assertIn('password1', form.errors)
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(
form.errors['password1'][0],
"The password is too similar to the "
)
self.assertIn("username", form.errors['password1'][0])
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(
form.errors['password1'][0],
"La pasvorto estas tro simila al la "
)
self.assertIn("salutnomo", form.errors['password1'][0])
mock_pwd_check.assert_not_called()
@patch('core.mixins.is_password_compromised')
def test_password_similar_to_email(self, mock_pwd_check):
mock_pwd_check.return_value = (False, 0)
for transform in self.test_transforms + [lambda v: v[::-1]]:
email = self.faker.email()
transformed_value = "**" + transform(email)
with self.subTest(email=email, password=transformed_value):
form = UserRegistrationForm(data={
'username': self.faker.user_name(),
'password1': transformed_value,
'password2': transformed_value,
'email': email,
})
self.assertFalse(form.is_valid())
self.assertIn('password1', form.errors)
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(
form.errors['password1'][0],
"The password is too similar to the "
)
self.assertIn("email address", form.errors['password1'][0])
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(
form.errors['password1'][0],
"La pasvorto estas tro simila al la "
)
self.assertIn("retpoŝta adreso", form.errors['password1'][0])
mock_pwd_check.assert_not_called()
def test_weak_password(self):
weak_password_tests(
self,
'core.mixins.is_password_compromised',
UserRegistrationForm,
(),
{
'username': self.faker.user_name(),
'password1': "not very strong",
'password2': "not very strong",
'email': self.faker.email(),
},
'password1'
)
def test_strong_password(self):
registration_data = {
'username': self.faker.user_name(),
'password1': "very strong indeed",
'password2': "very strong indeed",
'email': self.faker.email(),
}
user = strong_password_tests(
self,
'core.mixins.is_password_compromised',
UserRegistrationForm,
(),
registration_data)
self.assertEqual(user.username, registration_data['username'])
self.assertEqual(user.email, registration_data['email'])
@patch('core.mixins.is_password_compromised')
def test_honeypot(self, mock_pwd_check):
mock_pwd_check.return_value = (False, 0)
for expected_result, injected_value in ((True, " \n \f "),
(False, self.faker.domain_word())):
pwd = self.faker.password()
form = UserRegistrationForm(data={
'username': self.faker.user_name(),
'password1': pwd,
'password2': pwd,
'email': self.faker.email(),
self.honeypot_field: injected_value,
})
if expected_result is True:
self.assertTrue(form.is_valid())
if expected_result is False:
with self.assertLogs('PasportaServo.auth', level='ERROR') as log:
self.assertFalse(form.is_valid())
self.assertIn(self.honeypot_field, form.errors)
self.assertEqual(form.errors[self.honeypot_field], [""])
self.assertEqual(len(log.records), 1)
self.assertEqual(
log.records[0].message,
"Registration failed, flies found in honeypot."
)
def test_proxy_user(self):
form = UserRegistrationForm(data={})
user = form.proxy_user
self.assertIsNotNone(user)
self.assertIs(user._meta, get_user_model()._meta)
# The proxy is expected to raise a Profile.DoesNotExist exception
# if the `profile` attribute is accessed.
self.assertRaises(ObjectDoesNotExist, lambda: user.profile)
# The proxy is expected to raise an AttributeError exception as
# long as the form was not cleaned.
with self.assertRaises(AttributeError) as cm:
user.username
self.assertEqual(str(cm.exception), "Form was not cleaned yet")
# The proxy is expected to return the value of the form data field
# and raise no exception, once the form was cleaned.
form.is_valid()
self.assertNotRaises(AttributeError, lambda: user.email)
def test_view_page(self):
page = self.app.get(reverse('register'))
self.assertEqual(page.status_code, 200)
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], UserRegistrationForm)
@patch('core.mixins.is_password_compromised')
def test_form_submit(self, mock_pwd_check):
page = self.app.get(reverse('register'))
page.form['username'] = uname = self.faker.user_name()
page.form['email'] = email = self.faker.email()
page.form['password1'] = page.form['password2'] = self.faker.password()
mock_pwd_check.return_value = (False, 0)
page = page.form.submit()
mock_pwd_check.assert_called_once()
self.assertEqual(page.status_code, 302)
self.assertRedirects(page, reverse('profile_create'), fetch_redirect_response=False)
while page.status_code == 302:
page = page.follow()
self.assertEqual(page.context['user'].username, uname)
self.assertEqual(page.context['user'].email, email)
@tag('forms', 'forms-auth', 'auth')
class UserAuthenticationFormTests(AdditionalAsserts, WebTest):
@classmethod
def setUpTestData(cls):
cls.user = UserFactory()
def setUp(self):
self.dummy_request = HttpRequest()
session = SessionStore()
session.create()
self.dummy_request.session = session
self.user.refresh_from_db()
def test_init(self):
form_empty = UserAuthenticationForm()
expected_fields = [
'username',
'password',
]
# Verify that the expected fields are part of the form.
self.assertEqual(set(expected_fields), set(form_empty.fields))
# Verify that fields are correctly marked for credential managers.
for field, markup in {'password': "current-password", 'username': "username"}.items():
with self.subTest(field=field):
self.assertIn('autocomplete', form_empty.fields[field].widget.attrs)
self.assertEqual(form_empty.fields[field].widget.attrs['autocomplete'], markup)
def test_blank_data(self):
# Empty form is expected to be invalid.
form = UserAuthenticationForm(data={})
self.assertFalse(form.is_valid())
# Form with empty password field is expected to be invalid.
form = UserAuthenticationForm(data={'username': self.user.username})
self.assertFalse(form.is_valid())
self.assertIn('password', form.errors)
# Form with empty username field is expected to be invalid.
form = UserAuthenticationForm(data={'password': "adm1n"})
self.assertFalse(form.is_valid())
self.assertIn('username', form.errors)
def test_inactive_user_login(self):
self.user.is_active = False
self.user.save()
# The error for an inactive user's login with incorrect credentials is
# expected to be along the lines of 'incorrect username or password'.
form = UserAuthenticationForm(
self.dummy_request, {'username': self.user.username, 'password': "incorrect"})
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(
form.non_field_errors()[0],
"Please enter the correct username and password")
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(
form.non_field_errors()[0],
"Bonvole enigu ĝustajn salutnomon kaj pasvorton")
self.assertNotIn('restore_request_id', self.dummy_request.session)
# The error for an inactive user's login with correct credentials is
# expected to inform that the account is inactive. In addition, the
# restore_request_id is expected in the session and a warning emitted
# on the authentication log.
with self.assertLogs('PasportaServo.auth', level='WARNING') as log:
form = UserAuthenticationForm(
self.dummy_request, {'username': self.user.username, 'password': "adm1n"})
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors()[0], str(form.error_messages['inactive']))
self.assertIn('restore_request_id', self.dummy_request.session)
self.assertIs(type(self.dummy_request.session['restore_request_id']), tuple)
self.assertEqual(len(self.dummy_request.session['restore_request_id']), 2)
self.assertEqual(len(log.records), 1)
self.assertIn("the account is deactivated", log.output[0])
def test_active_user_login(self):
self.assertTrue(self.user.is_active)
# The error for an active user's login with incorrect credentials is
# expected to be along the lines of 'incorrect username or password'.
form = UserAuthenticationForm(
self.dummy_request, {'username': self.user.username, 'password': "incorrect"})
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(
form.non_field_errors()[0],
"Please enter the correct username and password")
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(
form.non_field_errors()[0],
"Bonvole enigu ĝustajn salutnomon kaj pasvorton")
self.assertNotIn('restore_request_id', self.dummy_request.session)
# For an active user's login with correct credentials, no error is
# expected. In addition, the restore_request_id shall not be in the
# session.
form = UserAuthenticationForm(
self.dummy_request, {'username': self.user.username, 'password': "adm1n"})
self.assertTrue(form.is_valid())
self.assertNotIn('restore_request_id', self.dummy_request.session)
def test_case_sensitivity(self):
# The error for login with username different in case is expected to
# be along the lines of 'incorrect username or password', and inform
# that the field is case-sensitive.
for credentials in ((self.user.username.upper(), "incorrect"),
(self.user.username.upper(), "adm1n")):
form = UserAuthenticationForm(
self.dummy_request, {'username': credentials[0], 'password': credentials[1]})
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(
form.non_field_errors()[0],
"Please enter the correct username and password")
self.assertIn(
"Note that both fields are case-sensitive",
form.non_field_errors()[0])
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(
form.non_field_errors()[0],
"Bonvole enigu ĝustajn salutnomon kaj pasvorton")
self.assertIn(
"Notu, ke ambaŭ kampoj estas uskleco-distingaj",
form.non_field_errors()[0])
def test_view_page(self):
page = self.app.get(reverse('login'))
self.assertEqual(page.status_code, 200)
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], UserAuthenticationForm)
def test_form_submit_invalid_credentials(self):
page = self.app.get(reverse('login'))
page.form['username'] = "SomeUser"
page.form['password'] = ".incorrect."
page = page.form.submit()
self.assertEqual(page.status_code, 200)
self.assertEqual(len(page.forms), 1)
self.assertGreaterEqual(len(page.context['form'].errors), 1)
def test_form_submit_valid_credentials(self):
page = self.app.get(reverse('login'))
page.form['username'] = self.user.username
page.form['password'] = "adm1n"
page = page.form.submit()
self.assertEqual(page.status_code, 302)
self.assertRedirects(page, '/')
@tag('forms', 'forms-auth', 'auth')
class UsernameUpdateFormTests(AdditionalAsserts, WebTest):
@classmethod
def setUpTestData(cls):
cls.user = UserFactory()
def setUp(self):
self.user.refresh_from_db()
def test_init(self):
form = UsernameUpdateForm(instance=self.user)
# Verify that the expected fields are part of the form.
self.assertEqual(['username'], list(form.fields))
# Verify that the form stores the username before a change.
self.assertTrue(hasattr(form, 'previous_uname'))
self.assertEqual(form.previous_uname, self.user.username)
# Verify the form's save method is protected in templates.
self.assertTrue(
hasattr(form.save, 'alters_data')
or hasattr(form.save, 'do_not_call_in_templates')
)
def test_blank_data(self):
# Empty form is expected to be invalid.
form = UsernameUpdateForm(data={})
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(form.errors, {'username': ["This field is required."]})
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(form.errors, {'username': ["Ĉi tiu kampo estas deviga."]})
def test_invalid_username(self):
test_data = [
# Too-long username is expected to be rejected.
(
"a" * (self.user._meta.get_field('username').max_length + 1),
{
'en': (f"Ensure that this value has at most "
f"{self.user._meta.get_field('username').max_length} characters"),
'eo': (f"Certigu ke tiu ĉi valoro maksimume enhavu "
f"{self.user._meta.get_field('username').max_length} karaktrojn"),
}
),
# Username consisting of only whitespace is expected to be rejected.
(
" \t \r \f ",
{
'en': "This field is required",
'eo': "Ĉi tiu kampo estas deviga",
}
),
# Usernames containing invalid characters are expected to be rejected.
(
self.user.username + " id",
{
'en': "Enter a username conforming to these rules: ",
'eo': "Enigu salutnomon laŭantan la jenajn regulojn: ",
}
),
(
self.user.username + "=+~",
{
'en': "Enter a username conforming to these rules: ",
'eo': "Enigu salutnomon laŭantan la jenajn regulojn: ",
}
),
(
"A<B",
{
'en': "Enter a username conforming to these rules: ",
'eo': "Enigu salutnomon laŭantan la jenajn regulojn: ",
}
),
]
for new_username, expected_error in test_data:
with self.subTest(value=new_username):
form = UsernameUpdateForm(data={'username': new_username}, instance=self.user)
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors['username']), 1)
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(form.errors['username'][0], expected_error['en'])
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(form.errors['username'][0], expected_error['eo'])
def test_same_username(self):
# Username without any change is expected to be accepted.
form = UsernameUpdateForm(data={'username': self.user.username}, instance=self.user)
self.assertTrue(form.is_valid())
UserFactory(username=_snake_str(self.user.username))
form = UsernameUpdateForm(data={'username': self.user.username}, instance=self.user)
self.assertTrue(form.is_valid())
def test_case_modified_username(self):
form = UsernameUpdateForm(
data={'username': self.user.username.capitalize()},
instance=self.user)
self.assertTrue(form.is_valid())
form = UsernameUpdateForm(
data={'username': self.user.username.upper()},
instance=self.user)
self.assertTrue(form.is_valid())
def test_case_modified_nonunique_username(self):
UserFactory(username=_snake_str(self.user.username))
UserFactory(username=self.user.username.upper())
form = UsernameUpdateForm(
data={'username': self.user.username.capitalize()},
instance=self.user)
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(
form.errors,
{'username': ["A user with a similar username already exists."]}
)
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(
form.errors,
{'username': ["Uzanto kun simila salutnomo jam ekzistas."]}
)
def test_nonunique_username(self):
other_user = UserFactory()
for new_username in (other_user.username,
other_user.username.capitalize(),
_snake_str(other_user.username)):
with self.subTest(value=new_username):
form = UsernameUpdateForm(data={'username': new_username}, instance=self.user)
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(
form.errors,
{'username': ["A user with a similar username already exists."]}
)
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(
form.errors,
{'username': ["Uzanto kun simila salutnomo jam ekzistas."]}
)
def test_valid_data(self):
new_username = self.user.username * 2
form = UsernameUpdateForm(data={'username': new_username}, instance=self.user)
self.assertTrue(form.is_valid())
user = form.save(commit=False)
self.assertEqual(user.pk, self.user.pk)
self.assertEqual(user.username, new_username)
def test_view_page(self):
page = self.app.get(reverse('username_change'), user=self.user)
self.assertEqual(page.status_code, 200)
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], UsernameUpdateForm)
def test_form_submit(self):
page = self.app.get(reverse('username_change'), user=self.user)
page.form['username'] = new_username = _snake_str(self.user.username)
page = page.form.submit()
self.user.refresh_from_db()
self.assertRedirects(
page,
reverse('profile_edit', kwargs={
'pk': self.user.profile.pk,
'slug': self.user.profile.autoslug})
)
self.assertEqual(self.user.username, new_username)
@tag('forms', 'forms-auth', 'auth')
class EmailUpdateFormTests(AdditionalAsserts, WebTest):
empty_is_invalid = True
@classmethod
def setUpTestData(cls):
cls.user = UserFactory()
cls.invalid_email_user = UserFactory(invalid_email=True)
def setUp(self):
self.user.refresh_from_db()
self.invalid_email_user.refresh_from_db()
def _init_form(self, data=None, instance=None):
return EmailUpdateForm(data=data, instance=instance)
def test_init(self):
form = self._init_form(instance=self.user)
# Verify that the expected fields are part of the form.
self.assertEqual(['email'], list(form.fields))
# Verify that the form stores the email address before a change.
self.assertTrue(hasattr(form, 'previous_email'))
self.assertEqual(form.previous_email, self.user.email)
self.assertEqual(form.initial['email'], form.previous_email)
form = self._init_form(instance=self.invalid_email_user)
# Verify that the form stores the cleaned up email address.
self.assertTrue(hasattr(form, 'previous_email'))
self.assertEqual(form.previous_email, self.invalid_email_user._clean_email)
self.assertEqual(form.initial['email'], form.previous_email)
# Verify that the form's save method is protected in templates.
self.assertTrue(
hasattr(form.save, 'alters_data')
or hasattr(form.save, 'do_not_call_in_templates')
)
def test_blank_data(self):
# Empty form is expected to follow the 'empty_is_invalid' setting.
form = self._init_form(data={}, instance=self.user)
if self.empty_is_invalid:
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(form.errors, {'email': ["Enter a valid email address."]})
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(form.errors, {'email': ["Enigu retadreson en ĝusta formo."]})
else:
self.assertTrue(form.is_valid(), msg=repr(form.errors))
def test_invalid_email(self):
test_data = [
# Too-long email address is expected to be rejected.
(
"a" * self.user._meta.get_field('email').max_length + "@xyz.biz",
{
'en': (f"Ensure that this value has at most "
f"{self.user._meta.get_field('email').max_length} characters"),
'eo': (f"Certigu ke tiu ĉi valoro maksimume enhavu "
f"{self.user._meta.get_field('email').max_length} karaktrojn"),
}
),
# Email address containing invalid characters is expected to be rejected.
(
"abc[def]gh@localhost",
{
'en': "Enter a valid email address.",
'eo': "Enigu retadreson en ĝusta formo.",
}
),
(
"abc def gh@localhost",
{
'en': "Enter a valid email address.",
'eo': "Enigu retadreson en ĝusta formo.",
}
),
# Email address containing more than one 'at' sign is expected to be rejected.
(
"abc@def@gh@localhost",
{
'en': "Enter a valid email address.",
'eo': "Enigu retadreson en ĝusta formo.",
}
),
]
if self.empty_is_invalid:
test_data.append(
# Email address consisting of only whitespace is expected to be rejected.
(
" \t \r \f ",
{
'en': "Enter a valid email address.",
'eo': "Enigu retadreson en ĝusta formo.",
}
)
)
for new_email, expected_error in test_data:
with self.subTest(value=new_email):
form = self._init_form(data={'email': new_email}, instance=self.user)
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors['email']), 1)
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(form.errors['email'][0], expected_error['en'])
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(form.errors['email'][0], expected_error['eo'])
def test_valid_strange_email(self):
test_data = [
"\"abc@def\"@example.com",
"user+mailbox@example.com",
"customer/department=shipping@example.com",
"$A12345@example.com",
"!def!xyz%abc@example.com",
]
for new_email in test_data:
with self.subTest(value=new_email):
form = self._init_form(data={'email': new_email}, instance=self.user)
self.assertTrue(form.is_valid(), msg=repr(form.errors))
def test_invalid_prefix_email(self):
for obj_tag, obj in (("normal email", self.user),
("invalid email", self.invalid_email_user)):
transformed_email = f"{settings.INVALID_PREFIX}{obj._clean_email}"
with self.subTest(tag=obj_tag, value=transformed_email):
form = self._init_form(data={'email': transformed_email}, instance=self.user)
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors['email']), 1)
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(
form.errors['email'][0],
f"Email address cannot start with {settings.INVALID_PREFIX}"
)
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(
form.errors['email'][0],
f"Retpoŝta adreso ne povas komenciĝi per {settings.INVALID_PREFIX}"
)
def test_same_email(self):
# Email address without any change is expected to be accepted.
form = self._init_form(data={'email': self.user.email}, instance=self.user)
self.assertTrue(form.is_valid())
form.save(commit=False)
# Since no change is done in the address, no email is expected to be sent.
self.assertEqual(len(mail.outbox), 0)
form = self._init_form(
data={'email': self.invalid_email_user._clean_email},
instance=self.invalid_email_user)
self.assertTrue(form.is_valid())
form.save(commit=False)
# Since no change is done in the address, no email is expected to be sent.
self.assertEqual(len(mail.outbox), 0)
def test_case_modified_email(self):
test_transforms = [
lambda e: e.capitalize(),
lambda e: _snake_str(e),
lambda e: e.upper(),
]
test_data = [
(obj_tag, obj, tr)
for obj_tag, obj in (("normal email", self.user),
("invalid email", self.invalid_email_user))
for tr in test_transforms
]
for obj_tag, obj, transform in test_data:
transformed_email = transform(obj._clean_email)
with self.subTest(tag=obj_tag, value=transformed_email):
form = self._init_form(data={'email': transformed_email}, instance=obj)
self.assertTrue(form.is_valid(), msg=repr(form.errors))
def test_nonunique_email(self):
normal_email_user = UserFactory()
test_transforms = [
lambda e: e,
lambda e: _snake_str(e),
lambda e: e.upper(),
]
test_data = [
(obj_tag, obj, tr)
for obj_tag, obj in (("normal email", normal_email_user),
("invalid email", self.invalid_email_user))
for tr in test_transforms
]
for obj_tag, obj, transform in test_data:
transformed_email = transform(obj._clean_email)
with self.subTest(tag=obj_tag, value=transformed_email):
form = self._init_form(data={'email': transformed_email}, instance=self.user)
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(
form.errors,
{'email': ["User address already in use."]}
)
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(
form.errors,
{'email': ["Adreso de uzanto jam utiligita ĉe la retejo."]}
)
# Attempting to use an email address similar to an existing invalid address
# is expected to result in error.
form = self._init_form(
data={'email': self.invalid_email_user.email},
instance=self.user)
self.assertFalse(form.is_valid())
expected_errors = {
'en': f"Email address cannot start with {settings.INVALID_PREFIX}",
'eo': f"Retpoŝta adreso ne povas komenciĝi per {settings.INVALID_PREFIX}",
}
unexpected_errors = {
'en': "User address already in use.",
'eo': "Adreso de uzanto jam utiligita ĉe la retejo.",
}
for lang in expected_errors:
with override_settings(LANGUAGE_CODE=lang):
self.assertTrue(
any(
e.startswith(expected_errors[lang])
for e in form.errors['email']
),
msg=repr(form.errors))
self.assertNotIn(unexpected_errors[lang], form.errors['email'])
transformed_email = f"{settings.INVALID_PREFIX}{self.invalid_email_user._clean_email}"
transformed_email = _snake_str(transformed_email.lower())
form = self._init_form(data={'email': transformed_email}, instance=self.user)
self.assertFalse(form.is_valid())
expected_errors = unexpected_errors
for lang in expected_errors:
with override_settings(LANGUAGE_CODE=lang):
self.assertEqual(form.errors['email'], [expected_errors[lang]])
def test_valid_data(self):
for obj_tag, obj in (("normal email", self.user),
("invalid email", self.invalid_email_user)):
with self.subTest(tag=obj_tag):
new_email = f"{obj.username}@{obj.username}.onion"
form = self._init_form(data={'email': new_email}, instance=obj)
self.assertTrue(form.is_valid())
user = form.save(commit=False)
self.assertEqual(user.pk, obj.pk)
self.assertEqual(user.email, new_email)
def test_view_page(self):
page = self.app.get(reverse('email_update'), user=self.user)
self.assertEqual(page.status_code, 200)
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], EmailUpdateForm)
@override_settings(EMAIL_SUBJECT_PREFIX_FULL="TEST ")
def form_submission_tests(self, *, lang, obj=None):
obj = self.user if obj is None else obj
old_email = obj._clean_email
new_email = '{}@ps.org'.format(_snake_str(obj.username))
unchanged_email = obj.email
with override_settings(LANGUAGE_CODE=lang):
page = self.app.get(reverse('email_update'), user=obj)
page.form['email'] = new_email
page = page.form.submit()
obj.refresh_from_db()
self.assertRedirects(
page,
reverse('profile_edit', kwargs={
'pk': obj.profile.pk, 'slug': obj.profile.autoslug})
)
self.assertEqual(obj.email, unchanged_email)
self.assertEqual(len(mail.outbox), 2)
test_subject = {
'en': "TEST Change of email address",
'eo': "TEST Retpoŝtadreso ĉe retejo ŝanĝita",
}
test_contents = {
old_email: {
'en': ("you (or someone on your behalf) requested a change of your email address",
f"The new address is: {new_email}",),
'eo': ("vi (aŭ iu vianome) petis ŝanĝon de via retpoŝta adreso",
f"La nova adreso estas: {new_email}",),
},
new_email: {
'en': ("you requested to change your email address",
"Please go to the following page to confirm your new email address:",),
'eo': ("vi petis ŝanĝon de via retpoŝta adreso",
"Bonvole iru al la jena paĝo por konfirmi vian novan retadreson:",),
},
}
for i, recipient in enumerate([old_email, new_email]):
self.assertEqual(mail.outbox[i].subject, test_subject[lang])
self.assertEqual(mail.outbox[i].from_email, settings.DEFAULT_FROM_EMAIL)
self.assertEqual(mail.outbox[i].to, [recipient])
for content in test_contents[recipient][lang]:
self.assertIn(content, mail.outbox[i].body)
def test_form_submit(self):
mail.outbox = []
self.form_submission_tests(lang='en')
mail.outbox = []
self.form_submission_tests(lang='eo')
def test_form_submit_for_invalid_email(self):
mail.outbox = []
self.form_submission_tests(obj=self.invalid_email_user, lang='en')
mail.outbox = []
self.form_submission_tests(obj=self.invalid_email_user, lang='eo')
class EmailStaffUpdateFormTests(EmailUpdateFormTests):
@classmethod
def setUpTestData(cls):
cls.supervisor = UserFactory(is_superuser=True, profile=None)
super().setUpTestData()
def _init_form(self, data=None, instance=None):
return EmailStaffUpdateForm(data=data, instance=instance)
def test_view_page(self):
page = self.app.get(
reverse('staff_email_update', kwargs={
'pk': self.user.profile.pk, 'slug': self.user.profile.autoslug}),
user=self.supervisor)
self.assertEqual(page.status_code, 200)
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], EmailStaffUpdateForm)
def form_submission_tests(self, *, lang, obj=None):
obj = self.user if obj is None else obj
new_email = '{}@ps.org'.format(_snake_str(obj.username))
page = self.app.get(
reverse('staff_email_update', kwargs={
'pk': obj.profile.pk, 'slug': obj.profile.autoslug}),
user=self.supervisor)
page.form['email'] = new_email
page = page.form.submit()
obj.refresh_from_db()
self.assertRedirects(
page,
reverse('profile_edit', kwargs={
'pk': obj.profile.pk, 'slug': obj.profile.autoslug})
)
self.assertEqual(obj.email, new_email)
self.assertEqual(len(mail.outbox), 0)
@tag('forms', 'forms-auth', 'auth')
class SystemPasswordResetRequestFormTests(AdditionalAsserts, WebTest):
@classmethod
def setUpTestData(cls):
cls.active_user = UserFactory()
cls.inactive_user = UserFactory(is_active=False)
cls.active_invalid_email_user = UserFactory(invalid_email=True)
cls.inactive_invalid_email_user = UserFactory(invalid_email=True, is_active=False)
cls.view_page_url = reverse('password_reset')
cls.view_page_success_url = reverse('password_reset_done')
def _init_form(self, data=None):
return SystemPasswordResetRequestForm(data=data)
@property
def _related_view(self):
return PasswordResetView
def test_init(self):
form = self._init_form()
# Verify that the expected fields are part of the form.
self.assertEqual(['email'], list(form.fields))
# Verify that the form's save method is protected in templates.
self.assertTrue(
hasattr(form.save, 'alters_data')
or hasattr(form.save, 'do_not_call_in_templates')
)
def test_blank_data(self):
# Empty form is expected to be invalid.
form = self._init_form(data={})
self.assertFalse(form.is_valid())
with override_settings(LANGUAGE_CODE='en'):
self.assertEqual(form.errors, {'email': ["This field is required."]})
with override_settings(LANGUAGE_CODE='eo'):
self.assertEqual(form.errors, {'email': ["Ĉi tiu kampo estas deviga."]})
def test_get_users(self):
with self.settings(PASSWORD_HASHERS=[
'django.contrib.auth.hashers.MD5PasswordHasher']):
active_md5_user1 = UserFactory()
with self.settings(PASSWORD_HASHERS=[
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher']):
active_md5_user2 = UserFactory(invalid_email=True)
form = self._init_form()
# All types of users with useable passwords are expected to be returned.
for user, expected_empty in [(self.active_user, True),
(self.inactive_user, True),
(self.active_invalid_email_user, False),
(self.inactive_invalid_email_user, False),
(active_md5_user1, True),
(active_md5_user2, False)]:
with self.subTest(email=user.email, active=user.is_active):
got_users = list(form.get_users(user._clean_email))
self.assertEqual(got_users, [user])
self.assertEqual(got_users[0].email, user._clean_email)
got_users = list(
form.get_users(f'{settings.INVALID_PREFIX}{user._clean_email}')
)
self.assertEqual(got_users, [] if expected_empty else [user])
if not expected_empty:
self.assertEqual(got_users[0].email, user._clean_email)
# Users with unuseable passwords are expected to be not returned.
active_nonepwd_user = UserFactory()
active_nonepwd_user.set_unusable_password()
active_nonepwd_user.save()
inactive_nonepwd_user = UserFactory(is_active=False)
inactive_nonepwd_user.set_unusable_password()
inactive_nonepwd_user.save()
for user in [active_nonepwd_user, inactive_nonepwd_user]:
with self.subTest(email=user.email, pwd=None, active=user.is_active):
got_users = list(form.get_users(user._clean_email))
self.assertEqual(got_users, [])
def _get_admin_message(self, user):
return (
f"User '{user.username}' tried to reset the login password,"
" but the account is deactivated"
)
def _get_email_content(self, active, lang):
test_data = {}
test_data[True] = {
'en': (
"TEST Password reset",
[
"You're receiving this email because you requested "
"a password reset for your user account",
"Please go to the following page and choose a new password:",
],
[
"you deactivated your account previously",
],
),
'eo': (
"TEST Nova pasvorto",
[
"Vi ricevis ĉi tiun retpoŝton ĉar vi petis pasvortan "
"rekomencigon por via uzanta konto",
"Bonvolu iri al la sekvanta paĝo kaj elekti novan pasvorton:",
],
[
"vi malaktivigis vian konton en la pasinteco",
],
),
}
test_data[False] = {
'en': (
"TEST Password reset",
[
"You're receiving this email because you requested "
"a password reset for your user account",
"Unfortunately, you deactivated your account previously, "
"and first it needs to be re-activated",
],
[
"Please go to the following page and choose a new password:",
],
),
'eo': (
"TEST Nova pasvorto",
[
"Vi ricevis ĉi tiun retpoŝton ĉar vi petis pasvortan "
"rekomencigon por via uzanta konto",
"Bedaŭrinde vi malaktivigis vian konton en la pasinteco, "
"kaj unue necesas ĝin restarigi",
],
[
"Bonvolu iri al la sekvanta paĝo kaj elekti novan pasvorton:",
],
)
}
return test_data[active][lang]
@override_settings(EMAIL_SUBJECT_PREFIX_FULL="TEST ")
def test_active_user_request(self):
# Active users are expected to receive an email with password reset link.
for user_tag, user in [("normal email", self.active_user),
("invalid email", self.active_invalid_email_user)]:
for lang in ['en', 'eo']:
with override_settings(LANGUAGE_CODE=lang):
with self.subTest(tag=user_tag, lang=lang):
# No warnings are expected on the auth log.
with self.assertLogs('PasportaServo.auth', level='WARNING') as log:
form = self._init_form({'email': user._clean_email})
self.assertTrue(form.is_valid())
form.save(
subject_template_name=self._related_view.subject_template_name,
email_template_name=self._related_view.email_template_name,
html_email_template_name=self._related_view.html_email_template_name,
)
# Workaround for lack of assertNotLogs.
auth_log.warning("No warning emitted.")
self.assertEqual(len(log.records), 1)
self.assertEqual(log.records[0].message, "No warning emitted.")
# The email message is expected to describe the password reset procedure.
title, expected_content, not_expected_content = self._get_email_content(True, lang)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, title)
self.assertEqual(mail.outbox[0].from_email, settings.DEFAULT_FROM_EMAIL)
self.assertEqual(mail.outbox[0].to, [user._clean_email])
for content in expected_content:
self.assertIn(content, mail.outbox[0].body)
for content in not_expected_content:
self.assertNotIn(content, mail.outbox[0].body)
mail.outbox = []
@override_settings(EMAIL_SUBJECT_PREFIX_FULL="TEST ")
def test_inactive_user_request(self):
# Inactive users are expected to receive an email with instructions
# for activating their account and not password reset link.
for user_tag, user in [("normal email", self.inactive_user),
("invalid email", self.inactive_invalid_email_user)]:
for lang in ['en', 'eo']:
with override_settings(LANGUAGE_CODE=lang):
with self.subTest(tag=user_tag, lang=lang):
# A warning about a deactivated account is expected on the auth log.
with self.assertLogs('PasportaServo.auth', level='WARNING') as log:
form = self._init_form({'email': user._clean_email})
self.assertTrue(form.is_valid())
form.save(
subject_template_name=self._related_view.subject_template_name,
email_template_name=self._related_view.email_template_name,
html_email_template_name=self._related_view.html_email_template_name,
)
self.assertEqual(len(log.records), 1)
self.assertStartsWith(log.records[0].message, self._get_admin_message(user))
# The warning is expected to include a reference number.
code = re.search(r'\[([A-F0-9-]+)\]', log.records[0].message)
self.assertIsNotNone(code)
code = code.group(1)
# The email message is expected to describe the account reactivation procedure.
title, expected_content, not_expected_content = self._get_email_content(False, lang)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, title)
self.assertEqual(mail.outbox[0].from_email, settings.DEFAULT_FROM_EMAIL)
self.assertEqual(mail.outbox[0].to, [user._clean_email])
for content in expected_content:
self.assertIn(content, mail.outbox[0].body)
for content in not_expected_content:
self.assertNotIn(content, mail.outbox[0].body)
# The email message is expected to include the reference number.
self.assertIn(code, mail.outbox[0].body)
mail.outbox = []
def test_view_page(self):
page = self.app.get(self.view_page_url)
self.assertEqual(page.status_code, 200)
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], self._init_form().__class__)
def test_form_submit(self):
for user in [self.active_user,
self.active_invalid_email_user,
self.inactive_user,
self.inactive_invalid_email_user]:
with self.subTest(email=user.email, active=user.is_active):
page = self.app.get(self.view_page_url)
page.form['email'] = user.email
page = page.form.submit()
self.assertEqual(page.status_code, 302)
self.assertRedirects(page, self.view_page_success_url)
class UsernameRemindRequestFormTests(SystemPasswordResetRequestFormTests):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.view_page_url = reverse('username_remind')
cls.view_page_success_url = reverse('username_remind_done')
def _init_form(self, data=None):
return UsernameRemindRequestForm(data=data)
@property
def _related_view(self):
return UsernameRemindView
def _get_admin_message(self, user):
return (
f"User '{user.username}' requested a reminder of the username,"
" but the account is deactivated"
)
def _get_email_content(self, active, lang):
test_data = {}
test_data[True] = {
'en': (
"TEST Username reminder",
[
"Your username, in case you've forgotten:",
],
[
"you deactivated your account previously",
],
),
'eo': (
"TEST Memorigo pri salutnomo",
[
"Via salutnomo, kaze ke vi forgesis:",
],
[
"vi malaktivigis vian konton en la pasinteco",
],
),
}
test_data[False] = {
'en': (
"TEST Username reminder",
[
"Your username, in case you've forgotten:",
"Unfortunately, you deactivated your account previously, "
"and first it needs to be re-activated",
],
[],
),
'eo': (
"TEST Memorigo pri salutnomo",
[
"Via salutnomo, kaze ke vi forgesis:",
"Bedaŭrinde vi malaktivigis vian konton en la pasinteco, "
"kaj unue necesas ĝin restarigi",
],
[],
),
}
return test_data[active][lang]
@tag('forms', 'forms-auth', 'forms-pwd', 'auth')
class SystemPasswordResetFormTests(AdditionalAsserts, WebTest):
@classmethod
def setUpTestData(cls):
cls.user = UserFactory(invalid_email=True)
cls.user.profile.email = cls.user.email
cls.user.profile.save(update_fields=['email'])
cls.form_class = SystemPasswordResetForm
cls.expected_fields = [
'new_password1',
'new_password2',
]
def test_init(self):
form_empty = self.form_class(self.user)
# Verify that the expected fields are part of the form.
self.assertEqual(set(self.expected_fields), set(form_empty.fields))
# Verify that fields are correctly marked for credential managers.
field_markups = {'new_password1': "new-password", 'new_password2': "new-password"}
if 'old_password' in self.expected_fields:
field_markups['old_password'] = "current-password"
for field, markup in field_markups.items():
with self.subTest(field=field):
self.assertIn('autocomplete', form_empty.fields[field].widget.attrs)
self.assertEqual(form_empty.fields[field].widget.attrs['autocomplete'], markup)
# Verify that the form's save method is protected in templates.
self.assertTrue(hasattr(form_empty.save, 'alters_data'))
@patch('core.mixins.is_password_compromised')
def test_blank_data(self, mock_pwd_check):
# Empty form is expected to be invalid.
form = self.form_class(self.user, data={})
mock_pwd_check.side_effect = AssertionError("password check API was unexpectedly called")
self.assertFalse(form.is_valid())
for field in self.expected_fields:
with self.subTest(field=field):
self.assertIn(field, form.errors)
@patch('core.mixins.is_password_compromised')
def test_password_similar_to_account_details(self, mock_pwd_check):
mock_pwd_check.return_value = (False, 0)
test_data = [
('username', "salutnomo", _snake_str(self.user.username)),
('email address', "retpoŝta adreso", self.user._clean_email.upper()),
('first name', "persona nomo", _snake_str(self.user.profile.first_name)),
('last name', "familia nomo", _snake_str(self.user.profile.last_name)),
]
for case, label_eo, transformed_value in test_data:
with self.subTest(case=case, password=transformed_value):
data = {field_name: transformed_value for field_name in self.expected_fields}
if 'old_password' in self.expected_fields:
data['old_password'] = "adm1n"
form = self.form_class(self.user, data=data)
self.assertFalse(form.is_valid())
self.assertIn('new_password1', form.errors)
with override_settings(LANGUAGE_CODE='en'):
self.assertStartsWith(
form.errors['new_password1'][0],
"The password is too similar to the ")
self.assertIn(case, form.errors['new_password1'][0])
with override_settings(LANGUAGE_CODE='eo'):
self.assertStartsWith(
form.errors['new_password1'][0],
"La pasvorto estas tro simila al la ")
self.assertIn(label_eo, form.errors['new_password1'][0])
mock_pwd_check.assert_not_called()
def test_weak_password(self):
weak_password_tests(
self,
'core.mixins.is_password_compromised',
self.form_class,
(self.user, ),
{field_name: "adm1n" for field_name in self.expected_fields},
'new_password1'
)
def test_strong_password(self):
user = strong_password_tests(
self,
'core.mixins.is_password_compromised',
self.form_class,
(self.user, ),
{field_name: "adm1n" for field_name in self.expected_fields})
self.assertEqual(user.pk, self.user.pk)
@patch('django.contrib.auth.views.default_token_generator.check_token')
def test_view_page(self, mock_check_token, lang='en'):
mock_check_token.return_value = True
user_id = urlsafe_base64_encode(force_bytes(self.user.pk))
with override_settings(LANGUAGE_CODE=lang):
page = self.app.get(
reverse('password_reset_confirm', kwargs={
'uidb64': user_id if isinstance(user_id, str) else user_id.decode(),
'token': PasswordResetConfirmView.reset_url_token})
)
self.assertEqual(page.status_code, 200, msg=repr(page))
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], SystemPasswordResetForm)
@skipIf(django.VERSION < (3, 0, 0), 'Localisation of reset URL token is in Dj3.0 and later')
def test_view_page_localised(self):
self.test_view_page(lang='eo')
@patch('core.mixins.is_password_compromised')
@patch('django.contrib.auth.views.default_token_generator.check_token')
@override_settings(LANGUAGE_CODE='en')
def test_form_submit(self, mock_check_token, mock_pwd_check):
mock_check_token.return_value = True # Bypass Django's token verification.
user_id = urlsafe_base64_encode(force_bytes(self.user.pk))
page = self.app.get(
reverse('password_reset_confirm', kwargs={
'uidb64': user_id if isinstance(user_id, str) else user_id.decode(),
'token': PasswordResetConfirmView.reset_url_token}),
user=self.user)
page.form['new_password1'] = page.form['new_password2'] = (
Faker._get_faker().password()
)
session = self.app.session
session[INTERNAL_RESET_SESSION_TOKEN] = None
session.save()
mock_pwd_check.return_value = (False, 0) # Treat password as a strong one.
self.assertEqual(self.user.email, self.user.profile.email)
self.assertStartsWith(self.user.email, settings.INVALID_PREFIX)
page = page.form.submit()
mock_pwd_check.assert_called_once()
self.assertEqual(page.status_code, 302, msg=repr(page))
self.assertRedirects(page, reverse('password_reset_complete'))
# The marked invalid email address is expected to be marked valid
# after submission of the form.
self.user.refresh_from_db()
self.user.profile.refresh_from_db()
self.assertEqual(self.user.email, self.user.profile.email)
self.assertFalse(self.user.email.startswith(settings.INVALID_PREFIX))
class SystemPasswordChangeFormTests(SystemPasswordResetFormTests):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.form_class = SystemPasswordChangeForm
cls.expected_fields = [
'new_password1',
'new_password2',
'old_password',
]
def test_view_page(self, lang='en'):
with override_settings(LANGUAGE_CODE=lang):
page = self.app.get(reverse('password_change'), user=self.user)
self.assertEqual(page.status_code, 200, msg=repr(page))
self.assertEqual(len(page.forms), 1)
self.assertIsInstance(page.context['form'], SystemPasswordChangeForm)
@patch('core.mixins.is_password_compromised')
def test_form_submit(self, mock_pwd_check):
page = self.app.get(reverse('password_change'), user=self.user)
page.form['old_password'] = "adm1n"
page.form['new_password1'] = "Strong & Courageous"
page.form['new_password2'] = "Strong & Courageous"
mock_pwd_check.return_value = (False, 0) # Treat password as a strong one.
page = page.form.submit()
mock_pwd_check.assert_called_once()
self.assertEqual(page.status_code, 302, msg=repr(page))
self.assertRedirects(page, reverse('password_change_done'))
def weak_password_tests(test_inst, where_to_patch, form_class, form_args, form_data, inspect_field):
test_data = [
(1, True, ""),
(2, False, {
'en': ("The password selected by you is not very secure. "
"Such combination of characters is known to cyber-criminals."),
'eo': ("La pasvorto elektita de vi ne estas tre sekura. "
"Tia kombino de karaktroj estas konata al ciber-krimuloj."),
}),
(100, False, {
'en': ("The password selected by you is too insecure. "
"Such combination of characters is very well-known to cyber-criminals."),
'eo': ("La pasvorto elektita de vi estas tro nesekura. "
"Tia kombino de karaktroj estas bone konata al ciber-krimuloj."),
}),
]
for number_seen, expected_result, expected_error in test_data:
# Mock the response of the Pwned Pwds API to indicate a compromised password,
# seen a specific number of times.
with patch(where_to_patch) as mock_pwd_check:
mock_pwd_check.return_value = (True, number_seen)
form = form_class(*form_args, data=form_data)
with test_inst.assertLogs('PasportaServo.auth', level='WARNING') as log:
test_inst.assertIs(form.is_valid(), expected_result, msg=repr(form.errors))
mock_pwd_check.assert_called_once_with(form_data[inspect_field])
if expected_result is False:
test_inst.assertIn(inspect_field, form.errors)
with override_settings(LANGUAGE_CODE='en'):
test_inst.assertEqual(
form.errors[inspect_field],
["Choose a less easily guessable password."])
test_inst.assertEqual(form.non_field_errors(), [expected_error['en']])
with override_settings(LANGUAGE_CODE='eo'):
test_inst.assertEqual(
form.errors[inspect_field],
["Bonvole elektu pli malfacile diveneblan pasvorton."])
test_inst.assertEqual(form.non_field_errors(), [expected_error['eo']])
test_inst.assertEqual(
log.records[0].message,
f"Password with HIBP count {number_seen} selected in {form_class.__name__}."
)
def strong_password_tests(test_inst, where_to_patch, form_class, form_args, form_data):
# Mock the response of the Pwned Pwds API to indicate non-compromised password.
with patch(where_to_patch) as mock_pwd_check:
mock_pwd_check.return_value = (False, 0)
form = form_class(*form_args, data=form_data)
test_inst.assertTrue(form.is_valid(), msg=repr(form.errors))
mock_pwd_check.assert_called_once()
return form.save(commit=False)
|
tejoesperanto/pasportaservo
|
tests/forms/test_auth_forms.py
|
Python
|
agpl-3.0
| 67,559
|
#!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import sys
import numpy
import essentia
from essentia import EssentiaError
dependencies = ['lowlevel']
def compute(pool, options):
minimumLength = options['segmentation']['minimumSegmentsLength']
energy = pool.descriptors['lowlevel']['spectral_rms']['values']
scopes = pool.descriptors['lowlevel']['spectral_rms']['scopes']
energyScopes = []
for scope in scopes:
energyScopes.append(scope[0])
energyHop = energyScopes[1] - energyScopes[0]
hopSizeDuration = 1.0
hopSize = hopSizeDuration / energyHop
frameSizeDuration = minimumLength
frameSize = frameSizeDuration / energyHop
frames = essentia.FrameGenerator(audio = energy, frameSize = frameSize, hopSize = hopSize, startFromZero = True)
framesEnergy = []
for frame in frames:
framesEnergy.append(sum(frame))
maxFrameIndex = framesEnergy.index(max(framesEnergy))
onsetStart = energyScopes[int(maxFrameIndex * hopSize)]
onsetEnd = onsetStart + frameSizeDuration
onsets = [onsetStart, onsetEnd]
return onsets
|
MTG/essentia
|
src/python/essentia/extractor/segmentation_max_energy.py
|
Python
|
agpl-3.0
| 1,852
|
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Quentin Gigon <gigon.quentin@gmail.com>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import sms_sponsorship_registration_form
|
CompassionCH/compassion-modules
|
sms_sponsorship/forms/__init__.py
|
Python
|
agpl-3.0
| 379
|
from hitchtest import HitchPackage, utils
from hitchtest.environment import checks
from subprocess import check_output, call, check_call
from os.path import join, exists
from os import makedirs, chdir, environ
import hitchnode
import struct
import sys
ISSUES_URL = "http://github.com/hitchtest/hitchnode/issues"
class NodePackage(HitchPackage):
VERSIONS = [
#"0.1.100", "0.1.101", "0.1.102", "0.1.103", "0.1.104",
#"0.1.14", "0.1.15", "0.1.16", "0.1.17", "0.1.18", "0.1.19",
#"0.1.20", "0.1.21", "0.1.22", "0.1.23", "0.1.24", "0.1.25", "0.1.26", "0.1.27", "0.1.28", "0.1.29",
#"0.1.30", "0.1.31", "0.1.32", "0.1.33",
#"0.1.90", "0.1.91", "0.1.92", "0.1.93", "0.1.94", "0.1.95", "0.1.96", "0.1.97", "0.1.98", "0.1.99",
#"0.10.0", "0.10.1", "0.10.10", "0.10.11", "0.10.12", "0.10.13", "0.10.14", "0.10.15", "0.10.16",
#"0.10.17", "0.10.18", "0.10.19",
#"0.10.2", "0.10.20", "0.10.21", "0.10.22", "0.10.23", "0.10.24", "0.10.25", "0.10.26", "0.10.27", "0.10.28", "0.10.29",
#"0.10.3", "0.10.30", "0.10.31", "0.10.32", "0.10.33", "0.10.34", "0.10.35", "0.10.36", "0.10.37", "0.10.38", "0.10.39",
#"0.10.4", "0.10.40", "0.10.41",
#"0.10.5", "0.10.6", "0.10.7", "0.10.8", "0.10.9", "0.11.0", "0.11.1",
#"0.11.10", "0.11.11", "0.11.12", "0.11.13", "0.11.14", "0.11.15",
#"0.11.16", "0.11.2", "0.11.3", "0.11.4", "0.11.5", "0.11.6", "0.11.7", "0.11.8", "0.11.9",
#"0.12.0", "0.12.1", "0.12.2", "0.12.3", "0.12.4", "0.12.5", "0.12.6", "0.12.7", "0.12.8", "0.12.9",
#"0.2.0", "0.2.1", "0.2.2", "0.2.3", "0.2.4", "0.2.5", "0.2.6",
#"0.3.0", "0.3.1", "0.3.2", "0.3.3", "0.3.4", "0.3.5", "0.3.6", "0.3.7", "0.3.8",
#"0.4.0", "0.4.1", "0.4.10", "0.4.11", "0.4.12", "0.4.2", "0.4.3", "0.4.4", "0.4.5", "0.4.6", "0.4.7", "0.4.8", "0.4.9",
#"0.5.0", "0.5.1", "0.5.10", "0.5.2", "0.5.3", "0.5.4", "0.5.5", "0.5.6", "0.5.7", "0.5.8", "0.5.9",
#"0.6.0", "0.6.1", "0.6.10", "0.6.11", "0.6.12", "0.6.13", "0.6.14", "0.6.15", "0.6.16", "0.6.17", "0.6.18", "0.6.19",
#"0.6.2", "0.6.20", "0.6.21", "0.6.3", "0.6.4", "0.6.5", "0.6.6", "0.6.7", "0.6.8", "0.6.9",
#"0.7.0", "0.7.1", "0.7.10", "0.7.11", "0.7.12", "0.7.2", "0.7.3", "0.7.4", "0.7.5", "0.7.6", "0.7.7", "0.7.8", "0.7.9",
#"0.8.0", "0.8.1", "0.8.2", "0.8.3", "0.8.4", "0.8.5",
"0.8.6", "0.8.7", "0.8.8", "0.8.9", "0.8.10", "0.8.11", "0.8.12", "0.8.13", "0.8.14", "0.8.15", "0.8.16", "0.8.17", "0.8.18", "0.8.19",
"0.8.20", "0.8.21", "0.8.22", "0.8.23", "0.8.24", "0.8.25", "0.8.26", "0.8.27", "0.8.28",
"0.9.0", "0.9.1", "0.9.10", "0.9.11", "0.9.12", "0.9.2", "0.9.3", "0.9.4", "0.9.5", "0.9.6", "0.9.7", "0.9.8", "0.9.9",
"4.0.0", "4.1.0", "4.1.1", "4.1.2", "4.2.0", "4.2.1", "4.2.2", "4.2.3", "4.2.4", "4.2.5", "4.2.6",
"5.0.0", "5.1.0", "5.1.1",
"5.2.0", "5.3.0", "5.4.0", "5.4.1", "5.5.0", "5.6.0", "5.7.0", "5.7.1", "5.8.0", "5.9.0",
"6.11.0",
"7.10.0",
"8.1.2",
"9.2.0",
]
name = "Node"
def __init__(self, version="5.6.0", directory=None, bin_directory=None):
super(NodePackage, self).__init__()
self.version = self.check_version(version, self.VERSIONS, ISSUES_URL)
if directory is None:
if sys.platform == "darwin":
self.download_url = "https://nodejs.org/dist/v{0}/node-v{0}-darwin-x64.tar.gz".format(self.version)
self.subdirectory = "node-v{0}-darwin-x64".format(self.version)
else:
systembits = struct.calcsize("P") * 8
if systembits == 32:
self.download_url = "https://nodejs.org/dist/v{0}/node-v{0}-linux-x86.tar.gz".format(self.version)
self.subdirectory = "node-v{0}-linux-x86".format(self.version)
else:
self.download_url = "https://nodejs.org/dist/v{0}/node-v{0}-linux-x64.tar.gz".format(self.version)
self.subdirectory = "node-v{0}-linux-x64".format(self.version)
self.directory = join(self.get_build_directory(), "node-{}".format(self.version), self.subdirectory)
else:
self.directory = directory
self.bin_directory = bin_directory
#checks.packages(hitchnode.UNIXPACKAGES)
def verify(self):
version_output = check_output([self.node, "--version"]).decode('utf8')
if self.version not in version_output:
raise RuntimeError("Node version needed is {}, output is: {}.".format(self.version, version_output))
def build(self):
download_to = join(self.get_downloads_directory(), "node-{}.tar.gz".format(self.version))
utils.download_file(download_to, self.download_url)
if not exists(self.directory):
makedirs(self.directory)
utils.extract_archive(download_to, self.directory)
self.bin_directory = join(self.directory, self.subdirectory, "bin")
self.verify()
@property
def node(self):
if self.bin_directory is None:
raise RuntimeError("bin_directory not set.")
return join(self.bin_directory, "node")
@property
def npm(self):
if self.bin_directory is None:
raise RuntimeError("bin_directory not set.")
return join(self.bin_directory, "npm")
@property
def environment_vars(self):
return {"PATH": "{}:{}".format(environ.get('PATH', ''), self.bin_directory), }
def call_npm(self, *args):
"""Run npm."""
check_call([self.npm, ] + args, env=self.environment_vars)
def call_node(self, *args):
"""Run node."""
check_call([self.node, ] + args, env=self.environment_vars)
|
hitchtest/hitchnode
|
hitchnode/node_package.py
|
Python
|
agpl-3.0
| 5,790
|
# pylint: disable=missing-docstring
"""
This must be run only after seed_permissions_roles.py!
Creates default roles for all users in the provided course. Just runs through
Enrollments.
"""
from django.core.management.base import BaseCommand
from openedx.core.djangoapps.django_comment_common.models import assign_default_role_on_enrollment
from student.models import CourseEnrollment
class Command(BaseCommand):
help = 'Add roles for all users in a course.'
def add_arguments(self, parser):
parser.add_argument('course_id',
help='the edx course_id')
def handle(self, *args, **options):
course_id = options['course_id']
print('Updated roles for ', end=' ')
for i, enrollment in enumerate(CourseEnrollment.objects.filter(course_id=course_id, is_active=1), start=1):
assign_default_role_on_enrollment(None, enrollment)
if i % 1000 == 0:
print('{0}...'.format(i), end=' ')
print()
|
cpennington/edx-platform
|
lms/djangoapps/discussion/management/commands/assign_roles_for_course.py
|
Python
|
agpl-3.0
| 1,007
|