code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import copy
import uuid
def copy_exchange(ex):
exchange = copy.copy(ex)
exchange.id = uuid.uuid4()
exchange.in_msg = copy.deepcopy(ex.in_msg)
exchange.out_msg = copy.deepcopy(ex.out_msg)
exchange.properties = copy.deepcopy(ex.properties)
return exchange
|
vaibhav-sinha/pypipeline
|
pypipeline/util/ExchangeUtil.py
|
Python
|
gpl-3.0
| 280
|
""" Functions for processing marker data. """
__version__ = '0.1'
__author__ = 'Kjartan Halvorsen'
import numpy as np
import math
import csv
import itertools
import unittest
import matplotlib.pyplot as pyplot
import matplotlib.dates as mdates
from scipy.interpolate import interp1d
import scipy.optimize as optimize
from scipy.integrate import cumtrapz
from scipy.signal import detrend, bessel, filtfilt
import scipy.io as sio
from datetime import datetime, timedelta, date
from nvg.maths import quaternions as quat
from nvg.algorithms import orientation
#from nvg.utilities import time_series
from nvg.ximu import pointfinder
from nvg.ximu import kinematics
from nvg.io import qualisys_tsv as qtsv
from cyclicpython import cyclic_path
from cyclicpython.algorithms import kinematics as cpkinematics
#from cyclicpython.algorithms import fomatlab as fomatlab
from cyclicpython.algorithms import ekf as cpekf
from cyclicpython.algorithms import detect_peaks
from cyclicpython import cyclic_planar as cppl
def get_marker_data_cycles(md, frames2use, plotResults=True):
"""
Will find start and end of gait cycles using the trajectory of the left
ankle marker
Arguments
md -> marker data object
frames2use -> list of timestamps
Returns
list of tuples (startTime, endTime) of each cycle
list of tuples (startInd, endInd) of each cycle
"""
# Find initical contact from ankle marker data
ankled = md.marker('ANKLE').position(frames2use)
ics = detect_heel_strike(frames2use, ankled[2,:], plotResults=plotResults)
cycledtaInds = kinematics.fix_cycles(ics, k=0.5, plotResults=plotResults)
cycledtaTimes = [(frames2use[start_], frames2use[end_])
for (start_, end_) in cycledtaInds]
return (cycledtaTimes, cycledtaInds)
def detect_heel_strike(tvec, ankleposz, wn=0.2, posThr=[0.03, 0.08],
velThr = [-100, 0], accThr = [5, 100],
plotResults=False):
"""
Returns a list of heelstrikes detected from the z-component of the ankle
marker. From the ankle position, velocity and acceleration are computed.
The following heuristic is used to detect the heel strike:
minpos + posThr[0] < position < minpos + posThr[1]
velThr[0] < velocity < velThr[1]
accThr[0] < acc < accThr[1]
Arguments:
tvec -> Time vector (N,)
ankleposz -> ankle position in vertical direction (N,)
wn -> cutoff frequency of the low pass filter (Nyquist freq = 1)
plotResults -> If true do plot
Returns:
pks <- list of indices where peaks are found
"""
# Lowpass filter using a Bessel filter
[b,a] = bessel(4, wn)
ap = filtfilt(b,a,ankleposz)
dt = np.mean(np.diff(tvec))
av = np.diff(ap)/dt
aa = np.diff(av)/dt
apmin = np.min(ap)
okinds = np.where( np.logical_and( np.logical_and(
np.logical_and( ap[:-2] > (apmin + posThr[0]),
ap[:-2] < (apmin + posThr[1])),
np.logical_and( av[:-1] > velThr[0],
av[:-1] < velThr[1])),
np.logical_and( aa > accThr[0],
aa < accThr[1])))
aaa = np.empty(aa.shape)
aaa[:] = np.nan
aaa[okinds] = aa[okinds]
#pks = detect_peaks.detect_peaks(aa, mph=10, mpd=10)
pks = detect_peaks.detect_peaks(aaa, mph=5, mpd=40)
pks = np.intersect1d(pks, okinds)
if plotResults:
pyplot.figure()
pyplot.subplot(3,1,1)
pyplot.plot(tvec, ankleposz, alpha=0.3)
pyplot.plot(tvec, ap)
for ic_ in pks:
pyplot.plot([tvec[ic_], tvec[ic_]], [-0.3, 0], 'm', alpha=0.5)
pyplot.plot([tvec[0], tvec[-1]], [apmin+posThr[0], apmin+posThr[0]], 'y')
pyplot.plot([tvec[0], tvec[-1]], [apmin+posThr[1], apmin+posThr[1]], 'c')
pyplot.ylim((-0.3, -0.1))
pyplot.subplot(3,1,2)
pyplot.plot(tvec[:-1], av)
for ic_ in pks:
pyplot.plot([tvec[ic_], tvec[ic_]], [-1, 1], 'm', alpha=0.6)
pyplot.plot([tvec[0], tvec[-1]], [velThr[0], velThr[0]], 'y')
pyplot.plot([tvec[0], tvec[-1]], [velThr[1], velThr[1]], 'c')
pyplot.ylim((-1, 1))
pyplot.subplot(3,1,3)
pyplot.plot(tvec[:-2], aa)
for ic_ in pks:
pyplot.plot([tvec[ic_], tvec[ic_]], [-10, 10], 'm', alpha=0.6)
pyplot.plot([tvec[0], tvec[-1]], [accThr[0], accThr[0]], 'y')
pyplot.plot([tvec[0], tvec[-1]], [accThr[1], accThr[1]], 'c')
pyplot.ylim((-10, 10))
return pks
def split_in_cycles(tvec, dta, cycledta, indices=False, minCycleLength=80):
""" Will split the data matrix dta (timeseries in columns) into cycles given
in cycledta.
Arguments:
tvec -> time vector (N,)
dta -> data matrix (N,m)
cycledta -> list of (start,stop) times corresponding to the times in tvec.
OR, indices (start, stop)
indices -> if True, then cycledta contains indices, not times.
Returns tuple:
timespl <- the time vector split in cycles, list
dtaspl <- the data matrix split in cycles, list
"""
timespl = []
dtaspl = []
tv = np.asarray(tvec).ravel() # Make sure it is a numpy 1d-array
for (cstart,cstop) in cycledta:
if indices:
indStart = [cstart]
indEnd = [cstop]
else:
(indStart,) = np.nonzero(tv < cstart)
(indEnd,) = np.nonzero(tv > cstop)
if len(indStart) == 0:
indStart = [0]
if len(indEnd) == 0:
indEnd = [len(tv)-1]
if indEnd[0] - indStart[-1] > minCycleLength:
timespl.append(tv[indStart[-1]:indEnd[0]])
if dta.ndim == 1:
dtaspl.append(dta[indStart[-1]:indEnd[0]])
else:
dtaspl.append(dta[indStart[-1]:indEnd[0],:])
return (timespl, dtaspl)
def resample_timeseries(x, t, tnew, kind='linear'):
""" Resamples the timeseries x defined at times t by interpolation """
f = interp1d(t, x, kind=kind, axis=0)
return f(tnew)
def check_sync(syncfilename="/home/kjartan/Dropbox/projekt/nvg/data/solna09/S7/NVG_2012_S7_sync.tsv"):
"""
Loads the tsv file with markerdata from the synchronization experiment. Plots the z-coordinate of the marker
'clapper1'.
"""
title = "Checking sync of file %s" %syncfilename
md = qtsv.loadQualisysTSVFile(syncfilename)
timeToSync = md.syncTime - md.timeStamp
clapper = md.marker('clapper1')
clapposz = clapper.position(md.frameTimes).transpose()[:,2]
plt.figure()
plt.plot(md.frameTimes, clapposz)
plt.plot(timeToSync.total_seconds()*np.array([1, 1]), [-0.3, 1])
plt.title(title)
def test_three_point_angle():
p0 = np.array([0.0,0,0])
p1 = np.array([1.0,0,0])
p2 = np.array([1.0,1.0,0])
p3 = np.array([0.0,2.0,0])
npt.assert_almost_equal( _three_point_angle(p1,p0,p2,np.array([0,0,1.0])), np.pi/4)
npt.assert_almost_equal( _three_point_angle_projected(p1,p0,p2,np.array([0,0,1.0])), np.pi/4 )
npt.assert_almost_equal( _three_point_angle(p1,p0,p3,np.array([0,0,1.0])), np.pi/2)
npt.assert_almost_equal( _three_point_angle_projected(p1,p0,p3,np.array([0,0,1.0])), np.pi/2 )
def _four_point_angle(pp1, pp2, pd1, pd2, posdir):
"""
Computes the angle between the lines pp1-pp2 and pd1-pd2.
posdir is a vector in 3D giving the positive direction of rotation, using the right-hand rule. The angle is measured from 0 to 360 degrees as a rotation from (p1-pcentral) to (p2-pcentral).
"""
v1 = pp1-pp2
v2 = pd1-pd2
return _two_vec_angle(v1,v2,posdir)
def _three_point_angle(p1, pcentral, p2, posdir):
"""
Will compute the angle between the three points, using pcentral as the center.
posdir is a vector in 3D giving the positive direction of rotation, using the right-hand rule. The angle is measured from 0 to 360 degrees as a rotation from (p1-pcentral) to (p2-pcentral).
"""
v1 = p1-pcentral
v2 = -(p2-pcentral)
return _two_vec_angle(v1,v2,posdir)
def _two_vec_angle(v1,v2,posdir):
if v1.ndim == 1:
v1.shape += (1,)
v2.shape += (1,)
theta = np.zeros((v1.shape[1],))
for i in range(v1.shape[1]):
v1_ = v1[:,i]
v2_ = v2[:,i]
theta[i] = np.arccos( np.inner(v1_, v2_) / np.linalg.norm(v1_) / np.linalg.norm(v2_) )
v3_ = np.cross(v1_,v2_)
if (np.inner(v3_, posdir) < 0):
#theta[i] = 2*np.pi - theta[i]
theta[i] = - theta[i]
return theta
def _four_point_angle_projected(pp1, pp2, pd1, pd2, posdir):
"""
Computes the angle between the lines pp1-pp2 and pd1-pd2.
posdir is a vector in 3D giving the positive direction of rotation, using the right-hand rule. The angle is measured from 0 to 360 degrees as a rotation from (p1-pcentral) to (p2-pcentral).
"""
v1 = pp1-pp2
v2 = pd1-pd2
return _two_vec_angle_projected(v1,v2,posdir)
def _three_point_angle_projected(p1, pcentral, p2, posdir):
"""
Will compute the angle between the three points, using pcentral as the center.
posdir is a vector in 3D giving the positive direction of rotation, using the right-hand rule. The angle is measured from 0 to 360 degrees as a rotation from (p1-pcentral) to (p2-pcentral).
"""
v1 = p1-pcentral
v2 = p2-pcentral
return _two_vec_angle_projected(v1,v2,posdir)
def _two_vec_angle_projected(v1,v2,posdir):
Pr = np.identity(3) - np.outer(posdir,posdir)
if v1.ndim == 1:
v1.shape += (1,)
v2.shape += (1,)
theta = np.zeros((v1.shape[1],))
for i in range(v1.shape[1]):
v1_ = np.dot( Pr, v1[:,i] )
v2_ = np.dot( Pr, v2[:,i] )
theta[i] = np.arccos( np.inner(v1_, v2_) / np.linalg.norm(v1_) / np.linalg.norm(v2_) )
v3_ = np.cross(v1_,v2_)
if (np.inner(v3_, posdir) < 0):
theta[i] = 2*np.pi - theta[i]
return theta
|
alfkjartan/nvgimu
|
nvg/ximu/markerdata.py
|
Python
|
gpl-3.0
| 10,131
|
import pytest
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_almost_equal
from scipy import constants
from copy import deepcopy
import gromacs
import mdpow.config
import mdpow.fep
def test_molar_to_nm3():
assert_almost_equal(mdpow.fep.molar_to_nm3(1.5), 0.9033212684)
assert_almost_equal(mdpow.fep.molar_to_nm3(55.5), 33.42288693449999)
def test_bar_to_kJmolnm3():
assert_almost_equal(mdpow.fep.bar_to_kJmolnm3(1.0), 0.0602214179)
def test_kcal_to_kJ():
assert_almost_equal(mdpow.fep.kcal_to_kJ(10.0), 41.84)
def test_kJ_to_kcal():
assert_almost_equal(mdpow.fep.kJ_to_kcal(41.84), 10.0)
def test_kBT_to_kJ():
ref = constants.N_A*constants.k*1e-3
assert_almost_equal(mdpow.fep.kBT_to_kJ(1, 1), ref)
class TestFEPschedule(object):
reference = {
'VDW':
{'couple_lambda0': 'vdw',
'couple_lambda1': 'none',
'description': 'decoupling vdw --> none',
'label': 'VDW',
'lambdas': np.array([ 0. , 0.05, 0.1 , 0.2 , 0.3 , 0.4 , 0.5 , 0.6 , 0.65,
0.7 , 0.75, 0.8 , 0.85, 0.9 , 0.95, 1. ]),
'name': 'vdw',
'sc_alpha': 0.5,
'sc_power': 1,
'sc_sigma': 0.3},
'Coulomb':
{'couple_lambda0': 'vdw-q',
'couple_lambda1': 'vdw',
'description': 'dis-charging vdw+q --> vdw',
'label': 'Coul',
'lambdas': np.array([ 0. , 0.25, 0.5 , 0.75, 1. ]),
'name': 'Coulomb',
'sc_alpha': 0,
'sc_power': 1,
'sc_sigma': 0.3}
}
@pytest.fixture
def cfg(self):
# load default bundled configuration
return mdpow.config.get_configuration()
def test_VDW(self, cfg):
return self._test_schedule(cfg, 'VDW')
def test_Coulomb(self, cfg):
return self._test_schedule(cfg, 'Coulomb')
@pytest.mark.parametrize('component', ['VDW', 'Coulomb'])
def test_copy(self, cfg, component):
section = 'FEP_schedule_{0}'.format(component)
schedule = deepcopy(mdpow.fep.FEPschedule.load(cfg, section))
reference = self.reference[component]
for k in schedule:
assert k in reference, "additional entry {0} in runinput.yml".format(k)
for k in reference:
assert k in schedule, "missing entry {0} in runinput.yml".format(k)
for k in schedule.keys():
if k == "lambdas":
assert_array_almost_equal(schedule[k], reference[k],
err_msg="FEP schedule {0} mismatch".format(k))
else:
assert schedule[k] == reference[k], \
"mismatch between loaded FEP schedule entry {0} and reference".format(k)
@pytest.mark.parametrize('component', ['VDW', 'Coulomb'])
def test_write(self, cfg, component, tmp_path):
filename = tmp_path / "cfg.yaml"
cfg.write(filename)
new_cfg = mdpow.config.get_configuration(filename)
assert new_cfg.conf == cfg.conf
@pytest.mark.parametrize("x,ref", [
("test", False),
([1, 1, 2, 3], True),
({1, 2, 3}, True),
(None, False),
])
def test_iterable(self, x, ref):
assert mdpow.config.iterable(x) == ref
def _test_schedule(self, cfg, component):
section = 'FEP_schedule_{0}'.format(component)
schedule = mdpow.fep.FEPschedule.load(cfg, section)
reference = self.reference[component]
for k in schedule:
assert k in reference, "additional entry {0} in runinput.yml".format(k)
for k in reference:
assert k in schedule, "missing entry {0} in runinput.yml".format(k)
for k in schedule.keys():
if k == "lambdas":
assert_array_almost_equal(schedule[k], reference[k],
err_msg="FEP schedule {0} mismatch".format(k))
else:
assert schedule[k] == reference[k], \
"mismatch between loaded FEP schedule entry {0} and reference".format(k)
def test_skip_empty_entries(self, cfg, section="FEP_schedule_Coulomb"):
# remove some entries
del cfg.conf[section]['name'] # string
del cfg.conf[section]['lambdas'] # array
with pytest.warns(mdpow.config.NoOptionWarning):
schedule = mdpow.fep.FEPschedule.load(cfg, section)
assert schedule['label'] == "Coul"
assert schedule['sc_power'] == 1
assert 'name' not in schedule
assert 'lambdas' not in schedule
|
Becksteinlab/MDPOW
|
mdpow/tests/test_fep.py
|
Python
|
gpl-3.0
| 4,710
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Object Event Tests
$Id: test_objectevent.py 67630 2006-04-27 00:54:03Z jim $
"""
import unittest
import zope.component.event
from zope.testing import doctest
from zope.app.container.contained import Contained, ObjectRemovedEvent
from zope.app.container.interfaces import IContained, IObjectRemovedEvent
from zope.app.container.sample import SampleContainer
from zope.app.testing.placelesssetup import setUp, tearDown
from zope.app.testing import ztapi
class TestObjectEventNotifications(unittest.TestCase):
def setUp(self):
self.callbackTriggered = False
setUp()
def tearDown(self):
tearDown()
def testNotify(self):
events = []
def record(*args):
events.append(args)
ztapi.subscribe([IContained, IObjectRemovedEvent], None, record)
item = Contained()
event = ObjectRemovedEvent(item)
zope.component.event.objectEventNotify(event)
self.assertEqual([(item, event)], events)
def testNotifyNobody(self):
# Check that notify won't raise an exception in absence of
# of subscribers.
events = []
item = Contained()
evt = ObjectRemovedEvent(item)
zope.component.event.objectEventNotify(evt)
self.assertEqual([], events)
def testVeto(self):
zope.component.provideHandler(zope.component.event.objectEventNotify)
container = SampleContainer()
item = Contained()
# This will fire an event.
container['Fred'] = item
class Veto(Exception):
pass
def callback(item, event):
self.callbackTriggered = True
self.assertEqual(item, event.object)
raise Veto
ztapi.subscribe([IContained, IObjectRemovedEvent], None, callback)
# del container['Fred'] will fire an ObjectRemovedEvent event.
self.assertRaises(Veto, container.__delitem__, 'Fred')
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(TestObjectEventNotifications),
))
if __name__=='__main__':
unittest.main(defaultTest='test_suite')
|
Donkyhotay/MoonPy
|
zope/app/event/tests/test_objectevent.py
|
Python
|
gpl-3.0
| 2,785
|
# coding=utf-8
import unittest
"""881. Boats to Save People
https://leetcode.com/problems/boats-to-save-people/description/
The `i`-th person has weight `people[i]`, and each boat can carry a maximum
weight of `limit`.
Each boat carries at most 2 people at the same time, provided the sum of the
weight of those people is at most `limit`.
Return the minimum number of boats to carry every given person. (It is
guaranteed each person can be carried by a boat.)
**Example 1:**
**Input:** people = [1,2], limit = 3
**Output:** 1
**Explanation:** 1 boat (1, 2)
**Example 2:**
**Input:** people = [3,2,2,1], limit = 3
**Output:** 3
**Explanation** : 3 boats (1, 2), (2) and (3)
**Example 3:**
**Input:** people = [3,5,3,4], limit = 5
**Output:** 4
**Explanation** : 4 boats (3), (3), (4), (5)
**Note** :
* `1 <= people.length <= 50000`
* `1 <= people[i] <= limit <= 30000`
Similar Questions:
"""
class Solution(object):
def numRescueBoats(self, people, limit):
"""
:type people: List[int]
:type limit: int
:rtype: int
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
openqt/algorithms
|
leetcode/python/lc881-boats-to-save-people.py
|
Python
|
gpl-3.0
| 1,264
|
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
"""
PyWin32 package 'win32com' extends it's __path__ attribute with win32comext
directory and thus PyInstaller is not able to find modules in it. For example
module 'win32com.shell' is in reality 'win32comext.shell'.
>>> win32com.__path__
['win32com', 'C:\\Python27\\Lib\\site-packages\\win32comext']
"""
import os
from PyInstaller.utils.hooks import logger, get_module_file_attribute
def pre_safe_import_module(api):
win32com_dir = os.path.dirname(get_module_file_attribute('win32com'))
comext_dir = os.path.join(os.path.dirname(win32com_dir), 'win32comext')
logger.debug('win32com: extending __path__ with dir %r' % comext_dir)
# Append the __path__ where PyInstaller will look for 'win32com' modules.'
api.append_package_path(comext_dir)
|
ijat/Hotspot-PUTRA-Auto-login
|
PyInstaller-3.2/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py
|
Python
|
gpl-3.0
| 1,177
|
from docutils import statemachine
from docutils.parsers.rst import Directive #pylint: disable=unused-import
import re
ALG_DOCNAME_RE = re.compile(r'^([A-Z][a-zA-Z0-9]+)-v([0-9][0-9]*)$')
FIT_DOCNAME_RE = re.compile(r'^([A-Z][a-zA-Z0-9]+)$')
#----------------------------------------------------------------------------------------
def algorithm_name_and_version(docname):
"""
Returns the name and version of an algorithm based on the name of the
document supplied. The expected name of the document is "AlgorithmName-v?", which
is the name of the file with the extension removed
Arguments:
docname (str): The name of the document as supplied by docutils. Can contain slashes to indicate a path
Returns:
tuple: A tuple containing two elements (name, version). In the case of a fit function the version is None.
"""
# simple check to see if it is an algorithm or fit function
is_alg = ("algorithms" in docname)
is_fit = ("fitfunctions" in docname)
# docname includes path, using forward slashes, from root of documentation directory
docname = docname.split("/")[-1]
# name for an algorithm
if is_alg:
match = ALG_DOCNAME_RE.match(docname)
if not match or len(match.groups()) != 2:
raise RuntimeError("Document filename '%s.rst' does not match the expected format: AlgorithmName-vX.rst" % docname)
grps = match.groups()
return (str(grps[0]), int(grps[1]))
# name for a a fitfunction
if is_fit:
match = FIT_DOCNAME_RE.match(docname)
if not match or len(match.groups()) != 1:
raise RuntimeError("Document filename '%s.rst' does not match the expected format: FitFunctionName.rst" % docname)
return (str(match.groups()[0]), None)
# fail now
raise RuntimeError("Faild to fine ame from document filename ")
#----------------------------------------------------------------------------------------
class BaseDirective(Directive):
"""
Contains shared functionality for Mantid custom directives.
"""
has_content = False
final_argument_whitespace = True
rst_lines = None
def add_rst(self, text):
"""
Appends given reST into a managed list. It is NOT inserted into the
document until commit_rst() is called
Args:
text (str): reST to track
"""
if self.rst_lines is None:
self.rst_lines = []
self.rst_lines.extend(statemachine.string2lines(text))
def commit_rst(self):
"""
Inserts the currently tracked rst lines into the state_machine
"""
self.state_machine.insert_input(self.rst_lines, self.source())
self.rst_lines = []
def source(self):
"""
Returns the full path to the source document
"""
return self.state.document.settings.env.docname
def make_header(self, name, pagetitle=False, level=2):
"""
Makes a ReStructuredText title from the algorithm's name.
Args:
algorithm_name (str): The name of the algorithm to use for the title.
pagetitle (bool): If True, this sets the level to 1 (overriding any other value).
level (int): 1-4 the level of the heading to be used.
Returns:
str: ReST formatted header with algorithm_name as content.
"""
level_dict = {1:"=", 2:"-", 3:"#", 4:"^"}
if pagetitle:
level = 1
if level not in level_dict:
env = self.state.document.settings.env
env.app.warn('base.make_header - Did not understand level ' +str(level))
level = 2
line = "\n" + level_dict[level] * (len(name)) + "\n"
if level == 1:
return line + name + line
else:
return name + line
#----------------------------------------------------------------------------------------
class AlgorithmBaseDirective(BaseDirective):
"""
Specialized base directive for an algorithm
"""
algm_name = None
algm_version = None
def run(self):
"""
The main entry point that docutils calls.
It calls self.execute to do the main work. If an
algorithm doesn't exist then the directive is
skipped a debug message is emitted
Derived classes should override execute() and insert
whatever rst they require with self.add_rst()
"""
nodes = []
skip_msg = self.skip()
if skip_msg != "":
self.add_rst("**ERROR: %s**" % skip_msg)
else:
nodes = self.execute()
if self.rst_lines is not None:
self.commit_rst()
return nodes
def skip(self):
"""
Override and return a string depending on whether the directive
should be skipped. If empty then the directive should be processed
otherwise the string should contain the error message
The default is to skip (and warn) if the algorithm is not known.
Returns:
str: Return error mesage string if the directive should be skipped
"""
from mantid.api import AlgorithmFactory, FunctionFactory
name, version = self.algorithm_name(), self.algorithm_version()
msg = ""
if version is None: # it is a fit function
if name in FunctionFactory.getFunctionNames():
return ""
else:
msg = "No fit function '%s', skipping directive" % name
else:
if AlgorithmFactory.exists(name, version):
return ""
else:
msg = "No algorithm '%s' version '%d', skipping directive" % (name, version)
# warn the user
if len(msg) > 0:
env = self.state.document.settings.env
env.app.verbose(msg)
return msg
def algorithm_name(self):
"""
Returns the algorithm name as parsed from the document name
"""
if self.algm_name is None:
self._set_algorithm_name_and_version()
return self.algm_name
def algorithm_version(self):
"""
Returns the algorithm version as parsed from the document name
"""
if self.algm_version is None:
self._set_algorithm_name_and_version()
return self.algm_version
def create_mantid_algorithm_by_name(self, algorithm_name):
"""
Create and initializes a Mantid algorithm using tha latest version.
Args:
algorithm_name (str): The name of the algorithm to use for the title.
Returns:
algorithm: An instance of a Mantid algorithm.
"""
from mantid.api import AlgorithmManager
alg = AlgorithmManager.createUnmanaged(algorithm_name)
alg.initialize()
return alg
def create_mantid_algorithm(self, algorithm_name, version):
"""
Create and initializes a Mantid algorithm.
Args:
algorithm_name (str): The name of the algorithm to use for the title.
version (int): Version of the algorithm to create
Returns:
algorithm: An instance of a Mantid algorithm.
"""
from mantid.api import AlgorithmManager
alg = AlgorithmManager.createUnmanaged(algorithm_name, version)
alg.initialize()
return alg
def create_mantid_ifunction(self, function_name):
"""
Create and initiializes a Mantid IFunction.
Args:
function_name (str): The name of the function to use.
Returns:
ifunction: An instance of a Mantid IFunction
"""
from mantid.api import FunctionFactory
return FunctionFactory.createFunction(function_name)
def _set_algorithm_name_and_version(self):
"""
Returns the name and version of an algorithm based on the name of the
document. The expected name of the document is "AlgorithmName-v?", which
is the name of the file with the extension removed
"""
(self.algm_name, self.algm_version) = algorithm_name_and_version(self.source())
|
ScreamingUdder/mantid
|
docs/sphinxext/mantiddoc/directives/base.py
|
Python
|
gpl-3.0
| 8,208
|
import smtplib
import secrets
import sys
try:
mail_from = str(sys.argv[1])
mail_to = str(sys.argv[2])
subject_line = str(sys.argv[3])
msg_body = str(sys.argv[4])
except IndexError:
print ("Error: Enter the from, to, subject, and msg body of the email to complete")
print(mail_from)
password = raw_input("Please type in your password to log into your email.\n")
msg = "From: {} To: {} Subject: {} {}".format(mail_from, mail_to,
subject_line, msg_body)
server = 'smtp.gmail.com'
server = smtplib.SMTP_SSL(server)
server.set_debuglevel(1)
server.login(mail_from, password)
server.sendmail(mail_from, mail_to, msg)
server.quit()
|
Surouxx/special-enigma
|
send.py
|
Python
|
gpl-3.0
| 654
|
"""
This file contains the System Calibration sub-class as part of the System Class
This class handles changes to system settings for calibration
"""
__author__ = "Corwin Perren"
__copyright__ = "None"
__credits__ = [""]
__license__ = "GPL (GNU General Public License)"
__version__ = "0.1 Alpha"
__maintainer__ = "Corwin Perren"
__email__ = "caperren@caperren.com"
__status__ = "Development"
# This file is part of "Pick And Plate".
#
# "Pick And Plate" is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# "Pick And Plate" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Pick And Plate". If not, see <http://www.gnu.org/licenses/>.
#####################################
# Imports
#####################################
# Python native imports
from PyQt4 import QtCore, QtGui
import logging
# Custom imports
#####################################
# Global Variables
#####################################
#####################################
# SystemCalibration Class Definition
#####################################
class SystemCalibration(QtCore.QObject):
camera_focus_exposure_changed_signal = QtCore.pyqtSignal(int, int)
system_location_request_signal = QtCore.pyqtSignal()
full_system_home_request_signal = QtCore.pyqtSignal()
x_y_move_relative_request_signal = QtCore.pyqtSignal(float, float)
x_y_move_request_signal = QtCore.pyqtSignal(float, float)
z_move_relative_request_signal = QtCore.pyqtSignal(float)
z_move_request_signal = QtCore.pyqtSignal(float)
light_change_signal = QtCore.pyqtSignal(int)
motor_state_change_signal = QtCore.pyqtSignal(int)
setting_saved_messagebox_show_signal = QtCore.pyqtSignal()
def __init__(self, main_window, master):
QtCore.QObject.__init__(self)
# ########## Get the Pick And Plate instance of the logger ##########
self.logger = logging.getLogger("PickAndPlate")
# ########## Get the Pick And Plate settings instance ##########
self.settings = QtCore.QSettings()
# ########## Reference to highest level window and master widget ##########
self.main_window = main_window
self.master = master
# ########## References to gui objects ##########
self.toolbox = self.main_window.system_calibration_toolbox
self.crop_x_center_sb = self.main_window.crop_x_center_spin_box
self.crop_y_center_sb = self.main_window.crop_y_center_spin_box
self.camera_focus_sb = self.main_window.camera_focus_spinbox
self.camera_exposure_sb = self.main_window.camera_exposure_spinbox
self.crop_dimension_sb = self.main_window.crop_dimension_spin_box
self.apply_focus_exposure_button = self.main_window.system_calibration_exposure_focus_button
self.distance_cal_x_sb = self.main_window.distance_calibration_x_spin_box
self.distance_cal_y_sb = self.main_window.distance_calibration_y_spin_box
self.usable_area_offset_sb = self.main_window.usable_area_offset_spin_box
self.res_combo_box = self.main_window.alignment_resolution_combo_box
self.x_left_button = self.main_window.alignment_x_left_button
self.x_right_button = self.main_window.alignment_x_right_button
self.y_up_button = self.main_window.alignment_y_up_button
self.y_down_button = self.main_window.alignment_y_down_button
self.triple_zero_button = self.main_window.alignment_triple_zero_button
self.z_up_button = self.main_window.alignment_z_up_button
self.z_down_button = self.main_window.alignment_z_down_button
self.z_max_button = self.main_window.alignment_z_max_button
self.full_home_button = self.main_window.alignment_full_home_button
self.save_z_center_button = self.main_window.alignment_save_precision_z_button
self.save_dish_button = self.main_window.alignment_save_dish_button
self.save_a1_button = self.main_window.alignment_save_a1_button
self.save_clean_button = self.main_window.alignment_save_clean_button
self.save_waste_button = self.main_window.alignment_save_waste_button
self.save_dish_min_button = self.main_window.alignment_save_dish_min_button
self.save_plate_min_button = self.main_window.alignment_save_plate_min_button
self.lights_on_button = self.main_window.alignment_lights_on_button
self.lights_off_button = self.main_window.alignment_lights_off_button
self.motors_on_button = self.main_window.alignment_motors_on_button
self.motors_off_button = self.main_window.alignment_motors_off_button
self.cal_preview_button = self.main_window.system_calibration_image_preview_button
# ########## Local Class Variables ##########
self.request_complete = True
self.tinyg_z_location = None
self.tinyg_x_location = None
self.tinyg_y_location = None
self.tinyg_full_home_done = False
# ########## Set up gui elements ##########
self.toolbox.setCurrentIndex(0)
# ########## Load settings and set widgets to values ##########
self.load_and_show_settings()
# ########## Make signal/slot connections ##########
self.connect_signals_to_slots()
def connect_signals_to_slots(self):
# ########## Local interface and settings connections ##########
self.crop_x_center_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.crop_y_center_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.camera_focus_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.camera_exposure_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.crop_dimension_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.distance_cal_x_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.distance_cal_y_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.usable_area_offset_sb.valueChanged.connect(self.save_non_pick_head_changed_values_to_settings_slot)
self.apply_focus_exposure_button.clicked.connect(self.on_focus_or_exposure_changed_slot)
self.camera_focus_exposure_changed_signal.connect(self.main_window.video.configure_v4l2_camera_settings_slot)
self.x_right_button.clicked.connect(self.on_x_positive_clicked_slot)
self.x_left_button.clicked.connect(self.on_x_negative_clicked_slot)
self.y_up_button.clicked.connect(self.on_y_positive_clicked_slot)
self.y_down_button.clicked.connect(self.on_y_negative_clicked_slot)
self.z_up_button.clicked.connect(self.on_z_positive_clicked_slot)
self.z_down_button.clicked.connect(self.on_z_negative_clicked_slot)
self.triple_zero_button.clicked.connect(self.on_x_y_z_zero_clicked_slot)
self.z_max_button.clicked.connect(self.on_z_max_clicked_slot)
self.save_z_center_button.clicked.connect(self.on_save_precision_z_center_clicked_slot)
self.save_dish_button.clicked.connect(self.on_save_dish_center_clicked_slot)
self.save_a1_button.clicked.connect(self.on_save_a1_center_clicked_slot)
self.save_waste_button.clicked.connect(self.on_save_waste_center_clicked_slot)
self.save_clean_button.clicked.connect(self.on_save_clean_center_clicked_slot)
self.save_dish_min_button.clicked.connect(self.on_save_dish_min_clicked_slot)
self.save_plate_min_button.clicked.connect(self.on_save_plate_min_clicked_slot)
self.full_home_button.clicked.connect(self.on_do_full_homing_clicked_slot)
self.lights_on_button.clicked.connect(self.on_lights_on_clicked_slot)
self.lights_off_button.clicked.connect(self.on_lights_off_clicked_slot)
self.motors_on_button.clicked.connect(self.on_motors_on_clicked_slot)
self.motors_off_button.clicked.connect(self.on_motors_off_clicked_slot)
self.setting_saved_messagebox_show_signal.connect(self.on_setting_saved_show_message_box_slot)
# ########## External connections ##########
self.system_location_request_signal.connect(self.main_window.controller.broadcast_location_slot)
self.main_window.controller.tinyg_location_update_signal.connect(self.on_system_location_changed_slot)
self.x_y_move_relative_request_signal.connect(
self.main_window.controller.on_x_y_axis_move_relative_requested_slot)
self.x_y_move_request_signal.connect(self.main_window.controller.on_x_y_axis_move_requested_slot)
self.z_move_relative_request_signal.connect(self.main_window.controller.on_z_axis_move_relative_requested_slot)
self.z_move_request_signal.connect(self.main_window.controller.on_z_axis_move_requested_slot)
self.full_home_button.clicked.connect(self.main_window.controller.on_full_system_homing_requested_slot)
self.light_change_signal.connect(self.main_window.controller.on_light_change_request_signal_slot)
self.motor_state_change_signal.connect(self.main_window.controller.on_motor_state_change_request_signal_slot)
def save_non_pick_head_changed_values_to_settings_slot(self):
self.settings.setValue("system/system_calibration/crop_x_center", self.crop_x_center_sb.value())
self.settings.setValue("system/system_calibration/crop_y_center", self.crop_y_center_sb.value())
self.settings.setValue("system/system_calibration/camera_focus", self.camera_focus_sb.value())
self.settings.setValue("system/system_calibration/camera_exposure", self.camera_exposure_sb.value())
self.settings.setValue("system/system_calibration/crop_dimension", self.crop_dimension_sb.value())
self.settings.setValue("system/system_calibration/distance_cal_x", self.distance_cal_x_sb.value())
self.settings.setValue("system/system_calibration/distance_cal_y", self.distance_cal_y_sb.value())
self.settings.setValue("system/system_calibration/usable_area_offset", self.usable_area_offset_sb.value())
def load_and_show_settings(self):
self.crop_x_center_sb.setValue(self.settings.value("system/system_calibration/crop_x_center").toInt()[0])
self.crop_y_center_sb.setValue(self.settings.value("system/system_calibration/crop_y_center").toInt()[0])
self.camera_focus_sb.setValue(self.settings.value("system/system_calibration/camera_focus").toInt()[0])
self.camera_exposure_sb.setValue(self.settings.value("system/system_calibration/camera_exposure").toInt()[0])
self.crop_dimension_sb.setValue(self.settings.value("system/system_calibration/crop_dimension").toInt()[0])
self.distance_cal_x_sb.setValue(self.settings.value("system/system_calibration/distance_cal_x").toInt()[0])
self.distance_cal_y_sb.setValue(self.settings.value("system/system_calibration/distance_cal_y").toInt()[0])
self.usable_area_offset_sb.setValue(self.settings.value("system/system_calibration/usable_area_offset")
.toInt()[0])
def on_x_positive_clicked_slot(self):
self.request_complete = False
resolution = float(self.res_combo_box.currentText())
self.x_y_move_relative_request_signal.emit(resolution, 0)
self.motor_state_change_signal.emit(True)
def on_x_negative_clicked_slot(self):
self.request_complete = False
resolution = float(self.res_combo_box.currentText())
self.x_y_move_relative_request_signal.emit(-resolution, 0)
self.motor_state_change_signal.emit(True)
def on_y_positive_clicked_slot(self):
self.request_complete = False
resolution = float(self.res_combo_box.currentText())
self.x_y_move_relative_request_signal.emit(0, resolution)
self.motor_state_change_signal.emit(True)
def on_y_negative_clicked_slot(self):
self.request_complete = False
resolution = float(self.res_combo_box.currentText())
self.x_y_move_relative_request_signal.emit(0, -resolution)
self.motor_state_change_signal.emit(True)
def on_z_positive_clicked_slot(self):
self.request_complete = False
resolution = float(self.res_combo_box.currentText())
self.z_move_relative_request_signal.emit(resolution)
self.motor_state_change_signal.emit(True)
def on_z_negative_clicked_slot(self):
self.request_complete = False
resolution = float(self.res_combo_box.currentText())
self.z_move_relative_request_signal.emit(-resolution)
self.motor_state_change_signal.emit(True)
def on_x_y_z_zero_clicked_slot(self):
if self.tinyg_full_home_done:
self.z_move_request_signal.emit(25)
self.x_y_move_request_signal.emit(0, 0)
self.z_move_request_signal.emit(0)
self.motor_state_change_signal.emit(True)
def on_z_max_clicked_slot(self):
if self.tinyg_full_home_done:
self.request_complete = False
self.z_move_request_signal.emit(25)
self.motor_state_change_signal.emit(True)
def on_do_full_homing_clicked_slot(self):
self.tinyg_full_home_done = True
self.motor_state_change_signal.emit(True)
def on_save_precision_z_center_clicked_slot(self):
self.settings.setValue("system/system_calibration/precision_z_x_center", self.tinyg_x_location)
self.settings.setValue("system/system_calibration/precision_z_y_center", self.tinyg_y_location)
self.setting_saved_messagebox_show_signal.emit()
def on_save_dish_center_clicked_slot(self):
self.settings.setValue("system/system_calibration/dish_x_center", self.tinyg_x_location)
self.settings.setValue("system/system_calibration/dish_y_center", self.tinyg_y_location)
self.setting_saved_messagebox_show_signal.emit()
def on_save_a1_center_clicked_slot(self):
self.settings.setValue("system/system_calibration/a1_x_center", self.tinyg_x_location)
self.settings.setValue("system/system_calibration/a1_y_center", self.tinyg_y_location)
self.setting_saved_messagebox_show_signal.emit()
def on_save_clean_center_clicked_slot(self):
self.settings.setValue("system/system_calibration/clean_x_center", self.tinyg_x_location)
self.settings.setValue("system/system_calibration/clean_y_center", self.tinyg_y_location)
self.setting_saved_messagebox_show_signal.emit()
def on_save_waste_center_clicked_slot(self):
self.settings.setValue("system/system_calibration/waste_x_center", self.tinyg_x_location)
self.settings.setValue("system/system_calibration/waste_y_center", self.tinyg_y_location)
self.setting_saved_messagebox_show_signal.emit()
def on_save_dish_min_clicked_slot(self):
self.settings.setValue("system/system_calibration/dish_z_min", self.tinyg_z_location)
self.setting_saved_messagebox_show_signal.emit()
def on_save_plate_min_clicked_slot(self):
self.settings.setValue("system/system_calibration/plate_z_min", self.tinyg_z_location)
self.setting_saved_messagebox_show_signal.emit()
@staticmethod
def on_setting_saved_show_message_box_slot():
msg = QtGui.QMessageBox()
msg.setWindowTitle("Location Saved Successfully!")
msg.setText("Press \"Continue\" to return to adjusting settings.")
msg.setModal(True)
msg.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
# Make and add custom labeled buttons
msg.addButton("Continue", QtGui.QMessageBox.ActionRole)
# Set stylesheet
msg.setStyleSheet("QLabel{ color:rgb(202, 202, 202); }" +
"QMessageBox{ background-color:rgb(55, 55, 55);}" +
"QPushButton{ background-color:rgb(15,15,15); color:rgb(202, 202, 202);}")
# Move box to center of screen
box_size = msg.sizeHint()
screen_size = QtGui.QDesktopWidget().screen().rect()
msg.move((screen_size.width()/2 - box_size.width()/2), (screen_size.height()/2 - box_size.height()/2) )
msg.exec_()
def on_lights_on_clicked_slot(self):
self.light_change_signal.emit(1000)
def on_lights_off_clicked_slot(self):
self.light_change_signal.emit(0)
def on_motors_on_clicked_slot(self):
self.motor_state_change_signal.emit(True)
def on_motors_off_clicked_slot(self):
self.motor_state_change_signal.emit(False)
def on_system_location_changed_slot(self, x, y, z, a):
self.tinyg_x_location = x
self.tinyg_y_location = y
self.tinyg_z_location = z
def on_focus_or_exposure_changed_slot(self):
self.camera_focus_exposure_changed_signal.emit(self.camera_focus_sb.value(), self.camera_exposure_sb.value())
|
SARL-Engineering/PickAndPlate
|
Interface/System/SystemCalibration/SystemCalibrationCore.py
|
Python
|
gpl-3.0
| 17,344
|
#!/usr/bin/env python
# Pyctools - a picture processing algorithm development kit.
# http://github.com/jim-easterbrook/pyctools
# Copyright (C) 2014-18 Pyctools contributors
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
"""Turn a picture upside down or reflect it left to right.
"""
__all__ = ['Flip']
__docformat__ = 'restructuredtext en'
import PIL.Image
from pyctools.core.base import Transformer
from pyctools.core.config import ConfigEnum
class Flip(Transformer):
def initialise(self):
self.config['direction'] = ConfigEnum(choices=('vertical', 'horizontal'))
def transform(self, in_frame, out_frame):
self.update_config()
direction = self.config['direction']
if direction == 'vertical':
flip = PIL.Image.FLIP_TOP_BOTTOM
else:
flip = PIL.Image.FLIP_LEFT_RIGHT
in_data = in_frame.as_PIL()
out_frame.data = in_data.transpose(flip)
audit = out_frame.metadata.get('audit')
audit += 'data = Flip(data)\n'
audit += ' direction: %s\n' % direction
out_frame.metadata.set('audit', audit)
return True
|
jim-easterbrook/pyctools
|
src/examples/simple/src/pyctools/components/example/flip.py
|
Python
|
gpl-3.0
| 1,741
|
#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
import math
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def wall_front(self,ls):
return ls.left_forward > 50 or ls.right_forward > 50
def too_right(self,ls):
return ls.right_side > 50
def too_left(self,ls):
return ls.left_side > 50
def run(self):
rate = rospy.Rate(20)
data = Twist()
data.linear.x = 0.3
data.angular.z = 0.0
while not rospy.is_shutdown():
if self.wall_front(self.sensor_values):
data.angular.z = - math.pi
elif self.too_right(self.sensor_values):
data.angular.z = math.pi
elif self.too_left(self.sensor_values):
data.angular.z = - math.pi
else:
e = 50 - self.sensor_values.left_side
data.angular.z = e * math.pi / 180.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
sabomeister/pimouse_run_corridor
|
script/wall_around.py
|
Python
|
gpl-3.0
| 1,637
|
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.http import Http404
from django.shortcuts import render
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from repanier.models.customer import Customer
from repanier.models.staff import Staff
from repanier.tools import get_repanier_template_name
@login_required()
@csrf_protect
@never_cache
def who_is_who_view(request):
from repanier.apps import REPANIER_SETTINGS_DISPLAY_WHO_IS_WHO
if not REPANIER_SETTINGS_DISPLAY_WHO_IS_WHO:
raise Http404
q = request.POST.get("q", None)
customer_list = Customer.objects.filter(
may_order=True, represent_this_buyinggroup=False
).order_by("long_basket_name")
if q is not None:
customer_list = customer_list.filter(
Q(long_basket_name__icontains=q) | Q(city__icontains=q)
)
staff_list = Staff.objects.filter(is_active=True, can_be_contacted=True)
template_name = get_repanier_template_name("who_is_who.html")
return render(
request,
template_name,
{"staff_list": staff_list, "customer_list": customer_list, "q": q},
)
|
pcolmant/repanier
|
repanier/views/who_is_who_view.py
|
Python
|
gpl-3.0
| 1,225
|
import json
from django.http import QueryDict
from django.utils import timezone
from ajapaik.ajapaik_face_recognition.models import FaceRecognitionRectangle
from ajapaik.ajapaik_object_recognition.models import ObjectDetectionAnnotation
DELETION_EXPIRATION_THRESHOLD_IN_HOURS = 24
GENDER_FEMALE = 0
GENDER_MALE = 1
GENDER_NOT_SURE = 2
GENDER_STRING_FEMALE = 'FEMALE'
GENDER_STRING_MALE = 'MALE'
GENDER_STRING_UNSURE = 'UNSURE'
AGE_CHILD = 0
AGE_ADULT = 1
AGE_ELDERLY = 2
AGE_NOT_SURE = 3
AGE_STRING_CHILD = 'CHILD'
AGE_STRING_ADULT = 'ADULT'
AGE_STRING_ELDERLY = 'ELDERLY'
AGE_STRING_UNSURE = 'UNSURE'
def is_value_present(val):
return val is not None and len(val) > 0
def parse_parameter(parameter):
if is_value_present(parameter):
return int(parameter)
return 0
def convert_to_query_dictionary(dictionary):
query_dictionary = QueryDict('', mutable=True)
query_dictionary.update(dictionary)
return query_dictionary
def transform_annotation_queryset(user_id, query_set, transform_function, photo_id=None):
transformed_collection = []
for entry in query_set:
transformed_collection.append(json.dumps(transform_function(entry, user_id).__dict__))
return transformed_collection
def is_object_annotation_editable(user_id: int, object_annotation: ObjectDetectionAnnotation):
created_on = object_annotation.created_on
created_by_id = object_annotation.user_id
return is_annotation_editable_for_user(user_id, created_on, created_by_id)
def is_face_annotation_editable(user_id: int, annotation: FaceRecognitionRectangle):
created_on = annotation.created
created_by = annotation.user
is_without_name = annotation.get_subject_name() is None
is_created_by_system = created_by is None
return is_without_name or is_created_by_system and is_annotation_editable_time_wise(
created_on) or is_annotation_editable_for_user(user_id, created_on, annotation.user_id)
def is_annotation_editable_for_user(user_id: int, created_on, created_by_id):
return user_id == created_by_id and is_annotation_editable_time_wise(created_on)
def is_annotation_editable_time_wise(created_on):
global DELETION_EXPIRATION_THRESHOLD_IN_HOURS
current_time = timezone.now()
time_difference = current_time - created_on
time_difference_in_hours = time_difference.total_seconds() / 3600
return time_difference_in_hours <= DELETION_EXPIRATION_THRESHOLD_IN_HOURS
def parse_boolean(value):
if is_value_present(value):
return value in ['True', 'true']
return None
def parse_gender_parameter(gender):
global GENDER_MALE
global GENDER_FEMALE
global GENDER_NOT_SURE
global GENDER_STRING_FEMALE
global GENDER_STRING_MALE
if gender is not None and gender.isdigit():
return gender
if gender == GENDER_STRING_MALE:
return GENDER_MALE
if gender == GENDER_STRING_FEMALE:
return GENDER_FEMALE
return GENDER_NOT_SURE
def parse_age_parameter(age):
global AGE_ADULT
global AGE_CHILD
global AGE_ELDERLY
global AGE_NOT_SURE
global AGE_STRING_CHILD
global AGE_STRING_ADULT
global AGE_STRING_ELDERLY
if age is not None and age.isdigit():
return age
if age == AGE_STRING_CHILD:
return AGE_CHILD
if age == AGE_STRING_ADULT:
return AGE_ADULT
if age == AGE_STRING_ELDERLY:
return AGE_ELDERLY
return AGE_NOT_SURE
def parse_age_to_constant(age):
global AGE_ADULT
global AGE_CHILD
global AGE_ELDERLY
global AGE_NOT_SURE
global AGE_STRING_ADULT
global AGE_STRING_CHILD
global AGE_STRING_ELDERLY
global AGE_STRING_UNSURE
if age is None:
return age
if age == AGE_CHILD:
return AGE_STRING_CHILD
if age == AGE_ADULT:
return AGE_STRING_ADULT
if age == AGE_ELDERLY:
return AGE_STRING_ELDERLY
return AGE_STRING_UNSURE
def parse_gender_to_constant(gender):
global GENDER_MALE
global GENDER_FEMALE
global GENDER_NOT_SURE
global GENDER_STRING_MALE
global GENDER_STRING_FEMALE
global GENDER_STRING_UNSURE
if gender is None:
return gender
if gender == GENDER_MALE:
return GENDER_STRING_MALE
if gender == GENDER_FEMALE:
return GENDER_STRING_FEMALE
return GENDER_STRING_UNSURE
|
Ajapaik/ajapaik-web
|
ajapaik/ajapaik_object_recognition/object_annotation_utils.py
|
Python
|
gpl-3.0
| 4,371
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#This module contains statistical procedures
from scipy import stats as scstats
from scipy import special as sc
import calculus_functions as cf
import numpy as np
import loglikelihood as logl
import random_effects
def var_decomposition(XXNorm=None,X=None,concat=False):
"""Variance decomposition. Returns the matrix of condition indexes for each factor (rows) and each variable
(columns). Calculates the normalized sum of squares using square_and_norm if XXNorm is not supplied"""
if XXNorm is None:
XXNorm=square_and_norm(X)
ub=len(XXNorm)
d,EVec=np.linalg.eig(XXNorm)
if np.any(np.round(d.imag,15)!=len(d)*[0]):
pass
#print( "non-real XX matrix")
d=d.real
EVec=EVec.real
d=np.abs(d)**0.5+1e-100
MaxEv=np.max(d)
fi=np.abs(EVec*EVec/((d*d).reshape((1,ub))+1E-200))
fiTot=np.sum(fi,1)
pi=fi/fiTot.reshape((ub,1))
pi=pi.T
CondIx=MaxEv/d
ind=np.argsort(CondIx)
pi=pi[ind]
CondIx=CondIx[ind]
CondIx=CondIx.reshape((len(CondIx),1))
if concat:
return np.concatenate((CondIx,pi),1)
else:
return CondIx,pi
def square_and_norm(X):
"""Squares X, and normalize to unit lenght.
Similar to a correlation matrix, except the
means are not subtracted"""
N,T,k=X.shape
Sumsq=np.sqrt(np.sum(np.sum(X**2,0),0))
Sumsq.resize((k,1))
Sumsq=Sumsq*Sumsq.T
norm=cf.dot(X,X)/(Sumsq+1e-200)
return norm
def singular_elim(panel,X):
"""Eliminates variables that cause singularity"""
N,T,k=X.shape
r=np.arange(k)
ci_threshold=50
keep,XXCorrel=find_singulars(panel,X)
XXNorm=square_and_norm(X)
cond_ix,pi=var_decomposition(XXNorm)
if max(cond_ix)<ci_threshold:
return keep,cond_ix
for cix in range(1,len(cond_ix)):
if (np.sum(pi[-cix]>0.5)>1) and cond_ix[-cix][0]>ci_threshold:
keep[pi[:,-cix]>0.5]=False
return keep,cond_ix
def find_singulars(panel,X):
"""Returns a list with True for variables that cause singularity and False otherwise.
for the main regression, singularity is detected by the constraints module"""
N,T,k=X.shape
XXCorrel=correl(X,panel)
keep=np.all(np.isnan(XXCorrel)==False,0)
keep=keep*np.all((np.abs(np.triu(XXCorrel,1))>0.99)==False,0)
x_dev=deviation(panel, X)
var_x=np.sum(np.sum(x_dev**2,0),0)
keep=keep*(var_x>0)#remove constants
keep[0]=True#allways keep the first constant term
return keep,XXCorrel
def adf_test(panel,ll,p):
"""Returns the augmented dickey fuller test statistic and critical value"""
N,T,k=panel.X.shape
y=ll.Y_st
yl1=cf.roll(y,1,1)
dy=y-yl1
date_var=np.arange(T).reshape((T,1))*panel.included[3] #date count
X=np.concatenate((panel.included[3],date_var,yl1),2)
dyL=[]
for i in range(p):
dyL.append(cf.roll(dy,i+1,1))
dyL=np.concatenate(dyL,2)
date_var=(date_var>p+1)
X=np.concatenate((X,dyL),2)
X=X*date_var
dy=dy*date_var
X[:,0:panel.lost_obs+10]=0
keep,c_ix=singular_elim(panel,X)
if not np.all(keep[0:3]):
return 'NA','NA','NA'
beta,se_robust,se=OLS(panel,X[:,:,keep],dy,robust_se_lags=10,c=date_var)
adf_stat=beta[2]/se_robust[2]
critval=adf_crit_values(panel.NT,True)
res=np.append(adf_stat,critval)
return res
def goodness_of_fit(ll,standarized,panel):
if standarized:
s_res=panel.var(ll.e_RE)
s_tot=panel.var(ll.Y_st)
else:
s_res=panel.var(ll.u)
s_tot=panel.var(panel.Y)
r_unexpl=s_res/s_tot
Rsq=1-r_unexpl
Rsqadj=1-r_unexpl*(panel.NT-1)/(panel.NT-panel.args.n_args-1)
panel.args.create_null_ll(panel)
LL_ratio_OLS=2*(ll.LL-panel.args.LL_OLS)
LL_ratio=2*(ll.LL-panel.args.LL_null)
return Rsq, Rsqadj, LL_ratio,LL_ratio_OLS
def breusch_godfrey_test(panel,ll, lags):
"""returns the probability that err_vec are not auto correlated"""
e=ll.e_norm_centered
X=ll.XIV_st
N,T,k=X.shape
X_u=X[:,lags:T]
u=e[:,lags:T]
c=panel.included[3][:,lags:T]
for i in range(1,lags+1):
X_u=np.append(X_u,e[:,lags-i:T-i],2)
Beta,Rsq=OLS(panel,X_u,u,False,True,c=c)
T=(panel.NT-k-1-lags)
BGStat=T*Rsq
rho=Beta[k:]
ProbNoAC=1.0-chisq_dist(BGStat,lags)
return ProbNoAC, rho, Rsq #The probability of no AC given H0 of AC.
def DurbinWatson(panel,ll):
"""returns the probability that err_vec are not auto correlated"""
X=ll.XIV_st
N,T,k=X.shape
e=ll.e_norm_centered
c=panel.included[3][:,:-1]
DW=np.sum((c*(e[:,1:]-e[:,:-1]))**2)/np.sum((e*panel.included[3])**2)
return DW
def correlogram(panel,e,lags,center=False):
N,T,k=e.shape
v=panel.var(e)
if center:
e=e-panel.mean(e)
rho=np.zeros(lags+1)
rho[0]=1
for i in range(1,lags+1):
a=panel.T_i-i-1>0
incl=(a*panel.included[3])[:,i:,:]
df=np.sum(incl)
rho[i]=np.sum(incl*e[:,i:]*e[:,0:-i])/(v*df)
return rho #The probability of no AC given H0 of AC.
def chisq_dist(X,df):
"""Returns the probability of drawing a number
less than X from a chi-square distribution with
df degrees of freedom"""
retval=1.0-sc.gammaincc(df/2.0,X/2.0)
return retval
def adf_crit_values(n,trend):
"""Returns 1 and 5 percent critical values respectively. Interpolated from table in
Fuller, W. A. (1976). Introduction to Statistical Time Series. New York: John Wiley and Sons. ISBN 0-471-28715-6
table is available at https://en.wikipedia.org/wiki/Augmented_Dickey%E2%80%93Fuller_test"""
if trend:
d={25:np.array([-3.75,-3.00]),50:np.array([-3.58,-2.93]),100:np.array([-3.51,-2.89]),
250:np.array([-3.46,-2.88]),500:np.array([-3.44,-2.87]),10000:np.array([-3.43,-2.86])}
else:
d={25:np.array([-4.38,-3.60]),50:np.array([-4.15,-3.50]),100:np.array([-4.04,-3.45]),
250:np.array([-3.99,-3.43]),500:np.array([-3.98,-3.42]),10000:np.array([-3.96,-3.41])}
if n<25:
print ("Warning: ADF critical values are not available for fewer than 25 observations")
return (0,0)
k=(25,50,100,250,500,10000)
r=None
for i in range(len(k)-1):
if n>=k[i] and n<500:
r=d[k[i]]+(n-k[i])*((d[k[i+1]]-d[k[i]])/(k[i+1]-k[i]))#interpolation
return r
if r is None:
return d[10000]
def JB_normality_test(e,panel):
"""Jarque-Bera test for normality.
returns the probability that a set of residuals are drawn from a normal distribution"""
e=e[panel.included[3]]
a=np.argsort(np.abs(e))[::-1]
ec=e[a][int(0.001*len(e)):]
df=len(ec)
ec=ec-np.mean(ec)
s=(np.sum(ec**2)/df)**0.5
mu3=np.sum(ec**3)/df
mu4=np.sum(ec**4)/df
S=mu3/s**3
C=mu4/s**4
JB=df*((S**2)+0.25*(C-3)**2)/6.0
p=1.0-chisq_dist(JB,2)
return p
def correl(X,panel=None):
"""Returns the correlation of X. Assumes three dimensional matrices. """
if not panel is None:
X=X*panel.included[3]
N,T,k=X.shape
N=panel.NT
mean=np.sum(np.sum(X,0),0).reshape((1,k))/N
else:
N,k=X.shape
mean=np.sum(X,0).reshape((1,k))/N
cov=cf.dot(X,X)/N
cov=cov-(mean.T*mean)
stdx=(np.diag(cov)**0.5).reshape((1,k))
stdx=(stdx.T*stdx)
stdx[np.isnan(stdx)]=0
corr=(stdx>0)*cov/(stdx+(stdx==0)*1e-100)
corr[stdx<=0]=0
return corr
def deviation(panel,X):
N,T,k=X.shape
x=X*panel.included[3]
mean=np.sum(np.sum(x,0),0).reshape((1,1,k))/panel.NT
return (X-mean)*panel.included[3]
def correl_2dim(X,Y=None,covar=False):
"""Returns the correlation of X and Y. Assumes two dimensional matrixes. If Y is not supplied, the
correlation matrix of X is returned"""
if type(X)==list:
X=Concat(X)
single=Y is None
if single:
Y=X
T,k=X.shape
T,m=Y.shape
X_dev=X-np.mean(X,0)
Y_dev=Y-np.mean(Y,0)
cov=np.dot(X_dev.T,Y_dev)
if covar:
return cov/(len(X)-1)
stdx=np.sum(X_dev**2,0).reshape((1,k))**0.5
if single:
stdy=stdx
else:
stdy=np.sum(Y_dev**2,0).reshape((1,k))**0.5
std_matr=stdx.T*stdy
std_matr=std_matr+(std_matr==0)*1e-200
corr=cov/std_matr
if corr.shape==(1,1):
corr=corr[0][0]
return corr
def get_singular_list(panel,XX):
a,b=singular_elim(panel,XX)
names=np.array(panel.input.X_names)[a==False]
idx=np.array(range(len(a)))[a==False]
s=', '.join([f"{names[i]}" for i in range(len(idx))])
return s
def OLS(panel,X,Y,add_const=False,return_rsq=False,return_e=False,c=None,robust_se_lags=0):
"""runs OLS after adding const as the last variable"""
if c is None:
c=panel.included[3]
N,T,k=X.shape
NT=panel.NT
if add_const:
X=np.concatenate((c,X),2)
k=k+1
X=X*c
Y=Y*c
XX=cf.dot(X,X)
XY=cf.dot(X,Y)
try:
beta=np.linalg.solve(XX,XY)
except np.linalg.LinAlgError:
s=get_singular_list(panel,X)
raise RuntimeError("The following variables caused singularity runtime and must be removed: "+s)
if return_rsq or return_e or robust_se_lags:
e=(Y-cf.dot(X,beta))*c
if return_rsq:
v0=panel.var(e,included=c)
v1=panel.var(Y,included=c)
Rsq=1-v0/v1
#Rsqadj=1-(v0/v1)*(NT-1)/(NT-k-1)
return beta,Rsq
elif return_e:
return beta,e*c
elif robust_se_lags:
XXInv=np.linalg.inv(XX)
se_robust,se,V=robust_se(panel,robust_se_lags,XXInv,X*e)
return beta,se_robust.reshape(k,1),se.reshape(k,1)
return beta
def OLS_simple(Y,X,addconst=False,residuals=True):
"""Returns the OLS residuals if residuals. For use with two dimiensional arrays only"""
if addconst:
n=len(X)
X=np.concatenate((np.ones((n,1)),X),1)
XY=np.dot(X.T,Y)
XX=np.dot(X.T,X)
XXInv=np.linalg.inv(XX)
beta=np.dot(XXInv,XY)
e=Y-np.dot(X,beta)
if residuals:
return beta,e
else:
return beta
def newey_west_wghts(L,XErr):
"""Calculates the Newey-West autocorrelation consistent weighting matrix. Either err_vec or XErr is required"""
N,T,k=XErr.shape
S=np.zeros((k,k))
try:
a=min(L,T)
except:
a=0
for i in range(1,min(L,T)):
w=1-(i+1)/(L)
XX=cf.dot(XErr[:,i:],XErr[:,0:T-i])
S+=w*(XX+XX.T)
return S
def robust_cluster_weights(panel,XErr,cluster_dim,whites):
"""Calculates the Newey-West autocorrelation consistent weighting matrix. Either err_vec or XErr is required"""
N,T,k=XErr.shape
if cluster_dim==0:#group cluster
if N<=1:
return 0
mean=panel.mean(XErr,0)
elif cluster_dim==1:#time cluster
mean=random_effects.mean_time(panel,XErr,True)
T,m,k=mean.shape
mean=mean.reshape((T,k))
S=cf.dot(mean,mean)-whites
return S
def robust_se(panel,L,hessin,XErr,nw_only=True):
"""Returns the maximum robust standard errors considering all combinations of sums of different combinations
of clusters and newy-west"""
w,W=sandwich_var(hessin,cf.dot(XErr,XErr))#whites
nw,NW=sandwich_var(hessin,newey_west_wghts(L,XErr))#newy-west
if panel.N>1:
c0,C0=sandwich_var(hessin,robust_cluster_weights(panel,XErr, 0, w))#cluster dim 1
c1,C1=sandwich_var(hessin,robust_cluster_weights(panel,XErr, 1, w))#cluster dim 2
else:
c0,c1,C0,C1=0,0,0*W,0*W
v=np.array([
nw,
nw+c0,
nw+c1,
nw+c1+c0,
w*0
])
V=np.array([
NW,
NW+C0,
NW+C1,
NW+C1+C0,
W*0
])
V=V+W
s=np.max(w+v,0)
se_robust=np.maximum(s,0)**0.5
i=np.argmax(np.sum(w+v,1))
se_std=np.maximum(w,0)**0.5
return se_robust,se_std,V[i]
def sandwich_var(hessin,V):
hessinV=cf.dot(hessin,V)
V=cf.dot(hessinV,hessin)
v=np.diag(V)
return v,V
|
espensirnes/paneltime
|
paneltime/stat_functions.py
|
Python
|
gpl-3.0
| 10,747
|
import quizduell
import cookielib
import json
import os
# Load authenticated session from file to prevent unnecessary logins:
cookie_jar = cookielib.MozillaCookieJar('cookie_file')
api = quizduell.QuizduellApi(cookie_jar)
if os.access(cookie_jar.filename, os.F_OK):
cookie_jar.load()
else:
api.login_user('name', 'password')
api = quizduell.QuizduellApi(cookie_jar)
result = api.top_list_rating()
if 'access' in result:
# Session invalid, re-login:
api.login_user('name', 'password')
result = api.top_list_rating()
# Store authenticated session in file:
cookie_jar.save()
print json.dumps(result, sort_keys=True, indent=4)
|
mtschirs/quizduellapi
|
examples/top_list_rating.py
|
Python
|
gpl-3.0
| 648
|
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2016 by the mediaTUM authors
:license: GPL3, see COPYING for details
Various functions for checking and converting unsafe user input.
"""
from __future__ import absolute_import
import logging
logg = logging.getLogger(__name__)
def string_to_int(data):
if data is None:
logg.warn("cannot convert None value to an integer")
return None
try:
nid = int(data)
except ValueError:
logg.warn("invalid user input for int: '%r'", data)
return None
return nid
|
mediatum/mediatum
|
utils/userinput.py
|
Python
|
gpl-3.0
| 569
|
import moose
import numpy as np
from collections import Counter
def xyPosition(objInfo,xory):
try:
return(float(moose.element(objInfo).getField(xory)))
except ValueError:
return (float(0))
def setupMeshObj(modelRoot):
''' Setup compartment and its members pool,reaction,enz cplx under self.meshEntry dictionaries \
self.meshEntry with "key" as compartment,
value is key2:list where key2 represents moose object type,list of objects of a perticular type
e.g self.meshEntry[meshEnt] = { 'reaction': reaction_list,'enzyme':enzyme_list,'pool':poollist,'cplx': cplxlist }
'''
xmin = 0.0
xmax = 1.0
ymin = 0.0
ymax = 1.0
listOfitems = {}
positionInfoExist = True
meshEntry = {}
if meshEntry:
meshEntry.clear()
else:
meshEntry = {}
xcord = []
ycord = []
meshEntryWildcard = '/##[ISA=ChemCompt]'
if modelRoot != '/':
meshEntryWildcard = modelRoot+meshEntryWildcard
for meshEnt in moose.wildcardFind(meshEntryWildcard):
mollist = []
realist = []
enzlist = []
cplxlist = []
tablist = []
funclist = []
mol_cpl = moose.wildcardFind(meshEnt.path+'/##[ISA=PoolBase]')
funclist = moose.wildcardFind(meshEnt.path+'/##[ISA=Function]')
enzlist = moose.wildcardFind(meshEnt.path+'/##[ISA=EnzBase]')
realist = moose.wildcardFind(meshEnt.path+'/##[ISA=ReacBase]')
tablist = moose.wildcardFind(meshEnt.path+'/##[ISA=StimulusTable]')
if mol_cpl or funclist or enzlist or realist or tablist:
for m in mol_cpl:
if isinstance(moose.element(m.parent),moose.CplxEnzBase):
cplxlist.append(m)
objInfo = m.parent.path+'/info'
elif isinstance(moose.element(m),moose.PoolBase):
mollist.append(m)
objInfo =m.path+'/info'
if moose.exists(objInfo):
listOfitems[moose.element(moose.element(objInfo).parent)]={'x':xyPosition(objInfo,'x'),'y':xyPosition(objInfo,'y')}
xcord.append(xyPosition(objInfo,'x'))
ycord.append(xyPosition(objInfo,'y'))
getxyCord(xcord,ycord,funclist,listOfitems)
getxyCord(xcord,ycord,enzlist,listOfitems)
getxyCord(xcord,ycord,realist,listOfitems)
getxyCord(xcord,ycord,tablist,listOfitems)
meshEntry[meshEnt] = {'enzyme':enzlist,
'reaction':realist,
'pool':mollist,
'cplx':cplxlist,
'table':tablist,
'function':funclist
}
positionInfoExist = not(len(np.nonzero(xcord)[0]) == 0 \
and len(np.nonzero(ycord)[0]) == 0)
if positionInfoExist:
xmin = min(xcord)
xmax = max(xcord)
ymin = min(ycord)
ymax = max(ycord)
return meshEntry,xmin,xmax,ymin,ymax,positionInfoExist,listOfitems
def sizeHint(self):
return QtCore.QSize(800,400)
def getxyCord(xcord,ycord,list1,listOfitems):
for item in list1:
# if isinstance(item,Function):
# objInfo = moose.element(item.parent).path+'/info'
# else:
# objInfo = item.path+'/info'
if not isinstance(item,moose.Function):
objInfo = item.path+'/info'
xcord.append(xyPosition(objInfo,'x'))
ycord.append(xyPosition(objInfo,'y'))
if moose.exists(objInfo):
listOfitems[moose.element(moose.element(objInfo).parent)]={'x':xyPosition(objInfo,'x'),'y':xyPosition(objInfo,'y')}
def setupItem(modelPath,cntDict):
'''This function collects information of what is connected to what. \
eg. substrate and product connectivity to reaction's and enzyme's \
sumtotal connectivity to its pool are collected '''
#print " setupItem"
sublist = []
prdlist = []
zombieType = ['ReacBase','EnzBase','Function','StimulusTable']
for baseObj in zombieType:
path = '/##[ISA='+baseObj+']'
if modelPath != '/':
path = modelPath+path
if ( (baseObj == 'ReacBase') or (baseObj == 'EnzBase')):
for items in moose.wildcardFind(path):
sublist = []
prdlist = []
uniqItem,countuniqItem = countitems(items,'subOut')
subNo = uniqItem
for sub in uniqItem:
sublist.append((moose.element(sub),'s',countuniqItem[sub]))
uniqItem,countuniqItem = countitems(items,'prd')
prdNo = uniqItem
if (len(subNo) == 0 or len(prdNo) == 0):
print ("Substrate Product is empty ",path, " ",items)
for prd in uniqItem:
prdlist.append((moose.element(prd),'p',countuniqItem[prd]))
if (baseObj == 'CplxEnzBase') :
uniqItem,countuniqItem = countitems(items,'toEnz')
for enzpar in uniqItem:
sublist.append((moose.element(enzpar),'t',countuniqItem[enzpar]))
uniqItem,countuniqItem = countitems(items,'cplxDest')
for cplx in uniqItem:
prdlist.append((moose.element(cplx),'cplx',countuniqItem[cplx]))
if (baseObj == 'EnzBase'):
uniqItem,countuniqItem = countitems(items,'enzDest')
for enzpar in uniqItem:
sublist.append((moose.element(enzpar),'t',countuniqItem[enzpar]))
cntDict[items] = sublist,prdlist
elif baseObj == 'Function':
for items in moose.wildcardFind(path):
sublist = []
prdlist = []
item = items.path+'/x[0]'
uniqItem,countuniqItem = countitems(item,'input')
for funcpar in uniqItem:
sublist.append((moose.element(funcpar),'sts',countuniqItem[funcpar]))
uniqItem,countuniqItem = countitems(items,'valueOut')
for funcpar in uniqItem:
prdlist.append((moose.element(funcpar),'stp',countuniqItem[funcpar]))
cntDict[items] = sublist,prdlist
# elif baseObj == 'Function':
# #ZombieSumFunc adding inputs
# inputlist = []
# outputlist = []
# funplist = []
# nfunplist = []
# for items in moose.wildcardFind(path):
# for funplist in moose.element(items).neighbors['valueOut']:
# for func in funplist:
# funcx = moose.element(items.path+'/x[0]')
# uniqItem,countuniqItem = countitems(funcx,'input')
# for inPut in uniqItem:
# inputlist.append((inPut,'st',countuniqItem[inPut]))
# cntDict[func] = inputlist
else:
for tab in moose.wildcardFind(path):
tablist = []
uniqItem,countuniqItem = countitems(tab,'output')
for tabconnect in uniqItem:
tablist.append((moose.element(tabconnect),'tab',countuniqItem[tabconnect]))
cntDict[tab] = tablist
def countitems(mitems,objtype):
items = []
items = moose.element(mitems).neighbors[objtype]
uniqItems = set(items)
countuniqItems = Counter(items)
return(uniqItems,countuniqItems)
|
subhacom/moose-core
|
python/moose/chemUtil/chemConnectUtil.py
|
Python
|
gpl-3.0
| 7,761
|
"""Unit tests for the ``organizations`` paths.
Each ``APITestCase`` subclass tests a single URL. A full list of URLs to be
tested can be found here:
http://theforeman.org/api/apidoc/v2/organizations.html
"""
import ddt
import httplib
import sys
from fauxfactory import gen_string
from nailgun import client
from requests.exceptions import HTTPError
from robottelo import entities
from robottelo.common.helpers import get_server_credentials
from robottelo.test import APITestCase
# (too-many-public-methods) pylint:disable=R0904
@ddt.ddt
class OrganizationTestCase(APITestCase):
"""Tests for the ``organizations`` path."""
def test_create_text_plain(self):
"""@Test Create an organization using a 'text/plain' content-type.
@Assert: HTTP 415 is returned.
@Feature: Organization
"""
organization = entities.Organization()
organization.create_missing()
response = client.post(
organization.path(),
organization.create_payload(),
auth=get_server_credentials(),
headers={'content-type': 'text/plain'},
verify=False,
)
self.assertEqual(httplib.UNSUPPORTED_MEDIA_TYPE, response.status_code)
def test_positive_create_1(self):
"""@Test: Create an organization and provide a name.
@Assert: The organization has the provided attributes and an
auto-generated label.
@Feature: Organization
"""
attrs = entities.Organization().create_json()
self.assertTrue('label' in attrs.keys())
if sys.version_info[0] is 2:
self.assertIsInstance(attrs['label'], unicode)
else:
self.assertIsInstance(attrs['label'], str)
def test_positive_create_2(self):
"""@Test: Create an org and provide a name and identical label.
@Assert: The organization has the provided attributes.
@Feature: Organzation
"""
# A label has a more restrictive allowable charset than a name.
name_label = entities.Organization.label.gen_value()
attrs = entities.Organization(
name=name_label,
label=name_label,
).create_json()
self.assertEqual(attrs['name'], name_label)
self.assertEqual(attrs['label'], name_label)
def test_positive_create_3(self):
"""@Test: Create an organization and provide a name and label.
@Assert: The organization has the provided attributes.
@Feature: Organization
"""
name = entities.Organization.name.gen_value()
label = entities.Organization.label.gen_value()
attrs = entities.Organization(name=name, label=label).create_json()
self.assertEqual(attrs['name'], name)
self.assertEqual(attrs['label'], label)
@ddt.data(
# two-tuples of data
(gen_string(str_type='alpha'),
gen_string(str_type='alpha')),
(gen_string(str_type='alphanumeric'),
gen_string(str_type='alphanumeric')),
(gen_string(str_type='cjk'),
gen_string(str_type='cjk')),
(gen_string(str_type='latin1'),
gen_string(str_type='latin1')),
(gen_string(str_type='numeric'),
gen_string(str_type='numeric')),
(gen_string(str_type='utf8'),
gen_string(str_type='utf8')),
)
@ddt.unpack
def test_positive_create_4(self, name, description):
"""@Test: Create an organization and provide a name and description.
@Assert: The organization has the provided attributes and an
auto-generated label.
@Feature: Organization
"""
attrs = entities.Organization(
name=name,
description=description,
).create_json()
self.assertEqual(name, attrs['name'])
self.assertEqual(description, attrs['description'])
# Was a label auto-generated?
self.assertTrue('label' in attrs.keys())
if sys.version_info[0] is 2:
self.assertIsInstance(attrs['label'], unicode)
else:
self.assertIsInstance(attrs['label'], str)
self.assertGreater(len(attrs['label']), 0)
def test_positive_create_5(self):
"""@Test: Create an org and provide a name, label and description.
@Assert: The organization has the provided name, label and description.
@Feature: Organization
"""
name = entities.Organization.name.gen_value()
label = entities.Organization.label.gen_value()
description = entities.Organization.description.gen_value()
attrs = entities.Organization(
name=name,
label=label,
description=description,
).create_json()
self.assertEqual(attrs['name'], name)
self.assertEqual(attrs['label'], label)
self.assertEqual(attrs['description'], description)
@ddt.data(
gen_string('utf8', length=256), # longer than 255
'',
' ',
)
def test_negative_create_name(self, name):
"""@Test: Create an org with an incorrect name.
@Assert: The organization cannot be created.
@Feature: Organization
"""
with self.assertRaises(HTTPError):
entities.Organization(name=name).create_json()
def test_negative_create_duplicate(self):
"""@Test: Create two organizations with identical names.
@Assert: The second organization cannot be created.
@Feature: Organization
"""
name = entities.Organization.name.gen_value()
entities.Organization(name=name).create_json()
with self.assertRaises(HTTPError):
entities.Organization(name=name).create_json()
def test_positive_search(self):
"""@Test: Create an organization, then search for it by name.
@Assert: Searching returns at least one result.
@Feature: Organization
"""
name = entities.Organization().create_json()['name']
response = client.get(
entities.Organization().path(),
auth=get_server_credentials(),
data={u'name': name},
verify=False,
)
response.raise_for_status()
results = response.json()['results']
self.assertGreaterEqual(len(results), 1)
@ddt.ddt
class OrganizationUpdateTestCase(APITestCase):
"""Tests for the ``organizations`` path."""
@classmethod
def setUpClass(cls): # noqa
"""Create an organization."""
cls.organization = entities.Organization(
id=entities.Organization().create_json()['id']
).read()
@ddt.data(
{'description': gen_string(str_type='alpha')},
{'description': gen_string(str_type='alphanumeric')},
{'description': gen_string(str_type='cjk')},
{'description': gen_string(str_type='latin1')},
{'description': gen_string(str_type='numeric')},
{'description': gen_string(str_type='utf8')},
{'name': gen_string(str_type='alpha')},
{'name': gen_string(str_type='alphanumeric')},
{'name': gen_string(str_type='cjk')},
{'name': gen_string(str_type='latin1')},
{'name': gen_string(str_type='numeric')},
{'name': gen_string(str_type='utf8')},
{ # can we update two attrs at once?
'description': entities.Organization.description.gen_value(),
'name': entities.Organization.name.gen_value(),
},
)
def test_positive_update(self, attrs):
"""@Test: Update an organization's attributes with valid values.
@Assert: The organization's attributes are updated.
@Feature: Organization
"""
client.put(
self.organization.path(),
attrs,
verify=False,
auth=get_server_credentials(),
).raise_for_status()
# Read the organization and validate its attributes.
new_attrs = self.organization.read_json()
for name, value in attrs.items():
self.assertIn(name, new_attrs.keys())
self.assertEqual(new_attrs[name], value)
@ddt.data(
{'name': gen_string(str_type='utf8', length=256)},
{'label': gen_string(str_type='utf8')}, # Immutable. See BZ 1089996.
)
def test_negative_update(self, attrs):
"""@Test: Update an organization's attributes with invalid values.
@Assert: The organization's attributes are not updated.
@Feature: Organization
"""
response = client.put(
self.organization.path(),
attrs,
verify=False,
auth=get_server_credentials(),
)
with self.assertRaises(HTTPError):
response.raise_for_status()
|
oshtaier/robottelo
|
tests/foreman/api/test_organization.py
|
Python
|
gpl-3.0
| 8,794
|
#!/usr/bin/python
#Conky-Fitbit by Exadrid
from fitbit.api import FitbitOauthClient
import os, sys, webbrowser, fitbit, time, datetime
import configparser
config = configparser.ConfigParser()
client_key = 'ba7c8a6d4376449f8ed481f0af25c7e2'
client_secret = '69f323833a91491cbac4fdc4ff219bdc'
#Get personal keys and put it in a config file
def gather_keys():
print('* Obtain a request token ...\n')
client = FitbitOauthClient(client_key, client_secret)
token = client.fetch_request_token()
stderr = os.dup(2)
os.close(2)
os.open(os.devnull, os.O_RDWR)
try:
webbrowser.get().open(client.authorize_token_url())
finally:
os.dup2(stderr, 2)
print('* Authorize the request token in your browser\n')
time.sleep(3)
verifier = input('\nPlease input PIN: ')
# get access token
print('\n* Obtain an access token ...\n')
token = client.fetch_access_token(verifier)
global user_id, user_key, user_secret
user_key = token['oauth_token']
user_secret = token['oauth_token_secret']
print('* Your user key is %s and your user secret is %s. These will be saved in config.cfg.' % (user_key, user_secret))
# lets create that config file for next time...
cfgfile = open("./config.cfg",'w')
# add the settings to the structure of the file, and lets write it out...
config.add_section('Passkey')
config.set('Passkey','user_key', user_key)
config.set('Passkey','user_secret', user_secret)
config.write(cfgfile)
cfgfile.close()
#Gather data by time period
def gather_data(auth, path, activity_type, time_input):
if time_input == '1d':
date_list = (auth.time_series('%s/%s' % (path, activity_type), period=time_input))
final_sum = next (iter (date_list.values()))[-1]['value']
elif time_input in('1d', '7d', '30d', '1w', '1m', '3m', '6m', '1y'):
date_list = (auth.time_series('%s/%s' % (path, activity_type), period=time_input))
final_sum = 0
for item in range(len(next (iter (date_list.values())))):
final_sum = final_sum + int(next (iter (date_list.values()))[item]['value'])
elif time_input == 'yesterday':
date_list = (auth.time_series('%s/%s' % (path, activity_type), period='1w'))
final_sum = next (iter (date_list.values()))[-2]['value']
elif len(time_input) == 10:
date_list = (auth.time_series('%s/%s' % (path, activity_type), period='max'))
date = next (iter (date_list.values()))
for item in range(len(date)):
if (date[item]['dateTime']) == time_input:
final_sum = (date[item]['value'])
elif time_input == 'last_week':
date_list = (auth.time_series('%s/%s' % (path, activity_type), period='max'))
date_list2 = next (iter (date_list.values()))
date_list3 = date_list2[-days_since_sunday:]
final_sum = 0
for item in range(len(date_list3)):
final_sum = final_sum + int(date_list3[item]['value'])
return(final_sum)
#Keys for the fitbit API
if not os.path.exists("/home/eric/.conky/Fitbit/config.cfg"):
gather_keys()
config.read('/home/eric/.conky/Fitbit/config.cfg')
user_key = config.get('Passkey', 'user_key')
user_secret = config.get('Passkey', 'user_secret')
authd_client = fitbit.Fitbit(client_key, client_secret, resource_owner_key=user_key, resource_owner_secret=user_secret)
#days since last sunday
d = datetime.datetime.today()
today = datetime.date(d.year, d.month, d.day)
days_since_sunday = today.weekday() + 1
#Information on how many steps today
steps_today = gather_data(authd_client, 'activities', 'steps', "1d")
today_ff = open('/home/eric/.conky/Fitbit/steps_format.txt', 'w')
today_ff.write(steps_today)
if int(steps_today) > 10000:
steps_today = 10000
today_f = open('/home/eric/.conky/Fitbit/steps.txt', 'w')
today_f.write(str(steps_today))
else:
today_f = open('/home/eric/.conky/Fitbit/steps.txt', 'w')
today_f.write(str(steps_today))
#Information on how many steps since last sunday
steps_this_week = gather_data(authd_client, 'activities', 'steps', "last_week")
week_ff = open('/home/eric/.conky/Fitbit/week_format.txt', 'w')
week_ff.write(str(steps_this_week))
if int(steps_this_week) > 70000:
steps_this_week = 70000
week_f = open('/home/eric/.conky/Fitbit/week.txt', 'w')
week_f.write(str(steps_this_week))
else:
week_f = open('/home/eric/.conky/Fitbit/week.txt', 'w')
week_f.write(str(steps_this_week))
#Information on how many floors this week
daily_floors = gather_data(authd_client, 'activities', 'floors', "1d")
floor_ff = open('/home/eric/.conky/Fitbit/floor_format.txt', 'w')
floor_ff.write(str(daily_floors))
|
Exadrid/Conky-Fitbit
|
Conky-Fitbit.py
|
Python
|
gpl-3.0
| 4,749
|
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 24 04:03:19 2015
@author: winpython
"""
from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
import cPickle, pickle
import gzip
thelist = np.array(['8_7', '1_12', '2_8', '3_15', '8_16', '7_1', '0_3', '1_0', '9_18', '3_3', '5_0',
'7_5', '7_3', '2_18', '6_4', '0_11', '0_12', '5_1', '0_19', '2_10', '8_2', '9_19',
'4_5', '4_10', '7_9', '9_13', '8_14', '5_12', '3_1', '6_1', '4_13', '7_4', '7_11',
'9_11', '5_4', '4_19', '5_16', '5_19', '7_6', '6_13', '8_3', '1_8', '3_19', '3_8',
'8_1', '1_19', '1_14', '7_16', '8_0', '8_6', '2_11', '8_13', '7_13', '7_19', '9_9',
'4_1', '1_11', '8_17', '3_14', '9_14', '0_16', '4_6', '5_3', '6_12', '2_14', '5_17',
'7_7', '7_15', '1_1', '4_7', '0_14', '3_6', '1_5', '1_15', '6_19', '9_3', '3_7',
'8_9', '3_10', '5_9', '1_10', '4_3', '0_2', '9_10', '2_0', '0_0', '0_10', '3_11',
'0_8', '8_5', '3_16', '8_8', '9_17', '2_12', '0_1', '4_8', '9_6', '0_4', '9_4',
'6_2', '9_16', '1_3', '7_14', '4_0', '9_15', '0_6', '9_0', '2_5', '4_16', '2_13',
'5_14', '8_15', '1_7', '1_16', '1_2', '1_4', '2_17', '8_19', '5_13', '6_18', '2_16',
'6_16', '0_13', '4_17', '5_8', '4_4', '5_15', '3_17', '6_15', '3_4', '9_12', '4_15',
'4_9', '6_8', '0_9', '1_6', '5_11', '5_7', '4_18', '2_3', '5_6', '4_11', '2_4',
'0_17', '7_17', '1_18', '3_13', '6_3', '0_5', '2_1', '3_2', '1_13', '2_9', '4_14',
'6_14', '7_10', '5_2', '8_12', '2_19', '6_5', '9_7', '9_8', '9_1', '6_6', '1_17',
'7_2', '8_4', '9_2', '5_5', '8_18', '6_11', '3_5', '4_12', '2_7', '3_18', '4_2',
'6_9', '3_0', '3_12', '1_9', '8_10', '7_8', '7_18', '6_17', '7_12', '9_5', '3_9',
'0_7', '8_11', '6_0', '6_7', '2_6', '5_10', '5_18', '0_15', '0_18', '6_10', '7_0',
'2_15', '2_2'])
final_output = np.zeros((200,784),dtype=np.float32)
final_label = np.array([8, 1, 2, 3, 8, 7, 0, 1, 9, 3, 5, 7, 7, 2, 6, 0, 0, 5, 0, 2, 8, 9, 4, 4, 7, 9, 8, 5, 3, 6, 4, 7, 7, 9, 5, 4, 5, 5, 7, 6, 8, 1, 3, 3, 8, 1, 1, 7, 8, 8, 2, 8, 7, 7, 9, 4, 1, 8, 3, 9, 0, 4, 5, 6, 2, 5, 7, 7, 1, 4, 0, 3, 1, 1, 6, 9, 3, 8, 3, 5, 1, 4, 0, 9, 2, 0, 0, 3, 0, 8, 3, 8, 9, 2, 0, 4, 9, 0, 9, 6, 9, 1, 7, 4, 9, 0, 9, 2, 4, 2, 5, 8, 1, 1, 1, 1, 2, 8, 5, 6, 2, 6, 0, 4, 5, 4, 5, 3, 6, 3, 9, 4, 4, 6, 0, 1, 5, 5, 4, 2, 5, 4, 2, 0, 7, 1, 3, 6, 0, 2, 3, 1, 2, 4, 6, 7, 5, 8, 2, 6, 9, 9, 9, 6, 1, 7, 8, 9, 5, 8, 6, 3, 4, 2, 3, 4, 6, 3, 3, 1, 8, 7, 7, 6, 7, 9, 3, 0, 8, 6, 6, 2, 5, 5, 0, 0, 6, 7, 2, 2],dtype=np.int64)
for i in range(200):
print "reading", i, "..."
pil_im = Image.open( thelist[i] + ".jpg" ).convert('L')
#imshow(np.asarray(pil_im)) # before resize
pil_im = pil_im.resize((28, 28), Image.BILINEAR )
pil_im = np.array(pil_im)
fig = plt.figure()
plotwindow = fig.add_subplot()
plt.imshow(pil_im, cmap='gray')
plt.show()
#print("test")
#print(pil_im)
note = 0
for j in range(28):
for k in range(28):
final_output[i][note]= ((255 - pil_im[j][k])/225.)
note += 1
print " in ", note, "...",
print " "
print "Finished Picture..."
print "Starting label"
print "Finished Labeling..."
print "Starting cpickle"
outputandlabel = final_output, final_label
f = gzip.open("training_data_200v2.pkl.gz", 'wb')
cPickle.dump(outputandlabel, f)
f.close()
print "Finished cPickle..."
print "\ ! congradulation ! /"
#f = open("pic1.txt", "r")
'''
imshow(np.asarray(pil_im)) # before resize
pil_im = pil_im.resize((28, 28), Image.BILINEAR )
pil_im = np.array(pil_im)
#print(np.array(pil_im))
#imshow(np.asarray(pil_im))
fig = plt.figure()
plotwindow = fig.add_subplot()
plt.imshow(pil_im, cmap='gray')
plt.show()
print("test")
print(pil_im)
'''
|
artmusic0/theano-learning.part02
|
Training_data 2.0/rd_file_resize_rand_gz.py
|
Python
|
gpl-3.0
| 4,053
|
Workflow_2 = Workflow(
Inputs=[
VariableGenerator_1
]
)
Flow123d_3 = Flow123dAction(
Inputs=[
Workflow_2.input()
],
YAMLFile='resources/test1.yaml'
)
Flow123d_4 = Flow123dAction(
Inputs=[
Flow123d_3
],
YAMLFile='resources/test2.yaml'
)
Workflow_2.set_config(
OutputAction=Flow123d_4,
InputAction=Flow123d_3
)
|
GeoMop/GeoMop
|
testing/Analysis/pipeline/results/workflow3.py
|
Python
|
gpl-3.0
| 373
|
#!/usr/bin/env python3
# pip3 install youtube_dl
# dnf install ffmpeg
from __future__ import unicode_literals
from mpd import MPDClient
from os import listdir, makedirs, path, system
from shutil import move
from sys import argv
from youtube_dl import YoutubeDL
from youtube_dl.utils import DownloadError
library_path = '/music/'
mpd = MPDClient()
new_musics = []
ydl_opts = {'format': 'bestaudio/best',
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3'
}]
}
with YoutubeDL(ydl_opts) as ydl, open(argv[1], 'r') as openfile:
try:
for url in openfile.read().split('\n'):
if url:
ydl.download([url])
except DownloadError as error:
print("\033[31mFailed\033[0m: {0}".format(error))
for each in listdir('.'):
if path.isfile(each) and each[-4:] == '.mp3':
print("Moving '{0}'".format(each))
try:
artist, song, *rest = each.split('-')
except ValueError:
artist = song = ''
else:
artist = artist.strip()
song = song.strip()
artist_input = input("Artist [{0}]: ".format(artist))
song_input = input("Song [{0}]: ".format(song))
if artist_input:
artist = artist_input
if song_input:
song = song_input
if not path.exists('{0}{1}'.format(library_path, artist)):
makedirs('{0}{1}'.format(library_path, artist))
if not path.exists('{0}{1}/Unknown Album'.format(library_path, artist)):
makedirs('{0}{1}/Unknown Album'.format(library_path, artist))
move(each, '{0}{1}/Unknown Album/{2}.mp3'.format(library_path, artist, song))
new_musics.append('{0}/Unknown Album/{1}.mp3'.format(artist, song))
mpd.connect("localhost", 6600)
mpd.update()
system('restorecon -rv {0}; chown mpd:mpd -R {0}'.format(library_path))
for music in new_musics: # Add new songs to MPD playlist
mpd.playlistadd('New', music)
mpd.clear()
mpd.load('New')
|
flippym/spytify-server
|
Scripts/music-downloader.py
|
Python
|
gpl-3.0
| 2,070
|
import pygame
import configparser
import os
import time
import sys
import urllib.request
##
# Install:
# pip3 install pygame.whl
##
version = 0
pygame.init() # Get PyGame ready
config = configparser.ConfigParser() # Get configparser ready....
home = os.path.expanduser("~")
if not os.path.exists("/.CrackGames/"):
try:
os.makedirs("/.CrackGames/")
r = "/.CrackGames/Resources/"
except Exception:
print("Uh-oh we cannot write to / !")
print("Using home directory instead!")
r = home + "/.CrackGames/Resources/"
pass
if not os.path.exists(home + "/.CrackGames/"):
os.makedirs(home + "/.CrackGames/")
try:
print(r)
except Exception:
r = "/.CrackGames/Resources/"
print("Set resources directory to " + r)
if not os.path.exists(r):
os.makedirs(r)
if not os.path.exists(r + "/img"):
os.makedirs(r + "/img")
##########################################
# This is Crack Games' Molester Simulator#
##########################################
#########################################################################################
# BOMB TERRORIST ATTACK ISIS JOIN NOW FREE RECRUIT BOMB ATTACK DESTROY BOMB PLANE CRASH #
#########################################################################################
def dl(url, out):
print("Saving " + url + " to " + out + "...")
g = urllib.request.urlopen(url)
with open(r + "/img/" + out, 'wb+') as f:
f.write(g.read())
print("Done!")
def downloadr():
print("Downloading resources... Please standby...")
dl("https://raw.githubusercontent.com/CrackGames/MolesterSimulator/master/img/BackGround.png", "BackGround.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/DickExtension.png", "DickExtension.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/ManClothesOff.png", "ManClothesOff.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/ManClothesOff2.png", "ManClothesOff2.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/ManClothesOn.png", "ManClothesOn.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/OnionRing.png", "OnionRing.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/TOOTHPASTE.png", "TOOTHPASTE.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/Viagra.png", "Viagra.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/WD-40.png", "WD-40.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/WomanClothesOff.png", "WomanClothesOff.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/WomanClothesOff2.png", "WomanClothesOff2.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/cursor.png", "cursor.png")
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/steamsalty.png", "steamsalty.png")
print("Resource download complete! Please enjoy your play!")
if not os.path.isfile(r + "/img/" + "BackGround.png") or not os.path.isfile(r + "/img/" + "DickExtension.png") or not os.path.isfile(r + "/img/" + "ManClothesOff.png") or not os.path.isfile(r + "/img/" + "ManClothesOff2.png") or not os.path.isfile(r + "/img/" + "ManClothesOn.png") or not os.path.isfile(r + "/img/" + "OnionRing.png") or not os.path.isfile(r + "/img/" + "TOOTHPASTE.png") or not os.path.isfile(r + "/img/" + "Viagra.png") or not os.path.isfile(r + "/img/" + "WD-40.png") or not os.path.isfile(r + "/img/" + "WomanClothesOff.png") or not os.path.isfile(r + "/img/" + "WomanClothesOff2.png") or not os.path.isfile(r + "/img/" + "cursor.png") or not os.path.isfile(r + "/img/" + "steamsalty.png"):
downloadr()
def reloadimg():
global WD_40
global WD_40rect
global TOOTHPASTE
global TOOTHPASTErect
global PLAYER
global PLAYERrect
global WOMAN
global WOMANrect
global VIAGRA
global VIAGRArect
global CURSOR
global CURSORrect
global bg
global bgr
thumby = pygame.display.set_mode()
thumby = pygame.image.load(r + "img/ManClothesOff.png")
pygame.display.set_icon(thumby)
WD_40 = pygame.image.load(r + "img/WD-40.png")
WD_40rect = WD_40.get_rect()
TOOTHPASTE = pygame.image.load(r + "img/ToothPaste.png")
TOOTHPASTErect = TOOTHPASTE.get_rect()
PLAYER = pygame.image.load(r + "img/ManClothesOn.png")
PLAYERrect = PLAYER.get_rect()
WOMAN = pygame.image.load(r + "img/WomanClothesOff.png")
WOMANrect = WOMAN.get_rect()
VIAGRA = pygame.image.load(r + "img/VIAGRA.png")
VIAGRArect = VIAGRA.get_rect()
CURSOR = pygame.image.load(r + "img/cursor.png")
CURSORrect = CURSOR.get_rect()
bg = pygame.image.load(r + "img/BackGround.png")
bgr = bg.get_rect()
def resetconfig(cfg):
global config
print("Resetting " + cfg + "!")
config['screen'] = {'width': '320',
'height': '240'}
config['misc'] = {'profanelanguage': 'no'}
config['savedata'] = {'totalviagra': '0',
'totalwd40': '0',
'totaltoothpaste': '0',
'totalwomen': '0',
'totalchildren': '0',
'totalmen': '0',
'gender': 'none',
'totalfucked': '0',
'totalscore': '0',
'womenhumps': '0',
'menhumps': '0',
'childrenhumps': '0',
'totalhumps': '0',
'money': '0',
'scoremultiplier': '1',
'name': 'none',
'macemultiplier': '1',
'failedgirl': '0',
'failedman': '0',
'successfullmen': '0',
'successfullwomen': '0',
'demo': 'no',
'level': '0',
'world': '0',
'skin': 'none',
'reveal': 'False'}
try:
with open(cfg, 'w+') as configfile:
config.write(configfile)
except Exception:
print("Couldn't write to " + cfg + "! Trying to continue anyways...")
def reloadcfg(cfg):
config.read(cfg)
global screenwidth
global screenheight
global resolution
global profanelang
global screen
global womenhumps
global totalhumps
global reveal
print("Reloading " + cfg + "!")
screencfg = config['screen']
screenwidth = int(screencfg['width'])
screenheight = int(screencfg['height'])
resolution = [screenwidth, screenheight]
misccfg = config['misc']
profanelang = misccfg.getboolean('profanelanguage')
screen = pygame.display.set_mode(resolution) # Set our resolution
savecfg = config['savedata']
womenhumps = savecfg['womenhumps']
totalhumps = savecfg['totalhumps']
reveal = savecfg['reveal']
resetconfig("/.CrackGames/molester.ini")
resetconfig(home + "/.CrackGames/molester.ini")
touchingwoman = False
cfgfile = "None"
if os.path.isfile(home + "/.CrackGames/molester.ini"):
cfgfile = home + "/.molester.ini"
elif os.path.isfile("/.CrackGames/molester.ini"):
cfgfile = "/.CrackGames/molester.ini"
if cfgfile == "None":
print("Config file not found.")
print("Generating default config and loading it")
resetconfig("/.CrackGames/molester.ini")
resetconfig(home + "/.CrackGames/molester.ini")
reloadcfg(cfgfile)
SimRunning = True
reloadimg()
speed = [5, 0]
backcolor = 0, 0, 80
PLAYERrect.y = int(screenheight) - screenheight / 6
WOMANrect.y = int(screenheight) - screenheight / 6
WOMANrect.x = int(screenwidth) / 2
screen.fill(backcolor)
screen.blit(WOMAN, WOMANrect)
screen.blit(PLAYER, PLAYERrect)
PLAYERrect = PLAYERrect.move(speed)
screen.fill(backcolor)
screen.blit(PLAYER, PLAYERrect)
pygame.display.flip()
PLAYER = pygame.image.load(r + "img/ManClothesOff.png")
time.sleep(0.01)
PLAYERrect = PLAYERrect.move(speed)
screen.fill(backcolor)
screen.blit(PLAYER, PLAYERrect)
pygame.display.flip()
PLAYER = pygame.image.load(r + "img/ManClothesOff2.png")
time.sleep(0.01)
pygame.display.set_caption("Molester Simulator by Crack Games V." + str(version) + " (humped " + totalhumps + ") [DEMO VERSION]")
pygame.key.set_repeat(1, 10)
bg = pygame.transform.scale(bg, (int(screenwidth), int(screenheight)))
PLAYER = pygame.transform.scale(PLAYER, (int(screenheight * 0.08), int(screenwidth * 0.08)))
WOMAN = pygame.transform.scale(WOMAN, (int(screenheight * 0.08), int(screenwidth * 0.08)))
print("test")
revealdone = False
print(reveal)
while SimRunning: # Keeps the game going...
if reveal == "False":
print(reveal)
reveal = False
if reveal == "True":
print(reveal)
reveal = True
if reveal and not revealdone:
if not os.path.isfile(r + "/img/reveal.png"):
dl("https://github.com/CrackGames/MolesterSimulator/raw/master/img/reveal.png", "reveal.png")
revealer = pygame.image.load(r + "img/reveal.png")
revealerrect = revealer.get_rect()
revealerrect.x = PLAYERrect.x - 50
revealerrect.y = int(screenheight) - screenheight / 2.5
revealdone = True
if PLAYERrect.colliderect(WOMANrect) and not touchingwoman:
print("touched")
touchingwoman = True
womenhumps = int(womenhumps) + 1
totalhumps = 1 + int(totalhumps)
config['savedata']['womenhumps'] = str(womenhumps)
config['savedata']['totalhumps'] = str(totalhumps)
with open(cfgfile, 'w+') as configfile:
config.write(configfile)
pygame.display.set_caption("Molester Simulator by Crack Games V." + str(version) + " (humped " + str(totalhumps) + ") [DEMO VERSION]")
if not PLAYERrect.colliderect(WOMANrect) and touchingwoman:
print("Untouched")
touchingwoman = False
for event in pygame.event.get():
cursorx = pygame.mouse.get_pos()[0]
cursory = pygame.mouse.get_pos()[1]
if cursory == 0: pygame.mouse.set_visible(True)
if cursorx == 0: pygame.mouse.set_visible(True)
if cursorx > 0 and cursory > 0: pygame.mouse.set_visible(False)
CURSORrect.x = cursorx
CURSORrect.y = cursory
screen.fill(backcolor)
screen.blit(bg, bgr)
screen.blit(PLAYER, PLAYERrect)
screen.blit(WOMAN, WOMANrect)
if reveal: revealerrect.x = PLAYERrect.x - 50
if reveal: screen.blit(revealer, revealerrect)
screen.blit(CURSOR, CURSORrect)
pygame.display.flip()
if event.type == pygame.QUIT:
print(event)
pygame.display.quit()
pygame.quit()
SimRunning = False
if hasattr(event, 'key'):
print(event)
if event.key == pygame.K_b and totalhumps == 69:
print("easter...")
config['savedata']['reveal'] = 'True'
with open(cfgfile, 'w+') as configfile:
config.write(configfile)
reveal = True
if event.key == pygame.K_SPACE:
#PLAYERrect = PLAYERrect.move(speed)
screen.fill(backcolor)
screen.blit(bg, bgr)
screen.blit(PLAYER, PLAYERrect)
screen.blit(WOMAN, WOMANrect)
if reveal: revealerrect.x = PLAYERrect.x - 50
if reveal: screen.blit(revealer, revealerrect)
screen.blit(CURSOR, CURSORrect)
pygame.display.flip()
PLAYER = pygame.image.load(r + "img/ManClothesOff.png")
PLAYER = pygame.transform.scale(PLAYER, (int(screenheight * 0.08), int(screenwidth * 0.08)))
time.sleep(0.01)
oldx = PLAYERrect.x
#PLAYERrect = PLAYERrect.move(speed)
print("1st")
print(oldx)
PLAYERrect.x = oldx + 3
print(oldx + 3)
screen.fill(backcolor)
screen.blit(bg, bgr)
screen.blit(PLAYER, PLAYERrect)
screen.blit(WOMAN, WOMANrect)
if reveal: revealerrect.x = PLAYERrect.x - 50
if reveal: screen.blit(revealer, revealerrect)
screen.blit(CURSOR, CURSORrect)
pygame.display.flip()
#PLAYER = pygame.image.load(r + "img/ManClothesOff2.png")
#PLAYER = pygame.transform.scale(PLAYER, (int(screenheight * 0.08), int(screenwidth * 0.08)))
#time.sleep(0.01)
#PLAYERrect = PLAYERrect.move(speed)
#print(speed)
#screen.fill(backcolor)
#screen.blit(bg, bgr)
#screen.blit(PLAYER, PLAYERrect)
#screen.blit(WOMAN, WOMANrect)
#screen.blit(CURSOR, CURSORrect)
#pygame.display.flip()
PLAYER = pygame.image.load(r + "img/ManClothesOff.png")
PLAYER = pygame.transform.scale(PLAYER, (int(screenheight * 0.08), int(screenwidth * 0.08)))
time.sleep(0.01)
#PLAYERrect = PLAYERrect.move(speed)
print("2nd")
print(oldx)
PLAYERrect.x = oldx
print(PLAYERrect.x)
PLAYERrect.x = oldx
#PLAYERrect = PLAYERrect.move([speed[0] * -1, speed[1] * -1])
#print([speed[0] * -1, speed[1] * -1])
screen.fill(backcolor)
screen.blit(bg, bgr)
screen.blit(PLAYER, PLAYERrect)
screen.blit(WOMAN, WOMANrect)
if reveal: revealerrect.x = PLAYERrect.x - 50
if reveal: screen.blit(revealer, revealerrect)
screen.blit(CURSOR, CURSORrect)
pygame.display.flip()
#PLAYER = pygame.image.load(r + "img/ManClothesOff2.png")
#PLAYER = pygame.transform.scale(PLAYER, (int(screenheight * 0.08), int(screenwidth * 0.08)))
#time.sleep(0.01)
#PLAYERrect = PLAYERrect.move([speed[0] * -1, speed[1] * -1])
#print([speed[0] * -1, speed[1] * -1])
#screen.fill(backcolor)
#screen.blit(bg, bgr)
#screen.blit(PLAYER, PLAYERrect)
#screen.blit(WOMAN, WOMANrect)
#screen.blit(CURSOR, CURSORrect)
#pygame.display.flip()
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:
print(event)
if event.key == pygame.K_LEFT and event.type == pygame.KEYDOWN: speed = [-5, 0]
if event.key == pygame.K_RIGHT and event.type == pygame.KEYDOWN: speed = [5, 0]
print("Moving!" + str(speed))
PLAYERrect = PLAYERrect.move(speed)
screen.fill(backcolor)
screen.blit(bg, bgr)
screen.blit(PLAYER, PLAYERrect)
screen.blit(WOMAN, WOMANrect)
if reveal: revealerrect.x = PLAYERrect.x - 50
if reveal: screen.blit(revealer, revealerrect)
screen.blit(CURSOR, CURSORrect)
pygame.display.flip()
PLAYER = pygame.image.load(r + "img/ManClothesOff.png")
PLAYER = pygame.transform.scale(PLAYER, (int(screenheight * 0.08), int(screenwidth * 0.08)))
time.sleep(0.01)
PLAYERrect = PLAYERrect.move(speed)
screen.fill(backcolor)
screen.blit(bg, bgr)
screen.blit(PLAYER, PLAYERrect)
screen.blit(WOMAN, WOMANrect)
if reveal: revealerrect.x = PLAYERrect.x - 50
if reveal: screen.blit(revealer, revealerrect)
screen.blit(CURSOR, CURSORrect)
pygame.display.flip()
PLAYER = pygame.image.load(r + "img/ManClothesOff2.png")
PLAYER = pygame.transform.scale(PLAYER, (int(screenheight * 0.08), int(screenwidth * 0.08)))
time.sleep(0.01)
exit()
|
CrackGames/MolesterSimulator
|
MolesterSim.py
|
Python
|
gpl-3.0
| 16,500
|
from os.path import join
from setuptools import setup, find_packages
import sys
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def get_version():
with open(join('tinytag', '__init__.py')) as f:
for line in f:
if line.startswith('__version__ ='):
return line.split('=')[1].strip().strip('"\'')
long_description = None
if 'upload' in sys.argv or 'register' in sys.argv:
readmemd = "\n" + "\n".join([read('README.md')])
print("converting markdown to reStucturedText for upload to pypi.")
from urllib.request import urlopen
from urllib.parse import quote
import json
import codecs
url = 'http://johnmacfarlane.net/cgi-bin/trypandoc?text=%s&from=markdown&to=rst'
urlhandler = urlopen(url % quote(readmemd))
result = json.loads(codecs.decode(urlhandler.read(), 'utf-8'))
long_description = result['result']
else:
long_description = "\n" + "\n".join([read('README.md')])
setup(
name='tinytag',
version=get_version(),
description='Read music meta data and length of MP3, OGG, FLAC and Wave files',
long_description=long_description,
author='Tom Wallroth',
author_email='tomwallroth@gmail.com',
url='https://github.com/devsnd/tinytag/',
license='GPLv3',
packages=find_packages(),
install_requires=[],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Multimedia',
'Topic :: Multimedia :: Sound/Audio',
],
zip_safe=False,
tests_require=["nose"],
)
|
tilboerner/tinytag
|
setup.py
|
Python
|
gpl-3.0
| 2,098
|
import splicetestlib
from splicetestlib.splice_testcase import *
from splicetestlib.splice_testcase_web import *
import nose
class test_splice_deleted_2(SpliceTestcase, Splice_has_FAKE_SPACEWALK, Splice_has_Manifest, Splice_has_WebUI):
def _setup(self):
splicetestlib.fake_spacewalk_env(self.ss.Instances["FAKE_SPACEWALK"][0], "test_deleted_2")
# creating orgs
splicetestlib.sst_step(self.ss.Instances["KATELLO"][0], self.ss.Instances["FAKE_SPACEWALK"][0])
# uploading manifest
self.katello.upload_manifest("satellite-1", self.ss.config["manifest"])
for step in range(24):
splicetestlib.sst_step(self.ss.Instances["KATELLO"][0], self.ss.Instances["FAKE_SPACEWALK"][0])
self.ss.Instances["KATELLO"][0].recv_exit_status("ntpdate pool.ntp.org", timeout=60)
def test_01_system_was_removed(self):
"""
Checking if system is not present in Katello
"""
systems = self.katello.list_systems("satellite-1")
nose.tools.assert_equals(len(systems), 0)
def test_02_active_first_two_days(self):
"""
Active report first two days
Expecting 1 current subscription
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=3, current=1, state=['Active'])
def test_03_inactive_first_two_days(self):
"""
Inactive report first two days
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=3, state=['Inactive'])
def test_04_deleted_first_two_days(self):
"""
Deleted report first two days
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=3, state=['Deleted'])
def test_05_consolidated_first_two_days(self):
"""
Consolidated report first two days
Expecting 1 current subscription
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=3, current=1, state=['Active', 'Inactive', 'Deleted'])
def test_06_active_last_two_days(self):
"""
Active report last two days
Expecting 0 subscription
"""
Splice_has_WebUI.splice_check_report(days_start=1, days_end=0, state=['Active'])
def test_07_inactive_last_two_days(self):
"""
Inactive report last two days
Expecting 1 current subscription
"""
Splice_has_WebUI.splice_check_report(days_start=1, days_end=0, current=1, state=['Inactive'])
def test_08_deleted_last_two_days(self):
"""
Deleted report last two days
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=1, days_end=0, state=['Deleted'])
def test_09_consolidated_last_two_days(self):
"""
Consolidated report last two days
Expecting 1 current subscription
"""
Splice_has_WebUI.splice_check_report(days_start=1, days_end=0, current=1, state=['Active', 'Inactive', 'Deleted'])
def test_10_active_four_days(self):
"""
Active report four days
Expecting 1 current subscription
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=0, current=1, state=['Active'])
def test_11_inactive_four_days(self):
"""
Inactive report four days
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=0, state=['Inactive'])
def test_12_deleted_four_days(self):
"""
Deleted report four days
Expecting 1 current subscription
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=-1, current=1, state=['Deleted'])
def test_13_consolidated_four_days(self):
"""
Consolidated report four days
Expecting 1 current subscription
"""
Splice_has_WebUI.splice_check_report(days_start=4, days_end=-1, current=1, state=['Active', 'Inactive', 'Deleted'])
def test_14_active_history(self):
"""
Active report month ago
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=31, days_end=21, state=['Active'])
def test_15_inactive_history(self):
"""
Inactive report month ago
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=31, days_end=21, state=['Inactive'])
def test_16_deleted_history(self):
"""
Deleted report month ago
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=31, days_end=21, state=['Deleted'])
def test_17_consolidated_history(self):
"""
Consolidated report month ago
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=31, days_end=21, state=['Active', 'Inactive', 'Deleted'])
def test_18_active_future(self):
"""
Active report future month
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=-21, days_end=-31, state=['Active'])
def test_19_inactive_future(self):
"""
Inactive report future month
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=-21, days_end=-31, state=['Inactive'])
def test_20_deleted_future(self):
"""
Deleted report future month
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=-21, days_end=-31, state=['Deleted'])
def test_21_consolidated_future(self):
"""
Consolidated report future month
Expecting 0 subscriptions
"""
Splice_has_WebUI.splice_check_report(days_start=-21, days_end=-31, state=['Active', 'Inactive', 'Deleted'])
def _cleanup(self):
splicetestlib.cleanup_katello(self.ss.Instances["KATELLO"][0], self.katello, full_reset=False)
if __name__ == "__main__":
nose.run(defaultTest=__name__, argv=[__file__, '-v'])
|
RedHatQE/splice-testing-tools
|
splice-tests/test_deleted_2_webui.py
|
Python
|
gpl-3.0
| 6,106
|
# coding:utf-8
import logging
from utils.session import Session
from .BaseHandler import BaseHandler
from utils.image_storage import storage
from utils.common import require_logined
from utils.response_code import RET
from config import image_url_prefix
class AvatarHandler(BaseHandler):
"""头像"""
@require_logined
def post(self):
self.session = Session(self)
user_id = self.session.data["user_id"]
try:
avatar = self.request.files["avatar"][0]["body"]
except Exception as e:
logging.error(e)
return self.write(dict(errno=RET.PARAMERR, errmsg="参数错误"))
try:
img_name = storage(avatar)
except Exception as e:
logging.error(e)
img_name = None
if not img_name:
return self.write({"errno":RET.THIRDERR, "errmsg":"qiniu error"})
try:
ret = self.db.execute("update ih_user_profile set up_avatar=%s where up_user_id=%s", img_name, user_id)
except Exception as e:
logging.error(e)
return self.write({"errno":RET.DBERR, "errmsg":"upload failed"})
img_url = image_url_prefix + img_name
self.write({"errno":RET.OK, "errmsg":"OK", "url":img_url})
|
wzj-python-wn/lhome
|
Tornado_Project/handlers/Profile.py
|
Python
|
gpl-3.0
| 1,267
|
from django.test import Client as ClientBase
class Client(ClientBase):
def __init__(self, logged_user=None, **kwargs):
super().__init__(**kwargs)
if logged_user:
assert self.login(
username=logged_user.username, password=logged_user._plaintext_password)
def extract_post_args(dom_elem):
"""Returns a ``post()``-ready dict of all input/select values in ``dom_elem``.
``dom_elem`` being an element extracted from an etree-parsed DOM.
If you have multiple forms in your response, be sure to supply a sub-element if you don't
want all inputs in the page to be included.
"""
result = {}
for input in dom_elem.iterdescendants('input'):
name = input.attrib['name']
value = input.attrib.get('value', '')
if input.attrib['type'] == 'checkbox':
if not input.attrib.get('checked'):
value = ''
if isinstance(result.get(name, None), list):
result[name].append(value)
else:
result[name] = [value]
else:
result[name] = value
for k, v in list(result.items()):
if isinstance(v, list):
# post-process checkbox values
if len(v) == 1:
# single checkbox, converto into simple 'on' or '' value
result[k] = 'on' if v[0] else ''
else:
result[k] = {x for x in v if x}
for select in dom_elem.iterdescendants('select'):
options = list(select.xpath('option[@selected=\'selected\']'))
if 'multiple' in select.attrib:
value = [elem.get('value') for elem in options]
else:
try:
value = options[0].get('value')
except IndexError:
value = ''
result[select.attrib['name']] = value
return result
|
erudit/zenon
|
eruditorg/base/test/testcases.py
|
Python
|
gpl-3.0
| 1,875
|
# -*- coding: UTF-8 -*-
"""
Basepaths test cases
@author: Aurélien Gâteau <mail@agateau.com>
@license: GPL v3 or later
"""
import os
import shutil
import tempfile
from pathlib import Path
from yokadi.core import basepaths
from yokadi.tests.yokaditestcase import YokadiTestCase
class BasePathsUnixTestCase(YokadiTestCase):
def setUp(self):
YokadiTestCase.setUp(self)
self._oldWindows = basepaths._WINDOWS
basepaths._WINDOWS = False
def tearDown(self):
basepaths._WINDOWS = self._oldWindows
YokadiTestCase.tearDown(self)
def testMigrateOldDb(self):
oldDb = Path(self.testHomeDir) / '.yokadi.db'
newDb = Path(basepaths.getDbPath(basepaths.getDataDir()))
oldDb.touch()
basepaths.migrateOldDb(str(newDb))
self.assertFalse(oldDb.exists())
self.assertTrue(newDb.exists())
def testMigrateNothingToDo(self):
newDb = Path(basepaths.getDbPath(basepaths.getDataDir()))
basepaths.migrateOldDb(str(newDb))
basepaths.migrateOldHistory()
self.assertFalse(newDb.exists())
def testMigrateOldDbFails(self):
oldDb = Path(self.testHomeDir) / '.yokadi.db'
newDb = Path(basepaths.getDbPath(basepaths.getDataDir()))
oldDb.touch()
newDb.parent.mkdir(parents=True)
newDb.touch()
self.assertRaises(basepaths.MigrationException, basepaths.migrateOldDb, str(newDb))
def testMigrateOldHistory(self):
old = Path(self.testHomeDir) / '.yokadi_history'
new = Path(basepaths.getHistoryPath())
old.touch()
basepaths.migrateOldHistory()
self.assertFalse(old.exists())
self.assertTrue(new.exists())
def testMigrateOldHistoryOverwriteNew(self):
old = Path(self.testHomeDir) / '.yokadi_history'
new = Path(basepaths.getHistoryPath())
with old.open('w') as f:
f.write('old')
new.parent.mkdir(parents=True)
with new.open('w') as f:
f.write('new')
basepaths.migrateOldHistory()
self.assertFalse(old.exists())
with new.open() as f:
newData = f.read()
self.assertEqual(newData, 'old')
def testHistoryEnvVar(self):
path = "foo"
os.environ["YOKADI_HISTORY"] = path
self.assertEqual(basepaths.getHistoryPath(), path)
def testDbEnvVar(self):
path = "foo"
os.environ["YOKADI_DB"] = path
self.assertEqual(basepaths.getDbPath(basepaths.getDataDir()), path)
class BasePathsWindowsTestCase(YokadiTestCase):
def setUp(self):
YokadiTestCase.setUp(self)
self._oldWindows = basepaths._WINDOWS
basepaths._WINDOWS = True
self.testAppDataDir = tempfile.mkdtemp(prefix="yokadi-basepaths-testcase")
os.environ["APPDATA"] = self.testAppDataDir
def tearDown(self):
shutil.rmtree(self.testAppDataDir)
basepaths._WINDOWS = self._oldWindows
YokadiTestCase.tearDown(self)
def testGetCacheDir(self):
expected = os.path.join(self.testAppDataDir, "yokadi", "cache")
self.assertEqual(basepaths.getCacheDir(), expected)
def testGetDataDir(self):
expected = os.path.join(self.testAppDataDir, "yokadi", "data")
self.assertEqual(basepaths.getDataDir(), expected)
def testOldHistoryPath(self):
expected = os.path.join(self.testAppDataDir, ".yokadi_history")
self.assertEqual(basepaths._getOldHistoryPath(), expected)
|
agateau/yokadi
|
yokadi/tests/basepathstestcase.py
|
Python
|
gpl-3.0
| 3,507
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import DrydenMusicApp.models
from django.utils.timezone import utc
import storages.backends.s3boto
import datetime
class Migration(migrations.Migration):
dependencies = [
('DrydenMusicApp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='music',
name='added',
field=models.DateTimeField(default=datetime.datetime(2016, 8, 15, 18, 14, 43, 265487, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='music',
name='first_line',
field=models.CharField(max_length=200, blank=True),
),
migrations.AddField(
model_name='music',
name='title',
field=models.CharField(max_length=200, blank=True),
),
migrations.AlterField(
model_name='music',
name='music_file',
field=models.FileField(null=True, upload_to=DrydenMusicApp.models.data_file_path, storage=storages.backends.s3boto.S3BotoStorage(acl='private', location='music_files'), blank=True),
),
]
|
jdovi/drydenmusic
|
DrydenMusicApp/migrations/0002_auto_20160815_1814.py
|
Python
|
gpl-3.0
| 1,253
|
from itertools import chain
import networkx as nx
__all__ = ["adjacency_data", "adjacency_graph"]
_attrs = dict(id="id", key="key")
def adjacency_data(G, attrs=_attrs):
"""Returns data in adjacency format that is suitable for JSON serialization
and use in Javascript documents.
Parameters
----------
G : NetworkX graph
attrs : dict
A dictionary that contains two keys 'id' and 'key'. The corresponding
values provide the attribute names for storing NetworkX-internal graph
data. The values should be unique. Default value:
:samp:`dict(id='id', key='key')`.
If some user-defined graph data use these attribute names as data keys,
they may be silently dropped.
Returns
-------
data : dict
A dictionary with adjacency formatted data.
Raises
------
NetworkXError
If values in attrs are not unique.
Examples
--------
>>> from networkx.readwrite import json_graph
>>> G = nx.Graph([(1,2)])
>>> data = json_graph.adjacency_data(G)
To serialize with json
>>> import json
>>> s = json.dumps(data)
Notes
-----
Graph, node, and link attributes will be written when using this format
but attribute keys must be strings if you want to serialize the resulting
data with JSON.
The default value of attrs will be changed in a future release of NetworkX.
See Also
--------
adjacency_graph, node_link_data, tree_data
"""
multigraph = G.is_multigraph()
id_ = attrs["id"]
# Allow 'key' to be omitted from attrs if the graph is not a multigraph.
key = None if not multigraph else attrs["key"]
if id_ == key:
raise nx.NetworkXError("Attribute names are not unique.")
data = {}
data["directed"] = G.is_directed()
data["multigraph"] = multigraph
data["graph"] = list(G.graph.items())
data["nodes"] = []
data["adjacency"] = []
for n, nbrdict in G.adjacency():
data["nodes"].append(dict(chain(G.nodes[n].items(), [(id_, n)])))
adj = []
if multigraph:
for nbr, keys in nbrdict.items():
for k, d in keys.items():
adj.append(dict(chain(d.items(), [(id_, nbr), (key, k)])))
else:
for nbr, d in nbrdict.items():
adj.append(dict(chain(d.items(), [(id_, nbr)])))
data["adjacency"].append(adj)
return data
def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs):
"""Returns graph from adjacency data format.
Parameters
----------
data : dict
Adjacency list formatted graph data
Returns
-------
G : NetworkX graph
A NetworkX graph object
directed : bool
If True, and direction not specified in data, return a directed graph.
multigraph : bool
If True, and multigraph not specified in data, return a multigraph.
attrs : dict
A dictionary that contains two keys 'id' and 'key'. The corresponding
values provide the attribute names for storing NetworkX-internal graph
data. The values should be unique. Default value:
:samp:`dict(id='id', key='key')`.
Examples
--------
>>> from networkx.readwrite import json_graph
>>> G = nx.Graph([(1,2)])
>>> data = json_graph.adjacency_data(G)
>>> H = json_graph.adjacency_graph(data)
Notes
-----
The default value of attrs will be changed in a future release of NetworkX.
See Also
--------
adjacency_graph, node_link_data, tree_data
"""
multigraph = data.get("multigraph", multigraph)
directed = data.get("directed", directed)
if multigraph:
graph = nx.MultiGraph()
else:
graph = nx.Graph()
if directed:
graph = graph.to_directed()
id_ = attrs["id"]
# Allow 'key' to be omitted from attrs if the graph is not a multigraph.
key = None if not multigraph else attrs["key"]
graph.graph = dict(data.get("graph", []))
mapping = []
for d in data["nodes"]:
node_data = d.copy()
node = node_data.pop(id_)
mapping.append(node)
graph.add_node(node)
graph.nodes[node].update(node_data)
for i, d in enumerate(data["adjacency"]):
source = mapping[i]
for tdata in d:
target_data = tdata.copy()
target = target_data.pop(id_)
if not multigraph:
graph.add_edge(source, target)
graph[source][target].update(tdata)
else:
ky = target_data.pop(key, None)
graph.add_edge(source, target, key=ky)
graph[source][target][ky].update(tdata)
return graph
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
esstoolkit/external/networkx/readwrite/json_graph/adjacency.py
|
Python
|
gpl-3.0
| 4,742
|
"""
Object containing dynamic simulation state.
"""
import opiniongame.IO as og_io
import opiniongame.coupling as og_coupling
import opiniongame.opinions as og_opinions
import opiniongame.adjacency as og_adj
import numpy as np
class WorldState:
def __init__(self, adj, couplingWeights, initialOpinions, initialHistorySize=100, historyGrowthScale=2):
self.adj = adj
self.couplingWeights = couplingWeights
self.initialOpinions = initialOpinions
self.individualCount = np.size(initialOpinions, 0)
self.initialHistorySize = initialHistorySize
self.historyGrowthScale = historyGrowthScale
self.initializeHistory()
self.iterCount = 0
@classmethod
def fromCmdlineArguments(cls, cmdline, config):
#
# check optional arguments and generate defaults if missing
#
weights = None
initialOpinions = None
adj = None
if cmdline.args.weights is not None:
weights = og_io.loadNamedMatrix(cmdline.args.weights, 'weights')
else:
weights = og_coupling.weights_no_coupling(config.popSize, config.ntopics)
if cmdline.args.initialOpinions is not None:
initialOpinions = og_io.loadNamedMatrix(cmdline.args.initialOpinions, 'initialOpinions')
else:
initialOpinions = og_opinions.initialize_opinions(config.popSize, config.ntopics)
if cmdline.args.adjacency is not None:
adj = og_io.loadNamedMatrix(cmdline.args.adjacency, 'adjacency')
else:
adj = og_adj.make_adj(config.popSize, 'full')
state = cls(adj, weights, initialOpinions)
state.validate()
#
# set popsize and ntopics based on current state. warn if config
# disagrees with loaded files.
#
wPopsize = np.shape(weights)[0]
wNtopics = np.shape(weights)[1]
if wPopsize != config.popSize:
print("WARNING: popsize from data files disagrees with cfg.")
config.popSize = wPopsize
if wNtopics != config.ntopics:
print("WARNING: ntopics from data files disagrees with cfg.")
config.ntopics = wNtopics
return state
def initializeHistory(self):
self.individualCount = np.size(self.initialOpinions, 0)
self.numTopics = np.size(self.initialOpinions, 1)
self.history = np.zeros([self.initialHistorySize, self.individualCount, self.numTopics])
self.history[0,:,:] = self.initialOpinions[:,:]
self.nextHistoryIndex = 1
def appendToHistory(self, newOpinions):
curHistorySize = np.size(self.history,0)
if curHistorySize == self.nextHistoryIndex:
newhist = np.zeros([int(curHistorySize * self.historyGrowthScale), self.individualCount, self.numTopics])
newhist[0:curHistorySize, :, :] = self.history
self.history = newhist
self.history[self.nextHistoryIndex,:,:] = newOpinions[:,:]
self.nextHistoryIndex += 1
def reset(self):
self.history[:,:,:] = 0
self.nextHistoryIndex = 0
self.iterCount = 0
def currentOpinions(self):
return self.history[self.nextHistoryIndex-1]
def validate(self):
# validation of data sizes
print("WEIGHT SHAPE : "+str(np.shape(self.couplingWeights)))
print("OPINION SHAPE : "+str(np.shape(self.initialOpinions)))
print("ADJACENCY SHAPE: "+str(np.shape(self.adj)))
wPopsize = np.shape(self.couplingWeights)[0]
wNtopics1 = np.shape(self.couplingWeights)[1]
wNtopics2 = np.shape(self.couplingWeights)[2]
oPopsize = np.shape(self.initialOpinions)[0]
oNtopics = np.shape(self.initialOpinions)[1]
aPopsize1 = np.shape(self.adj)[0]
aPopsize2 = np.shape(self.adj)[1]
if aPopsize1 != aPopsize2:
raise ValueError("Adjacency matrix must be square.")
if wNtopics1 != wNtopics2:
raise ValueError("Per-topic weight matrix must be square.")
if wPopsize != oPopsize or wPopsize != aPopsize1 or aPopsize1 != oPopsize:
raise ValueError("Weight tensor, opinion state, and adjacency matrix disagree on population size.")
if oNtopics != wNtopics1:
raise ValueError("Weight tensor and opinion state disagree on topic count.")
print("==> World state validation passed.")
print("")
|
mjsottile/PyOpinionGame
|
opiniongame/state.py
|
Python
|
gpl-3.0
| 4,455
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# pylint: disable=no-init,too-many-instance-attributes
from __future__ import (absolute_import, division, print_function)
import mantid.simpleapi as s_api
from mantid.api import PythonAlgorithm, AlgorithmFactory, MatrixWorkspaceProperty, WorkspaceGroupProperty, \
PropertyMode, MatrixWorkspace, Progress, WorkspaceGroup
from mantid.kernel import Direction, logger
class ApplyPaalmanPingsCorrection(PythonAlgorithm):
_sample_ws_name = None
_corrections_ws_name = None
_use_can = False
_can_ws_name = None
_use_corrections = False
_can_scale_factor = 1.0
_scale_can = False
_output_ws_name = None
_corrections = None
_shift_can = False
_shifted_container = None
_can_shift_factor = 0.0
_sample_ws_wavelength = None
_rebin_container_ws = False
_factors = []
def category(self):
return "Workflow\\MIDAS"
def summary(self):
return "Applies a calculated absorption correction in the Paalman and Pings factor style."
def PyInit(self):
self.declareProperty(MatrixWorkspaceProperty('SampleWorkspace', '', direction=Direction.Input),
doc='Name for the input Sample workspace.')
self.declareProperty(WorkspaceGroupProperty('CorrectionsWorkspace', '',
optional=PropertyMode.Optional, direction=Direction.Input),
doc='Name for the input Corrections workspace.')
self.declareProperty(MatrixWorkspaceProperty('CanWorkspace', '',
optional=PropertyMode.Optional, direction=Direction.Input),
doc='Name for the input Can workspace.')
self.declareProperty(name='CanScaleFactor', defaultValue=1.0,
doc='Factor to scale the can data')
self.declareProperty(name='CanShiftFactor', defaultValue=0.0,
doc='Amount by which to shift the container data')
self.declareProperty(MatrixWorkspaceProperty('OutputWorkspace', '', direction=Direction.Output),
doc='The output corrections workspace.')
self.declareProperty(name='RebinCanToSample',
defaultValue=True,
doc='Enable or disable RebinToWorkspace on CanWorkspace.')
# pylint: disable=too-many-branches
def PyExec(self):
if not self._use_corrections:
logger.information('Not using corrections')
if not self._use_can:
logger.information('Not using container')
prog_container = Progress(self, start=0.0, end=0.2, nreports=4)
prog_container.report('Starting algorithm')
# Units should be wavelength
sample_unit = self._sample_workspace.getAxis(0).getUnit().unitID()
sample_ws_wavelength = self._convert_units_wavelength(self._sample_workspace)
container_ws_wavelength = (self._process_container_workspace(self._container_workspace, prog_container)
if self._use_can else None)
prog_corr = Progress(self, start=0.2, end=0.6, nreports=2)
if self._use_corrections:
prog_corr.report('Preprocessing corrections')
if self._use_can:
# Use container factors
prog_corr.report('Correcting sample and container')
factor_workspaces = self._get_factor_workspaces()
output_workspace = self._correct_sample_can(sample_ws_wavelength, container_ws_wavelength,
factor_workspaces)
correction_type = 'sample_and_can_corrections'
else:
# Use sample factor only
output_workspace = self._correct_sample(sample_ws_wavelength, self._corrections_workspace[0])
correction_type = 'sample_corrections_only'
# Add corrections filename to log values
prog_corr.report('Correcting sample')
s_api.AddSampleLog(Workspace=output_workspace,
LogName='corrections_filename',
LogType='String',
LogText=self._corrections_ws_name)
else:
# Do simple subtraction
output_workspace = self._subtract(sample_ws_wavelength, container_ws_wavelength)
correction_type = 'can_subtraction'
# Add container filename to log values
can_base = self.getPropertyValue("CanWorkspace")
can_base = can_base[:can_base.index('_')]
prog_corr.report('Adding container filename')
s_api.AddSampleLog(Workspace=output_workspace,
LogName='container_filename',
LogType='String',
LogText=can_base)
prog_wrkflow = Progress(self, 0.6, 1.0, nreports=5)
# Record the container scale factor
if self._use_can and self._scale_can:
prog_wrkflow.report('Adding container scaling')
s_api.AddSampleLog(Workspace=output_workspace,
LogName='container_scale',
LogType='Number',
LogText=str(self._can_scale_factor))
# Record the container shift amount
if self._use_can and self._shift_can:
prog_wrkflow.report('Adding container shift')
s_api.AddSampleLog(Workspace=output_workspace,
LogName='container_shift',
LogType='Number',
LogText=str(self._can_shift_factor))
# Record the type of corrections applied
prog_wrkflow.report('Adding correction type')
s_api.AddSampleLog(Workspace=output_workspace,
LogName='corrections_type',
LogType='String',
LogText=correction_type)
# Add original sample as log entry
sam_base = self.getPropertyValue("SampleWorkspace")
if '_' in sam_base:
sam_base = sam_base[:sam_base.index('_')]
prog_wrkflow.report('Adding sample filename')
s_api.AddSampleLog(Workspace=output_workspace,
LogName='sample_filename',
LogType='String',
LogText=sam_base)
# Convert Units back to original
emode = str(output_workspace.getEMode())
efixed = 0.0
if emode == "Indirect":
efixed = self._get_e_fixed(output_workspace)
output_workspace = self._convert_units(output_workspace, sample_unit, emode, efixed)
self.setProperty('OutputWorkspace', output_workspace)
prog_wrkflow.report('Algorithm Complete')
def validateInputs(self):
"""
Validate user input.
"""
self._setup()
issues = dict()
# Need something to get corrections from
if not (self._use_can or self._use_corrections):
error_msg = 'Must provide either CorrectionsWorkspace or CanWorkspace or both'
issues['CorrectionsWorkspace'] = error_msg
issues['CanWorkspace'] = error_msg
if self._use_corrections:
if not isinstance(self._corrections_workspace, WorkspaceGroup):
issues['CorrectionsWorkspace'] = "The corrections workspace should be a workspace group."
if self._corrections_workspace.size() == 0:
issues['CorrectionsWorkspace'] = "No corrections found in the supplied corrections workspace group."
else:
corrections_issues = []
for factor in self._factors:
if not any(factor in correction_name for correction_name
in self._corrections_workspace.getNames()):
corrections_issues.append(factor + " workspace not present in corrections workspace group.\n")
if corrections_issues:
issues['CorrectionsWorkspace'] = "\n".join(corrections_issues)
sample_ws = self.getProperty("SampleWorkspace").value
if isinstance(sample_ws, MatrixWorkspace):
sample_unit_id = sample_ws.getAxis(0).getUnit().unitID()
# Check sample and container X axis units match
if self._use_can:
can_ws = self.getProperty("CanWorkspace").value
if isinstance(can_ws, MatrixWorkspace):
can_unit_id = can_ws.getAxis(0).getUnit().unitID()
if can_unit_id != sample_unit_id:
issues['CanWorkspace'] = 'X axis unit must match SampleWorkspace'
else:
issues['CanWorkspace'] = 'Must be a MatrixWorkspace'
else:
issues['SampleWorkspace'] = 'Must be a MatrixWorkspace'
return issues
def _setup(self):
"""
Get properties and setup instance variables.
"""
self._sample_workspace = self.getProperty('SampleWorkspace').value
# Get corrections workspace
self._corrections_workspace = self.getProperty('CorrectionsWorkspace').value
self._use_corrections = bool(self._corrections_workspace)
# Get container workspace
self._container_workspace = self.getProperty('CanWorkspace').value
self._use_can = bool(self._container_workspace)
self._can_scale_factor = self.getProperty('CanScaleFactor').value
self._scale_can = self._can_scale_factor != 1.0
self._can_shift_factor = self.getProperty('CanShiftFactor').value
self._shift_can = self._can_shift_factor != 0.0
self._rebin_container_ws = self.getProperty('RebinCanToSample').value
if self._use_corrections:
if self._corrections_workspace.size() == 1:
correction_name = self._corrections_workspace[0].getName()
if 'acc' in correction_name:
self._factors = ['acc']
elif 'ass' in correction_name:
self._factors = ['ass']
if self._corrections_workspace.size() == 2:
self._factors = ['acc', 'ass']
self._corrections_approximation = self._two_factor_corrections_approximation
elif self._corrections_workspace.size() >= 3:
self._factors = ['acc', 'assc', 'acsc']
self._corrections_approximation = self._three_factor_corrections_approximation
def _shift_workspace(self, workspace, shift_factor):
return s_api.ScaleX(InputWorkspace=workspace,
Factor=shift_factor,
OutputWorkspace="__shifted",
Operation="Add", StoreInADS=False)
def _convert_units_wavelength(self, workspace):
unit = workspace.getAxis(0).getUnit().unitID()
if unit != 'Wavelength':
# Configure conversion
if unit == 'dSpacing' or unit == 'DeltaE':
if unit == 'dSpacing':
emode = 'Elastic'
efixed = 0.0
else:
emode = 'Indirect'
efixed = self._get_e_fixed(workspace)
return self._convert_units(workspace, "Wavelength", emode, efixed)
else:
return workspace
def _convert_units(self, workspace, target, emode, efixed):
return s_api.ConvertUnits(InputWorkspace=workspace,
OutputWorkspace="__units_converted",
Target=target, EMode=emode,
EFixed=efixed, StoreInADS=False)
def _get_e_fixed(self, workspace):
from IndirectCommon import getEfixed
return getEfixed(workspace)
def _process_container_workspace(self, container_workspace, prog_container):
# Appy container shift if needed
if self._shift_can:
# Use temp workspace so we don't modify data
prog_container.report('Shifting can')
shifted_container = self._shift_workspace(container_workspace, self._can_shift_factor)
logger.information('Container shifted by %f' % self._can_shift_factor)
else:
shifted_container = container_workspace
# Apply container scale factor if needed
if self._scale_can:
# Use temp workspace so we don't modify original data
prog_container.report('Scaling can')
scaled_container = self._convert_units_wavelength(shifted_container * self._can_scale_factor)
logger.information('Container scaled by %f' % self._can_scale_factor)
return scaled_container
else:
return self._convert_units_wavelength(shifted_container)
def _get_correction_factor_workspace(self, factor_type):
"""
Gets the full name for a correction factor workspace given the correction type.
@param factor_type Factory type (ass, acc, acsc, assc)
@return Full name of workspace (None if not found)
"""
if factor_type == 'ass':
def predicate(workspace_name):
return factor_type in workspace_name and 'assc' not in workspace_name
else:
def predicate(workspace_name):
return factor_type in workspace_name
for workspace in self._corrections_workspace:
if predicate(workspace.getName()):
return workspace
return None
def _get_factor_workspaces(self):
"""
:return: A dictionary of the factors to the factor workspaces.
"""
return {factor: self._get_correction_factor_workspace(factor) for factor in self._factors}
def _subtract(self, minuend_workspace, subtrahend_workspace):
"""
Do a simple container subtraction (when no corrections are given).
"""
logger.information('Using simple container subtraction')
if self._rebin_container_ws:
logger.information('Rebining container to ensure Minus')
subtrahend_workspace = s_api.RebinToWorkspace(WorkspaceToRebin=subtrahend_workspace,
WorkspaceToMatch=minuend_workspace,
OutputWorkspace="__rebinned",
StoreInADS=False)
return minuend_workspace - subtrahend_workspace
def _clone(self, workspace):
"""
Clones the specified workspace.
:param workspace: The workspace to clone.
:return: A clone of the specified workspace.
"""
return s_api.CloneWorkspace(InputWorkspace=workspace,
OutputWorkspace="cloned",
StoreInADS=False)
def _correct_sample(self, sample_workspace, a_ss_workspace):
"""
Correct for sample only (when no container is given).
"""
logger.information('Correcting sample')
return sample_workspace / self._convert_units_wavelength(a_ss_workspace)
def _correct_sample_can(self, sample_workspace, container_workspace, factor_workspaces):
"""
Correct for sample and container.
"""
logger.information('Correcting sample and container')
factor_workspaces_wavelength = {factor: self._convert_units_wavelength(workspace) for factor, workspace
in factor_workspaces.items()}
if self._rebin_container_ws:
container_workspace = s_api.RebinToWorkspace(WorkspaceToRebin=container_workspace,
WorkspaceToMatch=factor_workspaces_wavelength['acc'],
OutputWorkspace="rebinned",
StoreInADS=False)
return self._corrections_approximation(sample_workspace, container_workspace, factor_workspaces_wavelength)
def _three_factor_corrections_approximation(self, sample_workspace, container_workspace, factor_workspaces):
acc = factor_workspaces['acc']
acsc = factor_workspaces['acsc']
assc = factor_workspaces['assc']
return (sample_workspace - container_workspace * (acsc / acc)) / assc
def _two_factor_corrections_approximation(self, sample_workspace, container_workspace, factor_workspaces):
acc = factor_workspaces['acc']
ass = factor_workspaces['ass']
return (sample_workspace / ass) - (container_workspace / acc)
# Register algorithm with Mantid
AlgorithmFactory.subscribe(ApplyPaalmanPingsCorrection)
|
mganeva/mantid
|
Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ApplyPaalmanPingsCorrection.py
|
Python
|
gpl-3.0
| 17,291
|
# Importing common provides default settings, see:
# https://github.com/taigaio/taiga-back/blob/master/settings/common.py
from .common import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.getenv('TAIGA_DB_NAME'),
'HOST': os.getenv('TAIGA_DB_HOST'),
'USER': os.getenv('TAIGA_DB_USER'),
'PASSWORD': os.getenv('TAIGA_DB_PASSWORD')
}
}
TAIGA_HOSTNAME = os.getenv('TAIGA_HOSTNAME')
SITES['api']['domain'] = TAIGA_HOSTNAME
SITES['front']['domain'] = TAIGA_HOSTNAME
MEDIA_URL = 'http://' + TAIGA_HOSTNAME + '/media/'
STATIC_URL = 'http://' + TAIGA_HOSTNAME + '/static/'
if os.getenv('TAIGA_SSL').lower() == 'true' or os.getenv('TAIGA_SSL_BY_REVERSE_PROXY').lower() == 'true':
SITES['api']['scheme'] = 'https'
SITES['front']['scheme'] = 'https'
MEDIA_URL = 'https://' + TAIGA_HOSTNAME + '/media/'
STATIC_URL = 'https://' + TAIGA_HOSTNAME + '/static/'
SECRET_KEY = os.getenv('TAIGA_SECRET_KEY')
if os.getenv('RABBIT_PORT') is not None and os.getenv('REDIS_PORT') is not None:
from .celery import *
BROKER_URL = 'amqp://guest:guest@rabbit:5672'
CELERY_RESULT_BACKEND = 'redis://redis:6379/0'
CELERY_ENABLED = True
EVENTS_PUSH_BACKEND = "taiga.events.backends.rabbitmq.EventsPushBackend"
EVENTS_PUSH_BACKEND_OPTIONS = {"url": "amqp://guest:guest@rabbit:5672//"}
if os.getenv('TAIGA_ENABLE_EMAIL').lower() == 'true':
DEFAULT_FROM_EMAIL = os.getenv('TAIGA_EMAIL_FROM')
CHANGE_NOTIFICATIONS_MIN_INTERVAL = 300 # in seconds
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
if os.getenv('TAIGA_EMAIL_USE_TLS').lower() == 'true':
EMAIL_USE_TLS = True
else:
EMAIL_USE_TLS = False
EMAIL_HOST = os.getenv('TAIGA_EMAIL_HOST')
EMAIL_PORT = int(os.getenv('TAIGA_EMAIL_PORT'))
EMAIL_HOST_USER = os.getenv('TAIGA_EMAIL_USER')
EMAIL_HOST_PASSWORD = os.getenv('TAIGA_EMAIL_PASS')
|
benhutchins/docker-taiga
|
docker-settings.py
|
Python
|
gpl-3.0
| 1,959
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Switch and Buzzer test file for 'Whizzy' - Line Follower Robot based on GoPiGo with Raspberry Pi.
# Intended to run on a Raspberry Pi on the actual robot.
#
# Copyright (C) 2017 Rob Probin.
# All original work.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# As of May 2017, this project is hosted at https://github.com/robzed/Whizzy_Robot
#
import RPi.GPIO as GPIO
import time
Buzzer_Pin = 24
Switch1_Pin = 25
Switch2_Pin = 23
Switch3_Pin = 18
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(Buzzer_Pin, GPIO.OUT)
GPIO.setup(Switch1_Pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(Switch2_Pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(Switch3_Pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.output(Buzzer_Pin, GPIO.HIGH)
time.sleep(0.2)
GPIO.output(Buzzer_Pin, GPIO.LOW)
Switch1_State = None
Switch2_State = None
Switch3_State = None
while True:
switch_input = GPIO.input(Switch1_Pin)
if (switch_input != Switch1_State):
Switch1_State = switch_input
print("Switch 1 ", switch_input)
switch_input = GPIO.input(Switch2_Pin)
if (switch_input != Switch2_State):
Switch2_State = switch_input
print("Switch 2 ", switch_input)
switch_input = GPIO.input(Switch3_Pin)
if (switch_input != Switch3_State):
Switch3_State = switch_input
print("Switch 3 ", switch_input)
|
robzed/Whizzy_Robot
|
tests/switch_buzzer_test.py
|
Python
|
gpl-3.0
| 1,997
|
# Arduboy Game Changer
# Server v0.1
# by Matthew Begg
# 2017-03-28
import serial
import serial.tools.list_ports
import time
import os
gameName = "evade.hex"
port = "/dev/ttyACM0"
# Upload a hex file to Arduboy
def uploadHex(fileName):
ser = serial.Serial(port, 1200, timeout=1, dsrdtr=True)
ser.close()
ser.open()
ser.close()
time.sleep(1)
os.system("avrdude -v -patmega32u4 -cavr109 -P" + port + " -b57600 -D -Uflash:w:" + fileName + ":i")
print(fileName + " uploaded.")
# Upon startup, send the menu to the Arduboy
uploadHex("agc.hex")
print("agc menu uploaded.")
# Wait for reboot
time.sleep(5)
# Read the name of the game
ser = serial.Serial(port, 9600)
ser.close()
ser.open()
print("Waiting for game name from ArduBoy...")
gameName = str(ser.readline().decode())
gameName = str(gameName.replace('\r\n',''))
print("Game name: " + gameName + ".hex")
ser.close()
time.sleep(2)
# Upload selected game
uploadHex("games/" + gameName + ".hex")
# Main loop
while True:
pass
|
marvbloke/agc
|
agc.py
|
Python
|
gpl-3.0
| 1,022
|
################################################################################
## ##
## This file is a part of TADEK. ##
## ##
## TADEK - Test Automation in a Distributed Environment ##
## (http://tadek.comarch.com) ##
## ##
## Copyright (C) 2011 Comarch S.A. ##
## All rights reserved. ##
## ##
## TADEK is free software for non-commercial purposes. For commercial ones ##
## we offer a commercial license. Please check http://tadek.comarch.com for ##
## details or write to tadek-licenses@comarch.com ##
## ##
## You can redistribute it and/or modify it under the terms of the ##
## GNU General Public License as published by the Free Software Foundation, ##
## either version 3 of the License, or (at your option) any later version. ##
## ##
## TADEK is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with TADEK bundled with this file in the file LICENSE. ##
## If not, see http://www.gnu.org/licenses/. ##
## ##
## Please notice that Contributor Agreement applies to any contribution ##
## you make to TADEK. The Agreement must be completed, signed and sent ##
## to Comarch before any contribution is made. You should have received ##
## a copy of Contribution Agreement along with TADEK bundled with this file ##
## in the file CONTRIBUTION_AGREEMENT.pdf or see http://tadek.comarch.com ##
## or write to tadek-licenses@comarch.com ##
## ##
################################################################################
import re
import string
from PySide import QtCore
from tadek.core import log
from tadek.core import config
from tadek.core import constants
from tadek.core.utils import decode
import utils
from dialogs import runWarning, runInformation
class SearchDialog(QtCore.QObject):
'''
A search dialog class.
'''
_SEARCH_UI = "search_dialog.ui"
_DEEP_METHOD = 0
_SIMPLE_METHOD = 1
_PARTIAL_MATCH = 0
_EXACT_MATCH = 1
_REGULAR_EXPRESSION_MATCH = 2
_CONFIG_NAME = "search"
_CONFIG_SECTION_OPTIONS = "options"
_CONFIG_SECTION_COMPLETERS = "completers"
def __init__(self, view):
QtCore.QObject.__init__(self)
self.__dict__.update(view.loadUi(self._SEARCH_UI))
self.buttonClose.clicked.connect(self.dialog.close)
self.buttonSearch.clicked.connect(self._startSearching)
self.buttonNext.clicked.connect(self._nextSearching)
self.buttonStop.clicked.connect(self._stopSearching)
self.dialog.finished.connect(self._onClose)
self.comboBoxName.editTextChanged.connect(self._setName)
self.comboBoxRole.editTextChanged.connect(self._setRole)
self.comboBoxState.editTextChanged.connect(self._setState)
self.plainTextEditText.textChanged.connect(self._setText)
self.radioButtonDeep.clicked.connect(self._setDeepMethod)
self.radioButtonSimple.clicked.connect(self._setSimpleMethod)
self.checkBoxExactMatch.toggled.connect(self._setExactMatch)
self.checkBoxCaseSensitive.toggled.connect(self._setCaseSensitiveMatch)
self._view = view
self._name = ''
self._role = ''
self._state = ''
self._text = ''
self._searching = False
self._manualUpdate = False
self._explorerDev = None
self._foundAccessibles = []
self.buttonNext.setVisible(False)
self.buttonStop.setEnabled(False)
self._lastNames = utils.LastValues(self._CONFIG_NAME,
self._CONFIG_SECTION_COMPLETERS,
"names")
self._lastRoles = utils.LastValues(self._CONFIG_NAME,
self._CONFIG_SECTION_COMPLETERS,
"roles")
self._lastStates = utils.LastValues(self._CONFIG_NAME,
self._CONFIG_SECTION_COMPLETERS,
"states")
self.comboBoxName.completer().setCaseSensitivity(
QtCore.Qt.CaseSensitive)
self.comboBoxRole.completer().setCaseSensitivity(
QtCore.Qt.CaseSensitive)
self.comboBoxState.completer().setCaseSensitivity(
QtCore.Qt.CaseSensitive)
#@QtCore.Slot(str)
def _setName(self, name):
'''
Sets the name of searched widgets.
'''
if self._manualUpdate:
return
self._stopSearching()
self._name = decode(name).strip()
#@QtCore.Slot(str)
def _setRole(self, role):
'''
Sets the role of searched widgets.
'''
if self._manualUpdate:
return
self._stopSearching()
self._role = decode(role).strip()
#@QtCore.Slot(str)
def _setState(self, state):
'''
Sets the state of searched widgets.
'''
if self._manualUpdate:
return
self._stopSearching()
self._state = decode(state).strip()
#@QtCore.Slot()
def _setDeepMethod(self):
'''
Sets the deep method of widgets searching.
'''
self._stopSearching()
self._method = self._DEEP_METHOD
self._saveState()
#@QtCore.Slot()
def _setSimpleMethod(self):
'''
Sets the simple method for widgets searching.
'''
self._stopSearching()
self._method = self._SIMPLE_METHOD
self._saveState()
#@QtCore.Slot()
def _setText(self):
'''
Sets text of searched widgets.
'''
self._stopSearching()
self._text = decode(self.plainTextEditText.toPlainText()).strip()
self._saveState()
#@QtCore.Slot()
def _setExactMatch(self, checked):
'''
Sets the exact match for widgets searching.
'''
self._stopSearching()
if checked:
self._matchType = self._EXACT_MATCH
else:
self._matchType = self._PARTIAL_MATCH
self._saveState()
#@QtCore.Slot(bool)
def _setCaseSensitiveMatch(self, checked):
'''
Sets the case sensitive match for widgets searching.
'''
self._stopSearching()
self._caseSensitiveMatch = checked
self._saveState()
#@QtCore.Slot()
def _onClose(self):
'''
Stops the searching process.
'''
self._stopSearching()
#@QtCore.Slot()
def _stopSearching(self):
'''
Stops the searching process and resets buttons.
'''
self.searchingStoppedUpdateState()
if self._searching:
self._explorerDev.stopSearching()
self._explorerDev.startItemChanged.disconnect(
self._startItemChanged)
self._explorerDev.itemFound.disconnect(self.itemFoundUpdateState)
self._explorerDev.itemNotFound.disconnect(
self.itemNotFoundUpdateState)
self._explorerDev.searchingStopped.disconnect(self._stopSearching)
self._searching = False
class Check(object):
'''
Tests whether the given accessible matches the current search criteria.
'''
def __init__(self, criteria):
'''
Initializes the criteria for searching which should be provided
as a dictionary with keys: name, role, text, type
'''
self.name = criteria['name'] or ''
self.role = criteria['role'] or ''
self.state = criteria['state'] or ''
self.text = criteria['text'] or ''
self._matchType = criteria['matchType']
self._caseSensitiveMatch = criteria['caseSensitiveMatch']
if self._matchType == SearchDialog._EXACT_MATCH:
if not self._caseSensitiveMatch:
self.name = self.name.upper()
self.text = self.text.upper()
self.role = self.role.upper()
self.state = self.state.upper()
self._matchName = lambda s: not self.name or s == self.name
self._matchRole = lambda s: not self.role or s == self.role
self._matchState = lambda s: not self.state or self.state in s
self._matchText = lambda s: not self.text or s == self.text
else:
flags = re.DOTALL
if not self._caseSensitiveMatch:
flags |= re.IGNORECASE
# name
if self.name and self.name[0] == '&':
self.name = self.name[1:]
compiledCriteriaName = self._compileExpression(
self.name+'\Z', flags, 'name')
self._matchName = lambda s: compiledCriteriaName.match(s)
else:
if not self._caseSensitiveMatch:
self.name = self.name.upper()
self._matchName = lambda s: (not self.name or
s.find(self.name)>=0)
# role
if self.role and self.role[0] == '&':
self.role = self.role[1:]
compiledCriteriaRole = self._compileExpression(
self.role+'\Z', flags, 'role')
self._matchRole = lambda s: compiledCriteriaRole.match(s)
else:
if not self._caseSensitiveMatch:
self.role = self.role.upper()
self._matchRole = lambda s: (not self.role or
s.find(self.role)>=0)
# state
if self.state and self.state[0] == '&':
self.state = self.state[1:]
compiledCriteriaState = self._compileExpression(
self.state+'\Z', flags, 'state')
def matchState(s):
for state in s:
if compiledCriteriaState.match(state):
return True
return False
self._matchState = matchState
else:
if not self._caseSensitiveMatch:
self.state = self.state.upper()
def matchState(s):
if not self.state:
return True
for state in s:
if state.find(self.state)>=0:
return True
return False
self._matchState = matchState
# text
if self.text and self.text[0] == '&':
self.text = self.text[1:]
compiledCriteriaText = self._compileExpression(
self.text+'\Z', flags, 'text')
self._matchText = lambda s: compiledCriteriaText.match(s)
else:
if not self._caseSensitiveMatch:
self.text = self.text.upper()
self._matchText = lambda s: (not self.text or
s.find(self.text)>=0)
def __call__(self, itemData):
'''
Compares the data in provided dictionary with the criteria.
'''
itemName = itemData['name'] or ''
itemRole = itemData['role'] or ''
itemStates = (itemData['states'] if itemData['states'] is not None
else [])
itemText = itemData['text'] or ''
if not self._caseSensitiveMatch:
if self._matchType == SearchDialog._EXACT_MATCH:
itemName = itemName.upper()
itemRole = itemRole.upper()
itemStates = map(string.upper, itemStates)
itemText = itemText.upper()
else:
if itemName and itemName[0] != '&':
itemName = itemName.upper()
if itemText and itemText[0] != '&':
itemText = itemText.upper()
return (self._matchName(itemName) and
self._matchRole(itemRole) and
self._matchState(itemStates) and
self._matchText(itemText))
def _compileExpression(self, pattern, flags, toLog):
'''
Compiles regular expression provided as pattern to check its
correctness (and remembers it) and logs a message on failure.
'''
msg = "Regular expression \"%s\" is incorrect"
msgToLog = "Regular expression for %s is incorrect"
try:
return re.compile(pattern, flags)
except:
runWarning(msg % pattern[:-1], "Incorrect expression")
log.warning(msgToLog, toLog)
raise
#@QtCore.Slot()
def _startSearching(self):
'''
Starts the searching process.
'''
self.buttonNext.setVisible(True)
self.buttonNext.setEnabled(False)
self.buttonSearch.setVisible(False)
self.buttonStop.setEnabled(True)
explorerDev = self._view.deviceTabAtIndex()
if explorerDev is None:
runWarning("No device is connected.", "Search unavailable")
self.searchingStoppedUpdateState()
return
currentPath = explorerDev.selectedItemPath()
if currentPath:
if not explorerDev.itemExists(currentPath):
self.searchingStoppedUpdateState()
return
else:
runInformation("No item is selected.", "Search unavailable")
self.searchingStoppedUpdateState()
log.warning("Search cannot be performed since no reference item"
" is selected")
return
self._explorerDev = explorerDev
self._explorerDev.startItemChanged.connect(self._startItemChanged)
self._explorerDev.itemFound.connect(self.itemFoundUpdateState)
self._explorerDev.itemNotFound.connect(self.itemNotFoundUpdateState)
self._explorerDev.searchingStopped.connect(self._stopSearching)
self._searching = True
criteria = {
'name': self._name,
'role': self._role,
'state': self._state,
'text': self._text,
'matchType': self._matchType,
'caseSensitiveMatch': self._caseSensitiveMatch
}
deep = False
if self._method == self._DEEP_METHOD:
deep = True
try:
explorerDev.find(self.Check(criteria), deep)
self._lastNames.add(self._name)
if self._role:
self._lastRoles.add(self._role)
if self._state:
self._lastStates.add(self._state)
self._manualUpdate = True
self._refreshCompleters()
self._manualUpdate = False
self._saveState()
except:
self._stopSearching()
#@QtCore.Slot()
def _nextSearching(self):
'''
Searches a next matching item.
'''
self.buttonNext.setEnabled(False)
self.buttonSearch.setVisible(False)
self.buttonStop.setEnabled(True)
self._explorerDev.findNext()
#@QtCore.Slot()
def itemFoundUpdateState(self):
'''
Updates buttons after the item is found.
'''
self.buttonNext.setVisible(True)
self.buttonNext.setEnabled(True)
self.buttonNext.setDefault(True)
self.buttonSearch.setVisible(False)
self.buttonSearch.setDefault(False)
self.buttonStop.setEnabled(False)
#@QtCore.Slot()
def itemNotFoundUpdateState(self):
'''
Updates buttons after the item is not found.
'''
self.buttonNext.setVisible(False)
self.buttonNext.setDefault(False)
self.buttonSearch.setVisible(True)
self.buttonSearch.setEnabled(True)
self.buttonSearch.setDefault(True)
self.buttonStop.setVisible(True)
self.buttonStop.setEnabled(False)
runInformation("No items found.", "Search finished")
#@QtCore.Slot()
def searchingStoppedUpdateState(self):
'''
Updates buttons after the searching is stopped by the user.
'''
self.buttonNext.setVisible(False)
self.buttonNext.setAutoDefault(False)
self.buttonSearch.setVisible(True)
self.buttonSearch.setEnabled(True)
self.buttonSearch.setDefault(True)
self.buttonStop.setVisible(True)
self.buttonStop.setEnabled(False)
#@QtCore.Slot(QtGui.QTreeWidgetItem, int)
def _startItemChanged(self):
'''
Handles the event when the user changes the reference item
'''
self._stopSearching()
log.info("User selected new reference item to search")
def _refreshCompleters(self):
'''
Refreshes completers for combo boxes so they are synchronized with
their last used values.
'''
self.comboBoxName.clear()
for name in self._lastNames.all():
self.comboBoxName.addItem(name)
self.comboBoxName.clearEditText()
if len(self._name):
index = self.comboBoxName.findText(self._name)
if index >= 0:
self.comboBoxName.setCurrentIndex(index)
self.comboBoxRole.clear()
self.comboBoxRole.addItems(self._lastRoles.all())
if self.comboBoxRole.count():
self.comboBoxRole.insertSeparator(self.comboBoxRole.count())
self.comboBoxRole.addItems(constants.ROLES)
self.comboBoxRole.clearEditText()
if len(self._role):
index = self.comboBoxRole.findText(self._role)
if index >= 0:
self.comboBoxRole.setCurrentIndex(index)
self.comboBoxState.clear()
self.comboBoxState.addItems(self._lastStates.all())
if self.comboBoxState.count():
self.comboBoxState.insertSeparator(self.comboBoxState.count())
self.comboBoxState.addItems(constants.STATES)
self.comboBoxState.clearEditText()
if len(self._state):
index = self.comboBoxState.findText(self._state)
if index >= 0:
self.comboBoxState.setCurrentIndex(index)
def _saveState(self):
'''
Saves the dialog state to configuration.
'''
config.set(self._CONFIG_NAME, self._CONFIG_SECTION_OPTIONS,
"match_type", self._matchType)
config.set(self._CONFIG_NAME, self._CONFIG_SECTION_OPTIONS,
"case_sensitive", self._caseSensitiveMatch)
config.set(self._CONFIG_NAME, self._CONFIG_SECTION_OPTIONS,
"method", self._method)
log.info("Search dialog state was saved to configuration")
def _loadState(self):
'''
Loads the dialog state from configuration.
'''
self._matchType = config.getInt(self._CONFIG_NAME,
self._CONFIG_SECTION_OPTIONS,
"match_type", self._PARTIAL_MATCH)
self._caseSensitiveMatch = config.getBool(self._CONFIG_NAME,
self._CONFIG_SECTION_OPTIONS,
"case_sensitive", False)
self._method = config.getInt(self._CONFIG_NAME,
self._CONFIG_SECTION_OPTIONS,
"method", self._DEEP_METHOD)
if self._matchType == self._EXACT_MATCH:
self.checkBoxExactMatch.setChecked(True)
elif self._matchType == self._PARTIAL_MATCH:
self.checkBoxExactMatch.setChecked(False)
self.checkBoxCaseSensitive.setChecked(self._caseSensitiveMatch)
if self._method == self._DEEP_METHOD:
self.radioButtonDeep.setChecked(True)
elif self._method == self._SIMPLE_METHOD:
self.radioButtonSimple.setChecked(True)
self._refreshCompleters()
log.info("Search dialog state was loaded from configuration")
# Public methods:
def run(self):
'''
Resets and shows the Search dialog.
'''
self._loadState()
self.comboBoxName.clearEditText()
self.plainTextEditText.clear()
self.dialog.show()
def stop(self):
'''
Stops searching.
'''
self._stopSearching()
|
tadek-project/tadek-ui
|
src/explore/search.py
|
Python
|
gpl-3.0
| 22,004
|
# -*- coding: utf-8 -*-
# rtsserver.py
# Main model for the server
# Author : Jean-Sébastien Beaulieu
import Pyro4
from random import randint
from threading import Timer
from servertools import ServerTools
from datetime import datetime
class GameData():
"""Used by **RTSTITLE** server to manage user data
and syncing game data between clients."""
def __init__(self):
self.registering = True
self.players = {}
self.chat = []
self.random_seed = randint(0, 30000)
def register_user(self, name):
"""Register a new player on the server."""
if self.registering and name not in self.players.keys():
self.players[name] = None
return self.random_seed
else:
return None
def game_packets(self, name):
"""Create packets to distribute to the clients.
Return (False, Player list, Messages) if in Lobby
Return (True, Game infos, Messages) if in Game"""
if self.registering:
return (False, list(self.players.keys()), self.chat)
else:
if self.players[name] is not None:
data_pack = self.players[name]
del self.players[name]
return (True, data_pack, self.chat)
else:
return (True, None, None)
def receive_info(self, actions):
"""Receiving information from everyone."""
if self.registering:
self.registering = False
for key in self.players:
self.players[key] = actions
def kick_players(self, players):
"""Kicks selected users out of the game."""
for p in players:
if p in self.players:
del self.players[p]
class Server():
"""Dedicated server instance for **RTSTITLE**."""
def __init__(self):
self.game_data = GameData()
self.create_server()
def create_server(self):
"""Attempts to create a new server instance."""
try:
ip = ServerTools.get_local_ip()
self.deamon = Pyro4.Daemon(host=ip, port=48280)
self.uri = self.deamon.register(self, "uri")
self.deamon.requestLoop()
except:
pass
############################################################################
# Interaction between clients and game_data #
############################################################################
def get_server_info(self):
return self.uri
def get_players(self):
return self.game_data.players
def empty_players(self):
self.game_data.players = {}
def get_new_chat(self):
return self.game_data.chat
def empty_new_chat(self):
self.game_data.chat = []
def chat_message(self, name, message):
time_ms = datetime.now()
chat_string = str(name) + ": " + str(message) + "\n"
self.game_data.chat.append([time_ms, chat_string])
def register_user(self, name, uri):
self.deamon.register(uri, objectId=name)
return self.game_data.register_user(name)
def game_packets(self, name):
return self.game_data.game_packets(name)
def receive_info(self, actions):
return self.game_data.receive_info(actions)
def client_quit(self, name):
del self.game_data.players[name]
def registered_clients(self):
return self.deamon.registered()
############################################################################
# Shutting down the server #
############################################################################
def shutdown(self):
self.game_data
self.deamon.shutdown()
if __name__ == '__main__':
s = Server()
|
folkrav/rts-b51
|
src/server/rtsserver.py
|
Python
|
gpl-3.0
| 3,840
|
# RAVEn Plugin
#
# Author: Dnpwwo, 2016
#
#
# Plugin parameter definition below will be parsed during startup and copied into Manifest.xml, this will then drive the user interface in the Hardware web page
#
"""
<plugin key="RAVEn" name="RAVEn Zigbee energy monitor" author="dnpwwo" version="1.3.10" externallink="https://rainforestautomation.com/rfa-z106-raven/">
<params>
<param field="SerialPort" label="Serial Port" width="150px" required="true" default="/dev/ttyRAVEn"/>
<param field="Mode6" label="Debug" width="100px">
<options>
<option label="True" value="Debug"/>
<option label="False" value="Normal" default="true" />
<option label="Logging" value="File"/>
</options>
</param>
</params>
</plugin>
"""
import Domoticz
import xml.etree.ElementTree as ET
SerialConn = None
demandFreq=30 # seconds between demand events
summaryFreq=300 # seconds between summary updates
fScale = demandFreq / 3600.0
summation = 0.0
hasConnected = False
nextCommand = ""
def onStart():
global SerialConn
if Parameters["Mode6"] != "Normal":
Domoticz.Debugging(1)
if Parameters["Mode6"] == "Debug":
f = open(Parameters["HomeFolder"]+"plugin.log","w")
f.write("Plugin started.")
f.close()
if (len(Devices) == 0):
Domoticz.Device(Name="Usage", Unit=1, Type=243, Subtype=29, Switchtype=0, Image=0, Options="").Create()
Domoticz.Device("Total", 2, 113).Create()
Domoticz.Log("Devices created.")
Domoticz.Log("Plugin has " + str(len(Devices)) + " devices associated with it.")
DumpConfigToLog()
SerialConn = Domoticz.Connection(Name="RAVEn", Transport="Serial", Protocol="XML", Address=Parameters["SerialPort"], Baud=115200)
SerialConn.Connect()
return
def onConnect(Connection, Status, Description):
global SerialConn
if (Status == 0):
Domoticz.Log("Connected successfully to: "+Parameters["SerialPort"])
Connection.Send("<Command>\n <Name>restart</Name>\n</Command>")
SerialConn = Connection
else:
Domoticz.Log("Failed to connect ("+str(Status)+") to: "+Parameters["SerialPort"])
Domoticz.Debug("Failed to connect ("+str(Status)+") to: "+Parameters["SerialPort"]+" with error: "+Description)
return True
def onMessage(Connection, Data):
global hasConnected, nextCommand, fScale, summation
strData = Data.decode("utf-8", "ignore")
LogMessage(strData)
xmltree = ET.fromstring(strData)
if xmltree.tag == 'ConnectionStatus':
strLog = ""
if (xmltree.find('MeterMacId') != None): strLog = "MeterMacId: "+xmltree.find('MeterMacId').text+", "
connectStatus = xmltree.find('Status').text
strLog += "Connection Status = '"+connectStatus+"'"
if (xmltree.find('Description') != None): strLog += " - "+xmltree.find('Description').text
if (xmltree.find('LinkStrength') != None): strLog += ", Link Strength = "+str(int(xmltree.find('LinkStrength').text,16))
Domoticz.Log(strLog)
if connectStatus == 'Initializing...':
hasConnected = False
elif (connectStatus == 'Connected') and (hasConnected == False):
nextCommand = "get_device_info"
hasConnected = True
elif xmltree.tag == 'DeviceInfo':
Domoticz.Log( "Manufacturer: %s, Device ID: %s, Install Code: %s" % (xmltree.find('Manufacturer').text, xmltree.find('DeviceMacId').text, xmltree.find('InstallCode').text) )
Domoticz.Log( "Hardware: Version %s, Firmware Version: %s, Model: %s" % (xmltree.find('HWVersion').text, xmltree.find('FWVersion').text, xmltree.find('ModelId').text) )
nextCommand = "get_network_info"
elif xmltree.tag == 'NetworkInfo':
LogMessage( "NetworkInfo response, Status = '%s' - %s, Link Strength = %d" % (xmltree.find('Status').text, xmltree.find('Description').text, int(xmltree.find('LinkStrength').text,16)))
nextCommand = "get_meter_list"
elif xmltree.tag == 'MeterList':
nextCommand = ""
for meter in xmltree.iter('MeterMacId'):
LogMessage( "MeterMacId: %s, MeterList response" % meter.text)
Connection.Send("<Command>\n <Name>get_meter_info</Name>\n <MeterMacId>"+meter.text+"</MeterMacId>\n</Command>\n")
elif xmltree.tag == 'MeterInfo':
LogMessage( "MeterMacId: %s, MeterInfo response, Enabled = %s" % (xmltree.find('MeterMacId').text, xmltree.find('Enabled').text))
Connection.Send("<Command>\n <Name>get_schedule</Name>\n <MeterMacId>"+xmltree.find('MeterMacId').text+"</MeterMacId>\n</Command>\n")
elif xmltree.tag == 'ScheduleInfo':
iFreq = int(xmltree.find('Frequency').text,16)
LogMessage( "MeterMacId: %s, ScheduleInfo response: Type '%s', Frequency %d, Enabled %s" % (xmltree.find('MeterMacId').text, xmltree.find('Event').text, iFreq, xmltree.find('Enabled').text))
if (xmltree.find('Event').text == 'demand') and (iFreq != demandFreq):
LogMessage( "MeterMacId: %s, Setting 'demand' schedule to: Frequency %d" % (xmltree.find('MeterMacId').text, demandFreq))
Connection.Send("<Command>\n <Name>set_schedule</Name>\n <MeterMacId>"+xmltree.find('MeterMacId').text+"</MeterMacId>\n <Event>demand</Event>\n <Frequency>" + str(hex(demandFreq)) + "</Frequency>\n <Enabled>Y</Enabled>\n</Command>\n")
if (xmltree.find('Event').text == 'summation') and (iFreq != summaryFreq):
LogMessage( "MeterMacId: %s, Setting 'summation' schedule to: Frequency %d" % (xmltree.find('MeterMacId').text, summaryFreq))
Connection.Send("<Command>\n <Name>set_schedule</Name>\n <MeterMacId>"+xmltree.find('MeterMacId').text+"</MeterMacId>\n <Event>summation</Event>\n <Frequency>" + str(hex(summaryFreq)) + "</Frequency>\n <Enabled>Y</Enabled>\n</Command>\n")
if (xmltree.find('Event').text == 'summation'):
Connection.Send("<Command>\n <Name>get_current_summation_delivered</Name>\n <MeterMacId>"+xmltree.find('MeterMacId').text+"</MeterMacId>\n <Refresh>Y</Refresh>\n</Command>\n")
elif xmltree.tag == 'InstantaneousDemand':
demand = float(getInstantDemandKWh(xmltree))
if (summation == 0.0):
Domoticz.Log("MeterMacId: %s, Instantaneous Demand = %f, NO SUMMARY DATA" % (xmltree.find('MeterMacId').text, demand))
else:
delta = fScale * demand
summation = summation + delta
Domoticz.Log( "MeterMacId: %s, Instantaneous Demand = %.3f, Summary Total = %.3f, Delta = %f" % (xmltree.find('MeterMacId').text, demand, summation, delta))
sValue = "%.3f;%.3f" % (demand,summation)
Devices[1].Update(0, sValue.replace('.',''))
elif xmltree.tag == 'CurrentSummationDelivered':
total = float(getCurrentSummationKWh(xmltree))
if (total > summation):
summation = total
sValue = "%.3f" % (total)
Devices[2].Update(0, sValue.replace('.',''))
Domoticz.Log( "MeterMacId: %s, Current Summation = %.3f" % (xmltree.find('MeterMacId').text, total))
elif xmltree.tag == 'TimeCluster':
Domoticz.Debug( xmltree.tag + " response" )
elif xmltree.tag == 'PriceCluster':
Domoticz.Debug( xmltree.tag + " response" )
elif xmltree.tag == 'CurrentPeriodUsage':
Domoticz.Debug( xmltree.tag + " response" )
elif xmltree.tag == 'LastPeriodUsage':
Domoticz.Debug( xmltree.tag + " response" )
elif xmltree.tag == 'ProfileData':
Domoticz.Debug( xmltree.tag + " response" )
else:
Domoticz.Error("Unrecognised (not implemented) XML Fragment ("+xmltree.tag+").")
return
def onDisconnect(Connection):
Domoticz.Log("Connection '"+Connection.Name+"' disconnected.")
return
def onHeartbeat():
global hasConnected, nextCommand, SerialConn
if (SerialConn.Connected()):
if (nextCommand != ""):
Domoticz.Debug("Sending command: "+nextCommand)
SerialConn.Send("<Command>\n <Name>"+nextCommand+"</Name>\n</Command>\n")
else:
hasConnected = False
SerialConn.Connect()
return True
# RAVEn support functions
def getCurrentSummationKWh(xmltree):
'''Returns a single float value for the SummationDelivered from a Summation response from RAVEn'''
# Get the Current Summation (Meter Reading)
fReading = float(int(xmltree.find('SummationDelivered').text,16))
fResult = calculateRAVEnNumber(xmltree, fReading)
return formatRAVEnDigits(xmltree, fResult)
def getInstantDemandKWh(xmltree):
'''Returns a single float value for the Demand from an Instantaneous Demand response from RAVEn'''
# Get the Instantaneous Demand
fDemand = float(int(xmltree.find('Demand').text,16))
fResult = calculateRAVEnNumber(xmltree, fDemand)
return formatRAVEnDigits(xmltree, fResult)
def calculateRAVEnNumber(xmltree, value):
'''Calculates a float value from RAVEn using Multiplier and Divisor in XML response'''
# Get calculation parameters from XML - Multiplier, Divisor
fDivisor = float(int(xmltree.find('Divisor').text,16))
fMultiplier = float(int(xmltree.find('Multiplier').text,16))
if (fMultiplier > 0 and fDivisor > 0):
fResult = float( (value * fMultiplier) / fDivisor)
elif (fMultiplier > 0):
fResult = float(value * fMultiplier)
else: # (Divisor > 0) or anything else
fResult = float(value / fDivisor)
return fResult
def formatRAVEnDigits(xmltree, value):
'''Formats a float value according to DigitsRight, DigitsLeft and SuppressLeadingZero settings from RAVEn XML response'''
# Get formatting parameters from XML - DigitsRight, DigitsLeft
iDigitsRight = int(xmltree.find('DigitsRight').text,16)
iDigitsLeft = int(xmltree.find('DigitsLeft').text,16)
sResult = ("{:0%d.%df}" % (iDigitsLeft+iDigitsRight+1,iDigitsRight)).format(value)
# Suppress Leading Zeros if specified in XML
if xmltree.find('SuppressLeadingZero').text == 'Y':
while sResult[0] == '0':
sResult = sResult[1:]
if sResult[0] == '.':
sResult = '0' + sResult
return sResult
# Generic helper functions
def LogMessage(Message):
if Parameters["Mode6"] == "File":
f = open(Parameters["HomeFolder"]+"plugin.log","a")
f.write(Message+"\r\n")
f.close()
Domoticz.Debug(Message)
def DumpConfigToLog():
for x in Parameters:
if Parameters[x] != "":
LogMessage( "'" + x + "':'" + str(Parameters[x]) + "'")
LogMessage("Device count: " + str(len(Devices)))
for x in Devices:
LogMessage("Device: " + str(x) + " - " + str(Devices[x]))
LogMessage("Device ID: '" + str(Devices[x].ID) + "'")
LogMessage("Device Name: '" + Devices[x].Name + "'")
LogMessage("Device nValue: " + str(Devices[x].nValue))
LogMessage("Device sValue: '" + Devices[x].sValue + "'")
LogMessage("Device LastLevel: " + str(Devices[x].LastLevel))
return
|
pperichon/domoticz
|
plugins/examples/RAVEn.py
|
Python
|
gpl-3.0
| 11,003
|
# oppia/av/admin.py
from django.contrib import admin
from oppia.av.models import UploadedMedia, UploadedMediaImage
class UploadedMediaAdmin(admin.ModelAdmin):
list_display = ('id', 'file', 'created_date', 'md5', 'length')
class UploadedMediaImageAdmin(admin.ModelAdmin):
list_display = ('id', 'image', 'uploaded_media', 'default_image', 'created_date')
admin.site.register(UploadedMedia, UploadedMediaAdmin)
admin.site.register(UploadedMediaImage, UploadedMediaImageAdmin)
|
DigitalCampus/django-nurhi-oppia
|
oppia/av/admin.py
|
Python
|
gpl-3.0
| 486
|
#!/usr/bin/env python
"""
dyndns.py -- Dynamic DNS for Linode
Copyright 2011 Michael S. Yanovich
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
This script is a dynamic dns script for Linode. Simply provide the
appropriate command line arguments and it will update the IP address in
Linode's Domain Manager.
"""
import argparse
import api
import sys
import urllib2
def getip():
u = urllib2.urlopen("http://yano.me/ip/")
ip = u.read().rstrip().lstrip()
u.close()
return ip
def updateip(apikey, domain_name, subdomain, port, domain_type):
if port == None:
port = 22
if domain_type == None:
domain_type = "A"
FLAG = True
linode = api.Api(apikey)
domains = linode.domain_list()
for domain in domains:
if domain["DOMAIN"] == domain_name:
did = domain["DOMAINID"]
for each in linode.domain_resource_list(DomainID=did):
if each["NAME"] == subdomain:
FLAG = False
resource_id = each["RESOURCEID"]
## grab IP address
ip = getip()
if FLAG == True:
a = linode.domain_resource_create(Port=port, DomainID=did,
Type=domain_type, Name=subdomain, Target=ip)
else:
a = linode.domain_resource_update(Port=port, DomainID=did,
ResourceID=resource_id, Target=ip)
def main():
desc = "This script is a Dynamic DNS updater for Linode."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p', type=int, dest='port',
help='specifies the port number')
parser.add_argument('-ak', action='store', dest='apikey',
required=True, help='specifies the api key')
parser.add_argument('-dt', action='store', dest='domain_type',
help='specifies the domain type')
parser.add_argument('-dn', action='store', dest='domain_name',
required=True, help='specifies the domain name')
parser.add_argument('-sd', action='store', dest='subdomain',
required=True, help='specifies the subdomain')
results = parser.parse_args()
updateip(results.apikey, results.domain_name, results.subdomain,
results.port, results.domain_type)
if __name__ == '__main__':
main()
|
myano/linode-dyndns
|
dyndns.py
|
Python
|
gpl-3.0
| 2,806
|
"""
Create a simple, single-elimination demo tournament with 8 teams.
"""
from django.contrib.auth.models import User
from .utils import generate_tournament
teams = ['Team {}'.format(i) for i in range(1, 9)]
team_defaults = {
'creator': User.objects.get(pk=1),
}
generate_tournament(
teams=teams,
team_defaults=team_defaults
)
|
clemby/tournasite
|
scripts/create.py
|
Python
|
gpl-3.0
| 346
|
#!/usr/bin/python
# This file is part of Morse.
#
# Morse is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Morse is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Morse. If not, see <http://www.gnu.org/licenses/>.
from . import app
from flask import render_template
from flask.ext.login import current_user
from ..rights import possibly_banned, check_ban
from ..models import db
from ..models.discussion import Topic
from ..wrappers import TopicWrapper
from ..api.dispatchers import PostFilterDispatcher, FormatToolDispatcher
@app.route('/topic/<topic_str>', methods=['GET'])
@possibly_banned
def topic (topic_str):
"""
renders topic view
:param topic_id: indicates topic view to render
:rtype: html
"""
topic_id = int(topic_str.split("-")[0])
topic = Topic.query.get(topic_id)
if not topic:
return "nosuchtopic", 400
check_ban(topic.board_id)
if not current_user.may_read(topic.board):
return render_template('4xx/403-default.html'), 403
topic.view_count = Topic.view_count + 1
db.session.commit()
topic_with_user_context = TopicWrapper(topic)
post_filter_dispatcher = PostFilterDispatcher()
format_tool_dispatcher = FormatToolDispatcher()
return render_template("topic.html", topic = topic_with_user_context,
post_filter_dispatcher = post_filter_dispatcher,
format_tool_dispatcher = format_tool_dispatcher)
|
retooth/morse
|
morse/views/topic.py
|
Python
|
gpl-3.0
| 1,937
|
#
# Race Capture App
#
# Copyright (C) 2014-2016 Autosport Labs
#
# This file is part of the Race Capture App
#
# This is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License for more details. You should
# have received a copy of the GNU General Public License along with
# this code. If not, see <http://www.gnu.org/licenses/>.
import kivy
kivy.require('1.9.1')
from utils import *
from kivy.properties import ObjectProperty, NumericProperty, ListProperty, StringProperty
from kivy.uix.stacklayout import StackLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.anchorlayout import AnchorLayout
from kivy.uix.progressbar import ProgressBar
from kivy.app import Builder
from kivy.clock import Clock
from iconbutton import IconButton
from kivy.logger import Logger
from fieldlabel import FieldLabel
from autosportlabs.racecapture.theme.color import ColorScheme
TOOLBAR_VIEW_KV = '''
<ProgressFieldLabel>:
RelativeLayout:
StencilView:
size_hint: (None, 1.0)
id: stencil
width: 0
canvas.after:
Color:
rgba: root.color
Rectangle:
pos: self.pos
size: self.size
FieldLabel:
id: value
halign: 'center'
color: root.text_color
font_size: self.height * 0.6
<ToolbarItem>:
canvas.before:
Color:
rgba: ColorScheme.get_dark_background()
Rectangle:
pos: self.pos
size: self.size
<ToolbarView>:
orientation: 'horizontal'
spacing: sp(2)
ToolbarItem:
padding: self.height * 0.5, 0
size_hint_x: 0.10
orientation: 'horizontal'
IconButton:
id: menu
text: '\357\203\211'
on_release: root.mainMenu()
size_hint_x: None
width: self.height * 1
font_size: self.height
Button:
background_color: [0.0, 0.0, 0.0, 0.0]
background_down: ''
text: ' '
font_name: "resource/fonts/ASL_light.ttf"
size_hint_x: 0.0
width: self.height * 2.4
font_size: self.height * 0.7
on_release: root.mainMenu()
ToolbarItem:
orientation: 'horizontal'
padding: self.height * 0.4, 0
size_hint_x: 0.50
FieldLabel:
halign: 'center'
text: ''
id: state
font_size: self.height * 0.7
ToolbarItem:
orientation: 'horizontal'
padding: self.height * 0.4, 0
size_hint_x: 0.3
ProgressFieldLabel:
text: ''
id: prog_status
ToolbarItem:
orientation: 'horizontal'
size_hint_x: 0.1
IconButton:
id: gps_status
text: u'\uf041'
font_size: self.height * 0.8
color: [0.3, 0.3, 0.3, 0.2]
IconButton:
id: tele_status
text: '\357\203\256'
color: [0.3, 0.3, 0.3, 0.2]
font_size: self.height * 0.8
IconButton:
id: data_rx_status
text: '\357\202\223'
color: [0.0, 0.8, 1.0, 0.2]
font_size: self.height * 0.8
'''
class ToolbarItem(BoxLayout):
pass
class ProgressFieldLabel(AnchorLayout):
"""
A specialized field for showing progress information
"""
minval = NumericProperty(0)
maxval = NumericProperty(100)
value = NumericProperty(0)
color = ColorScheme.get_accent()
text_color = ColorScheme.get_light_primary_text()
text = StringProperty('')
def __init__(self, **kwargs):
super(ProgressFieldLabel, self).__init__(**kwargs)
def on_minval(self, instance, value):
self._refresh_value()
def on_maxval(self, instance, value):
self._refresh_value()
def on_value(self, instance, value):
self._refresh_value()
def on_text(self, instance, value):
self.ids.value.text = str(value)
def _refresh_value(self):
stencil = self.ids.stencil
value = self.value
minval = self.minval
maxval = self.maxval
pct = ((value - minval) / (maxval - minval))
width = self.width * pct
stencil.width = width
class ToolbarView(BoxLayout):
"""
The Toolbar view provides various status indicators for the running application.
"""
status_pump = ObjectProperty(None)
track_manager = ObjectProperty(None)
TELEMETRY_IDLE = 0
TELEMETRY_ACTIVE = 1
TELEMETRY_CONNECTING = 2
TELEMETRY_ERROR = 3
telemetry_color = {TELEMETRY_IDLE:[0.0, 1.0, 0.0, 0.2],
TELEMETRY_ACTIVE:[0.0, 1.0, 0.0, 1.0],
TELEMETRY_CONNECTING:[1.0, 1.0, 0.0, 1.0],
TELEMETRY_ERROR:[1.0, 0.0, 0.0, 1.0]
}
DATA_NO_RX = 0
DATA_RX = 1
data_rx_color = {DATA_NO_RX:[0.0, 0.8, 1.0, 0.2],
DATA_RX:[0.0, 0.8, 1.0, 1.0]}
TOOLBAR_DATA_RX_DURATION = 0.1
GPS_NO_DATA = 0
GPS_NO_LOCK = 1
GPS_MARGINAL = 2
GPS_HIGH_QUALITY = 3
gps_color = { GPS_NO_DATA: [0.3, 0.3, 0.3, 0.2],
GPS_NO_LOCK: [1.0, 0.0, 0.0, 1.0],
GPS_MARGINAL: [1.0, 1.0, 0.0, 1.0],
GPS_HIGH_QUALITY: [0.0, 1.0, 0.0, 1.0]}
STATUS_LINGER_DURATION = 2.0
ACTIVITY_MESSAGE_LINGER_DURATION = 7.5
PROGRESS_COMPLETE_LINGER_DURATION = 7.0
normal_status_color = ColorScheme.get_light_primary_text()
alert_status_color = ColorScheme.get_alert()
Builder.load_string(TOOLBAR_VIEW_KV)
def __init__(self, **kwargs):
super(ToolbarView, self).__init__(**kwargs)
self.current_status = ''
self.register_event_type('on_main_menu')
self.register_event_type('on_progress')
self.register_event_type('on_data_rx')
self.register_event_type('on_tele_status')
self.register_event_type('on_status')
self.register_event_type('on_activity')
self._data_rx_decay = Clock.create_trigger(self._on_data_rx_decay, ToolbarView.TOOLBAR_DATA_RX_DURATION)
self._activity_decay = Clock.create_trigger(self._on_activity_decay, ToolbarView.ACTIVITY_MESSAGE_LINGER_DURATION)
self._progress_decay = Clock.create_trigger(self._on_progress_decay, ToolbarView.PROGRESS_COMPLETE_LINGER_DURATION)
self._gps_decay = Clock.create_trigger(self._on_gps_decay, ToolbarView.STATUS_LINGER_DURATION)
def on_status_pump(self, instance, value):
value.add_listener(self.on_rc_status_updated)
def on_rc_status_updated(self, status_data):
self._update_track_status(status_data)
self._update_gps_status(status_data)
def on_activity(self, msg):
self._set_activity_message(msg)
self._activity_decay()
def on_main_menu(self, instance, *args):
pass
def mainMenu(self):
self.dispatch('on_main_menu', None)
def _set_state_message(self, msg):
self.ids.state.text = msg
def _set_activity_message(self, msg):
prog_status = self.ids.prog_status
prog_status.text = msg
def on_status(self, msg, isAlert):
status_label = self.ids.prog_status
status_label.text = msg
self.current_status = msg
if isAlert == True:
status_label.text_color = self.alert_status_color
else:
status_label.text_color = self.normal_status_color
def _update_progress(self, value):
self.ids.prog_status.value = value
if value == 100:
self._progress_decay()
def on_progress(self, value):
self._update_progress(value)
def _on_progress_decay(self, dt):
self._update_progress(0)
self.ids.prog_status.text = self.current_status
def _on_activity_decay(self, dt):
self._set_activity_message(self.current_status)
def _on_data_rx_decay(self, dt):
self.ids.data_rx_status.color = ToolbarView.data_rx_color[int(False)]
def on_data_rx(self, value):
self._data_rx_decay.cancel()
self.ids.data_rx_status.color = ToolbarView.data_rx_color[int(value)]
self._data_rx_decay()
def on_tele_status(self, status):
try:
self.ids.tele_status.color = self.telemetry_color[status]
except:
Logger.error("ToolbarView: Invalid telemetry status: " + str(status))
def _on_gps_decay(self, dt):
self.ids.gps_status.color = ToolbarView.gps_color[ToolbarView.GPS_NO_DATA]
def _update_gps_status(self, status_data):
self._gps_decay.cancel()
gps_status = status_data['status']['GPS']
gps_quality_code = gps_status['qual']
gps_quality = ToolbarView.GPS_NO_DATA
if gps_quality_code == 0:
gps_quality = ToolbarView.GPS_NO_LOCK
elif gps_quality_code == 1:
gps_quality = ToolbarView.GPS_MARGINAL
elif gps_quality_code >= 2:
gps_quality = ToolbarView.GPS_HIGH_QUALITY
gps_color = ToolbarView.gps_color[gps_quality]
self.ids.gps_status.color = gps_color
self._gps_decay()
def _update_track_status(self, status_data):
try:
track_status = status_data['status']['track']
detection_status = track_status['status']
if detection_status == 0:
track_status_msg = 'Searching for Track'
elif detection_status == 1 and status_data['status']['track']['trackId'] == 0:
track_status_msg = 'User defined Track'
else:
if track_status['trackId'] != 0:
track = self.track_manager.find_track_by_short_id(track_status['trackId'])
if track is None:
track_status_msg = '(Unknown Track)'
else:
track_status_msg = track.name
configuration_name = track.configuration
if configuration_name and len(configuration_name):
track_status_msg += ' (' + configuration_name + ')'
else:
track_status_msg = 'No track detected'
self._set_state_message(track_status_msg)
except Exception as e:
Logger.warn("ToolbarView: Could not retrieve track detection status " + str(e))
|
ddimensia/RaceCapture_App
|
autosportlabs/racecapture/views/toolbar/toolbarview.py
|
Python
|
gpl-3.0
| 10,857
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2017 Vincent Noel (vincent.noel@butantan.gov.br)
#
# This file is part of libSigNetSim.
#
# libSigNetSim is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# libSigNetSim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with libSigNetSim. If not, see <http://www.gnu.org/licenses/>.
"""
Initialization of the module SigNetSim/CWriter
"""
|
vincent-noel/libSigNetSim
|
libsignetsim/cwriter/__init__.py
|
Python
|
gpl-3.0
| 849
|
# -*- coding: utf-8 -*-
from rules.rule import *
class Rule(KLCRule):
"""
Create the methods check and fix to use with the kicad lib files.
"""
def __init__(self, component):
super(Rule, self).__init__(component, 'Rule 3.1', 'Using a 100mils grid, pin ends and origin must lie on grid nodes (IEC-60617).')
def check(self):
"""
Proceeds the checking of the rule.
The following variables will be accessible after checking:
* violating_pins
"""
self.violating_pins = []
for pin in self.component.pins:
posx = int(pin['posx'])
posy = int(pin['posy'])
if (posx % 100) != 0 or (posy % 100) != 0:
self.violating_pins.append(pin)
self.verboseOut(Verbosity.HIGH, Severity.ERROR, 'pin: {0} ({1}), {2}'.format(pin['name'], pin['num'], positionFormater(pin)))
return True if len(self.violating_pins) > 0 else False
def fix(self):
"""
Proceeds the fixing of the rule, if possible.
"""
self.verboseOut(Verbosity.NORMAL, Severity.INFO, "FIX: not yet supported" )
# TODO
|
reportingsjr/kicad-library-utils
|
schlib/rules/rule3_1.py
|
Python
|
gpl-3.0
| 1,168
|
# coding=utf-8
"""
Embedded Demo story, start it with python -m tale.demo.story
'Tale' mud driver, mudlib and interactive fiction framework
Copyright by Irmen de Jong (irmen@razorvine.net)
"""
from __future__ import absolute_import, print_function, division, unicode_literals
import os
import sys
import tale
from tale.driver import StoryConfig
from tale.main import run_story
class Story(object):
config = StoryConfig(
name="Tale demo story",
author="Irmen de Jong",
author_address="irmen@razorvine.net",
version=tale.__version__, # arbitrary but is used to check savegames for compatibility
requires_tale=tale.__version__, # tale library required to run the game
supported_modes={"if", "mud"}, # what driver modes (if/mud) are supported by this story
player_name="julie", # set a name to create a prebuilt player, None to use the character builder
player_gender="f", # m/f/n
player_race="human", # default is "human" ofcourse, but you can select something else if you want
player_money=15.5, # starting money
money_type="modern", # money type modern/fantasy
server_tick_method="timer", # 'command' (waits for player entry) or 'timer' (async timer driven)
server_tick_time=1.0, # time between server ticks (in seconds) (usually 1.0 for 'timer' tick method)
gametime_to_realtime=5, # meaning: game time is X times the speed of real time (only used with "timer" tick method) (>=0)
max_wait_hours=2, # the max. number of hours (gametime) the player is allowed to wait (>=0)
display_gametime=True, # enable/disable display of the game time at certain moments
epoch=None, # start date/time of the game clock
startlocation_player="house.livingroom",
startlocation_wizard="house.livingroom",
savegames_enabled=False,
show_exits_in_look=True,
license_file=None,
mud_host=None,
mud_port=None
)
driver = None # will be set by driver init()
def init(self, driver):
self.driver = driver
self.driver.load_zones(["house"])
def init_player(self, player):
player.money = 12.65
def welcome(self, player):
player.tell("<bright>Welcome to '%s'.</>" % self.config.name, end=True)
player.tell("This is a tiny embedded story to check out a running Tale environment.")
player.tell("Try to fool around with your pet, and exit the house to win the game.")
player.tell("\n")
def welcome_savegame(self, player):
pass # not supported in demo
def goodbye(self, player):
player.tell("Thanks for trying out Tale!")
def completion(self, player):
"""congratulation text / finale when player finished the game (story_complete event)"""
player.tell("Congratulations on finding the exit! Someone else has to look after Garfield now though...")
if __name__ == "__main__":
# story is invoked as a script, start it in the Tale Driver.
gamedir = os.path.dirname(__file__)
gui = len(sys.argv) > 1 and sys.argv[1] == "--gui"
web = len(sys.argv) > 1 and sys.argv[1] == "--web"
mud = len(sys.argv) > 1 and sys.argv[1] == "--mud"
run_story(gamedir, gui, web, mud)
|
sils1297/Tale
|
tale/demo/story.py
|
Python
|
gpl-3.0
| 3,500
|
# -*- coding: utf-8 -*-
"""
Administration interface options for ``critica.apps.videos`` models.
"""
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from critica.apps.custom_admin.sites import custom_site
from critica.apps.videos.models import Video
from critica.apps.issues.models import Issue
from critica.apps.videos.forms import VideoAdminModelForm
class VideoAdmin(admin.ModelAdmin):
"""
Administration interface for ``Video`` model.
"""
fieldsets = (
(_('Video'), {'fields': ('name', 'widget')}),
(_('Filling'), {'fields': ('issues', 'tags')}),
(_('Publication'), {'fields': ('is_reserved', 'is_ready_to_publish')}),
)
list_display = ('name', 'ald_issues', 'creation_date', 'modification_date', 'is_reserved', 'is_ready_to_publish', 'ald_submitter')
list_filter = ('issues',)
filter_horizontal = ('issues',)
search_fields = ('name',)
ordering = ('-creation_date',)
date_hierarchy = 'creation_date'
exclude = ['author']
form = VideoAdminModelForm
_object = None
def __call__(self, request, url):
"""
Adds current request object and current URL to this class.
"""
self.request = request
return super(VideoAdmin, self).__call__(request, url)
def change_view(self, request, object_id):
"""
Custom change view.
"""
self._request = request
self._object = None
try:
self._object = self.model._default_manager.get(pk=object_id)
except model.DoesNotExist:
self._object = None
self._action = "change"
return super(VideoAdmin, self).change_view(request, object_id)
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance. If kwargs are given, they're passed to the form Field's constructor.
"""
field = super(VideoAdmin, self).formfield_for_dbfield(db_field, **kwargs)
if db_field.name == 'issues':
my_choices = [('', '---------')]
my_choices.extend(Issue.objects.order_by('-number').values_list('id','number'))
print my_choices
field.choices = my_choices
return field
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
Auto-save submitter.
"""
if change == False:
obj.submitter = request.user
obj.save()
def ald_submitter(self, obj):
"""
Formatted submitter for admin list_display option.
"""
if obj.submitter.get_full_name():
return obj.submitter.get_full_name()
else:
return obj.submitter
ald_submitter.short_description = _('submitter')
def ald_issues(self, obj):
"""
Formatted issue list for admin list_display option."
"""
issues = [issue.number for issue in obj.issues.all()]
return ', '.join(['%s' % issue for issue in issues])
ald_issues.short_description = _('issues')
# Registers
# ------------------------------------------------------------------------------
admin.site.register(Video, VideoAdmin)
custom_site.register(Video, VideoAdmin)
|
brunobord/critica
|
apps/videos/admin.py
|
Python
|
gpl-3.0
| 3,421
|
# The level editor GUI.
# The following workflow is expected:
# 1) User load a level
# 2) main windows display the scene layout
# right-side-top-dock display:
# - level scene tree
# - level tree (a list in fact)
# - level resources tree (a list in fact)
# 3) user select an element in one of the tree, related properties are displayed in
# right-side-down-dock property list
# 4) user edit properties in property list
#
# Later on, provides property edition via scene layout display
# Add toolbar to create new element
#
# In memory, we keep track of two things:
# - updated level
# - specific text/fx resources
import xml.etree.ElementTree #@UnresolvedImport
import os.path
import glob #@UnresolvedImport
import subprocess #@UnresolvedImport
import louie
import wogfile
import metaworld
import metawog
import metaworldui
import metatreeui
import metaelementui
import levelview
import wogeditor_rc #@UnusedImport
from shutil import copy2 #@UnresolvedImport
from PyQt4 import QtCore, QtGui #@UnresolvedImport
from PyQt4.QtCore import Qt #@UnresolvedImport
import qthelper
import editleveldialog
import newleveldialog_ui
import errors
from utils import * #@UnusedWildImport
from datetime import datetime
YAML_FORMAT = True
LOG_TO_FILE = False
APP_NAME_UPPER = 'DFG-AMY-EDITOR'
APP_NAME_LOWER = 'dfg-amy-editor'
APP_NAME_PROPER = 'Amy In Da Farm! Editor'
STR_DIR_STUB = 'levels'
CURRENT_VERSION = "v0.1"
CREATED_BY = 'Created by ' + APP_NAME_PROPER + ' ' + CURRENT_VERSION
ISSUE_LEVEL_NONE = 0
ISSUE_LEVEL_ADVICE = 1
ISSUE_LEVEL_WARNING = 2
ISSUE_LEVEL_CRITICAL = 4
MAXRECENTFILES = 4
#@DaB New actions for Add item toolbar
def _appendChildTag( parent_element, new_element_meta , mandatory_attributes, keepid = False ):
"""Adds the specified child tag to the specified element and update the tree view."""
assert parent_element is not None
# build the list of attributes with their initial values.
for attribute_meta in new_element_meta.attributes:
if attribute_meta.mandatory:
if attribute_meta.type == metaworld.IDENTIFIER_TYPE:
try:
given_id = mandatory_attributes[attribute_meta.name]
except KeyError:
given_id = None
if given_id is None or not keepid:
init_value = parent_element.world.generate_unique_identifier( attribute_meta )
mandatory_attributes[attribute_meta.name] = init_value
else:
init_value = attribute_meta.init
if init_value is not None:
if attribute_meta.name not in mandatory_attributes:
mandatory_attributes[attribute_meta.name] = init_value
if ( attribute_meta.default is not None and not attribute_meta.mandatory ):
if attribute_meta.name not in mandatory_attributes:
init_value = attribute_meta.default
mandatory_attributes[attribute_meta.name] = init_value
# Notes: when the element is added, the ElementAdded signal will cause the
# corresponding item to be inserted into the tree.
child_element = parent_element.make_child( new_element_meta,
mandatory_attributes )
# Select new item in tree view
if not keepid:
child_element.world.set_selection( child_element )
return child_element
class AddItemFactory( object ):
def __init__( self, window, parent, itemtag, attrib ):
self.window = window
self.itemtag = itemtag
self.attrib = attrib
self.parent = parent
def _element_has_children(self, element):
meta_element = metawog.TREE_LEVEL_SCENE.find_immediate_child_by_tag( self.parent )
return meta_element.children_count > 0
def __call__( self ):
assert self.parent is not None
model = self.window.getCurrentModel()
if model:
window = self.window.mdiArea.activeSubWindow()
if window:
cview = window.widget()
cp = cview.mapToScene( cview.width()*0.5, cview.height()*0.5 )
offsetx, offsety = 0, 0
if self.parent == 'level':
root = model.level_root
elif self.parent == 'scene':
root = model.scene_root
elif self.parent == 'resource':
root = model.resource_root
elif self._element_has_children( self.parent ):
thisworld = cview.world
selected_elements = thisworld.selected_elements
cgparent = None
for element in selected_elements:
meta_element = metawog.TREE_LEVEL_SCENE.find_immediate_child_by_tag( element.tag )
if meta_element.children_count > 0:
cgparent = element
break
else:
# check to see if they are part of a cg
pelement = element.parent
if pelement is not None:
if self._element_has_children( pelement.tag ):
cgparent = pelement
break
if cgparent is None:
QtGui.QMessageBox.warning( window, 'No composite geometry parent', 'You must select a CompositeGeom item to add this child to' )
return
root = cgparent
offsetx, offsety = root.get_native( 'center' )
else:
print "Unknown Parent in AddItemFactory", self.parent
return
rootmbt = root.meta.find_immediate_child_by_tag( self.itemtag )
if rootmbt is not None:
for attribute_meta in rootmbt.attributes:
if attribute_meta.type == metaworld.XY_TYPE:
self.attrib[attribute_meta.name] = str( cp.x() - offsetx ) + "," + str( -( cp.y() + offsety ) )
break
_appendChildTag( root, rootmbt, self.attrib )
def tr( context, message ):
return QtCore.QCoreApplication.translate( context, message )
def find_element_in_tree( root_element, element ):
"""Searchs the specified element in the root_element children and returns all its parent, and its index in its immediate parent.
Returns None if the element is not found, otherwise returns a tuple ([parent_elements], child_index)
root_element, element: must provides the interface xml.etree.ElementTree.
"""
for index, child_element in enumerate( root_element ):
if child_element is element:
return ( [root_element], index )
found = find_element_in_tree( child_element, element )
if found is not None:
found_parents, found_index = found
found_parents.insert( 0, root_element )
return found_parents, found_index
return None
def flattened_element_children( element ):
"""Returns a list of all the element children, including its grand-children..."""
children = []
for child_element in element:
children.append( child_element )
children.extend( flattened_element_children( child_element ) )
return children
class GameModelException( Exception ):
pass
class PixmapCache( object ):
"""A global pixmap cache the cache the pixmap associated to each element.
Maintains the cache up to date by listening for element events.
"""
def __init__( self, amy_dir, universe ):
self._amy_dir = amy_dir
self._pixmaps_by_path = {}
self._filedate_by_path = {}
self.__event_synthetizer = metaworld.ElementEventsSynthetizer( universe,
None,
self._on_element_updated,
self._on_element_about_to_be_removed )
def get_pixmap( self, image_id ):
"""Returns a pixmap corresponding to the image id (actually image path).
The pixmap is loaded if not present in the cache.
None is returned on failure to load the pixmap.
"""
image_path = image_id
pixmap = self._pixmaps_by_path.get( image_path )
if pixmap:
return pixmap
path = os.path.join( self._amy_dir, image_path + '.png' )
if not os.path.isfile( path ):
print 'Warning: invalid image path "%(path)s"' % { 'path': image_path }
else:
return self._addToCache( path, image_id )
return None
def _addToCache( self, path, image_id ):
img = QtGui.QImage()
image_path = image_id
if not img.load( path ):
data = file( path, 'rb' ).read()
if not img.loadFromData( data ):
if image_path in self._pixmaps_by_path.keys():
del self._pixmaps_by_path[image_path]
del self._filedate_by_path[image_path]
print 'Warning: failed to load image "%(path)s"' % { 'path' : image_path }
return None
# assume source image is in premultiplied alpha format
# so, after converting image to ARGB32_Premultiplied
# we need to restore its pixels to the pixels of original image
img2 = img.convertToFormat( QtGui.QImage.Format_ARGB32_Premultiplied )
if img.hasAlphaChannel():
#img = img.convertToFormat( QtGui.QImage.Format_ARGB32 )
w = img.width()
for y in xrange( img.height() ):
pixels = img.scanLine( y )
pixels.setsize( 4 * w )
pixels_new = img2.scanLine( y )
pixels_new.setsize( 4 * w )
pixels_new[:] = pixels[:]
self._pixmaps_by_path[image_path] = img2
self._filedate_by_path[image_path] = os.path.getmtime( path )
return img2
def refresh( self ):
# check each file in the cache...
# if it's out of date then reload
for image_path, filedate in self._filedate_by_path.items():
path = os.path.normpath( os.path.join( self._amy_dir, image_path + '.png' ) )
if not os.path.isfile( path ):
if image_path in self._pixmaps_by_path.keys():
del self._pixmaps_by_path[image_path]
del self._filedate_by_path[image_path]
print 'Warning: File is missing %s' % path
elif os.path.getmtime( path ) > filedate:
# refresh
self._addToCache( path, {'id':path, 'path':image_path} )
def _on_element_about_to_be_removed( self, element, index_in_parent ): #IGNORE:W0
if element.tag == 'Image':
if element.get( 'path', '' ) in self._pixmaps_by_element:
del self._pixmaps_by_element[element.get( 'path', '' )]
def _on_element_updated( self, element, name, new_value, old_value ): #IGNORE:W0613
if element.tag == 'Image':
if old_value in self._pixmaps_by_element:
del self._pixmaps_by_element[old_value]
class GameModel( QtCore.QObject ):
def __init__( self, amy_path, window ):
"""Loads text and global resources.
Loads Levels.
The following signals are provided:
QtCore.SIGNAL('selectedObjectChanged(PyQt_PyObject,PyQt_PyObject,PyQt_PyObject)')
"""
QtCore.QObject.__init__( self )
self._window = window
self._amy_path = amy_path
if ON_PLATFORM == PLATFORM_MAC:
# on Mac
# amydir is Contents\resources\game\
self._amy_dir = os.path.join( self._amy_path, u'Contents', u'Resources', u'game' )
else:
self._amy_dir = os.path.split( amy_path )[0]
metaworld.AMY_PATH = self._amy_dir
self._res_dir = os.path.join( self._amy_dir, u'Data' )
# On MAC
# enumerate all files in res folder
# convert all .png.binltl to .png
if ON_PLATFORM == PLATFORM_MAC:
window.statusBar().showMessage( self.tr( "Checking graphics files..." ) )
skipped, processed, found = 0, 0, 0
lresdir = len( self._res_dir )
toconvert = []
for ( path, dirs, files ) in os.walk( self._res_dir ): #@UnusedVariable
for name in files:
if name.endswith( '.png.binltl' ):
found += 1
output_path = os.path.join( path, name[:-11] ) + '.png'
if not os.path.isfile( output_path ):
toconvert.append( [os.path.join( path, name ), output_path, os.path.join( path, name )[lresdir:]] )
processed += 1
else:
skipped += 1
#print "png.binltl found",found,'processed',processed,'skipped',skipped
if processed > 0:
progress = QtGui.QProgressDialog( "", QtCore.QString(), 0, processed, window );
progress.setWindowTitle( window.tr( "Converting PNG.BINLTL files to PNG..." ) );
progress.setWindowModality( Qt.WindowModal );
progress.setMinimumWidth( 300 )
progress.forceShow()
for filepair in toconvert:
if progress.wasCanceled():
break
progress.setValue( progress.value() + 1 );
progress.setLabelText( filepair[2] )
wogfile.pngbinltl2png( filepair[0], filepair[1] )
progress.setValue( progress.value() + 1 );
window.statusBar().showMessage( self.tr( "Game Model : Initializing" ) )
self._universe = metaworld.Universe()
self.global_world = self._universe.make_world( metawog.WORLD_GLOBAL, 'game' )
window.statusBar().showMessage( self.tr( "Game Model : Loading Properties XMLs" ) )
self._readonly_resources = set()
self._levels = self._loadDirList( os.path.join( self._res_dir, 'levels' ),
filename_filter = '%s.scene' )
self.models_by_name = {}
self.__is_dirty = False
self.modified_worlds_to_check = set()
louie.connect( self._onElementAdded, metaworld.ElementAdded )
louie.connect( self._onElementAboutToBeRemoved, metaworld.ElementAboutToBeRemoved )
louie.connect( self._onElementUpdated, metaworld.AttributeUpdated )
self.pixmap_cache = PixmapCache( self._amy_dir, self._universe )
window.statusBar().showMessage( self.tr( "Game Model : Complete" ) )
@property
def is_dirty( self ):
worlds = self.modified_worlds_to_check
self.modified_worlds_to_check = set()
for world in worlds:
if world:
self.__is_dirty = self.__is_dirty or world.is_dirty
return self.__is_dirty
def getResourcePath( self, game_dir_relative_path ):
return os.path.join( self._amy_dir, game_dir_relative_path )
def _loadTree( self, world, meta_tree, directory, file_name ):
path = os.path.join( directory, file_name )
if not os.path.isfile( path ):
raise GameModelException( tr( 'LoadData',
'File "%1" does not exist. You likely provided an incorrect Amy In Da Farm! directory.' ).arg( path ) )
data = wogfile.decrypt_file_data( path )
try:
if YAML_FORMAT:
new_tree = world.make_tree_from_yaml( meta_tree, data )
else:
new_tree = world.make_tree_from_xml( meta_tree, data )
except IOError, e:
raise GameModelException( unicode( e ) + u' in file ' + file_name )
new_tree.setFilename( path )
return new_tree
def _loadUnPackedTree( self, world, meta_tree, directory, file_name ):
input_path = os.path.join( directory, file_name )
data = file( input_path, 'rb' ).read()
try:
if YAML_FORMAT:
new_tree = world.make_tree_from_yaml( meta_tree, data )
else:
new_tree = world.make_tree_from_xml( meta_tree, data )
except IOError, e:
raise GameModelException( unicode( e ) + u' in file ' + file_name )
new_tree.setFilename( input_path )
return new_tree
def _saveUnPackedTree( self, directory, file_name, tree ):
if not os.path.isdir( directory ):
os.makedirs( directory )
output_path = os.path.join( directory, file_name )
if YAML_FORMAT:
data = '## ' + CREATED_BY + '\n' + tree.to_yaml()
else:
data = tree.to_xml()
data = '<!-- ' + CREATED_BY + ' -->\n' + data.replace( '><', '>\n<' )
file( output_path, 'wb' ).write( data )
tree.setFilename( output_path )
def _saveTree( self, directory, file_name, tree ):
if not os.path.isdir( directory ):
os.makedirs( directory )
path = os.path.join( directory, file_name )
if YAML_FORMAT:
data = '## ' + CREATED_BY + '\n' + tree.to_yaml()
else:
data = tree.to_xml()
data = '<!-- ' + CREATED_BY + ' -->\n' + data.replace( '><', '>\n<' )
wogfile.encrypt_file_data( path, data )
tree.setFilename( path )
def _loadDirList( self, directory, filename_filter ):
if not os.path.isdir( directory ):
raise GameModelException( tr( 'LoadLevelList',
'Directory "%1" does not exist. You likely provided an incorrect Amy In Da Farm! directory.' ).arg( directory ) )
def is_valid_dir( entry ):
"""Accepts the directory only if it contains a specified file."""
dir_path = os.path.join( directory, entry )
if os.path.isdir( dir_path ):
try:
filter_file_path = filename_filter % entry
except TypeError:
filter_file_path = filename_filter
if os.path.isfile( os.path.join( dir_path, filter_file_path ) ):
return True
return False
dirs = [ entry for entry in os.listdir( directory ) if is_valid_dir( entry ) ]
dirs.sort( key = unicode.lower )
return dirs
def _loadFileList( self, directory, filename_filter ):
if not os.path.isdir( directory ):
raise GameModelException( tr( 'LoadFileList',
'Directory "%1" does not exist. You likely provided an incorrect Amy In Da Farm! directory.' ).arg( directory ) )
def is_valid_file( entry ):
"""Accepts the directory only if it contains a specified file."""
if entry.endswith( filename_filter ):
file_path = os.path.join( directory, entry )
return os.path.isfile( file_path )
return False
files = [ entry for entry in os.listdir( directory ) if is_valid_file( entry ) ]
files.sort( key = unicode.lower )
return files
@property
def names( self ):
return self._levels
def getModel( self, name ):
if name not in self.models_by_name:
folder = os.path.join( self._res_dir, STR_DIR_STUB, name )
world = self.global_world.make_world( metawog.WORLD_LEVEL,
name,
LevelWorld,
self )
self._loadUnPackedTree( world, metawog.TREE_LEVEL_GAME,
folder, name + '.level' )
self._loadUnPackedTree( world, metawog.TREE_LEVEL_SCENE,
folder, name + '.scene' )
self._loadUnPackedTree( world, metawog.TREE_LEVEL_RESOURCE,
folder, name + '.resrc' )
if world.isReadOnly:
world.clean_dirty_tracker()
world.clear_undo_queue()
self.models_by_name[name] = world
return self.models_by_name[name]
def selectLevel( self, name ):
"""Activate the specified level and load it if required.
Returns the activated LevelWorld.
"""
model = self.getModel( name )
assert model is not None
louie.send( metaworldui.ActiveWorldChanged, self._universe, model )
return model
def _onElementAdded( self, element, index_in_parent ): #IGNORE:W0613
self.modified_worlds_to_check.add( element.world )
def _onElementUpdated( self, element, attribute_name, new_value, old_value ): #IGNORE:W0613
self.modified_worlds_to_check.add( element.world )
def _onElementAboutToBeRemoved( self, element, index_in_parent ): #IGNORE:W0613
self.modified_worlds_to_check.add( element.world )
def hasModifiedReadOnly( self ):
"""Checks if the user has modified read-only """
for model in self.models_by_name.itervalues():
if model.is_dirty and model.isReadOnly:
return True
return False
def playLevel( self, level_model ):
"""Starts Amy to test the specified level."""
# remove PYTHONPATH from the environment of new process
env = os.environ.copy()
if 'PYTHONPATH' in env:
del env['PYTHONPATH']
if ON_PLATFORM == PLATFORM_MAC:
#print "ON MAC - Save and Play"
#Then run the program file itself with no command-line parameters
#print "launch ",os.path.join(self._amy_path,u'Contents',u'MacOS',u'Amy In Da Farm')
subprocess.Popen(
os.path.join( self._amy_path, u'Contents', u'MacOS', u'Amy In Da Farm' ),
cwd = self._amy_dir, env = env )
else:
#pid = subprocess.Popen( self._amy_path, cwd = self._amy_dir ).pid
try:
subprocess.Popen( [self._amy_path, level_model.name], cwd = self._amy_dir, env = env )
except:
# debug build have executable in different place, try to use it
exe_path = os.path.join( os.path.dirname( self._amy_dir ), '_Debug', 'Launcher.exe' )
subprocess.Popen( [exe_path, level_model.name], cwd = self._amy_dir, env = env )
# Don't wait for process end...
# @Todo ? Monitor process so that only one can be launched ???
def newLevel( self, name ):
"""Creates a new blank level with the specified name.
May fails with an IOError or OSError."""
return self._addNewLevel( name,
self._universe.make_unattached_tree_from_xml( metawog.TREE_LEVEL_GAME,
metawog.LEVEL_GAME_TEMPLATE ),
self._universe.make_unattached_tree_from_xml( metawog.TREE_LEVEL_SCENE,
metawog.LEVEL_SCENE_TEMPLATE ),
self._universe.make_unattached_tree_from_xml( metawog.TREE_LEVEL_RESOURCE,
metawog.LEVEL_RESOURCE_TEMPLATE ) )
def cloneLevel( self, cloned_name, new_name ):
#Clone an existing level and its resources.
model = self.getModel( cloned_name )
dir = os.path.join( self._res_dir, STR_DIR_STUB, new_name )
if not os.path.isdir( dir ):
os.mkdir( dir )
os.mkdir( os.path.join( dir, 'animations' ) )
os.mkdir( os.path.join( dir, 'fx' ) )
os.mkdir( os.path.join( dir, 'scripts' ) )
os.mkdir( os.path.join( dir, 'textures' ) )
os.mkdir( os.path.join( dir, 'sounds' ) )
#new cloning method... #2
# worked for balls... might be going back to the old Nitrozark way..
# which didn't work right... Hmmm.!
#get xml from existing
#make unattached trees from it
new_level_tree = self._universe.make_unattached_tree_from_xml( metawog.TREE_LEVEL_GAME,
model.level_root.tree.to_xml() )
new_scene_tree = self._universe.make_unattached_tree_from_xml( metawog.TREE_LEVEL_SCENE,
model.scene_root.tree.to_xml() )
new_res_tree = self._universe.make_unattached_tree_from_xml( metawog.TREE_LEVEL_RESOURCE,
model.resource_root.tree.to_xml() )
#change stuff
#TODO: copy level related resources to new folder and change their paths in scene
# self._res_swap( new_level_tree.root, '_' + cloned_name.upper() + '_', '_' + new_name.upper() + '_' )
# self._res_swap( new_scene_tree.root, '_' + cloned_name.upper() + '_', '_' + new_name.upper() + '_' )
#save out new trees
self._saveUnPackedTree( dir, new_name + '.level', new_level_tree )
self._saveUnPackedTree( dir, new_name + '.scene', new_scene_tree )
self._saveUnPackedTree( dir, new_name + '.resrc', new_res_tree )
self._levels.append( unicode( new_name ) )
self._levels.sort( key = unicode.lower )
self.__is_dirty = True
# def _res_swap( self, element, find, replace ):
# for attribute in element.meta.attributes:
# if attribute.type == metaworld.REFERENCE_TYPE:
# if attribute.reference_family in ['image', 'sound', 'TEXT_LEVELNAME_STR']:
# value = element.get( attribute.name, None )
# if value is not None:
# rv = ','.join( [v.replace( find, replace, 1 ) for v in value.split( ',' )] )
# element.set( attribute.name, rv )
# for child in element.getchildren():
# self._res_swap( child, find, replace )
def _isOriginalFile( self, filename, extension ):
return False
path_bits = filename.replace( '\\', '/' ).split( "/" )
if len( path_bits ) == 1:
print filename, path_bits
return False
path_bits.pop( 0 )
file = path_bits.pop( len( path_bits ) - 1 )
root_element = self._files_tree.root
return self._seekFile( root_element, path_bits, file, extension )
def _seekFile( self, element, path, file, ext ):
if path == []:
for fileitem in element.findall( 'file' ):
if fileitem.get( 'name' ) == file:
if fileitem.get( 'type' ) == ext:
return True
return False
else:
for folder in element.findall( 'folder' ):
if folder.get( 'name' ) == path[0]:
path.pop( 0 )
return self._seekFile( folder, path, file, ext )
return False
def _addNewLevel( self, name, level_tree, scene_tree, resource_tree ):
"""Adds a new level using the specified level, scene and resource tree.
The level directory is created, but the level xml files will not be saved immediately.
"""
dir_path = os.path.join( self._res_dir, STR_DIR_STUB, name )
if not os.path.isdir( dir_path ):
os.mkdir( dir_path )
os.mkdir( os.path.join( dir_path, 'animations' ) )
os.mkdir( os.path.join( dir_path, 'fx' ) )
os.mkdir( os.path.join( dir_path, 'scripts' ) )
os.mkdir( os.path.join( dir_path, 'textures' ) )
os.mkdir( os.path.join( dir_path, 'sounds' ) )
# Creates and register the new level
world = self.global_world.make_world( metawog.WORLD_LEVEL, name,
LevelWorld, self, is_dirty = True )
treestoadd = [level_tree, scene_tree, resource_tree]
world.add_tree( treestoadd )
self.models_by_name[name] = world
self._levels.append( unicode( name ) )
self._levels.sort( key = unicode.lower )
self.__is_dirty = True
class ThingWorld( metaworld.World,
metaworldui.SelectedElementsTracker,
metaworldui.ElementIssueTracker,
metaworldui.UndoWorldTracker ):
def __init__( self, universe, world_meta, name, game_model, is_dirty = False ):
metaworld.World.__init__( self, universe, world_meta, name )
metaworldui.SelectedElementsTracker.__init__( self, self )
metaworldui.ElementIssueTracker.__init__( self, self )
metaworldui.UndoWorldTracker.__init__( self, self, 100 )
self.game_model = game_model
@property
def name( self ):
return self.key
class LevelWorld( ThingWorld ):
def __init__( self, universe, world_meta, name, game_model, is_dirty = False ):
ThingWorld.__init__( self, universe, world_meta, name, game_model, is_dirty = is_dirty )
self.__dirty_tracker = metaworldui.DirtyWorldTracker( self, is_dirty )
self._importError = None
self._sceneissues = ''
self._levelissues = ''
self._resrcissues = ''
self._globalissues = ''
self._scene_issue_level = ISSUE_LEVEL_NONE
self._level_issue_level = ISSUE_LEVEL_NONE
self._resrc_issue_level = ISSUE_LEVEL_NONE
self._global_issue_level = ISSUE_LEVEL_NONE
self._view = None
@property
def level_root( self ):
return self.find_tree( metawog.TREE_LEVEL_GAME ).root
@property
def scene_root( self ):
return self.find_tree( metawog.TREE_LEVEL_SCENE ).root
@property
def resource_root( self ):
return self.find_tree( metawog.TREE_LEVEL_RESOURCE ).root
@property
def is_dirty( self ):
return self.__dirty_tracker.is_dirty
@property
def isReadOnly( self ):
return self.name.lower() in metawog.LEVELS_ORIGINAL_LOWER
@property
def view( self ):
return self._view
def setView ( self, newview ):
self._view = newview
#@DaB - Issue checking used when saving the level
def hasIssues ( self ):
#Checks all 3 element trees for outstanding issues
# Returns True if there are any.
tIssue = ISSUE_LEVEL_NONE
if self.element_issue_level( self.scene_root ):
tIssue |= ISSUE_LEVEL_CRITICAL
if self.element_issue_level( self.level_root ):
tIssue |= ISSUE_LEVEL_CRITICAL
if self.element_issue_level( self.resource_root ):
tIssue |= ISSUE_LEVEL_CRITICAL
#If we have a tree Issue.. don't perform the extra checks
#because that can cause rt errors (because of the tree issues)
#and then we don't see a popup.
if tIssue == ISSUE_LEVEL_CRITICAL:
#ensure old issues don't get redisplayed is we do "bail" here
self._sceneissues = ''
self._levelissues = ''
self._resrcissues = ''
self._globalissues = ''
return tIssue
if self.haslevel_issue():
tIssue |= self._level_issue_level
if self.hasscene_issue():
tIssue |= self._scene_issue_level
if self.hasresrc_issue():
tIssue |= self._resrc_issue_level
if self.hasglobal_issue():
tIssue |= self._global_issue_level
return tIssue
def getIssues ( self ):
#Get a 'report' of outstanding Issues
#Used for Popup Message
txtIssue = ''
if self.element_issue_level( self.scene_root ):
txtIssue = txtIssue + '<p>Scene Tree:<br>' + self.element_issue_report( self.scene_root ) + '</p>'
if self.scene_issue_report != '':
txtIssue += '<p>Scene Checks:<br>' + self.scene_issue_report + '</p>'
if self.element_issue_level( self.level_root ):
txtIssue = txtIssue + '<p>Level Tree:<br>' + self.element_issue_report( self.level_root ) + '</p>'
if self.level_issue_report != '':
txtIssue += '<p>Level Checks:<br>' + self.level_issue_report + '</p>'
if self.element_issue_level( self.resource_root ):
txtIssue = txtIssue + '<p>Resource Tree:<br>' + self.element_issue_report( self.resource_root ) + '</p>'
if self.resrc_issue_report != '':
txtIssue += '<p>Resource Checks:<br>' + self.resrc_issue_report + '</p>'
if self.global_issue_report != '':
txtIssue += '<p>Global Checks:<br>' + self.global_issue_report + '</p>'
return txtIssue
#@DaB Additional Checking Level,Scene,Resource (at tree level)
def hasglobal_issue( self ):
# check for issues across trees
#if there's a levelexit it must be within the scene bounds
self._globalissues = ''
self._global_issue_level = ISSUE_LEVEL_NONE
levelexit = self.level_root.find( 'levelexit' )
if levelexit is not None:
exit_posx, exit_posy = levelexit.get_native( 'pos' )
minx, maxx = self.scene_root.get_native( 'minx' ), self.scene_root.get_native( 'maxx' )
miny, maxy = self.scene_root.get_native( 'miny' ), self.scene_root.get_native( 'maxy' )
if exit_posx > maxx or exit_posx < minx or exit_posy > maxy or exit_posy < miny:
# exit outside scene bounds warning
self.addGlobalError( 401, None )
return self._global_issue_level != ISSUE_LEVEL_NONE
def haslevel_issue( self ):
# rules for "DUMBASS" proofing (would normally use a much ruder word)
root = self.level_root
self._levelissues = ''
self._level_issue_level = ISSUE_LEVEL_NONE
normal_camera = False
widescreen_camera = False
#must have 1 normal camera and 1 widescreen camera
for camera in root.findall( 'camera' ):
c_aspect = camera.get( 'aspect' )
if c_aspect == 'normal':
normal_camera = True
elif c_aspect == 'widescreen':
widescreen_camera = True
#only Single poi travel time check
if len( camera._children ) == 1:
if camera._children[0].get_native( 'traveltime', 0 ) > 1:
self.addLevelError( 101, c_aspect )
if not normal_camera:
self.addLevelError( 102, None )
if not widescreen_camera:
self.addLevelError( 103, None )
end_conditions = []
if len( end_conditions ) > 1:
self.addLevelError( 111, ','.join( end_conditions ) )
return self._level_issue_level != ISSUE_LEVEL_NONE
def addSceneError( self, error_num, subst ):
error = errors.ERROR_INFO[error_num]
self._scene_issue_level, self._sceneissues = self.addError( self._scene_issue_level, self._sceneissues, error, error_num, subst )
def addLevelError( self, error_num, subst ):
error = errors.ERROR_INFO[error_num]
self._level_issue_level, self._levelissues = self.addError( self._level_issue_level, self._levelissues, error, error_num, subst )
def addResourceError( self, error_num, subst ):
error = errors.ERROR_INFO[error_num]
self._resrc_issue_level, self._resrcissues = self.addError( self._resrc_issue_level, self._resrcissues, error, error_num, subst )
def addGlobalError( self, error_num, subst ):
error = errors.ERROR_INFO[error_num]
self._global_issue_level, self._globalissues = self.addError( self._global_issue_level, self._globalissues, error, error_num, subst )
def addError( self, err_level, err_message, error, error_num, err_subst ):
err_level |= error[0]
err_message += errors.ERROR_FRONT[error[0]]
if err_subst is not None:
err_message += error[1] % err_subst
else:
err_message += error[1]
err_message += errors.ERROR_MORE_INFO % error_num
err_message += "<br>"
return err_level, err_message
def hasscene_issue( self ):
# TODO: check SceneLayer tiling applied to only pow2 textures
#rules
root = self.scene_root
self._scene_issue_level = ISSUE_LEVEL_NONE
self._sceneissues = ''
#motor attached to static body
motorbodys = set()
for motor in root.findall( 'motor' ):
motorbodys.add( motor.get( 'body' ) )
hingebodys = set()
for hinge in root.findall( 'hinge' ):
hingebodys.add( hinge.get( 'body1' ) )
body2 = hinge.get( 'body2', '' )
if body2 != '':
hingebodys.add( hinge.get( 'body2' ) )
rotspeedbodys = set()
geomitems = []
for geomitem in root.findall( 'rectangle' ):
geomitems.append( geomitem )
for geomitem in root.findall( 'circle' ):
geomitems.append( geomitem )
# # mass checks on rectangle and circles
# for geomitem in geomitems:
# geomstatic = geomitem.get_native( 'static', False )
# #static / masscheck!
# if not geomstatic:
# if geomitem.get_native( 'mass', 0 ) <= 0:
# self.addSceneError( 1, geomitem.get( 'id', '' ) )
# # check on composite geoms
geomchildren = set()
for geomitem in root.findall( 'compositegeom' ):
geomitems.append( geomitem )
# geomstatic = geomitem.get_native( 'static', False )
# if not geomstatic:
# if geomitem.get_native( 'rotation', 0 ) != 0:
# self.addSceneError( 2, geomitem.get( 'id', '' ) )
nchildren = 0
for geomchild in geomitem.getchildren():
nchildren += 1
geomchildren.add( geomchild.get( 'id', '' ) )
# if not geomstatic:
# if geomchild.get_native( 'mass', 0.0 ) <= 0:
# self.addSceneError( 3, ( geomitem.get( 'id', '' ), geomchild.get( 'id', '' ) ) )
# if geomchild.get( 'image' ):
# self.addSceneError( 4, geomchild.get( 'id', '' ) )
# if nchildren == 0:
# if not geomstatic:
# self.addSceneError( 5, geomitem.get( 'id', '' ) )
# else:
# self.addSceneError( 6, geomitem.get( 'id', '' ) )
# Get any radial forcefields.. ready for next check
rfflist = {}
for rff in root.findall( 'radialforcefield' ):
rffid = rff.get( 'id', len( rfflist ) )
rfflist[rffid] = rff.get_native( 'center' )
# check on ALL geometry bodies
# for geomitem in geomitems:
# id = geomitem.get( 'id', '' )
# if geomitem.get_native( 'rotspeed', 0 ) != 0:
# rotspeedbodys.add( id )
# geomstatic = geomitem.get_native( 'static', False )
# #static vs motor check
# if geomstatic and id in motorbodys:
# self.addSceneError( 7, id )
#
# if not geomstatic:
# gx, gy = geomitem.get_native( 'center', ( 0, 0 ) )
# for rffid, rffpos in rfflist.items():
# if abs( gx - rffpos[0] + gy - rffpos[1] ) < 0.001:
# self.addSceneError( 8, ( id, rffid ) )
# finally some checks on unfixed spinning things
spinning = motorbodys | rotspeedbodys
spinningnohinge = spinning - hingebodys
for body in spinningnohinge:
self.addSceneError( 9, body )
hingedchildren = hingebodys & geomchildren
for hingedchild in hingedchildren:
self.addSceneError( 10, hingedchild )
#linearforcefield can have center but no size
#but CANNOT have size, but no center
for lff in root.findall( 'linearforcefield' ):
if lff.get( 'size' ) is not None:
if lff.get( 'center', '' ) == '':
self.addSceneError( 11, lff.get( 'id', '' ) )
return self._scene_issue_level != ISSUE_LEVEL_NONE
def _get_all_resource_ids( self, root, tag ):
resource_ids = set()
for resource in root.findall( './/' + tag ):
resource_ids.add( resource.get( 'path' ) + resource.attribute_meta( 'path' ).strip_extension )
return resource_ids
def _get_unused_resources( self ):
used = self._get_used_resources()
resources = self._get_all_resource_ids( self.resource_root, "Image" ) | self._get_all_resource_ids( self.resource_root, "Sound" )
unused = resources - used
return unused
def _remove_unused_resources( self, element, unused ):
self.suspend_undo()
to_remove = []
def _recursive_remove( element ):
for attribute_meta in element.meta.attributes:
if attribute_meta.type == metaworld.PATH_TYPE:
if element.get( attribute_meta.name ) + attribute_meta.strip_extension in unused:
to_remove.append( element )
for child in element:
_recursive_remove( child )
_recursive_remove( element )
for element in to_remove:
element.parent.remove( element )
self.activate_undo()
def _get_used_resources( self ):
used = set()
#go through scene and level root
#store the resource id of any that do
for root in ( self.scene_root, self.level_root ):
for element in root:
for attribute_meta in element.meta.attributes:
if attribute_meta.type == metaworld.PATH_TYPE:
if element.get( attribute_meta.name ):
used.add( element.get( attribute_meta.name ) + attribute_meta.strip_extension )
return used
def hasresrc_issue( self ):
root = self.resource_root
self._resrcissues = ''
self._resrc_issue_level = ISSUE_LEVEL_NONE
# confirm every file referenced exists
used_resources = self._get_used_resources()
image_resources = set()
for resource in root.findall( './/Image' ):
image_resources.add( resource.get( 'path' ) )
full_filename = os.path.join( self.game_model._amy_dir, resource.get( 'path' ) + resource.attribute_meta( 'path' ).strip_extension )
if ON_PLATFORM == PLATFORM_WIN:
#confirm extension on drive is lower case
real_filename = getRealFilename( full_filename )
real_ext = os.path.splitext( real_filename )[1]
if real_ext != ".png":
self.addResourceError( 201, resource.get( 'path' ) + real_ext )
unused_images = image_resources.difference( used_resources )
if len( unused_images ) != 0:
for unused in unused_images:
self.addResourceError( 202, unused )
sound_resources = set()
for resource in root.findall( './/Sound' ):
sound_resources.add( resource.get( 'path' ) )
full_filename = os.path.join( self.game_model._amy_dir, resource.get( 'path' ) + ".ogg" )
if ON_PLATFORM == PLATFORM_WIN:
#confirm extension on drive is lower case
real_filename = getRealFilename( full_filename )
real_ext = os.path.splitext( real_filename )[1]
if real_ext != ".ogg":
self.addResourceError( 203, resource.get( 'path' ) + real_ext )
unused_sounds = sound_resources.difference( used_resources )
if len( unused_sounds ) != 0:
for unused in unused_sounds:
self.addResourceError( 204, unused )
return self._resrc_issue_level != ISSUE_LEVEL_NONE
@property
def scene_issue_report( self ):
return self._sceneissues
@property
def level_issue_report( self ):
return self._levelissues
@property
def resrc_issue_report( self ):
return self._resrcissues
@property
def global_issue_report( self ):
return self._globalissues
def _isNumber( self, input ):
try:
f = float( input ) #@UnusedVariable
return True
except ValueError:
return False
def _cleanleveltree( self ):
pass
def _cleanscenetree( self ):
self.suspend_undo()
for hinge in self.scene_root.findall( 'hinge' ):
self.scene_root.remove( hinge )
self.scene_root.append( hinge )
for motor in self.scene_root.findall( 'motor' ):
self.scene_root.remove( motor )
self.scene_root.append( motor )
self.activate_undo()
def _cleanresourcetree( self ):
#removes any unused resources from the resource and text resource trees
self.suspend_undo()
root = self.resource_root
#ensure cAsE sensitive path is stored in resource file
#Only required on windows...
#If path was not CaSe SenSitivE match on Linux / Mac would be File not found earlier
if ON_PLATFORM == PLATFORM_WIN:
for resource in root.findall( './/Image' ):
full_filename = os.path.normpath( os.path.join( self.game_model._amy_dir, resource.get( 'path' ) + ".png" ) )
if os.path.exists( full_filename ):
#confirm extension on drive is lower case
len_wogdir = len( os.path.normpath( self.game_model._amy_dir ) ) + 1
real_filename = os.path.normpath( getRealFilename( full_filename ) )
real_file = os.path.splitext( real_filename )[0][len_wogdir:]
full_file = os.path.splitext( full_filename )[0][len_wogdir:]
if real_file != full_file:
print "Correcting Path", resource.get( 'id' ), full_file, "-->", real_file
resource.attribute_meta( 'path' ).set( resource, real_file )
for resource in root.findall( './/Sound' ):
full_filename = os.path.normpath( os.path.join( self.game_model._amy_dir, resource.get( 'path' ) + ".ogg" ) )
if os.path.exists( full_filename ):
#confirm extension on drive is lower case
len_wogdir = len( os.path.normpath( self.game_model._amy_dir ) )
real_filename = os.path.normpath( getRealFilename( full_filename ) )
real_file = os.path.splitext( real_filename )[0][len_wogdir:]
full_file = os.path.splitext( full_filename )[0][len_wogdir:]
if real_file != full_file:
print "Correcting Path", resource.get( 'id' ), full_file, "-->", real_file
resource.attribute_meta( 'path' ).set( resource, real_file )
self.activate_undo()
def saveModifiedElements( self ):
"""Save the modified scene, level, resource tree."""
if not self.isReadOnly: # Discards change made on read-only level
name = self.name
dir = os.path.join( self.game_model._res_dir, STR_DIR_STUB, name )
if not os.path.isdir( dir ):
os.mkdir( dir )
os.mkdir( os.path.join( dir, 'animations' ) )
os.mkdir( os.path.join( dir, 'fx' ) )
os.mkdir( os.path.join( dir, 'scripts' ) )
os.mkdir( os.path.join( dir, 'textures' ) )
os.mkdir( os.path.join( dir, 'sounds' ) )
if self.__dirty_tracker.is_dirty_tree( metawog.TREE_LEVEL_GAME ):
if not self.element_issue_level( self.level_root ):
#clean tree caused an infinite loop when there was a missing ball
# so only clean trees with no issues
self._cleanleveltree()
self.game_model._saveUnPackedTree( dir, name + '.level', self.level_root.tree )
if self.__dirty_tracker.is_dirty_tree( metawog.TREE_LEVEL_RESOURCE ):
self.game_model._saveUnPackedTree( dir, name + '.resrc', self.resource_root.tree )
# ON Mac
# Convert all "custom" png to .png.binltl
# Only works with REAL PNG
if ON_PLATFORM == PLATFORM_MAC:
for image in self.resource_root.findall( './/Image' ):
if not self.game_model._isOriginalFile( image.get( 'path' ), 'png' ):
in_path = os.path.join( self.game_model._amy_dir, image.get( 'path' ) )
out_path = in_path + '.png.binltl'
in_path += '.png'
wogfile.png2pngbinltl( in_path, out_path )
if self.__dirty_tracker.is_dirty_tree( metawog.TREE_LEVEL_SCENE ):
if not self.element_issue_level( self.scene_root ):
# so only clean trees with no issues
self._cleanscenetree()
self.game_model._saveUnPackedTree( dir, name + '.scene', self.scene_root.tree )
self.__dirty_tracker.clean()
def clean_dirty_tracker( self ):
self.__dirty_tracker.clean()
def getImagePixmap( self, image_id ):
pixmap = self.game_model.pixmap_cache.get_pixmap( image_id )
if pixmap is None:
print 'Warning: invalid image reference:|', image_id, '|'
return pixmap
def updateResources( self ):
"""Ensures all image/sound resource present in the level directory
are in the resource tree.
Adds new resource to the resource tree if required.
"""
game_dir = os.path.normpath( self.game_model._amy_dir )
dir = os.path.join( game_dir, 'Data', STR_DIR_STUB, self.name )
if not os.path.isdir( dir ):
print 'Warning: level directory does not exist'
return []
resource_element = self.resource_root.find( './/Resources' )
if resource_element is None:
print 'Warning: root element not found in resource tree'
return []
added_elements = []
for tag, extension, subfolder in ( ( 'Image', 'png', 'textures' ), ( 'Sound', 'ogg', 'sounds' ) ):
known_paths = set()
for element in self.resource_root.findall( './/' + tag ):
path = os.path.normpath( os.path.splitext( element.get( 'path', '' ).lower() )[0] )
# known path are related to wog top dir in unix format & lower case without the file extension
known_paths.add( path )
existing_paths = glob.glob( os.path.join( dir, subfolder, '*.' + extension ) )
for existing_path in existing_paths:
existing_path = existing_path[len( game_dir ) + 1:] # makes path relative to top dir
existing_path = os.path.splitext( existing_path )[0] # strip file extension
path = os.path.normpath( existing_path ).lower()
if path not in known_paths:
resource_path = existing_path.replace( "\\", "/" )
meta_element = metawog.TREE_LEVEL_RESOURCE.find_element_meta_by_tag( tag )
new_resource = metaworld.Element( meta_element, {'path':resource_path} )
resource_element.append( new_resource )
added_elements.append( new_resource )
return added_elements
#@DaB New Functionality - Import resources direct from files
def importError( self ):
return self._importError
def importResources( self, importedfiles, res_dir ):
"""Import Resources direct from files into the level
If files are located outside the Wog/res folder it copies them
png -> Data/levels/{name}/textures
ogg -> Data/levels/{name}/sounds
"""
self._importError = None
res_dir = os.path.normpath( res_dir )
game_dir = os.path.split( res_dir )[0]
resource_element = self.resource_root.find( './/Resources' )
if resource_element is None:
print 'Warning: root element not found in resource tree'
return []
all_local = True
includesogg = False
for file in importedfiles:
file = os.path.normpath( file )
# "Are you Local?"
# Check if the files were imported from outside the Res folder
fileext = os.path.splitext( file )[1][1:4]
if fileext.lower() == "ogg":
includesogg = True
if file[:len( res_dir )] != res_dir:
all_local = False
if not all_local and self.isReadOnly:
self._importError = ["Cannot import external files...!", "You cannot import external files into the original levels.\nIf you really want to do this... Clone the level first!"]
return []
if not all_local:
level_path = os.path.join( res_dir, STR_DIR_STUB, self.name )
if not os.path.isdir( level_path ):
os.mkdir( level_path )
os.mkdir( os.path.join( level_path, 'animations' ) )
os.mkdir( os.path.join( level_path, 'fx' ) )
os.mkdir( os.path.join( level_path, 'scripts' ) )
os.mkdir( os.path.join( level_path, 'textures' ) )
os.mkdir( os.path.join( level_path, 'sounds' ) )
if includesogg:
#' confirm / create import folder'
music_path = os.path.join( res_dir, STR_DIR_STUB, 'sounds', self.name )
if not os.path.isdir( music_path ):
os.mkdir( music_path )
localfiles = []
resmap = {'png':( 'Image', 'textures' ), 'ogg':( 'Sound', 'sounds' )}
for file in importedfiles:
# "Are you Local?"
fileext = os.path.splitext( file )[1][1:4]
if file[:len( res_dir )] != res_dir:
#@DaB - Ensure if the file is copied that it's new extension is always lower case
fname = os.path.splitext( os.path.split( file )[1] )[0]
fileext = fileext.lower()
newfile = os.path.join( res_dir, STR_DIR_STUB, self.name, resmap[fileext][1], fname + "." + fileext )
copy2( file, newfile )
localfiles.append( newfile )
else:
#@DaB - File Extension Capitalization Check
if fileext != fileext.lower():
#Must be png or ogg to be compatible with LINUX and MAC
self._importError = ["File Extension CAPITALIZATION Warning!", "To be compatible with Linux and Mac - All file extensions must be lower case.\nYou should rename the file below, and then import it again.\n\n" + file + " skipped!"]
else:
localfiles.append( file )
added_elements = []
known_paths = {'Image':set(), 'Sound':set()}
for ext in resmap:
for element in self.resource_root.findall( './/' + resmap[ext][0] ):
path = os.path.normpath( os.path.splitext( element.get( 'path', '' ).lower() )[0] )
# known path are related to wog top dir in unix format & lower case without the file extension
known_paths[resmap[ext][0]].add( path )
for file in localfiles:
file = file[len( game_dir ) + 1:] # makes path relative to top dir
filei = os.path.splitext( file )
path = os.path.normpath( filei[0] ).lower()
ext = filei[1][1:4]
if path not in known_paths[resmap[ext][0]]:
resource_path = filei[0].replace( "\\", "/" )
meta_element = metawog.TREE_LEVEL_RESOURCE.find_element_meta_by_tag( resmap[ext][0] )
new_resource = metaworld.Element( meta_element, {'path':resource_path} )
resource_element.append( new_resource )
added_elements.append( new_resource )
return added_elements
class MainWindow( QtGui.QMainWindow ):
def __init__( self, parent = None ):
QtGui.QMainWindow.__init__( self, parent )
self.setWindowIcon( QtGui.QIcon( ":/images/icon.png" ) )
self.setAttribute( Qt.WA_DeleteOnClose )
self.actionTimer = None
self.statusTimer = None
self._amy_path = None # Path to 'amy' executable
self.recentfiles = None
self.createMDIArea()
self.createActions()
self.createMenus()
self.createToolBars()
self.createStatusBar()
self.createDockWindows()
self.setWindowTitle( self.tr( "Amy In Da Farm! Editor" ) )
self._readSettings()
self._game_model = None
if self._amy_path:
#Check that the stored path is still valid
if not os.path.exists( self._amy_path ):
self.changeAmyDir()
else:
self._reloadGameModel()
else:
# if amy_path is missing, prompt for it.
self.changeAmyDir()
def changeAmyDir( self ):
amy_path = QtGui.QFileDialog.getOpenFileName( self,
self.tr( 'Select Amy In Da Farm! program in the folder you want to edit' ),
r'',
self.tr( 'Amy In Da Farm! (Amy*)' ) )
if amy_path.isEmpty(): # user canceled action
return
self._amy_path = os.path.normpath( unicode( amy_path ) )
#print "_amy_path=",self._amy_path
self._reloadGameModel()
def _reloadGameModel( self ):
try:
self._game_model = GameModel( self._amy_path, self )
except GameModelException, e:
QtGui.QMessageBox.warning( self, self.tr( "Loading Amy In Da Farm! levels (" + APP_NAME_PROPER + " " + CURRENT_VERSION + ")" ),
unicode( e ) )
def _updateRecentFiles( self ):
if self.recentFiles is None:
numRecentFiles = 0
else:
numRecentFiles = min( len( self.recentFiles ), MAXRECENTFILES )
for i in range( 0, numRecentFiles ):
self.recentfile_actions[i].setText( self.recentFiles[i] )
self.recentfile_actions[i].setVisible( True )
for i in range( numRecentFiles, MAXRECENTFILES ):
self.recentfile_actions[i].setVisible( False )
self.separatorRecent.setVisible( numRecentFiles > 0 );
def _setRecentFile( self, filename ):
self.recentFiles.removeAll( filename )
self.recentFiles.prepend( filename )
if len( self.recentFiles ) > MAXRECENTFILES:
self.recentFiles = self.recentFiles[:MAXRECENTFILES]
self._updateRecentFiles()
def on_recentfile_action( self ):
action = self.sender()
name = unicode( action.text() )
if self.open_level_view_by_name( name ):
self._setRecentFile( name )
def editLevel( self ):
if self._game_model:
dialog = QtGui.QDialog()
ui = editleveldialog.Ui_EditLevelDialog()
ui.setupUi( dialog , set( self._game_model.names ), metawog.LEVELS_ORIGINAL )
if dialog.exec_() and ui.levelList.currentItem:
settings = QtCore.QSettings()
settings.beginGroup( "MainWindow" )
settings.setValue( "level_filter", ui.comboBox.currentIndex() )
settings.endGroup()
name = unicode( ui.levelList.currentItem().text() )
if self.open_level_view_by_name( name ):
self._setRecentFile( name )
def open_level_view_by_name( self, name ):
try:
world = self._game_model.selectLevel( name )
except GameModelException, e:
QtGui.QMessageBox.warning( self, self.tr( "Failed to load level! (" + APP_NAME_PROPER + " " + CURRENT_VERSION + ")" ),
unicode( e ) )
else:
sub_window = self._findWorldMDIView( world )
if sub_window:
self.mdiArea.setActiveSubWindow( sub_window )
else:
self._addGraphicView( world )
return True
return False
def _addGraphicView( self, world ):
"""Adds a new MDI GraphicView window for the specified level."""
level_view = levelview.LevelGraphicView( world, self.view_actions, self.common_actions )
sub_window = self.mdiArea.addSubWindow( level_view )
self.connect( level_view, QtCore.SIGNAL( 'mouseMovedInScene(PyQt_PyObject,PyQt_PyObject)' ),
self._updateMouseScenePosInStatusBar )
self.connect( sub_window, QtCore.SIGNAL( 'aboutToActivate()' ),
level_view.selectLevelOnSubWindowActivation )
world.set_selection( world.scene_root )
world.setView( level_view )
level_view.show()
def _updateMouseScenePosInStatusBar( self, x, y ):
"""Called whenever the mouse move in the LevelView."""
# Round displayed coordinate to 2dp (0.01)
x = round( x, 2 )
y = -round( y, 2 ) # Reverse transformation done when mapping to scene (in Qt 0 = top, in WOG 0 = bottom)
self._mousePositionLabel.setText( self.tr( 'x: %1 y: %2' ).arg( x ).arg( y ) )
def _findWorldMDIView( self, world ):
"""Search for an existing MDI window for level name.
Return the LevelGraphicView widget, or None if not found."""
for window in self.mdiArea.subWindowList():
sub_window = window.widget()
if sub_window.world == world:
return window
return None
def get_active_view( self ):
"""Returns the view of the active MDI window.
Returns None if no view is active.
"""
window = self.mdiArea.activeSubWindow()
if window:
return window.widget()
return None
def getCurrentModel( self ):
"""Returns the level model of the active MDI window."""
window = self.mdiArea.activeSubWindow()
if window:
return window.widget().getModel()
return None
#@DaB - New save routines to save ONLY the current Level
def saveCurrent( self ):
if self._game_model:
model = self.getCurrentModel()
if model is not None:
if model.isReadOnly:
if model.is_dirty:
QtGui.QMessageBox.warning( self, self.tr( "Can not save Amy In Da Farm! standard levels!" ),
self.tr( 'You can not save changes made to levels that come with Amy In Da Farm!.\n'
'Instead, clone the level using the "Clone selected level" tool.\n'
'Do so now, or your change will be lost once you quit the editor' ) )
return False
return True
else:
#Check for issues
try:
model.saveModifiedElements()
self.statusBar().showMessage( self.tr( "Saved " + model.name ), 2000 )
return True
except ( IOError, OSError ), e:
QtGui.QMessageBox.warning( self, self.tr( "Failed saving levels (" + APP_NAME_PROPER + " " + CURRENT_VERSION + ")" ), unicode( e ) )
return False
def saveIT( self ):
if self.saveCurrent():
QtGui.QApplication.setOverrideCursor( Qt.WaitCursor )
model = self.getCurrentModel()
issue_level = model.hasIssues()
QtGui.QApplication.restoreOverrideCursor()
if issue_level >= ISSUE_LEVEL_WARNING:
txtIssue = self.tr( """<p>There are unresolved issues with this level that may cause problems.<br>
You should fix these before you try to play or make a goomod.</p>""" )
txtIssue = txtIssue + self.tr( model.getIssues() )
txtIssue = txtIssue + self.tr( '<br>The level has been saved!' )
QtGui.QMessageBox.warning( self, self.tr( "This level has issues!" ),
txtIssue )
def saveAndPlayLevel( self ):
#@DaB only save current level, and don't "play" if it has "Issues"
if self.saveCurrent():
model = self.getCurrentModel()
if model:
issue_level = model.hasIssues()
if issue_level >= ISSUE_LEVEL_CRITICAL:
txtIssue = self.tr( """<p>There are CRITICAL issues with this level that will cause World of Goo to crash.<br>
You must fix these before you try to play the level.</p>""" )
txtIssue = txtIssue + self.tr( model.getIssues() )
txtIssue = txtIssue + self.tr( '<br>The level has been saved!' )
QtGui.QMessageBox.warning( self, self.tr( "This level has CRITICAL issues!" ),
txtIssue )
elif issue_level > ISSUE_LEVEL_NONE:
txtIssue = self.tr( """<p>There are Advice/Warnings for this level that may cause problems.<br>
You should fix these before you try to play the level.</p>""" )
txtIssue = txtIssue + self.tr( model.getIssues() )
txtIssue = txtIssue + self.tr( '<br>Click OK to Play anyway, or click Cancel to go back.' )
ret = QtGui.QMessageBox.warning( self, self.tr( "This level has warnings!" ),
txtIssue, QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel )
if ret == QtGui.QMessageBox.Ok:
self._game_model.playLevel( model )
else:
self._game_model.playLevel( model )
else:
self.statusBar().showMessage( self.tr( "You must select a level to play" ), 2000 )
def newLevel( self ):
"""Creates a new blank level."""
new_name = self._pickNewName( is_cloning = False )
if new_name:
try:
self._game_model.newLevel( new_name )
world = self._game_model.selectLevel( new_name )
self._addGraphicView( world )
except ( IOError, OSError ), e:
QtGui.QMessageBox.warning( self, self.tr( "Failed to create the new level! (" + APP_NAME_PROPER + " " + CURRENT_VERSION + ")" ),
unicode( e ) )
def _pickNewName( self, is_cloning = False ):
if self._game_model:
dialog = QtGui.QDialog()
ui = newleveldialog_ui.Ui_NewLevelDialog()
ui.setupUi( dialog )
reg_ex = QtCore.QRegExp( '[A-Za-z][0-9A-Za-z_][0-9A-Za-z_]+' )
validator = QtGui.QRegExpValidator( reg_ex, dialog )
ui.levelName.setValidator( validator )
if is_cloning:
dialog.setWindowTitle( tr( "NewLevelDialog", "Cloning Level" ) )
if dialog.exec_():
new_name = str( ui.levelName.text() )
existing_names = [name.lower() for name in self._game_model.names]
if new_name.lower() not in existing_names:
return new_name
QtGui.QMessageBox.warning( self, self.tr( "Can not create level!" ),
self.tr( "There is already a level named '%1'" ).arg( new_name ) )
return None
def cloneLevel( self ):
"""Clone the selected level."""
current_model = self.getCurrentModel()
if current_model:
new_name = self._pickNewName( is_cloning = True )
if new_name:
try:
self._game_model.cloneLevel( current_model.name, new_name )
world = self._game_model.selectLevel( new_name )
self._addGraphicView( world )
self._setRecentFile( new_name )
except ( IOError, OSError ), e:
QtGui.QMessageBox.warning( self, self.tr( "Failed to create the new cloned level! (" + APP_NAME_PROPER + " " + CURRENT_VERSION + ")" ), unicode( e ) )
def updateResources( self ):
"""Adds the required resource in the level based on existing file."""
model = self.getCurrentModel()
if model:
model.game_model.pixmap_cache.refresh()
added_resource_elements = model.updateResources()
if added_resource_elements:
model.set_selection( added_resource_elements )
model._view.refreshFromModel()
def cleanResources( self ):
model = self.getCurrentModel()
if model:
unused = model._get_unused_resources()
unusedlist = ''
for id in unused:
unusedlist += id + '\n'
if unusedlist != '':
unusedlist = "The following resources are unused\n" + unusedlist + "\nAre you sure you want to remove them?"
ret = QtGui.QMessageBox.warning( self, self.tr( "Remove unused resources" ),
unusedlist, QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel )
if ret == QtGui.QMessageBox.Ok:
model._remove_unused_resources( model.resource_root, unused )
else:
QtGui.QMessageBox.warning( self, self.tr( "Remove unused resources" ),
self.tr( "There are no unused resources\n" ) )
def importResources( self ):
"""Adds the required resource in the level based on existing file."""
model = self.getCurrentModel()
if model:
#game_dir = os.path.normpath( os.path.split( self._amy_path )[0] )
#res_dir = os.path.join( game_dir, 'res' )
dir = os.path.join( self._game_model._res_dir, STR_DIR_STUB )
files = QtGui.QFileDialog.getOpenFileNames( self,
self.tr( 'Select the Images to import...' ),
dir,
self.tr( 'Images (*.png)' ) )
if files.isEmpty(): # user canceled action
return
safefiles = []
for file in files:
safefiles.append( os.path.normpath( str( file ) ) )
added_resource_elements = model.importResources( safefiles, self._game_model._res_dir )
if added_resource_elements:
model.set_selection( added_resource_elements )
else:
ie = model.importError()
if ie is not None:
QtGui.QMessageBox.warning( self, self.tr( ie[0] ),
self.tr( ie[1] ) )
def about( self ):
QtGui.QMessageBox.about( self, self.tr( "About Amy In Da Farm! Level Editor " + CURRENT_VERSION ),
self.tr( """<p>Amy In Da Farm! Level Editor helps you create new levels for Amy In Da Farm!.<p>
<p>Developer Page, Sources and Reference Guide:<br>
<a href="http://github.com/reven86/dfg-amy-editor">http://github.com/reven86/dfg-amy-editor</a></p>
<p>Copyright 2010, Andrew Karpushin <andrew.karpushin at dreamfarmgames.com></p>
<p> <br>Original based on World Of Goo Level Editor (WooGLE) by DaftasBrush: (v0.77)</p>
<p>Copyright 2010, DaftasBrush<br>
<a href="http://goofans.com/download/utility/world-of-goo-level-editor">http://goofans.com/download/utility/world-of-goo-level-editor</a></p>
<p> <br>Original Sourceforge project: (v0.5)
<a href="http://www.sourceforge.net/projects/wogedit">http://www.sourceforge.net/projects/wogedit</a><br>
Copyright 2008-2009, NitroZark <nitrozark at users.sourceforget.net></p>""" ) )
def on_cut_action( self ):
elements = self.on_copy_action( is_cut_action = True )
if elements:
for element in elements:
if element.meta.read_only:
#Messagebox
QtGui.QMessageBox.warning( self, self.tr( "Cannot Cut read only element!" ),
self.tr( 'This element is read only.\n'
'It cannot be cut' ) )
return
self.on_delete_action( is_cut_action = True )
self.statusBar().showMessage(
self.tr( 'Element "%s" cut to clipboard' %
elements[0].tag ), 1000 )
def on_copy_action( self, is_cut_action = False ):
world = self.getCurrentModel()
if world:
elements = list( world.selected_elements )
on_clipboard = set()
clipboard_element = xml.etree.ElementTree._ElementInterface( 'WooGLEClipboard', {} )
for element in elements:
on_clipboard.add( element.tag )
xml_data = element.to_xml_with_meta()
clipboard_element.append( xml.etree.ElementTree.fromstring( xml_data ) )
clipboard = QtGui.QApplication.clipboard()
if len( on_clipboard ) == 1:
clipboard_element.set( 'type', list( on_clipboard )[0] )
else:
clipboard_element.set( 'type', "Various" )
scene = self.get_active_view().scene()
# bounding rect of selected items
i = 0
for item in scene.selectedItems():
if i == 0:
brect = item.mapToScene( item.boundingRect() ).boundingRect()
mybrect = [brect.left(), brect.right(), brect.bottom(), brect.top()]
else:
brect = item.mapToScene( item.boundingRect() ).boundingRect()
if brect.left() < mybrect[0]:
mybrect[0] = brect.left()
if brect.right() > mybrect[1]:
mybrect[1] = brect.right()
if brect.bottom() < mybrect[2]:
mybrect[2] = brect.bottom()
if brect.top() > mybrect[3]:
mybrect[3] = brect.top()
i += 1
clipboard_element.set( 'posx', str( ( mybrect[0] + mybrect[1] ) * 0.5 ) )
clipboard_element.set( 'posy', str( -( mybrect[2] + mybrect[3] ) * 0.5 ) )
xml_data = xml.etree.ElementTree.tostring( clipboard_element, 'utf-8' )
clipboard.setText( xml_data )
if not is_cut_action:
self.statusBar().showMessage(
self.tr( '%d Element "%s" copied to clipboard' %
( len( elements ), clipboard_element.get( 'type' ) ) ), 1000 )
self.common_actions['paste'].setText( "Paste In Place (" + clipboard_element.get( 'type' ) + ")" )
self.common_actions['pastehere'].setText( "Paste Here (" + clipboard_element.get( 'type' ) + ")" )
return elements
def on_pasteto_action( self ):
clipboard = QtGui.QApplication.clipboard()
xml_data = unicode( clipboard.text() )
world = self.getCurrentModel()
if world is None or not xml_data:
return
clipboard_element = xml.etree.ElementTree.fromstring( xml_data )
view = self.get_active_view()
paste_posx, paste_posy = view._last_pos.x(), -view._last_pos.y()
copy_posx, copy_posy = float( clipboard_element.get( 'posx', 0 ) ), float( clipboard_element.get( 'posy', 0 ) )
pasted_elements = []
for clip_child in clipboard_element.getchildren():
xml_data = xml.etree.ElementTree.tostring( clip_child, 'utf-8' )
for element in [tree.root for tree in world.trees]:
child_elements = element.make_detached_child_from_xml( xml_data )
if child_elements:
pasted_elements.extend( child_elements )
for child_element in child_elements:
# find the pos attribute in the meta
# set it to view._last_release_at
pos_attribute = self._getPositionAttribute( child_element )
if pos_attribute is not None:
old_pos = pos_attribute.get_native( child_element, ( 0, 0 ) )
if clipboard_element.__len__() == 1:
pos_attribute.set_native( child_element, [view._last_pos.x(), -view._last_pos.y()] )
else:
pos_attribute.set_native( child_element, [old_pos[0] + paste_posx - copy_posx, old_pos[1] + paste_posy - copy_posy] )
element.safe_identifier_insert( len( element ), child_element )
break
if len( pasted_elements ) >= 1:
world.set_selection( pasted_elements )
def _getPositionAttribute( self, element ):
for attribute_meta in element.meta.attributes:
if attribute_meta.type == metaworld.XY_TYPE:
if attribute_meta.position:
return attribute_meta
return None
def on_paste_action( self ):
clipboard = QtGui.QApplication.clipboard()
xml_data = unicode( clipboard.text() )
world = self.getCurrentModel()
if world is None or not xml_data:
return
elements = list( world.selected_elements )
if len( elements ) == 0: # Allow pasting to root when no selection
elements = [tree.root for tree in world.trees]
# Try to paste in one of the selected elements. Stop when succeed
clipboard_element = xml.etree.ElementTree.fromstring( xml_data )
pasted_elements = []
for clip_child in clipboard_element.getchildren():
xml_data = xml.etree.ElementTree.tostring( clip_child, 'utf-8' )
for element in elements:
while element is not None:
child_elements = element.make_detached_child_from_xml( xml_data )
if child_elements:
for child_element in child_elements:
element.safe_identifier_insert( len( element ), child_element )
pasted_elements.extend( child_elements )
break
element = element.parent
if len( pasted_elements ) >= 1:
element.world.set_selection( pasted_elements )
def on_undo_action( self ):
world = self.getCurrentModel()
if world is None:
return
world.undo()
def on_redo_action( self ):
world = self.getCurrentModel()
if world is None:
return
world.redo()
def on_delete_action( self, is_cut_action = False ):
world = self.getCurrentModel()
if world is None:
return
deleted_elements = []
previous_element = None
for element in list( world.selected_elements ):
if element.meta.read_only:
#messagebox
QtGui.QMessageBox.warning( self, self.tr( "Cannot delete read only element!" ),
self.tr( 'This element is read only.\n'
'It cannot be deleted' ) )
return 0
elif not element.is_root():
if element.previous_element() not in list( world.selected_elements ):
previous_element = element.previous_element()
deleted_elements.append( element.tag )
element.parent.remove( element )
if is_cut_action:
return len( deleted_elements )
if deleted_elements:
self.statusBar().showMessage(
self.tr( 'Deleted %d element(s)' % len( deleted_elements ) ), 1000 )
world.set_selection( previous_element )
def _on_view_tool_actived( self, tool_name ):
active_view = self.get_active_view()
if active_view is not None:
active_view.tool_activated( tool_name )
def on_pan_tool_action( self ):
self._on_view_tool_actived( levelview.TOOL_PAN )
def on_move_tool_action( self ):
self._on_view_tool_actived( levelview.TOOL_MOVE )
def onRefreshAction( self ):
"""Called multiple time per second. Used to refresh enabled flags of actions."""
has_amy_dir = self._game_model is not None
#@DaB - Now that save and "save and play" only act on the
# current level it's better if that toolbars buttons
# change state based on the current level, rather than all levels
currentModel = self.getCurrentModel()
is_selected = currentModel is not None
can_select = is_selected and self.view_actions[levelview.TOOL_MOVE].isChecked()
if is_selected:
can_save = has_amy_dir and currentModel.is_dirty
element_is_selected = can_select and len( currentModel.selected_elements ) > 0
can_import = is_selected and not currentModel.isReadOnly
can_undo = currentModel.can_undo
can_redo = currentModel.can_redo
if currentModel.is_dirty:
if currentModel.isReadOnly:
self.mdiArea.activeSubWindow().setWindowIcon( QtGui.QIcon ( ':/images/nosave.png' ) )
else:
self.mdiArea.activeSubWindow().setWindowIcon( QtGui.QIcon ( ':/images/dirty.png' ) )
else:
self.mdiArea.activeSubWindow().setWindowIcon( QtGui.QIcon ( ':/images/clean.png' ) )
else:
can_save = False
element_is_selected = False
can_import = False
can_undo = False
can_redo = False
self.editLevelAction.setEnabled( has_amy_dir )
self.newLevelAction.setEnabled( has_amy_dir )
self.cloneLevelAction.setEnabled( is_selected )
self.saveAction.setEnabled( can_save and True or False )
self.playAction.setEnabled( is_selected )
#Edit Menu / ToolBar
self.common_actions['cut'].setEnabled ( element_is_selected )
self.common_actions['copy'].setEnabled ( element_is_selected )
self.common_actions['paste'].setEnabled ( is_selected )
self.common_actions['delete'].setEnabled ( element_is_selected )
self.undoAction.setEnabled ( can_undo )
self.redoAction.setEnabled ( can_redo )
#Resources
self.importResourcesAction.setEnabled ( can_import )
self.cleanResourcesAction.setEnabled ( can_import )
self.updateResourcesAction.setEnabled( can_import )
self.addItemToolBar.setEnabled( can_select )
self.showhideToolBar.setEnabled( is_selected )
active_view = self.get_active_view()
enabled_view_tools = set()
if active_view:
enabled_view_tools = active_view.get_enabled_view_tools()
for name, action in self.view_actions.iteritems():
is_enabled = name in enabled_view_tools
action.setEnabled( is_enabled )
if self.view_action_group.checkedAction() is None:
self.view_actions[levelview.TOOL_MOVE].setChecked( True )
def _on_refresh_element_status( self ):
# broadcast the event to all ElementIssueTracker
louie.send_minimal( metaworldui.RefreshElementIssues )
def createMDIArea( self ):
self.mdiArea = QtGui.QMdiArea()
self.mdiArea.setViewMode( QtGui.QMdiArea.TabbedView )
for thing in self.mdiArea.findChildren( QtGui.QTabBar ):
thing.setTabsClosable( True )
self.connect ( thing, QtCore.SIGNAL( "tabCloseRequested(int)" ), self.on_closeTab )
self.setCentralWidget( self.mdiArea )
def on_closeTab( self, index ):
sub = self.mdiArea.subWindowList()[index]
sub.close()
def createActions( self ):
self.changeAmyDirAction = qthelper.action( self, handler = self.changeAmyDir,
icon = ":/images/open.png",
text = "&Change Amy In Da Farm! directory...",
shortcut = QtGui.QKeySequence.Open,
status_tip = "Change Amy In Da Farm! top-directory" )
self.editLevelAction = qthelper.action( self, handler = self.editLevel,
icon = ":/images/icon-amy-level.png",
text = "&Edit existing level...",
shortcut = "Ctrl+L",
status_tip = "Select a level to edit" )
self.newLevelAction = qthelper.action( self, handler = self.newLevel,
icon = ":/images/icon-amy-new-level2.png",
text = "&New level...",
shortcut = QtGui.QKeySequence.New,
status_tip = "Creates a new level" )
self.cloneLevelAction = qthelper.action( self, handler = self.cloneLevel,
icon = ":/images/icon-amy-clone-level.png",
text = "&Clone selected level...",
shortcut = "Ctrl+D",
status_tip = "Clone the selected level" )
self.saveAction = qthelper.action( self, handler = self.saveIT,
icon = ":/images/save.png",
text = "&Save...",
shortcut = QtGui.QKeySequence.Save,
status_tip = "Saves the Level" )
self.playAction = qthelper.action( self, handler = self.saveAndPlayLevel,
icon = ":/images/play.png",
text = "&Save and play Level...",
shortcut = "Ctrl+P",
status_tip = "Save and play the selected level" )
self.updateResourcesAction = qthelper.action( self,
handler = self.updateResources,
icon = ":/images/update-level-resources.png",
text = "&Update level resources...",
shortcut = "Ctrl+U",
status_tip = "Adds automatically all .png & .ogg files in the level directory to the level resources" )
self.cleanResourcesAction = qthelper.action( self,
handler = self.cleanResources,
icon = ":/images/cleanres.png",
text = "&Clean Resources",
status_tip = "Removes any unused resource from the level." )
self.importResourcesAction = qthelper.action( self,
handler = self.importResources,
icon = ":/images/importres.png",
text = "&Import images...",
shortcut = "Ctrl+I",
status_tip = "Adds images (png) to the level resources" )
self.quitAct = qthelper.action( self, handler = self.close,
text = "&Quit",
shortcut = "Ctrl+Q",
status_tip = "Quit the application" )
self.aboutAct = qthelper.action( self, handler = self.about,
icon = ":/images/icon.png",
text = "&About",
status_tip = "Show the application's About box" )
self.recentfile_actions = [qthelper.action( self, handler = self.on_recentfile_action, visible = False )
for i in range( 0, MAXRECENTFILES )] #@UnusedVariable
self.common_actions = {
'cut': qthelper.action( self, handler = self.on_cut_action,
icon = ":/images/cut.png",
text = "Cu&t",
shortcut = QtGui.QKeySequence.Cut ),
'copy': qthelper.action( self, handler = self.on_copy_action,
icon = ":/images/copy.png",
text = "&Copy",
shortcut = QtGui.QKeySequence.Copy ),
'paste': qthelper.action( self, handler = self.on_paste_action,
icon = ":/images/paste.png",
text = "Paste &In Place",
shortcut = "Ctrl+Shift+V" ),
'pastehere': qthelper.action( self, handler = self.on_pasteto_action,
icon = ":/images/paste.png",
text = "&Paste Here", shortcut = QtGui.QKeySequence.Paste ),
'delete': qthelper.action( self, handler = self.on_delete_action,
icon = ":/images/delete.png",
text = "&Delete",
shortcut = QtGui.QKeySequence.Delete )
}
self.undoAction = qthelper.action( self, handler = self.on_undo_action,
icon = ":/images/undo.png",
text = "&Undo",
shortcut = QtGui.QKeySequence.Undo )
self.redoAction = qthelper.action( self, handler = self.on_redo_action,
icon = ":/images/redo.png",
text = "&Redo",
shortcut = QtGui.QKeySequence.Redo )
class ShowHideFactory( object ):
def __init__( self, window, elements ):
self.window = window
self.elements = elements
def __call__( self ):
lv = self.window.get_active_view()
if lv is not None:
for elementtype in self.elements:
currentstate = lv.get_element_state( elementtype )
newstate = 2 - currentstate
lv.set_element_state( elementtype, newstate )
lv.refreshFromModel()
self.showhide_actions = {
'camera': qthelper.action( self, handler = ShowHideFactory( self , ['camera', 'poi'] ),
text = "Show/Hide Camera" , icon = ":/images/show-camera.png" ),
'fields': qthelper.action( self, handler = ShowHideFactory( self , ['linearforcefield', 'radialforcefield'] ),
text = "Show/Hide Forcefields", icon = ":/images/show-physic.png" ),
'geom': qthelper.action( self, handler = ShowHideFactory( self , ['rectangle', 'circle', 'compositegeom', 'levelexit', 'line', 'hinge'] ),
text = "Show/Hide Geometry" , icon = ":/images/show-geom.png" ),
'gfx': qthelper.action( self, handler = ShowHideFactory( self , ['scenelayer', 'pixmap'] ),
text = "Show/Hide Graphics" , icon = ":/images/show-gfx.png" ),
'labels': qthelper.action( self, handler = ShowHideFactory( self , ['label'] ),
text = "Show/Hide Labels" , icon = ":/images/show-label.png" )
}
self.view_action_group = QtGui.QActionGroup( self )
self.view_actions = {
levelview.TOOL_PAN: qthelper.action( self,
handler = self.on_pan_tool_action,
icon = ":/images/zoom.png",
text = "&Zoom and Pan view (F)",
shortcut = 'F',
checkable = True ),
levelview.TOOL_MOVE: qthelper.action( self,
handler = self.on_move_tool_action,
icon = ":/images/tool-move.png",
text = "&Select, Move and Resize",
shortcut = 'T',
checked = True,
checkable = True )
}
for action in self.view_actions.itervalues():
self.view_action_group.addAction( action )
self.additem_actions = {
'line':qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'line', {} ),
icon = ":/images/addline.png",
text = "&Add a Line" ),
'rectangle':qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'rectangle', {} ),
icon = ":/images/addrect.png",
text = "&Add Rectangle" ),
'circle': qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'circle', {} ),
icon = ":/images/addcircle.png",
text = "&Add Circle" ),
'image': qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'scenelayer', {} ),
icon = ":/images/group-image.png",
text = "&Add Image (SceneLayer)" ),
'compgeom': qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'compositegeom', {} ),
icon = ":/images/compgeom.png",
text = "&Add Composite Geometry (Parent)" ),
'childrect':qthelper.action( self,
handler = AddItemFactory( self, 'compositegeom', 'rectangle', {} ),
icon = ":/images/childrect.png",
text = "&Add Child Rectangle" ),
'childcircle':qthelper.action( self,
handler = AddItemFactory( self, 'compositegeom', 'circle', {} ),
icon = ":/images/childcircle.png",
text = "&Add Child Circle" ),
'hinge': qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'hinge', {} ),
icon = ":/images/hinge.png",
text = "&Add Hinge" ),
'lff': qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'linearforcefield', {'size':'100,100'} ),
icon = ":/images/lff.png",
text = "&Add Linear force Field" ),
'rff': qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'radialforcefield', {} ),
icon = ":/images/rff.png",
text = "&Add Radial force Field" ),
'label': qthelper.action( self,
handler = AddItemFactory( self, 'scene', 'label', {} ),
icon = ":/images/label.png",
text = "&Add Label" )
}
self.actionTimer = QtCore.QTimer( self )
self.connect( self.actionTimer, QtCore.SIGNAL( "timeout()" ), self.onRefreshAction )
self.actionTimer.start( 250 ) # Refresh action enabled flag every 250ms.
self.statusTimer = QtCore.QTimer( self )
self.connect( self.statusTimer, QtCore.SIGNAL( "timeout()" ),
self._on_refresh_element_status )
self.statusTimer.start( 300 ) # Refresh element status every 300ms.
def createMenus( self ):
self.fileMenu = self.menuBar().addMenu( self.tr( "&File" ) )
self.fileMenu.addAction( self.newLevelAction )
self.fileMenu.addAction( self.editLevelAction )
self.fileMenu.addAction( self.cloneLevelAction )
self.fileMenu.addAction( self.saveAction )
self.fileMenu.addAction( self.playAction )
self.fileMenu.addSeparator()
self.fileMenu.addAction( self.changeAmyDirAction )
self.separatorRecent = self.fileMenu.addSeparator()
for recentaction in self.recentfile_actions:
self.fileMenu.addAction( recentaction )
self.fileMenu.addSeparator()
self.fileMenu.addAction( self.quitAct )
self.editMenu = self.menuBar().addMenu( self.tr( "&Edit" ) )
self.editMenu.addAction( self.undoAction )
self.editMenu.addAction( self.redoAction )
self.editMenu.addSeparator()
self.editMenu.addAction( self.common_actions['cut'] )
self.editMenu.addAction( self.common_actions['copy'] )
self.editMenu.addAction( self.common_actions['paste'] )
self.editMenu.addAction( self.common_actions['pastehere'] )
self.editMenu.addSeparator()
self.editMenu.addAction( self.common_actions['delete'] )
self.menuBar().addSeparator()
self.resourceMenu = self.menuBar().addMenu( self.tr( "&Resources" ) )
self.resourceMenu.addAction( self.updateResourcesAction )
self.resourceMenu.addAction( self.importResourcesAction )
self.resourceMenu.addSeparator()
self.resourceMenu.addAction( self.cleanResourcesAction )
self.resourceMenu.addSeparator()
self.menuBar().addSeparator()
# @todo add Windows menu. Take MDI example as model.
self.helpMenu = self.menuBar().addMenu( self.tr( "&Help" ) )
self.helpMenu.addAction( self.aboutAct )
def createToolBars( self ):
self.fileToolBar = self.addToolBar( self.tr( "File" ) )
self.fileToolBar.setObjectName( "fileToolbar" )
# self.fileToolBar.addAction(self.changeAmyDirAction)
self.fileToolBar.addAction( self.newLevelAction )
self.fileToolBar.addAction( self.editLevelAction )
self.fileToolBar.addAction( self.cloneLevelAction )
self.fileToolBar.addSeparator()
self.fileToolBar.addAction( self.saveAction )
self.fileToolBar.addAction( self.playAction )
self.fileToolBar.addSeparator()
self.editToolbar = self.addToolBar( self.tr( "Edit" ) )
self.editToolbar.setObjectName( "editToolbar" )
self.editToolbar.addAction( self.undoAction )
self.editToolbar.addAction( self.redoAction )
self.editToolbar.addSeparator()
self.editToolbar.addAction( self.common_actions['cut'] )
self.editToolbar.addAction( self.common_actions['copy'] )
self.editToolbar.addAction( self.common_actions['paste'] )
self.editToolbar.addSeparator()
self.editToolbar.addAction( self.common_actions['delete'] )
self.resourceToolBar = self.addToolBar( self.tr( "Resources" ) )
self.resourceToolBar.setObjectName( "resourceToolbar" )
self.resourceToolBar.addAction( self.updateResourcesAction )
self.resourceToolBar.addAction( self.importResourcesAction )
self.resourceToolBar.addSeparator()
self.resourceToolBar.addAction( self.cleanResourcesAction )
self.resourceToolBar.addSeparator()
self.levelViewToolBar = self.addToolBar( self.tr( "Level View" ) )
self.levelViewToolBar.setObjectName( "levelViewToolbar" )
for name in ( 'move', 'pan' ):
action = self.view_actions[name]
self.levelViewToolBar.addAction( action )
self.addItemToolBar = QtGui.QToolBar( self.tr( "Add Item" ) )
self.addItemToolBar.setObjectName( "addItemToolbar" )
self.addToolBar( Qt.LeftToolBarArea, self.addItemToolBar )
additem_action_list = ['line', 'rectangle', 'circle', 'image', 'compgeom', 'childrect', 'childcircle', 'hinge',
'sep1',
'lff', 'rff',
'sep2',
'label'
]
for name in additem_action_list:
if name not in self.additem_actions:
self.addItemToolBar.addSeparator()
else:
self.addItemToolBar.addAction( self.additem_actions[name] )
self.showhideToolBar = self.addToolBar( self.tr( "Show/Hide" ) )
self.showhideToolBar.setObjectName( "showhideToolbar" )
for elementtype in ( 'camera', 'fields', 'geom', 'gfx', 'labels' ):
self.showhideToolBar.addAction( self.showhide_actions[elementtype] )
def createStatusBar( self ):
self.statusBar().showMessage( self.tr( "Ready" ) )
self._mousePositionLabel = QtGui.QLabel()
self.statusBar().addPermanentWidget( self._mousePositionLabel )
def createElementTreeView( self, name, tree_meta, sibling_tabbed_dock = None ):
dock = QtGui.QDockWidget( self.tr( name ), self )
dock.setObjectName( name + '_tab' )
dock.setAllowedAreas( Qt.RightDockWidgetArea )
element_tree_view = metatreeui.MetaWorldTreeView( self.common_actions, self.group_icons, dock )
tree_model = metatreeui.MetaWorldTreeModel( tree_meta, self.group_icons,
element_tree_view )
element_tree_view.setModel( tree_model )
dock.setWidget( element_tree_view )
self.addDockWidget( Qt.RightDockWidgetArea, dock )
if sibling_tabbed_dock: # Stacks the dock widget together
self.tabifyDockWidget( sibling_tabbed_dock, dock )
dock.setFeatures( QtGui.QDockWidget.NoDockWidgetFeatures )
self.tree_view_by_element_world[tree_meta] = element_tree_view
return dock, element_tree_view
def createDockWindows( self ):
self.group_icons = {}
for group in 'camera game image physic resource shape text info material rect circle compgeom line anim'.split():
self.group_icons[group] = QtGui.QIcon( ":/images/group-%s.png" % group )
self.tree_view_by_element_world = {} # map of all tree views
scene_dock, self.sceneTree = self.createElementTreeView( 'Scene', metawog.TREE_LEVEL_SCENE )
level_dock, self.levelTree = self.createElementTreeView( 'Level', metawog.TREE_LEVEL_GAME, scene_dock )
resource_dock, self.levelResourceTree = self.createElementTreeView( 'Resource', #@UnusedVariable
metawog.TREE_LEVEL_RESOURCE,
level_dock )
scene_dock.raise_() # Makes the scene the default active tab
dock = QtGui.QDockWidget( self.tr( "Properties" ), self )
dock.setAllowedAreas( Qt.RightDockWidgetArea )
dock.setFeatures( QtGui.QDockWidget.NoDockWidgetFeatures )
dock.setObjectName( 'properties' )
self.propertiesList = metaelementui.MetaWorldPropertyListView( self.statusBar(),
dock )
self.propertiesListModel = metaelementui.MetaWorldPropertyListModel( 0, 2,
self.propertiesList ) # nb rows, nb cols
self.propertiesList.setModel( self.propertiesListModel )
dock.setWidget( self.propertiesList )
self.addDockWidget( Qt.RightDockWidgetArea, dock )
def _readSettings( self ):
"""Reads setting from previous session & restore window state."""
settings = QtCore.QSettings()
settings.beginGroup( "MainWindow" )
self._amy_path = unicode( settings.value( "amy_path", QtCore.QVariant( u'' ) ).toString() )
if self._amy_path == u'.':
self._amy_path = u''
elif self._amy_path != u'':
self._amy_path = os.path.normpath( self._amy_path )
if settings.value( "wasMaximized", False ).toBool():
self.showMaximized()
else:
self.resize( settings.value( "size", QtCore.QVariant( QtCore.QSize( 640, 480 ) ) ).toSize() )
self.move( settings.value( "pos", QtCore.QVariant( QtCore.QPoint( 200, 200 ) ) ).toPoint() )
windowstate = settings.value( "windowState", None );
if windowstate is not None:
self.restoreState( windowstate.toByteArray() )
self.recentFiles = settings.value( "recent_files" ).toStringList()
self._updateRecentFiles()
settings.endGroup()
def _writeSettings( self ):
"""Persists the session window state for future restoration."""
# Settings should be stored in HKEY_CURRENT_USER\Software\WOGCorp\WOG Editor
settings = QtCore.QSettings() #@todo makes helper to avoid QVariant conversions
settings.beginGroup( "MainWindow" )
settings.setValue( "amy_path", QtCore.QVariant( QtCore.QString( self._amy_path or u'' ) ) )
settings.setValue( "wasMaximized", QtCore.QVariant( self.isMaximized() ) )
settings.setValue( "size", QtCore.QVariant( self.size() ) )
settings.setValue( "pos", QtCore.QVariant( self.pos() ) )
settings.setValue( "windowState", self.saveState() )
settings.setValue( "recent_files", self.recentFiles )
settings.endGroup()
def closeEvent( self, event ):
"""Called when user close the main window."""
#@todo check if user really want to quit
for subwin in self.mdiArea.subWindowList():
if not subwin.close():
event.ignore()
return
self._writeSettings()
self.actionTimer.stop
self.statusTimer.stop
QtGui.QMainWindow.closeEvent( self, event )
event.accept()
if __name__ == "__main__":
app = QtGui.QApplication( sys.argv )
# Set keys for settings
app.setOrganizationName( "DreamFarmGames" )
app.setOrganizationDomain( "dreamfarmgames.com" )
app.setApplicationName( "Amy In Da Farm! Editor" )
if LOG_TO_FILE:
saveout = sys.stdout
saveerr = sys.stderr
fout = open( APP_NAME_LOWER + '.log', 'a' )
sys.stdout = fout
sys.stderr = fout
print ""
print "------------------------------------------------------"
print APP_NAME_PROPER + " started ", datetime.now(), "File Logging Enabled"
mainwindow = MainWindow()
mainwindow.show()
appex = app.exec_()
if LOG_TO_FILE:
sys.stdout = saveout
sys.stderr = saveerr
fout.close()
sys.exit( appex )
|
reven86/dfg-amy-editor
|
src/wogeditor.py
|
Python
|
gpl-3.0
| 105,116
|
# coding=utf8
#
import hashlib
from flask import request
from flask.views import MethodView
DEFAULT_TOKEN = "wechatter"
class WechatVerify(MethodView):
def get(self):
""" """
echostr = request.args.get('echostr')
signature = request.args.get('signature')
timestamp = request.args.get('timestamp')
nonce = request.args.get('nonce')
token = DEFAULT_TOKEN
base_str = "".join(sorted([token, timestamp, nonce]))
if hashlib.sha1(base_str).hexdigest() == signature:
return echostr
else:
return ""
class WechatMsg(MethodView):
""" """
class WechatMenu(MethodView):
""" """
def get(self):
""" """
pass
def post(self):
""" """
pass
|
seraphln/wheel
|
wheel/core/plugins/wechat_plugin.py
|
Python
|
gpl-3.0
| 784
|
from __mailer import *
|
AdTechMedia/adtechmedia-website
|
component/ads-blocking-losses/mailer/__init__.py
|
Python
|
mpl-2.0
| 22
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'BIND 9 管理员参考手册'
copyright = u'2021, Internet Systems Consortium'
author = u"Internet Systems Consortium \\and 翻译: sunguonian@yahoo.com"
# The full version, including alpha/beta/rc tags
release = 'BIND 9.16.15(稳定版)'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'zh_CN'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
'_build',
'Thumbs.db',
'.DS_Store',
'*.grammar.rst',
'*.zoneopts.rst',
'catz.rst',
'dlz.rst',
'dnssec.rst',
'dyndb.rst',
'logging-cattegories.rst',
'managed-keys.rst',
'pkcs11.rst',
'plugins.rst'
]
# The master toctree document.
master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
latex_engine = 'xelatex'
latex_elements = {
'fontpkg': r'''
\setmainfont{Source Han Serif CN:style=Regular}
\setsansfont{Source Han Sans CN Medium:style=Medium,Regular}
\setmonofont{Source Han Sans CN:style=Regular}
\setCJKfamilyfont{song}{Source Han Serif CN:style=Regular}
\setCJKfamilyfont{heiti}{Source Han Sans CN:style=Regular}
''',
'pointsize': '11pt',
'preamble': r'\input{../mystyle.tex.txt}'
}
latex_documents = [
(master_doc, 'Bv9ARM.tex', u'BIND 9管理员参考手册', author, 'manual'),
]
latex_logo = "isc-logo.pdf"
|
perlang/bv9arm-chinese
|
branches/9.16.15/arm/conf.py
|
Python
|
mpl-2.0
| 3,175
|
# Generated by Django 3.1.5 on 2021-01-04 17:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vehicles', '0007_auto_20201130_1939'),
]
operations = [
migrations.AddField(
model_name='channel',
name='datetime',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='vehiclelocation',
name='occupancy',
field=models.CharField(blank=True, choices=[('seatsAvailable', 'Seats available'), ('standingAvailable', 'Standing available'), ('full', 'Full')], max_length=17),
),
migrations.AddField(
model_name='vehicletype',
name='electric',
field=models.BooleanField(null=True),
),
]
|
jclgoodwin/bustimes.org.uk
|
vehicles/migrations/0008_auto_20210104_1742.py
|
Python
|
mpl-2.0
| 844
|
import json
from nose.tools import eq_
from django.conf import settings
from django.core.urlresolvers import reverse
from crashstats.base.tests.testbase import DjangoTestCase
from ..browserid_mock import mock_browserid
class TestViews(DjangoTestCase):
def _login_attempt(self, email, assertion='fakeassertion123', next=None):
if not next:
next = self._home_url
with mock_browserid(email):
post_data = {
'assertion': assertion,
'next': next
}
return self.client.post(
'/browserid/login/',
post_data
)
@property
def _home_url(self):
return reverse('crashstats:home', args=(settings.DEFAULT_PRODUCT,))
def test_invalid(self):
"""Bad BrowserID form (i.e. no assertion) -> failure."""
response = self._login_attempt(None, None)
eq_(response.status_code, 403)
context = json.loads(response.content)
eq_(context['redirect'], self._home_url)
def test_bad_verification(self):
"""Bad verification -> failure."""
response = self._login_attempt(None)
eq_(response.status_code, 403)
context = json.loads(response.content)
eq_(context['redirect'], self._home_url)
def test_successful_redirect(self):
response = self._login_attempt(
'peter@example.com',
)
eq_(response.status_code, 200)
context = json.loads(response.content)
eq_(context['redirect'], self._home_url)
|
bsmedberg/socorro
|
webapp-django/crashstats/auth/tests/test_views.py
|
Python
|
mpl-2.0
| 1,566
|
# The contents of this file are subject to the Mozilla Public License
# Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# OS2Webscanner was developed by Magenta in collaboration with OS2 the
# Danish community of open source municipalities (http://www.os2web.dk/).
#
# The code is currently governed by OS2 the Danish community of open
# source municipalities ( http://www.os2web.dk/ )
"""Scrapy Items."""
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
from scrapy.item import Field
from scrapy_djangoitem import DjangoItem
class MatchItem(DjangoItem):
"""Scrapy Item using the Match object from the Django model as storage."""
from os2webscanner.models.match_model import Match
django_model = Match
"""Original text matched. Stored temporarily for the purposes of
replacing the original matched text.
Note that this is not stored in the DB."""
original_matched_data = Field()
|
os2webscanner/os2webscanner
|
scrapy-webscanner/scanners/items.py
|
Python
|
mpl-2.0
| 1,350
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from django.utils.timezone import utc
from django.forms import ValidationError
from crashstats.base.tests.testbase import TestCase
from crashstats.supersearch import form_fields
class TestFormFields(TestCase):
def test_integer_field(self):
field = form_fields.IntegerField()
cleaned_value = field.clean(['>13'])
assert cleaned_value == [13]
assert field.prefixed_value == ['>13']
# With a ! prefix.
cleaned_value = field.clean(['!13'])
assert cleaned_value == [13]
assert field.prefixed_value == ['!13']
def test_datetime_field(self):
field = form_fields.DateTimeField()
cleaned_value = field.clean(['>12/31/2012 10:20:30'])
dt = datetime.datetime(2012, 12, 31, 10, 20, 30)
dt = dt.replace(tzinfo=utc)
assert cleaned_value == [dt]
assert field.prefixed_value == ['>2012-12-31T10:20:30+00:00']
field = form_fields.DateTimeField()
cleaned_value = field.clean(['>=2012-12-31'])
dt = datetime.datetime(2012, 12, 31)
dt = dt.replace(tzinfo=utc)
assert cleaned_value == [dt]
assert field.prefixed_value == ['>=2012-12-31T00:00:00+00:00']
field = form_fields.DateTimeField()
cleaned_value = field.clean(['>=2012-12-31T01:02:03+00:00'])
dt = datetime.datetime(2012, 12, 31, 1, 2, 3)
dt = dt.replace(tzinfo=utc)
assert cleaned_value == [dt]
assert field.prefixed_value == ['>=2012-12-31T01:02:03+00:00']
def test_several_fields(self):
field1 = form_fields.DateTimeField()
cleaned_value1 = field1.clean(['>12/31/2012 10:20:30'])
field2 = form_fields.DateTimeField()
cleaned_value2 = field2.clean(['<12/31/2012 10:20:40'])
dt = datetime.datetime(2012, 12, 31, 10, 20, 30)
dt = dt.replace(tzinfo=utc)
assert cleaned_value1 == [dt]
assert field1.prefixed_value == ['>2012-12-31T10:20:30+00:00']
dt = datetime.datetime(2012, 12, 31, 10, 20, 40)
dt = dt.replace(tzinfo=utc)
assert cleaned_value2 == [dt]
assert field2.prefixed_value == ['<2012-12-31T10:20:40+00:00']
assert field1.operator == '>'
assert field2.operator == '<'
def test_several_fields_illogically_integerfield(self):
field = form_fields.IntegerField()
with pytest.raises(ValidationError):
field.clean(['>10', '<10'])
with pytest.raises(ValidationError):
field.clean(['<10', '>10'])
with pytest.raises(ValidationError):
field.clean(['<10', '>=10'])
with pytest.raises(ValidationError):
field.clean(['<=10', '>10'])
with pytest.raises(ValidationError):
field.clean(['<10', '<10'])
def test_several_fields_illogically_datetimefield(self):
field = form_fields.DateTimeField()
with pytest.raises(ValidationError):
field.clean(['>2016-08-10', '<2016-08-10'])
with pytest.raises(ValidationError):
field.clean(['<2016-08-10', '<2016-08-10'])
with pytest.raises(ValidationError):
field.clean(['>=2016-08-10', '<2016-08-10'])
with pytest.raises(ValidationError):
field.clean(['>2016-08-10', '<=2016-08-10'])
with pytest.raises(ValidationError):
field.clean(['>=2016-08-10', '<=2016-08-09'])
# but note, this should work!
field.clean(['>=2016-08-10', '<=2016-08-10'])
# any use of the equal sign and a less or greater than
with pytest.raises(ValidationError):
field.clean(['=2016-08-10', '<2016-08-10'])
with pytest.raises(ValidationError):
field.clean(['=2016-08-10', '>2016-08-10'])
with pytest.raises(ValidationError):
field.clean(['>2016-08-10', '=2016-08-10'])
with pytest.raises(ValidationError):
field.clean(['<2016-08-10', '=2016-08-10'])
# more than two fields
with pytest.raises(ValidationError):
field.clean(['>2016-08-01', '<=2016-08-02', '>=2016-08-02'])
with pytest.raises(ValidationError):
field.clean(['<2016-08-01', '<2016-08-02', '<2016-08-03'])
def test_boolean_field(self):
field = form_fields.BooleanField(required=False)
# If the input is None, leave it as None
cleaned_value = field.clean(None)
assert cleaned_value is None
# The list of known truthy strings
for value in form_fields.BooleanField.truthy_strings:
cleaned_value = field.clean(value)
assert cleaned_value == '__true__'
# But it's also case insensitive, so check that it still works
for value in form_fields.BooleanField.truthy_strings:
cleaned_value = field.clean(value.upper()) # note
assert cleaned_value == '__true__'
# Any other string that is NOT in form_fields.BooleanField.truthy_strings
# should return `!__true__`
cleaned_value = field.clean('FALSE')
assert cleaned_value == '!__true__'
cleaned_value = field.clean('anything')
assert cleaned_value == '!__true__'
# But not choke on non-ascii strings
cleaned_value = field.clean(u'Nöö')
assert cleaned_value == '!__true__'
|
Tayamarn/socorro
|
webapp-django/crashstats/supersearch/tests/test_form_fields.py
|
Python
|
mpl-2.0
| 5,385
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""This is the base of the crashstorage system - a unified interfaces for
saving, fetching and iterating over raw crashes, dumps and processed crashes.
"""
import sys
import collections
import datetime
from configman import Namespace, RequiredConfig
from configman.converters import classes_in_namespaces_converter, \
class_converter
from configman.dotdict import DotDict
#==============================================================================
class Redactor(RequiredConfig):
"""This class is the implementation of a functor for in situ redacting
of sensitive keys from a mapping. Keys that are to be redacted are placed
in the configuration under the name 'forbidden_keys'. They may take the
form of dotted keys with subkeys. For example, "a.b.c" means that the key,
"c" is to be redacted."""
required_config = Namespace()
required_config.add_option(
name='forbidden_keys',
doc='a list of keys not allowed in a redacted processed crash',
default="url, email, user_id, exploitability,"
"json_dump.sensitive,"
"upload_file_minidump_flash1.json_dump.sensitive,"
"upload_file_minidump_flash2.json_dump.sensitive,"
"upload_file_minidump_browser.json_dump.sensitive",
reference_value_from='resource.redactor',
)
#--------------------------------------------------------------------------
def __init__(self, config):
self.config = config
self.forbidden_keys = [
x.strip() for x in self.config.forbidden_keys.split(',')
]
#--------------------------------------------------------------------------
def redact(self, a_mapping):
"""this is the function that does the redaction."""
for a_key in self.forbidden_keys:
sub_mapping = a_mapping
sub_keys = a_key.split('.')
try:
for a_sub_key in sub_keys[:-1]: # step through the subkeys
sub_mapping = sub_mapping[a_sub_key.strip()]
del sub_mapping[sub_keys[-1]]
except KeyError:
pass # this is okay, our key was already deleted by
# another pattern that matched at a higher level
#--------------------------------------------------------------------------
def __call__(self, a_mapping):
self.redact(a_mapping)
#==============================================================================
class CrashIDNotFound(Exception):
pass
#==============================================================================
class CrashStorageBase(RequiredConfig):
"""the base class for all crash storage classes"""
required_config = Namespace()
required_config.add_option(
name="redactor_class",
doc="the name of the class that implements a 'redact' method",
default=Redactor,
reference_value_from='resource.redactor',
)
#--------------------------------------------------------------------------
def __init__(self, config, quit_check_callback=None):
"""base class constructor
parameters:
config - a configman dot dict holding configuration information
quit_check_callback - a function to be called periodically during
long running operations. It should check
whatever the client app uses to detect a
quit request and raise a KeyboardInterrupt.
All derived classes should be prepared to
shut down cleanly on getting such an
exception from a call to this function
instance varibles:
self.config - a reference to the config mapping
self.quit_check - a reference to the quit detecting callback
self.logger - convience shortcut to the logger in the config
self.exceptions_eligible_for_retry - a collection of non-fatal
exceptions that can be raised by a given storage
implementation. This may be fetched by a client of the
crashstorge so that it can determine if it can try a failed
storage operation again."""
self.config = config
if quit_check_callback:
self.quit_check = quit_check_callback
else:
self.quit_check = lambda: False
self.logger = config.logger
self.exceptions_eligible_for_retry = ()
self.redactor = config.redactor_class(config)
#--------------------------------------------------------------------------
def close(self):
"""some implementations may need explicit closing."""
pass
#--------------------------------------------------------------------------
def save_raw_crash(self, raw_crash, dumps, crash_id):
"""this method that saves both the raw_crash and the dump, must be
overridden in any implementation.
Why is does this base implementation just silently do nothing rather
than raise a NotImplementedError? Implementations of crashstorage
are not required to implement the entire api. Some may save only
processed crashes but may be bundled (see the PolyCrashStorage class)
with other crashstorage implementations. Rather than having a non-
implenting class raise an exeception that would derail the other
bundled operations, the non-implementing storageclass will just
quietly do nothing.
parameters:
raw_crash - a mapping containing the raw crash meta data. It is
often saved as a json file, but here it is in the form
of a dict.
dumps - a dict of dump name keys and binary blob values
crash_id - the crash key to use for this crash"""
pass
#--------------------------------------------------------------------------
def save_processed(self, processed_crash):
"""this method saves the processed_crash and must be overridden in
anything that chooses to implement it.
Why is does this base implementation just silently do nothing rather
than raise a NotImplementedError? Implementations of crashstorage
are not required to implement the entire api. Some may save only
processed crashes but may be bundled (see the PolyCrashStorage class)
with other crashstorage implementations. Rather than having a non-
implenting class raise an exeception that would derail the other
bundled operations, the non-implementing storageclass will just
quietly do nothing.
parameters:
processed_crash - a mapping containing the processed crash"""
pass
#--------------------------------------------------------------------------
def save_raw_and_processed(self, raw_crash, dumps, processed_crash,
crash_id):
"""Mainly for the convenience and efficiency of the processor,
this unified method combines saving both raw and processed crashes.
parameters:
raw_crash - a mapping containing the raw crash meta data. It is
often saved as a json file, but here it is in the form
of a dict.
dumps - a dict of dump name keys and binary blob values
processed_crash - a mapping containing the processed crash
crash_id - the crash key to use for this crash"""
self.save_raw_crash(raw_crash, dumps, crash_id)
self.save_processed(processed_crash)
#--------------------------------------------------------------------------
def get_raw_crash(self, crash_id):
"""the default implementation of fetching a raw_crash
parameters:
crash_id - the id of a raw crash to fetch"""
raise NotImplementedError("get_raw_crash is not implemented")
#--------------------------------------------------------------------------
def get_raw_dump(self, crash_id, name=None):
"""the default implementation of fetching a dump
parameters:
crash_id - the id of a dump to fetch
name - the name of the dump to fetch"""
raise NotImplementedError("get_raw_dump is not implemented")
#--------------------------------------------------------------------------
def get_raw_dumps(self, crash_id):
"""the default implementation of fetching all the dumps
parameters:
crash_id - the id of a dump to fetch"""
raise NotImplementedError("get_raw_dumps is not implemented")
#--------------------------------------------------------------------------
def get_raw_dumps_as_files(self, crash_id):
"""the default implementation of fetching all the dumps as files on
a file system somewhere. returns a list of pathnames.
parameters:
crash_id - the id of a dump to fetch"""
raise NotImplementedError("get_raw_dumps is not implemented")
#--------------------------------------------------------------------------
def get_processed(self, crash_id):
"""the default implementation of fetching a processed_crash. This
method should not be overridden in subclasses unless the intent is to
alter the redaction process.
parameters:
crash_id - the id of a processed_crash to fetch"""
processed_crash = self.get_unredacted_processed(crash_id)
self.redactor(processed_crash)
return processed_crash
#--------------------------------------------------------------------------
def get_unredacted_processed(self, crash_id):
"""the implementation of fetching a processed_crash with no redaction
parameters:
crash_id - the id of a processed_crash to fetch"""
raise NotImplementedError(
"get_unredacted_processed is not implemented"
)
#--------------------------------------------------------------------------
def remove(self, crash_id):
"""delete a crash from storage
parameters:
crash_id - the id of a crash to fetch"""
raise NotImplementedError("remove is not implemented")
#--------------------------------------------------------------------------
def new_crashes(self):
"""a generator handing out a sequence of crash_ids of crashes that are
considered to be new. Each implementation can interpret the concept
of "new" in an implementation specific way. To be useful, derived
class ought to override this method.
"""
return []
#==============================================================================
class NullCrashStorage(CrashStorageBase):
"""a testing crashstorage that silently ignores everything it's told to do
"""
#--------------------------------------------------------------------------
def get_raw_crash(self, crash_id):
"""the default implementation of fetching a raw_crash
parameters:
crash_id - the id of a raw crash to fetch"""
return {}
#--------------------------------------------------------------------------
def get_raw_dump(self, crash_id, name):
"""the default implementation of fetching a dump
parameters:
crash_id - the id of a dump to fetch"""
return ''
#--------------------------------------------------------------------------
def get_raw_dumps(self, crash_id):
"""the default implementation of fetching all the dumps
parameters:
crash_id - the id of a dump to fetch"""
return {}
#--------------------------------------------------------------------------
def get_unredacted_processed(self, crash_id):
"""the default implementation of fetching a processed_crash
parameters:
crash_id - the id of a processed_crash to fetch"""
return {}
#--------------------------------------------------------------------------
def remove(self, crash_id):
"""delete a crash from storage
parameters:
crash_id - the id of a crash to fetch"""
pass
#==============================================================================
class PolyStorageError(Exception, collections.MutableSequence):
"""an exception container holding a sequence of exceptions with tracebacks.
parameters:
message - an optional over all error message
"""
def __init__(self, message=''):
super(PolyStorageError, self).__init__(self, message)
self.exceptions = [] # the collection
def gather_current_exception(self):
"""append the currently active exception to the collection"""
self.exceptions.append(sys.exc_info())
def has_exceptions(self):
"""the boolean opposite of is_empty"""""
return bool(self.exceptions)
def __len__(self):
"""how many exceptions are stored?
this method is required by the MutableSequence abstract base class"""
return len(self.exceptions)
def __iter__(self):
"""start an iterator over the squence.
this method is required by the MutableSequence abstract base class"""
return iter(self.exceptions)
def __contains__(self, value):
"""search the sequence for a value and return true if it is present
this method is required by the MutableSequence abstract base class"""
return self.exceptions.__contains__(value)
def __getitem__(self, index):
"""fetch a specific exception
this method is required by the MutableSequence abstract base class"""
return self.exceptions.__getitem__(index)
def __setitem__(self, index, value):
"""change the value for an index in the sequence
this method is required by the MutableSequence abstract base class"""
self.exceptions.__setitem__(index, value)
#==============================================================================
class PolyCrashStorage(CrashStorageBase):
"""a crashstorage implementation that encapsulates a collection of other
crashstorage instances. Any save operation applied to an instance of this
class will be applied to all the crashstorge in the collection.
This class is useful for 'save' operations only. It does not implement
the 'get' operations.
The contained crashstorage instances are specified in the configuration.
Each class specified in the 'storage_classes' config option will be given
its own numbered namespace in the form 'storage%d'. With in the namespace,
the class itself will be referred to as just 'store'. Any configuration
requirements within the class 'store' will be isolated within the local
namespace. That allows multiple instances of the same storageclass to
avoid name collisions.
"""
required_config = Namespace()
required_config.add_option(
'storage_classes',
doc='a comma delimited list of storage classes',
default='',
from_string_converter=classes_in_namespaces_converter(
template_for_namespace='storage%d',
name_of_class_option='crashstorage_class',
instantiate_classes=False, # we instantiate manually for thread
# safety
)
)
#--------------------------------------------------------------------------
def __init__(self, config, quit_check_callback=None):
"""instantiate all the subordinate crashstorage instances
parameters:
config - a configman dot dict holding configuration information
quit_check_callback - a function to be called periodically during
long running operations.
instance variables:
self.storage_namespaces - the list of the namespaces inwhich the
subordinate instances are stored.
self.stores - instances of the subordinate crash stores
"""
super(PolyCrashStorage, self).__init__(config, quit_check_callback)
self.storage_namespaces = \
config.storage_classes.subordinate_namespace_names
self.stores = DotDict()
for a_namespace in self.storage_namespaces:
self.stores[a_namespace] = \
config[a_namespace].crashstorage_class(
config[a_namespace],
quit_check_callback
)
#--------------------------------------------------------------------------
def close(self):
"""iterate through the subordinate crash stores and close them.
Even though the classes are closed in sequential order, all are
assured to close even if an earlier one raises an exception. When all
are closed, any exceptions that were raised are reraised in a
PolyStorageError
raises:
PolyStorageError - an exception container holding a list of the
exceptions raised by the subordinate storage
systems"""
storage_exception = PolyStorageError()
for a_store in self.stores.itervalues():
try:
a_store.close()
except Exception, x:
self.logger.error('%s failure: %s', a_store.__class__,
str(x))
storage_exception.gather_current_exception()
if storage_exception.has_exceptions():
raise storage_exception
#--------------------------------------------------------------------------
def save_raw_crash(self, raw_crash, dumps, crash_id):
"""iterate through the subordinate crash stores saving the raw_crash
and the dump to each of them.
parameters:
raw_crash - the meta data mapping
dumps - a mapping of dump name keys to dump binary values
crash_id - the id of the crash to use"""
storage_exception = PolyStorageError()
for a_store in self.stores.itervalues():
self.quit_check()
try:
a_store.save_raw_crash(raw_crash, dumps, crash_id)
except Exception, x:
self.logger.error('%s failure: %s', a_store.__class__,
str(x))
storage_exception.gather_current_exception()
if storage_exception.has_exceptions():
raise storage_exception
#--------------------------------------------------------------------------
def save_processed(self, processed_crash):
"""iterate through the subordinate crash stores saving the
processed_crash to each of the.
parameters:
processed_crash - a mapping containing the processed crash"""
storage_exception = PolyStorageError()
for a_store in self.stores.itervalues():
self.quit_check()
try:
a_store.save_processed(processed_crash)
except Exception, x:
self.logger.error('%s failure: %s', a_store.__class__,
str(x), exc_info=True)
storage_exception.gather_current_exception()
if storage_exception.has_exceptions():
raise storage_exception
#--------------------------------------------------------------------------
def save_raw_and_processed(self, raw_crash, dump, processed_crash,
crash_id):
for a_store in self.stores.itervalues():
a_store.save_raw_and_processed(
raw_crash,
dump,
processed_crash,
crash_id
)
#==============================================================================
class FallbackCrashStorage(CrashStorageBase):
"""This storage system has a primary and fallback subordinate storage
systems. If an exception is raised by the primary storage system during
an operation, the operation is repeated on the fallback storage system.
This class is useful for 'save' operations only. It does not implement
the 'get' operations."""
required_config = Namespace()
required_config.primary = Namespace()
required_config.primary.add_option(
'storage_class',
doc='storage class for primary storage',
default='',
from_string_converter=class_converter
)
required_config.fallback = Namespace()
required_config.fallback.add_option(
'storage_class',
doc='storage class for fallback storage',
default='',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, quit_check_callback=None):
"""instantiate the primary and secondary storage systems"""
super(FallbackCrashStorage, self).__init__(config, quit_check_callback)
self.primary_store = config.primary.storage_class(
config.primary,
quit_check_callback
)
self.fallback_store = config.fallback.storage_class(
config.fallback,
quit_check_callback
)
self.logger = self.config.logger
#--------------------------------------------------------------------------
def close(self):
"""close both storage systems. The second will still be closed even
if the first raises an exception. """
poly_exception = PolyStorageError()
for a_store in (self.primary_store, self.fallback_store):
try:
a_store.close()
except NotImplementedError:
pass
except Exception:
poly_exception.gather_current_exception()
if len(poly_exception.exceptions) > 1:
raise poly_exception
#--------------------------------------------------------------------------
def save_raw_crash(self, raw_crash, dumps, crash_id):
"""save raw crash data to the primary. If that fails save to the
fallback. If that fails raise the PolyStorageException
parameters:
raw_crash - the meta data mapping
dumps - a mapping of dump name keys to dump binary values
crash_id - the id of the crash to use"""
try:
self.primary_store.save_raw_crash(raw_crash, dumps, crash_id)
except Exception:
self.logger.critical('error in saving primary', exc_info=True)
poly_exception = PolyStorageError()
poly_exception.gather_current_exception()
try:
self.fallback_store.save_raw_crash(raw_crash, dumps, crash_id)
except Exception:
self.logger.critical('error in saving fallback', exc_info=True)
poly_exception.gather_current_exception()
raise poly_exception
#--------------------------------------------------------------------------
def save_processed(self, processed_crash):
"""save processed crash data to the primary. If that fails save to the
fallback. If that fails raise the PolyStorageException
parameters:
processed_crash - a mapping containing the processed crash"""
try:
self.primary_store.save_processed(processed_crash)
except Exception:
self.logger.critical('error in saving primary', exc_info=True)
poly_exception = PolyStorageError()
poly_exception.gather_current_exception()
try:
self.fallback_store.save_processed(processed_crash)
except Exception:
self.logger.critical('error in saving fallback', exc_info=True)
poly_exception.gather_current_exception()
raise poly_exception
#--------------------------------------------------------------------------
def get_raw_crash(self, crash_id):
"""get a raw crash 1st from primary and if not found then try the
fallback.
parameters:
crash_id - the id of a raw crash to fetch"""
try:
return self.primary_store.get_raw_crash(crash_id)
except CrashIDNotFound:
return self.fallback_store.get_raw_crash(crash_id)
#--------------------------------------------------------------------------
def get_raw_dump(self, crash_id, name=None):
"""get a named crash dump 1st from primary and if not found then try
the fallback.
parameters:
crash_id - the id of a dump to fetch"""
try:
return self.primary_store.get_raw_dump(crash_id, name)
except CrashIDNotFound:
return self.fallback_store.get_raw_dump(crash_id, name)
#--------------------------------------------------------------------------
def get_raw_dumps(self, crash_id):
"""get all crash dumps 1st from primary and if not found then try
the fallback.
parameters:
crash_id - the id of a dump to fetch"""
try:
return self.primary_store.get_raw_dumps(crash_id)
except CrashIDNotFound:
return self.fallback_store.get_raw_dumps(crash_id)
#--------------------------------------------------------------------------
def get_raw_dumps_as_files(self, crash_id):
"""get all crash dump pathnames 1st from primary and if not found then
try the fallback.
parameters:
crash_id - the id of a dump to fetch"""
try:
return self.primary_store.get_raw_dumps_as_files(crash_id)
except CrashIDNotFound:
return self.fallback_store.get_raw_dumps_as_files(crash_id)
#--------------------------------------------------------------------------
def get_unredacted_processed(self, crash_id):
"""fetch an unredacted processed_crash
parameters:
crash_id - the id of a processed_crash to fetch"""
try:
return self.primary_store.get_unredacted_processed(crash_id)
except CrashIDNotFound:
return self.fallback_store.get_unredacted_processed(crash_id)
#--------------------------------------------------------------------------
def remove(self, crash_id):
"""delete a crash from storage
parameters:
crash_id - the id of a crash to fetch"""
try:
self.primary_store.remove(crash_id)
except CrashIDNotFound:
self.fallback_store.remove(crash_id)
#--------------------------------------------------------------------------
def new_crashes(self):
"""return an iterator that yields a list of crash_ids of raw crashes
that were added to the file system since the last time this iterator
was requested."""
for a_crash in self.fallback_store.new_crashes():
yield a_crash
for a_crash in self.primary_store.new_crashes():
yield a_crash
#==============================================================================
class PrimaryDeferredStorage(CrashStorageBase):
"""
PrimaryDeferredStorage reads information from a raw crash and, based on a
predicate function, selects either the primary or deferred storage to store
a crash in.
"""
required_config = Namespace()
required_config.primary = Namespace()
required_config.primary.add_option(
'storage_class',
doc='storage class for primary storage',
default='',
from_string_converter=class_converter
)
required_config.deferred = Namespace()
required_config.deferred.add_option(
'storage_class',
doc='storage class for deferred storage',
default='',
from_string_converter=class_converter
)
required_config.add_option(
'deferral_criteria',
doc='criteria for deferring a crash',
default='lambda crash: crash.get("legacy_processing")',
from_string_converter=eval
)
#--------------------------------------------------------------------------
def __init__(self, config, quit_check_callback=None):
"""instantiate the primary and deferred storage systems"""
super(PrimaryDeferredStorage, self).__init__(
config,
quit_check_callback
)
self.primary_store = config.primary.storage_class(
config.primary,
quit_check_callback
)
self.deferred_store = config.deferred.storage_class(
config.deferred,
quit_check_callback
)
self.logger = self.config.logger
#--------------------------------------------------------------------------
def close(self):
"""close both storage systems. The second will still be closed even
if the first raises an exception. """
poly_exception = PolyStorageError()
for a_store in (self.primary_store, self.deferred_store):
try:
a_store.close()
except NotImplementedError:
pass
except Exception:
poly_exception.gather_current_exception()
if len(poly_exception.exceptions) > 1:
raise poly_exception
#--------------------------------------------------------------------------
def save_raw_crash(self, raw_crash, dumps, crash_id):
"""save crash data into either the primary or deferred storage,
depending on the deferral criteria"""
if not self.config.deferral_criteria(raw_crash):
self.primary_store.save_raw_crash(raw_crash, dumps, crash_id)
else:
self.deferred_store.save_raw_crash(raw_crash, dumps, crash_id)
#--------------------------------------------------------------------------
def save_processed(self, processed_crash):
"""save processed crash data into either the primary or deferred
storage, depending on the deferral criteria"""
if not self.config.deferral_criteria(processed_crash):
self.primary_store.save_processed(processed_crash)
else:
self.deferred_store.save_processed(processed_crash)
#--------------------------------------------------------------------------
def get_raw_crash(self, crash_id):
"""get a raw crash 1st from primary and if not found then try the
deferred.
parameters:
crash_id - the id of a raw crash to fetch"""
try:
return self.primary_store.get_raw_crash(crash_id)
except CrashIDNotFound:
return self.deferred_store.get_raw_crash(crash_id)
#--------------------------------------------------------------------------
def get_raw_dump(self, crash_id, name=None):
"""get a named crash dump 1st from primary and if not found then try
the deferred.
parameters:
crash_id - the id of a dump to fetch
name - name of the crash to fetch, or omit to fetch default crash"""
try:
return self.primary_store.get_raw_dump(crash_id, name)
except CrashIDNotFound:
return self.deferred_store.get_raw_dump(crash_id, name)
#--------------------------------------------------------------------------
def get_raw_dumps(self, crash_id):
"""get all crash dumps 1st from primary and if not found then try
the deferred.
parameters:
crash_id - the id of a dump to fetch"""
try:
return self.primary_store.get_raw_dumps(crash_id)
except CrashIDNotFound:
return self.deferred_store.get_raw_dumps(crash_id)
#--------------------------------------------------------------------------
def get_raw_dumps_as_files(self, crash_id):
"""get all crash dump pathnames 1st from primary and if not found then
try the deferred.
parameters:
crash_id - the id of a dump to fetch"""
try:
return self.primary_store.get_raw_dumps_as_files(crash_id)
except CrashIDNotFound:
return self.deferred_store.get_raw_dumps_as_files(crash_id)
#--------------------------------------------------------------------------
def get_unredacted_processed(self, crash_id):
"""fetch an unredacted processed_crash
parameters:
crash_id - the id of a processed_crash to fetch"""
try:
return self.primary_store.get_unredacted_processed(crash_id)
except CrashIDNotFound:
return self.deferred_store.get_unredacted_processed(crash_id)
#--------------------------------------------------------------------------
def remove(self, crash_id):
"""delete a crash from storage
parameters:
crash_id - the id of a crash to fetch"""
try:
self.primary_store.remove(crash_id)
except CrashIDNotFound:
self.deferred_store.remove(crash_id)
#--------------------------------------------------------------------------
def new_crashes(self):
"""return an iterator that yields a list of crash_ids of raw crashes
that were added to the file system since the last time this iterator
was requested."""
return self.primary_store.new_crashes()
#==============================================================================
class PrimaryDeferredProcessedStorage(PrimaryDeferredStorage):
"""
PrimaryDeferredProcessedStorage aggregates three methods of storage: it
uses a deferral criteria predicate to decide where to store a raw crash,
like PrimaryDeferredStorage -- but it stores all processed crashes in a
third, separate storage.
"""
required_config = Namespace()
required_config.processed = Namespace()
required_config.processed.add_option(
'storage_class',
doc='storage class for processed storage',
default='',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, quit_check_callback=None):
super(PrimaryDeferredProcessedStorage, self).__init__(
config,
quit_check_callback
)
self.processed_store = config.processed.storage_class(
config.processed,
quit_check_callback
)
#--------------------------------------------------------------------------
def save_processed(self, processed_crash):
self.processed_store.save_processed(processed_crash)
#--------------------------------------------------------------------------
def get_unredacted_processed(self, crash_id):
"""fetch an unredacted processed crash from the underlying
storage implementation"""
return self.processed_store.get_unredacted_processed(crash_id)
#==============================================================================
class BenchmarkingCrashStorage(CrashStorageBase):
"""a wrapper around crash stores that will benchmark the calls in the logs
"""
required_config = Namespace()
required_config.add_option(
name="benchmark_tag",
doc="a tag to put on logged benchmarking lines",
default='Benchmark',
)
required_config.add_option(
name="wrapped_crashstore",
doc="another crash store to be benchmarked",
default='',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, quit_check_callback=None):
super(BenchmarkingCrashStorage, self).__init__(
config,
quit_check_callback
)
self.wrapped_crashstore = config.wrapped_crashstore(
config,
quit_check_callback)
self.tag = config.benchmark_tag
self.start_timer = datetime.datetime.now
self.end_timer = datetime.datetime.now
#--------------------------------------------------------------------------
def close(self):
"""some implementations may need explicit closing."""
self.wrapped_crashstore.close()
#--------------------------------------------------------------------------
def save_raw_crash(self, raw_crash, dumps, crash_id):
start_time = self.start_timer()
self.wrapped_crashstore.save_raw_crash(raw_crash, dumps, crash_id)
end_time = self.end_timer()
self.config.logger.debug(
'%s save_raw_crash %s',
self.tag,
end_time - start_time
)
#--------------------------------------------------------------------------
def save_processed(self, processed_crash):
start_time = self.start_timer()
self.wrapped_crashstore.save_processed(processed_crash)
end_time = self.end_timer()
self.config.logger.debug(
'%s save_processed %s',
self.tag,
end_time - start_time
)
#--------------------------------------------------------------------------
def get_raw_crash(self, crash_id):
start_time = self.start_timer()
result = self.wrapped_crashstore.get_raw_crash(crash_id)
end_time = self.end_timer()
self.config.logger.debug(
'%s get_raw_crash %s',
self.tag,
end_time - start_time
)
return result
#--------------------------------------------------------------------------
def get_raw_dump(self, crash_id, name=None):
start_time = self.start_timer()
result = self.wrapped_crashstore.get_raw_dump(crash_id)
end_time = self.end_timer()
self.config.logger.debug(
'%s get_raw_dump %s',
self.tag,
end_time - start_time
)
return result
#--------------------------------------------------------------------------
def get_raw_dumps(self, crash_id):
start_time = self.start_timer()
result = self.wrapped_crashstore.get_raw_dumps(crash_id)
end_time = self.end_timer()
self.config.logger.debug(
'%s get_raw_dumps %s',
self.tag,
end_time - start_time
)
return result
#--------------------------------------------------------------------------
def get_raw_dumps_as_files(self, crash_id):
start_time = self.start_timer()
result = self.wrapped_crashstore.get_raw_dumps_as_files(crash_id)
end_time = self.end_timer()
self.config.logger.debug(
'%s get_raw_dumps_as_files %s',
self.tag,
end_time - start_time
)
return result
#--------------------------------------------------------------------------
def get_unredacted_processed(self, crash_id):
start_time = self.start_timer()
result = self.wrapped_crashstore.get_unredacted_processed(crash_id)
end_time = self.end_timer()
self.config.logger.debug(
'%s get_unredacted_processed %s',
self.tag,
end_time - start_time
)
return result
#--------------------------------------------------------------------------
def remove(self, crash_id):
start_time = self.start_timer()
self.wrapped_crashstore.remove(crash_id)
end_time = self.end_timer()
self.config.logger.debug(
'%s remove %s',
self.tag,
end_time - start_time
)
|
bsmedberg/socorro
|
socorro/external/crashstorage_base.py
|
Python
|
mpl-2.0
| 40,084
|
from astrodata.eti.pyrafetiparam import PyrafETIParam, IrafStdout
from pyraf import iraf
from astrodata.adutils import logutils
log = logutils.get_logger(__name__)
class GemcombineParam(PyrafETIParam):
"""This class coordinates the ETI parameters as it pertains to the IRAF
task gemcombine directly.
"""
rc = None
adinput = None
key = None
value = None
def __init__(self, rc=None, key=None, value=None):
"""
:param rc: Used to store reduction information
:type rc: ReductionContext
:param key: A parameter name that is added as a dict key in prepare
:type key: any
:param value: A parameter value that is added as a dict value
in prepare
:type value: any
"""
log.debug("GemcombineParam __init__")
PyrafETIParam.__init__(self, rc)
self.adinput = self.rc.get_inputs_as_astrodata()
self.key = key
self.value = value
def nonecheck(self, param=None):
if param is None or param == "None":
param = "none"
return param
def prepare(self):
log.debug("Gemcombine prepare()")
self.paramdict.update({self.key:self.value})
class FlVardq(GemcombineParam):
rc = None
fl_vardq = None
def __init__(self, rc=None):
log.debug("FlVardq __init__")
GemcombineParam.__init__(self, rc)
self.fl_vardq = iraf.no
for ad in self.adinput:
if ad["VAR"]:
self.fl_vardq = iraf.yes
break
def prepare(self):
log.debug("FlVardq prepare()")
self.paramdict.update({"fl_vardq":self.fl_vardq})
class FlDqprop(GemcombineParam):
rc = None
fl_dqprop = None
def __init__(self, rc=None):
log.debug("FlDqprop __init__")
GemcombineParam.__init__(self, rc)
self.fl_dqprop = iraf.no
for ad in self.adinput:
if ad["DQ"]:
self.fl_dqprop = iraf.yes
break
def prepare(self):
log.debug("FlDqprop prepare()")
self.paramdict.update({"fl_dqprop":self.fl_dqprop})
class Masktype(GemcombineParam):
rc = None
masktype = None
def __init__(self, rc=None):
log.debug("Masktype __init__")
GemcombineParam.__init__(self, rc)
if rc["mask"]:
self.masktype = "goodvalue"
else:
self.masktype = "none"
def prepare(self):
log.debug("Masktype prepare()")
self.paramdict.update({"masktype":self.masktype})
class Combine(GemcombineParam):
rc = None
operation = None
def __init__(self, rc=None):
log.debug("Combine __init__")
GemcombineParam.__init__(self, rc)
self.operation = self.nonecheck(rc["operation"])
def prepare(self):
log.debug("Combine prepare()")
self.paramdict.update({"combine":self.operation})
class Nlow(GemcombineParam):
rc = None
nlow = None
def __init__(self, rc=None):
log.debug("Nlow __init__")
GemcombineParam.__init__(self, rc)
self.nlow = self.nonecheck(rc["nlow"])
def prepare(self):
log.debug("Nlow prepare()")
self.paramdict.update({"nlow":self.nlow})
class Nhigh(GemcombineParam):
rc = None
nhigh = None
def __init__(self, rc=None):
log.debug("Nhigh __init__")
GemcombineParam.__init__(self, rc)
self.nhigh = self.nonecheck(rc["nhigh"])
def prepare(self):
log.debug("Nhigh prepare()")
self.paramdict.update({"nhigh":self.nhigh})
class Reject(GemcombineParam):
rc = None
reject_method = None
def __init__(self, rc=None):
log.debug("Reject __init__")
GemcombineParam.__init__(self, rc)
self.reject_method = self.nonecheck(rc["reject_method"])
def prepare(self):
log.debug("Reject prepare()")
self.paramdict.update({"reject":self.reject_method})
hardcoded_params = {'title':'DEFAULT','Stdout':IrafStdout(),'Stderr':IrafStdout()}
|
pyrrho314/recipesystem
|
trunk/gempy/gemini/eti/gemcombineparam.py
|
Python
|
mpl-2.0
| 4,082
|
class config_eval_tanh(config_base):
mutable = 3
def eval_tanh(param):
if len(param) == 0:
return ''
try:
v = float(param[0])
except ValueError:
return ''
v = math.tanh(v)
return float_to_str(v)
# TESTS
# IN ['0']
# OUT '0'
# IN ['1']
# OUT_ROUND '0.76159'
# IN ['2']
# OUT_ROUND '0.96403'
|
plepe/pgmapcss
|
pgmapcss/eval/eval_tanh.py
|
Python
|
agpl-3.0
| 343
|
# Copyright 2014-2015 Luc Saffre
# This file is part of Lino Welfare.
#
# Lino Welfare is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Lino Welfare is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Lino Welfare. If not, see
# <http://www.gnu.org/licenses/>.
"""
This module is not actively used.
"""
from lino.api import ad
from django.utils.translation import ugettext_lazy as _
class Plugin(ad.Plugin):
verbose_name = _("Client projects")
def setup_config_menu(self, site, user_type, m):
m = m.add_menu(self.app_label, self.verbose_name)
m.add_action('projects.ProjectTypes')
def setup_explorer_menu(self, site, user_type, m):
m = m.add_menu(self.app_label, self.verbose_name)
m.add_action('projects.Projects')
|
khchine5/lino-welfare
|
lino_welfare/modlib/projects/__init__.py
|
Python
|
agpl-3.0
| 1,237
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
@authors: Sebastián Ortiz V. neoecos@gmail.com
SIIM Server is the web server of SIIM's Framework
Copyright (C) 2013 Infometrika Ltda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import unittest
import hashlib
#gevent
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
#tinyrpc
from tinyrpc.protocols.jsonrpc import JSONRPCProtocol
from tinyrpc.client import RPCClient, RPCError
from tinyrpc.transports.http import HttpWebSocketClientTransport
#neo4j
from py2neo import neo4j
#flask
from flask import (Flask)
#simplekv
import redis
from simplekv.memory.redisstore import RedisStore
from cid import caliope_server
from cid.utils.DefaultDatabase import DefaultDatabase
class CaliopeServerTestCase(unittest.TestCase):
def setUp(self):
caliope_server.app.config['TESTING'] = True
caliope_server.init_flask_app()
caliope_server.configure_server_and_app("../../conf/test_caliope_server.json")
caliope_server.configure_logger("../../conf/tests_logger.json")
caliope_server.register_modules()
caliope_server.app.storekv = RedisStore(redis.StrictRedis())
self.http_server = WSGIServer((caliope_server.app.config['address'],
caliope_server.app.config['port']),
caliope_server.app,
handler_class=WebSocketHandler) # @IgnorePep8
self.http_server.start()
self.create_default_database()
def tearDown(self):
"""Get rid of the database again after each test."""
if self.rpc_client:
self.rpc_client.transport.close()
self.http_server.stop()
self.http_server = None
caliope_server.app = Flask('caliope_server')
#:Delete database
neo4j.GraphDatabaseService().clear()
def create_default_database(self):
DefaultDatabase().test_defaultUserGroupOne()
def login(self, username, password):
self.rpc_client = RPCClient(JSONRPCProtocol(),
HttpWebSocketClientTransport('ws://localhost:9001/api/ws'))
self.loginManager = self.rpc_client.get_proxy("login.")
hashed_password = hashlib.sha256(password).hexdigest()
return self.loginManager.authenticate(username=username,
password=hashed_password)
def logout(self, uuid):
if self.loginManager is None:
return
return self.loginManager.logout(uuid=uuid)
def test_login(self):
rv = self.login(u'user', u'123')
expected = \
{u'first_name': {u'value': u'User'},
u'last_name': {u'value': u'Test'},
u'image': {u'data': None},
u'user': {u'value': u'user'},
u'login': True,
}
self.assertDictContainsSubset(expected, rv)
self.assertIn("session_uuid", rv)
self.assertIn("user_uuid", rv)
def test_logout(self):
uuid = self.login(u'user', u'123')['user_uuid']['value']
rv = self.logout(uuid=uuid)
self.assertIn('logout', rv)
self.assertTrue(rv['logout'])
self.assertIn('uuid', rv)
self.assertEqual(uuid, rv['uuid'])
def test_accounts_get_public_info(self):
users = [self.login(u'user', u'123')['user_uuid']['value']]
accounts_proxy = self.rpc_client.get_proxy(prefix="accounts.")
info = accounts_proxy.getPublicInfo(users)
assert len(info) == 1
assert 'uuid' in info[0]
for user in users:
info_uuid = info[0]['uuid']['value']
assert user == info_uuid
def test_projects_create(self):
user = self.login(u'user', u'123')
projects_proxy = self.rpc_client.get_proxy(prefix="project.")
model = projects_proxy.getModel()
data = {"name": "PROYECTO 305",
"general_location": "<p><em><strong>ASDASDASD</strong></em><br></p>",
"locality": "suba",
"project_type": "py_gr_escala",
"profit_center": "ASDASDADS",
"areas": [{"tipo": "A1", "valor": "121"}, {"tipo": "A2", "valor": "13"}],
"uuid": model['data']['uuid']['value']
}
#: TODO Check for real asserts
try:
rv = projects_proxy.create(data=data)
assert True
except BaseException:
assert False
def test_form_find(self):
user = self.login(u'user', u'123')
projects_proxy = self.rpc_client.get_proxy(prefix="project.")
rv = projects_proxy.getAll()
self.assertIsNotNone(rv)
if __name__ == '__main__':
unittest.main()
|
CaliopeProject/CaliopeServer
|
src/test/CaliopeWebSocketAPI_test.py
|
Python
|
agpl-3.0
| 5,425
|
# Copyright (C) 2021 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Para packet handler
"""
from esafe.rfid.idtronic_M890.para.para_packet import ParaPacketType, ParaPacket, CardType
from esafe.rfid.idtronic_M890.para.para_exception import ParaException
import logging
logger = logging.getLogger(__name__)
if False: # MyPy
from typing import Any, Dict, Callable
# @Injectable.named('para_packet_handler')
class ParaPacketHandler(object):
""" Packet handler for the ParaPackets, this can be extended in the future for multiple requests, and for keeping state of the reader"""
# At this point only one kind of packet is implemented, but can be extended in the future
def __init__(self, new_scan_callback):
self.handlers = {
ParaPacketType.AutoListCard.value: self.handle_auto_list
} # type: Dict[ParaPacketType.value, Callable[[ParaPacket], Any]]
self.new_scan_callback = new_scan_callback
def handle_packet(self, para_packet):
# type: (ParaPacket) -> Any
packet_type = para_packet.header.command_type
if packet_type in self.handlers:
handler = self.handlers[packet_type]
return handler(para_packet)
return None
def handle_auto_list(self, para_packet):
# type: (ParaPacket) -> Any
_ = self
# Ignore the empty list packets
if not para_packet.data:
return
# General scan parameters
card_type = para_packet.data[0]
# scan_period = para_packet.data[1]
# scanned_antenna = para_packet.data[2]
# notice_type = para_packet.data[3]
# reserved_future_use = para_packet.data[4]
if card_type in [CardType.ISO14443A.value, CardType.ISO14443B.value]:
# auto reporting format for ISO14443 cards
# ATQL = para_packet.data[5]
# ATQH = para_packet.data[6]
# SAK = para_packet.data[7]
uuid_length = para_packet.data[8]
logger.debug('Detected new ISO14443 card scan: Card type: {}, uuid_length: {}'.format(card_type, uuid_length))
scanned_uuid = para_packet.data[-uuid_length:]
elif card_type == CardType.ISO15693.value:
scanned_uuid = para_packet.data[5:]
else:
raise ParaException('Cannot handle auto list packet: Cannot detect card type')
scanned_uuid_str = ''.join('{:02X}'.format(x) for x in scanned_uuid)
self.new_scan_callback(scanned_uuid_str)
|
openmotics/gateway
|
src/esafe/rfid/idtronic_M890/para/para_handler.py
|
Python
|
agpl-3.0
| 3,126
|
#!/usr/bin/env python
# connectionmon.py - Views your network connections.
# Copyright (C) 2013 Mark Wingerd <markwingerd@yahoo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, os
import xml.etree.ElementTree as ET
import math
from char import Character
from util import XmlRetrieval
class Module:
def __init__(self, skills, mod_name):
self.stats = {}
self.name = mod_name
self.skills = skills
module_data = XmlRetrieval('module.xml')
properties, effecting_skills = module_data.get_target(mod_name)
self._add_stats(properties,effecting_skills)
def show_stats(self):
print self.name
for key in self.stats:
print '{:<20} {:<15}'.format(key, self.stats[key])
def get(self, stat):
if stat in self.stats:
return self.stats[stat]
else:
return None
def _add_stats(self, properties, effecting_skills):
""" Uses the properties list and effecting_skills list to populate
the stats dictionary with all appropriate values.
THIS ALSO HANDLES SKILL BONUSES! """
def _get_skill_modifier(skill_list):
output = 0
for name in skill_list:
try:
output = output + self.skills[name]
except KeyError:
# Skill is not in self.skills
pass
return output
for key in properties:
if key in effecting_skills.keys():
# Skills effect this property. Get and apply the skill modifier.
skill_list = effecting_skills[key]
mod = _get_skill_modifier(skill_list)
self.stats[key] = math.floor(properties[key] * (1 + mod))
else:
self.stats[key] = properties[key]
class Weapon(Module):
""" Req dropsuit and fitting to test properly. """
def __init__(self, skills, weapon_name, module_list=[]):
self.stats = {}
self.name = weapon_name
self.skills = skills
self.module_list = module_list
weapon_data = XmlRetrieval('weapon.xml')
properties, effecting_skills = weapon_data.get_target(weapon_name)
self._add_stats(properties,effecting_skills)
self._add_module_bonus()
def _add_module_bonus(self):
""" Searching self.module_list for any modules which effect this
weapons slot type (found in the modules 'enhances' cell. If so, it will
add the bonuses for that module.
CURRENTLY ONLY WORKS FOR DAMAGE!!! """
slot_type = self.stats['slot_type']
for m in self.module_list:
try:
if slot_type == m.stats['enhances']:
self.stats['damage'] = self.stats['damage'] * (1 + m.stats['damage'])
except KeyError:
# Module does not have a key 'enhances' in its stats dictionary.
pass
class ModuleLibrary:
def __init__(self):
self.module_data = XmlRetrieval('module.xml')
self.names = self.module_data.get_list()
def get_names(self):
""" Returns module names as a tuple. """
return tuple(self.names)
def get_parents(self):
return self.module_data.get_parents()
def get_children(self, parent):
return self.module_data.get_children(parent)
class WeaponLibrary(ModuleLibrary):
def __init__(self):
self.weapon_data = XmlRetrieval('weapon.xml')
self.names = self.weapon_data.get_list()
def get_parents(self):
return self.weapon_data.get_parents()
def get_children(self, parent):
return self.weapon_data.get_children(parent)
if __name__ == '__main__':
modlib = ModuleLibrary()
print modlib.get_parents()
print modlib.get_children('shields')
|
markwingerd/dft_old
|
module.py
|
Python
|
agpl-3.0
| 3,891
|
# -*- coding: utf-8 -*-
#
# 2020-09-21 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add possibility of multiple questions and answers
# http://www.privacyidea.org
# 2015-12-16 Initial writeup.
# Cornelius Kölbel <cornelius@privacyidea.org>
#
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__doc__ = """The questionnaire token is a challenge response token.
The user can define a set of answers to questions. Within the challenge the
user is asked one of these questions and can respond with the corresponding
answer.
"""
from privacyidea.api.lib.utils import getParam
from privacyidea.lib.config import get_from_config
from privacyidea.lib.tokenclass import TokenClass
from privacyidea.lib.log import log_with
from privacyidea.lib.error import TokenAdminError
import logging
from privacyidea.models import Challenge
from privacyidea.lib.challenge import get_challenges
from privacyidea.lib import _
from privacyidea.lib.decorators import check_token_locked
from privacyidea.lib.policy import SCOPE, ACTION, GROUP, get_action_values_from_options
from privacyidea.lib.crypto import safe_compare
import random
import json
import datetime
log = logging.getLogger(__name__)
optional = True
required = False
DEFAULT_NUM_ANSWERS = 5
class QUESTACTION(object):
NUM_QUESTIONS = "number"
class QuestionnaireTokenClass(TokenClass):
"""
This is a Questionnaire Token. The token stores a list of questions and
answers in the tokeninfo database table. The answers are encrypted.
During authentication a random answer is selected and presented as
challenge.
The user has to remember and pass the right answer.
"""
@staticmethod
def get_class_type():
"""
Returns the internal token type identifier
:return: qust
:rtype: basestring
"""
return "question"
@staticmethod
def get_class_prefix():
"""
Return the prefix, that is used as a prefix for the serial numbers.
:return: QUST
:rtype: basestring
"""
return "QUST"
@classmethod
@log_with(log)
def get_class_info(cls, key=None, ret='all'):
"""
returns a subtree of the token definition
:param key: subsection identifier
:type key: string
:param ret: default return value, if nothing is found
:type ret: user defined
:return: subsection if key exists or user defined
:rtype: dict or scalar
"""
res = {'type': cls.get_class_type(),
'title': 'Questionnaire Token',
'description': _('Questionnaire: Enroll Questions for the '
'user.'),
'init': {},
'config': {},
'user': ['enroll'],
# This tokentype is enrollable in the UI for...
'ui_enroll': ["admin", "user"],
'policy': {
SCOPE.AUTH: {
QUESTACTION.NUM_QUESTIONS: {
'type': 'int',
'desc': _("The user has to answer this number of questions during authentication."),
'group': GROUP.TOKEN,
'value': list(range(1, 31))
}
},
SCOPE.ENROLL: {
ACTION.MAXTOKENUSER: {
'type': 'int',
'desc': _("The user may only have this maximum number of questionaire tokens assigned."),
'group': GROUP.TOKEN
},
ACTION.MAXACTIVETOKENUSER: {
'type': 'int',
'desc': _("The user may only have this maximum number of active questionaire tokens assigned."),
'group': GROUP.TOKEN
}
}
},
}
if key:
ret = res.get(key, {})
else:
if ret == 'all':
ret = res
return ret
@log_with(log)
def __init__(self, db_token):
"""
Create a new QUST Token object from a database token
:param db_token: instance of the orm db object
:type db_token: DB object
"""
TokenClass.__init__(self, db_token)
self.set_type(self.get_class_type())
self.hKeyRequired = False
def update(self, param):
"""
This method is called during the initialization process.
:param param: parameters from the token init
:type param: dict
:return: None
"""
j_questions = getParam(param, "questions", required)
try:
# If we have a string, we load the json format
questions = json.loads(j_questions)
except TypeError:
# Obviously we have a dict...
questions = j_questions
num_answers = get_from_config("question.num_answers",
DEFAULT_NUM_ANSWERS)
if len(questions) < int(num_answers):
raise TokenAdminError(_("You need to provide at least %s "
"answers.") % num_answers)
# Save all questions and answers and encrypt them
for question, answer in questions.items():
self.add_tokeninfo(question, answer, value_type="password")
TokenClass.update(self, param)
def is_challenge_request(self, passw, user=None, options=None):
"""
The questionnaire token is always a challenge response token.
The challenge is triggered by providing the PIN as the password.
:param passw: password, which might be pin or pin+otp
:type passw: string
:param user: The user from the authentication request
:type user: User object
:param options: dictionary of additional request parameters
:type options: dict
:return: true or false
:rtype: bool
"""
request_is_challenge = False
options = options or {}
pin_match = self.check_pin(passw, user=user, options=options)
return pin_match
def create_challenge(self, transactionid=None, options=None):
"""
This method creates a challenge, which is submitted to the user.
The submitted challenge will be preserved in the challenge
database.
The challenge is a randomly selected question of the available
questions for this token.
If no transaction id is given, the system will create a transaction
id and return it, so that the response can refer to this transaction.
:param transactionid: the id of this challenge
:param options: the request context parameters / data
:type options: dict
:return: tuple of (bool, message, transactionid, reply_dict)
:rtype: tuple
The return tuple builds up like this:
``bool`` if submit was successful;
``message`` which is displayed in the JSON response;
additional challenge ``reply_dict``, which are displayed in the JSON challenges response.
"""
options = options or {}
questions = {}
# Get an integer list of the already used questions
used_questions = [int(x) for x in options.get("data", "").split(",") if options.get("data")]
# Fill the questions of the token
for tinfo in self.token.info_list:
if tinfo.Type == "password":
# Append a tuple of the DB Id and the actual question
questions[tinfo.id] = tinfo.Key
# if all questions are used up, make a new round
if len(questions) == len(used_questions):
log.info(u"User has only {0!s} questions in his token. Reusing questions now.".format(len(questions)))
used_questions = []
# Reduce the allowed questions
remaining_questions = {k: v for (k, v) in questions.items() if k not in used_questions}
message_id = random.choice(list(remaining_questions))
message = remaining_questions[message_id]
used_questions = (options.get("data", "") + ",{0!s}".format(message_id)).strip(",")
validity = int(get_from_config('DefaultChallengeValidityTime', 120))
tokentype = self.get_tokentype().lower()
# Maybe there is a QUESTIONChallengeValidityTime...
lookup_for = tokentype.capitalize() + 'ChallengeValidityTime'
validity = int(get_from_config(lookup_for, validity))
# Create the challenge in the database
db_challenge = Challenge(self.token.serial,
transaction_id=transactionid,
data=used_questions,
session=options.get("session"),
challenge=message,
validitytime=validity)
db_challenge.save()
expiry_date = datetime.datetime.now() + \
datetime.timedelta(seconds=validity)
reply_dict = {'attributes': {'valid_until': "{0!s}".format(expiry_date)}}
return True, message, db_challenge.transaction_id, reply_dict
def check_answer(self, given_answer, challenge_object):
"""
Check if the given answer is the answer to the sent question.
The question for this challenge response was stored in the
challenge_object.
Then we get the answer from the tokeninfo.
:param given_answer: The answer given by the user
:param challenge_object: The challenge object as stored in the database
:return: in case of success: 1
"""
res = -1
question = challenge_object.challenge
answer = self.get_tokeninfo(question)
# We need to compare two unicode strings
if safe_compare(answer, given_answer):
res = 1
else:
log.debug("The answer for token {0!s} does not match.".format(
self.get_serial()))
return res
@check_token_locked
def check_challenge_response(self, user=None, passw=None, options=None):
"""
This method verifies if there is a matching question for the given
passw and also verifies if the answer is correct.
It then returns the the otp_counter = 1
:param user: the requesting user
:type user: User object
:param passw: the password - in fact it is the answer to the question
:type passw: string
:param options: additional arguments from the request, which could
be token specific. Usually "transaction_id"
:type options: dict
:return: return 1 if the answer to the question is correct, -1 otherwise.
:rtype: int
"""
options = options or {}
r_success = -1
# fetch the transaction_id
transaction_id = options.get('transaction_id')
if transaction_id is None:
transaction_id = options.get('state')
# get the challenges for this transaction ID
if transaction_id is not None:
challengeobject_list = get_challenges(serial=self.token.serial,
transaction_id=transaction_id)
for challengeobject in challengeobject_list:
if challengeobject.is_valid():
# challenge is still valid
if self.check_answer(passw, challengeobject) > 0:
r_success = 1
# Set valid OTP to true. We must not delete the challenge now,
# Since we need it for further mutlichallenges
challengeobject.set_otp_status(True)
log.debug("The presented answer was correct.")
break
else:
# increase the received_count
challengeobject.set_otp_status()
self.challenge_janitor()
return r_success
@log_with(log)
def has_further_challenge(self, options=None):
"""
Check if there are still more questions to be asked.
:param options: Options dict
:return: True, if further challenge is required.
"""
transaction_id = options.get('transaction_id')
challengeobject_list = get_challenges(serial=self.token.serial,
transaction_id=transaction_id)
question_number = int(get_action_values_from_options(SCOPE.AUTH,
"{0!s}_{1!s}".format(self.get_class_type(),
QUESTACTION.NUM_QUESTIONS),
options) or 1)
if len(challengeobject_list) == 1:
session = int(challengeobject_list[0].session or "0") + 1
options["session"] = u"{0!s}".format(session)
# write the used questions to the data field
options["data"] = challengeobject_list[0].data or ""
if session < question_number:
return True
return False
@staticmethod
def get_setting_type(key):
"""
The setting type of questions is public, so that the user can also
read the questions.
:param key: The key of the setting
:return: "public" string
"""
if key.startswith("question.question."):
return "public"
|
privacyidea/privacyidea
|
privacyidea/lib/tokens/questionnairetoken.py
|
Python
|
agpl-3.0
| 14,293
|
# Generated by Django 2.2.24 on 2021-10-28 19:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('edx_proctoring', '0019_proctoredexamsoftwaresecurereview_encrypted_video_url'),
]
operations = [
migrations.AlterField(
model_name='proctoredexamsoftwaresecurereview',
name='video_url',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='proctoredexamsoftwaresecurereviewhistory',
name='video_url',
field=models.TextField(null=True),
),
]
|
edx/edx-proctoring
|
edx_proctoring/migrations/0020_auto_20211028_1915.py
|
Python
|
agpl-3.0
| 641
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# Thinkopen Brasil
# Copyright (C) Thinkopen Solutions Brasil (<http://www.tkobr.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'tko_partner_department_function_as_m2o',
'version': '0.004',
'category': 'Customizations',
'sequence': 38,
'complexity': 'normal',
'description': '''This module converts function to m2o
type and adds department for partners
''',
'author': 'ThinkOpen Solutions Brasil',
'website': 'http://www.tkobr.com',
'images': ['images/oerp61.jpeg',
],
'depends': [
'tko_partner_department_function',
'br_base',
],
'data': [
'res_partner_view.xml',
],
'init': [],
'demo': [],
'update': [],
'test': [], # YAML files with tests
'installable': True,
'application': False,
# If it's True, the modules will be auto-installed when all dependencies are installed
'auto_install': False,
'certificate': '',
}
|
thinkopensolutions/tkobr-addons
|
tko_partner_department_function_as_m2o/__manifest__.py
|
Python
|
agpl-3.0
| 1,918
|
import xarray as xr
from tikon.central.módulo import Módulo
from tikon.central.simul import SimulMódulo
from tikon.móds.clima.res import ResultadoClima
from tikon.utils import EJE_PARC, EJE_TIEMPO, EJE_COORD
from تقدیر.مقام import مقام, ذرائع_بنانا
class SimulClima(SimulMódulo):
def __init__(símismo, mód, simul_exper, ecs, vars_interés):
centroides = simul_exper.exper.controles['centroides']
elev = simul_exper.exper.controles['elevaciones']
parcelas = simul_exper.exper.controles['parcelas']
eje_t = simul_exper.t.eje
t_inic, t_final = eje_t[0], eje_t[-1]
variables = mód.variables
d_datos = {
prc: مقام(
centroides.loc[{EJE_PARC: prc, EJE_COORD: 'lat'}].matr[0],
centroides.loc[{EJE_PARC: prc, EJE_COORD: 'lon'}].matr[0],
elev.loc[{EJE_PARC: prc}].matr[0]
).کوائف_پانا(
سے=t_inic, تک=t_final, ذرائع=mód.fuentes, خاکے=mód.escenario
).روزانہ().loc[eje_t[0]:eje_t[-1]]
for prc in parcelas
}
símismo.datos = xr.Dataset({
res: xr.DataArray(
[d_datos[prc][res if res in d_datos[prc] else _vr_tikon_a_taqdir(res)] for prc in parcelas],
coords={EJE_PARC: parcelas, EJE_TIEMPO: eje_t},
dims=[EJE_PARC, EJE_TIEMPO]
) for res in variables
})
if all(x in símismo.datos.data_vars for x in ('temp_prom', 'temp_máx', 'temp_mín')):
símismo.datos['temp_prom'] = símismo.datos['temp_prom'].fillna(
(símismo.datos['temp_máx'] + símismo.datos['temp_mín']) / 2
)
for vr in símismo.datos.data_vars:
if símismo.datos[vr].isnull().any():
raise ValueError('Faltan datos en {vr}.'.format(vr=vr))
super().__init__(Clima, simul_exper, ecs=ecs, vars_interés=vars_interés)
@property
def resultados(símismo):
l_res = []
for var in símismo.datos: # type: str
cls_res = type(var, (ResultadoClima,), {'nombre': var, 'unids': lambda: None})
l_res.append(cls_res)
return l_res
def requísitos(símismo, controles=False):
if controles:
return {'centroides', 'elevaciones'}
def incrementar(símismo, paso, f):
super().incrementar(paso, f)
diarios = símismo.datos.loc[{EJE_TIEMPO: f}].drop_vars(EJE_TIEMPO)
for res in símismo:
símismo[res].poner_valor(diarios[str(res)])
class Clima(Módulo):
nombre = 'clima'
cls_simul = SimulClima
def __init__(símismo, fuentes=None, variables=None, escenario=8.5):
símismo.fuentes = ذرائع_بنانا(fuentes)
símismo.escenario = escenario
símismo.variables = variables or list(set(vr for fnt in fuentes for vr in fnt.متغیرات))
super().__init__()
def _vr_tikon_a_taqdir(vr):
if vr in _conv_vars_taqdir:
return _conv_vars_taqdir[vr]
return vr
_conv_vars_taqdir = {
'precip': 'بارش',
'rad_solar': 'شمسی_تابکاری',
'temp_máx': 'درجہ_حرارت_زیادہ',
'temp_mín': 'درجہ_حرارت_کم',
'temp_prom': 'درجہ_حرارت_اوسط',
'fecha': 'تاریخ'
}
|
julienmalard/Tikon
|
tikon/móds/clima/clima.py
|
Python
|
agpl-3.0
| 3,365
|
from .matrix_server import MatrixServer, MatrixClient
from .matrix_graphics import MatrixGraphics
from .matrix_controller import MatrixController, MatrixError
|
Mezgrman/ProjectANNAX
|
python/annax/__init__.py
|
Python
|
agpl-3.0
| 158
|
"""
Edit Subsection page in Studio
"""
from bok_choy.page_object import PageObject
class SubsectionPage(PageObject):
"""
Edit Subsection page in Studio
"""
name = "studio.subsection"
def url(self):
raise NotImplementedError
def is_browser_on_page(self):
return self.is_css_present('body.view-subsection')
|
pelikanchik/edx-platform
|
common/test/acceptance/edxapp_pages/studio/edit_subsection.py
|
Python
|
agpl-3.0
| 351
|
#!/usr/bin/env python
import os, sys, shutil, datetime
from fabric.api import run, cd, local, get, settings, lcd, put
from fabric_ssh_config import getSSHInfoForHost
from fabric.context_managers import shell_env
from fabric.utils import abort
username='test'
builddir = "/tmp/" + username + "Kits/buildtemp"
version = "UNKNOWN"
nativelibdir = "/nativelibs/obj" # ~test/libs/... usually
defaultlicensedays = 45 #default trial license length
################################################
# CHECKOUT CODE INTO A TEMP DIR
################################################
def checkoutCode(voltdbGit, proGit, rbmqExportGit):
global buildir
# clean out the existing dir
run("rm -rf " + builddir)
# make the build dir again
run("mkdir -p " + builddir)
# change to it
with cd(builddir):
# do the checkouts, collect checkout errors on both community &
# pro repos so user gets status on both checkouts
message = ""
run("git clone git@github.com:VoltDB/voltdb.git")
result = run("cd voltdb; git checkout %s" % voltdbGit, warn_only=True)
if result.failed:
message = "VoltDB checkout failed. Missing branch %s." % rbmqExportGit
run("git clone git@github.com:VoltDB/pro.git")
result = run("cd pro; git checkout %s" % proGit, warn_only=True)
if result.failed:
message += "\nPro checkout failed. Missing branch %s." % rbmqExportGit
run("git clone git@github.com:VoltDB/export-rabbitmq.git")
result = run("cd export-rabbitmq; git checkout %s" % rbmqExportGit, warn_only=True)
# Probably ok to use master for export-rabbitmq.
if result.failed:
print "\nExport-rabbitmg branch %s checkout failed. Defaulting to master." % rbmqExportGit
if len(message) > 0:
abort(message)
return run("cat voltdb/version.txt").strip()
################################################
# MAKE A RELEASE DIR
################################################
def makeReleaseDir(releaseDir):
# handle the case where a release dir exists for this version
if os.path.exists(releaseDir):
shutil.rmtree(releaseDir)
# create a release dir
os.makedirs(releaseDir)
print "Created dir: " + releaseDir
################################################
# SEE IF HAS ZIP TARGET
###############################################
def versionHasZipTarget():
with settings(warn_only=True):
with cd(os.path.join(builddir,'pro')):
return run("ant -p -f mmt.xml | grep dist.pro.zip")
################################################
# BUILD THE COMMUNITY VERSION
################################################
def buildCommunity():
with cd(builddir + "/voltdb"):
run("pwd")
run("git status")
run("git describe --dirty")
run("ant -Djmemcheck=NO_MEMCHECK -Dkitbuild=true %s clean default dist" % build_args)
################################################
# BUILD THE ENTERPRISE VERSION
################################################
def buildPro():
with cd(builddir + "/pro"):
run("pwd")
run("git status")
run("git describe --dirty")
run("VOLTCORE=../voltdb ant -f mmt.xml -Djmemcheck=NO_MEMCHECK -Dallowreplication=true -DallowDrActiveActive=true -Dlicensedays=%d -Dkitbuild=true %s clean dist.pro" % (defaultlicensedays, build_args))
################################################
# BUILD THE RABBITMQ EXPORT CONNECTOR
################################################
def buildRabbitMQExport(version):
with cd(builddir + "/export-rabbitmq"):
run("pwd")
run("git status")
run("git describe --dirty", warn_only=True)
run("VOLTDIST=../pro/obj/pro/voltdb-ent-%s ant" % version)
# Repackage the pro tarball and zip file with the RabbitMQ connector Jar
with cd("%s/pro/obj/pro" % builddir):
run("pwd")
run("gunzip voltdb-ent-%s.tar.gz" % version)
run("tar uvf voltdb-ent-%s.tar voltdb-ent-%s/lib/extension/voltdb-rabbitmq.jar" % (version, version))
if versionHasZipTarget():
run("gzip voltdb-ent-%s.tar" % version)
run("zip -r voltdb-ent-%s.zip voltdb-ent-%s" % (version, version))
################################################
# MAKE AN ENTERPRISE TRIAL LICENSE
################################################
# Must be called after buildPro has been done
def makeTrialLicense(days=30):
with cd(builddir + "/pro/tools"):
run("./make_trial_licenses.pl -t %d -W" % (days))
################################################
# MAKE AN ENTERPRISE ZIP FILE FOR SOME PARTNER UPLOAD SITES
################################################
def makeEnterpriseZip():
with cd(builddir + "/pro"):
run("VOLTCORE=../voltdb ant -f mmt.xml dist.pro.zip")
################################################
# MAKE AN JAR FILES NEEDED TO PUSH TO MAVEN
################################################
def makeMavenJars():
with cd(builddir + "/voltdb"):
run("VOLTCORE=../voltdb ant -f build-client.xml maven-jars")
################################################
# COPY FILES
################################################
def copyCommunityFilesToReleaseDir(releaseDir, version, operatingsys):
get("%s/voltdb/obj/release/voltdb-%s.tar.gz" % (builddir, version),
"%s/voltdb-%s.tar.gz" % (releaseDir, version))
get("%s/voltdb/obj/release/voltdb-client-java-%s.tar.gz" % (builddir, version),
"%s/voltdb-client-java-%s.tar.gz" % (releaseDir, version))
get("%s/voltdb/obj/release/voltdb-tools-%s.tar.gz" % (builddir, version),
"%s/voltdb-tools-%s.tar.gz" % (releaseDir, version))
# add stripped symbols
if operatingsys == "LINUX":
os.makedirs(releaseDir + "/other")
get("%s/voltdb/obj/release/voltdb-%s.sym" % (builddir, version),
"%s/other/%s-voltdb-voltkv-%s.sym" % (releaseDir, operatingsys, version))
def copyEnterpriseFilesToReleaseDir(releaseDir, version, operatingsys):
get("%s/pro/obj/pro/voltdb-ent-%s.tar.gz" % (builddir, version),
"%s/voltdb-ent-%s.tar.gz" % (releaseDir, version))
def copyTrialLicenseToReleaseDir(releaseDir):
get("%s/pro/trial_*.xml" % (builddir),
"%s/license.xml" % (releaseDir))
def copyEnterpriseZipToReleaseDir(releaseDir, version, operatingsys):
get("%s/pro/obj/pro/voltdb-ent-%s.zip" % (builddir, version),
"%s/voltdb-ent-%s.zip" % (releaseDir, version))
def copyMavenJarsToReleaseDir(releaseDir, version):
#The .jars and upload file must be in a directory called voltdb - it is the projectname
mavenProjectDir = releaseDir + "/mavenjars/voltdb"
if not os.path.exists(mavenProjectDir):
os.makedirs(mavenProjectDir)
#Get the voltdbclient-n.n.jar from the recently built community build
get("%s/voltdb/obj/release/dist-client-java/voltdb/voltdbclient-%s.jar" % (builddir, version),
"%s/voltdbclient-%s.jar" % (mavenProjectDir, version))
#Get the upload.gradle file
get("%s/voltdb/tools/kit_tools/upload.gradle" % (builddir),
"%s/upload.gradle" % (mavenProjectDir))
#Get the src and javadoc .jar files
get("%s/voltdb/obj/release/voltdbclient-%s-javadoc.jar" % (builddir, version),
"%s/voltdbclient-%s-javadoc.jar" % (mavenProjectDir, version))
get("%s/voltdb/obj/release/voltdbclient-%s-sources.jar" % (builddir, version),
"%s/voltdbclient-%s-sources.jar" % (mavenProjectDir, version))
################################################
# COMPUTE CHECKSUMS
################################################
def computeChecksums(releaseDir):
md5cmd = "md5sum"
sha1cmd = "sha1sum"
if os.uname()[0] == "Darwin":
md5cmd = "md5 -r"
sha1cmd = "shasum -a 1"
with lcd(releaseDir):
local('echo "CRC checksums:" > checksums.txt')
local('echo "" >> checksums.txt')
local('cksum *.*z* >> checksums.txt')
local('echo "MD5 checksums:" >> checksums.txt')
local('echo "" >> checksums.txt')
local('%s *.*z* >> checksums.txt' % md5cmd)
local('echo "SHA1 checksums:" >> checksums.txt')
local('echo "" >> checksums.txt')
local('%s *.*z* >> checksums.txt' % sha1cmd)
################################################
# CREATE CANDIDATE SYMLINKS
################################################
def createCandidateSysmlink(releaseDir):
candidateDir = os.getenv('HOME') + "/releases/candidate";
local("rm -rf " + candidateDir)
local("ln -s %s %s" % (releaseDir, candidateDir))
################################################
# BACKUP RELEASE DIR
################################################
def backupReleaseDir(releaseDir,archiveDir,version):
if not os.path.exists(archiveDir):
os.makedirs(archiveDir)
# make a backup with the timstamp of the build
timestamp = datetime.datetime.now().strftime("%y%m%d-%H%M%S")
local("tar -czf %s/%s-%s.tgz %s" \
% (archiveDir, version, timestamp, releaseDir))
################################################
# REMOVE NATIVE LIBS FROM SHARED DIRECTORY
################################################
def rmNativeLibs():
# local("ls -l ~" + username + nativelibdir)
local("rm -rf ~" + username + nativelibdir)
################################################
# GET THE GIT TAGS OR SHAS TO BUILD FROM
################################################
if (len(sys.argv) > 3 or (len(sys.argv) == 2 and sys.argv[1] == "-h")):
print "usage:"
print " build-kit.py"
print " build-kit.py git-tag"
print " build-kit.py voltdb-git-SHA pro-git-SHA"
proTreeish = "master"
voltdbTreeish = "master"
rbmqExportTreeish = "master"
# pass -o if you want the build put in the one-offs directory
# passing different voltdb and pro trees also forces one-off
if '-o' in sys.argv:
oneOff = True
sys.argv.remove('-o')
else:
oneOff = False
if len(sys.argv) == 2:
createCandidate = False
proTreeish = sys.argv[1]
voltdbTreeish = sys.argv[1]
rbmqExportTreeish = sys.argv[1]
if len(sys.argv) == 3:
createCandidate = False
voltdbTreeish = sys.argv[1]
proTreeish = sys.argv[2]
rbmqExportTreeish = sys.argv[2]
if voltdbTreeish != proTreeish:
oneOff = True #force oneoff when not same tag/branch
rmNativeLibs()
try:
build_args = os.environ['VOLTDB_BUILD_ARGS']
except:
build_args=""
print "Building with pro: %s and voltdb: %s" % (proTreeish, voltdbTreeish)
build_errors=False
versionCentos = "unknown"
versionMac = "unknown"
releaseDir = "unknown"
# get ssh config [key_filename, hostname]
CentosSSHInfo = getSSHInfoForHost("volt15a")
MacSSHInfo = getSSHInfoForHost("voltmini")
UbuntuSSHInfo = getSSHInfoForHost("volt12d")
# build kits on the mini
try:
with settings(user=username,host_string=MacSSHInfo[1],disable_known_hosts=True,key_filename=MacSSHInfo[0]):
versionMac = checkoutCode(voltdbTreeish, proTreeish, rbmqExportTreeish)
buildCommunity()
except Exception as e:
print "Could not build MAC kit. Exception: " + str(e) + ", Type: " + str(type(e))
build_errors=True
# build kits on 5f
try:
with settings(user=username,host_string=CentosSSHInfo[1],disable_known_hosts=True,key_filename=CentosSSHInfo[0]):
versionCentos = checkoutCode(voltdbTreeish, proTreeish, rbmqExportTreeish)
assert versionCentos == versionMac
if oneOff:
releaseDir = "%s/releases/one-offs/%s-%s-%s" % \
(os.getenv('HOME'), versionCentos, voltdbTreeish, proTreeish)
else:
releaseDir = os.getenv('HOME') + "/releases/" + voltdbTreeish
makeReleaseDir(releaseDir)
print "VERSION: " + versionCentos
buildCommunity()
copyCommunityFilesToReleaseDir(releaseDir, versionCentos, "LINUX")
buildPro()
buildRabbitMQExport(versionCentos)
copyEnterpriseFilesToReleaseDir(releaseDir, versionCentos, "LINUX")
makeTrialLicense()
copyTrialLicenseToReleaseDir(releaseDir)
if versionHasZipTarget():
makeEnterpriseZip()
copyEnterpriseZipToReleaseDir(releaseDir, versionCentos, "LINUX")
makeMavenJars()
copyMavenJarsToReleaseDir(releaseDir, versionCentos)
except Exception as e:
print "Could not build LINUX kit. Exception: " + str(e) + ", Type: " + str(type(e))
build_errors=True
# build debian kit
try:
with settings(user=username,host_string=UbuntuSSHInfo[1],disable_known_hosts=True,key_filename=UbuntuSSHInfo[0]):
debbuilddir = "%s/deb_build/" % builddir
run("rm -rf " + debbuilddir)
run("mkdir -p " + debbuilddir)
with cd(debbuilddir):
put ("tools/voltdb-install.py",".")
commbld = "voltdb-%s.tar.gz" % (versionCentos)
put("%s/%s" % (releaseDir, commbld),".")
run ("sudo python voltdb-install.py -D " + commbld)
get("voltdb_%s-1_amd64.deb" % (versionCentos), releaseDir)
entbld = "voltdb-ent-%s.tar.gz" % (versionCentos)
put("%s/%s" % (releaseDir, entbld),".")
run ("sudo python voltdb-install.py -D " + entbld)
get("voltdb-ent_%s-1_amd64.deb" % (versionCentos), releaseDir)
except Exception as e:
print "Could not build debian kit. Exception: " + str(e) + ", Type: " + str(type(e))
build_errors=True
try:
# build rpm kit
with settings(user=username,host_string=CentosSSHInfo[1],disable_known_hosts=True,key_filename=CentosSSHInfo[0]):
rpmbuilddir = "%s/rpm_build/" % builddir
run("rm -rf " + rpmbuilddir)
run("mkdir -p " + rpmbuilddir)
with cd(rpmbuilddir):
put ("tools/voltdb-install.py",".")
commbld = "voltdb-%s.tar.gz" % (versionCentos)
put("%s/%s" % (releaseDir, commbld),".")
run ("python2.6 voltdb-install.py -R " + commbld)
get("voltdb-%s-1.x86_64.rpm" % (versionCentos), releaseDir)
entbld = "voltdb-ent-%s.tar.gz" % (versionCentos)
put("%s/%s" % (releaseDir, entbld),".")
run ("python2.6 voltdb-install.py -R " + entbld)
get("voltdb-ent-%s-1.x86_64.rpm" % (versionCentos), releaseDir)
except Exception as e:
print "Could not build rpm kit. Exception: " + str(e) + ", Type: " + str(type(e))
build_errors=True
computeChecksums(releaseDir)
rmNativeLibs() # cleanup imported native libs so not picked up unexpectedly by other builds
exit (build_errors)
#archiveDir = os.path.join(os.getenv('HOME'), "releases", "archive", voltdbTreeish, versionCentos)
#backupReleaseDir(releaseDir, archiveDir, versionCentos)
|
paulmartel/voltdb
|
tools/kit_tools/build_kits.py
|
Python
|
agpl-3.0
| 14,723
|
from collections import defaultdict
import math
def hits (graph, epsilon = 1e-5, max_its = 100):
auth = defaultdict(lambda:1)
hub = defaultdict(lambda:1)
g = graph._g
weight = { (a,b): data['gram'].get('weight', 1)
for a, b, data in g.edges_iter(data=True) }
delta = epsilon+1
its = 0
prev_total = 0
while delta > epsilon and its < max_its:
for a, b in g.edges_iter():
auth[b] += weight[(a,b)] * hub[a]
for a, b in g.edges_iter():
hub[a] += weight[(a,b)] * auth[b]
totauth = math.sqrt(sum(auth[n]**2 for n in g.nodes()))
tothub = math.sqrt(sum(hub[n]**2 for n in g.nodes()))
for n in g.nodes():
auth[n] /= totauth
hub[n] /= tothub
delta = abs(totauth + tothub - prev_total)
prev_total = totauth + tothub
its += 1
auth.default_factory = lambda:0
hub.default_factory = lambda:0
return auth, hub
|
agarsev/grafeno
|
grafeno/operations/hits.py
|
Python
|
agpl-3.0
| 971
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emmanuel Mathier <emmanuel.mathier@gmail.com>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import partner_compassion
from . import correspondence_metadata
from . import correspondence
from . import correspondence_page
from . import correspondence_template_page
from . import correspondence_template
from . import import_config
from . import import_letters_history
from . import import_letter_line
from . import contracts
from . import correspondence_positioned_objects
from . import project_compassion
from . import correspondence_s2b_generator
from . import queue_job
from . import last_writing_report
from . import migration_10_0_1_6_0
|
ecino/compassion-modules
|
sbc_compassion/models/__init__.py
|
Python
|
agpl-3.0
| 967
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Please note that the function 'make_request' is provided for your reference only.
# You will not be able to to actually use it from within the Udacity web UI
# All your changes should be in the 'extract_data' function
from bs4 import BeautifulSoup
import requests
import json
html_page = "page_source.html"
def extract_data(page):
data = {"eventvalidation": "",
"viewstate": ""}
with open(page, "r") as html:
soup = BeautifulSoup(html)
_eventValid = soup.find(id="__EVENTVALIDATION")
data["eventvalidation"]=_eventValid["value"]
_viewState = soup.find(id="__VIEWSTATE")
data["viewstate"]=_viewState["value"]
return data
def make_request(data):
eventvalidation = data["eventvalidation"]
viewstate = data["viewstate"]
r = requests.post("http://www.transtats.bts.gov/Data_Elements.aspx?Data=2",
data={'AirportList': "BOS",
'CarrierList': "VX",
'Submit': 'Submit',
"__EVENTTARGET": "",
"__EVENTARGUMENT": "",
"__EVENTVALIDATION": eventvalidation,
"__VIEWSTATE": viewstate
})
return r.text
def test():
data = extract_data(html_page)
assert data["eventvalidation"] != ""
assert data["eventvalidation"].startswith("/wEWjAkCoIj1ng0")
assert data["viewstate"].startswith("/wEPDwUKLTI")
test()
|
krzyste/ud032
|
Lesson_2_Data_in_More_Complex_Formats/18-Using_Beautiful_Soup/html_soup.py
|
Python
|
agpl-3.0
| 1,546
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Dashboard'
db.create_table(u'dashboard_dashboard', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'dashboard', ['Dashboard'])
# Adding M2M table for field panels on 'Dashboard'
m2m_table_name = db.shorten_name(u'dashboard_dashboard_panels')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('dashboard', models.ForeignKey(orm[u'dashboard.dashboard'], null=False)),
('dashboardpanel', models.ForeignKey(orm[u'dashboard.dashboardpanel'], null=False))
))
db.create_unique(m2m_table_name, ['dashboard_id', 'dashboardpanel_id'])
# Adding model 'DashboardPanel'
db.create_table(u'dashboard_dashboardpanel', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('graphite_target', self.gf('django.db.models.fields.CharField')(max_length=1000)),
))
db.send_create_signal(u'dashboard', ['DashboardPanel'])
def backwards(self, orm):
# Deleting model 'Dashboard'
db.delete_table(u'dashboard_dashboard')
# Removing M2M table for field panels on 'Dashboard'
db.delete_table(db.shorten_name(u'dashboard_dashboard_panels'))
# Deleting model 'DashboardPanel'
db.delete_table(u'dashboard_dashboardpanel')
models = {
u'dashboard.dashboard': {
'Meta': {'object_name': 'Dashboard'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'panels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['dashboard.DashboardPanel']", 'symmetrical': 'False'})
},
u'dashboard.dashboardpanel': {
'Meta': {'object_name': 'DashboardPanel'},
'graphite_target': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['dashboard']
|
akvo/butler
|
butler/dashboard/migrations/0001_initial.py
|
Python
|
agpl-3.0
| 2,690
|
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from flask import request
from storm.expr import Or
import uuid
from supysonic.web import app, store
from supysonic.db import Playlist, User, Track
from . import get_entity
@app.route('/rest/getPlaylists.view', methods = [ 'GET', 'POST' ])
def list_playlists():
query = store.find(Playlist, Or(Playlist.user_id == request.user.id, Playlist.public == True)).order_by(Playlist.name)
username = request.values.get('username')
if username:
if not request.user.admin:
return request.error_formatter(50, 'Restricted to admins')
query = store.find(Playlist, Playlist.user_id == User.id, User.name == username).order_by(Playlist.name)
return request.formatter({ 'playlists': { 'playlist': [ p.as_subsonic_playlist(request.user) for p in query ] } })
@app.route('/rest/getPlaylist.view', methods = [ 'GET', 'POST' ])
def show_playlist():
status, res = get_entity(request, Playlist)
if not status:
return res
info = res.as_subsonic_playlist(request.user)
info['entry'] = [ t.as_subsonic_child(request.user) for t in res.tracks ]
return request.formatter({ 'playlist': info })
@app.route('/rest/createPlaylist.view', methods = [ 'GET', 'POST' ])
def create_playlist():
# Only(?) method where the android client uses form data rather than GET params
playlist_id, name = map(request.values.get, [ 'playlistId', 'name' ])
# songId actually doesn't seem to be required
songs = request.values.getlist('songId')
try:
playlist_id = uuid.UUID(playlist_id) if playlist_id else None
songs = set(map(uuid.UUID, songs))
except:
return request.error_formatter(0, 'Invalid parameter')
if playlist_id:
playlist = store.get(Playlist, playlist_id)
if not playlist:
return request.error_formatter(70, 'Unknwon playlist')
if playlist.user_id != request.user.id and not request.user.admin:
return request.error_formatter(50, "You're not allowed to modify a playlist that isn't yours")
playlist.tracks.clear()
if name:
playlist.name = name
elif name:
playlist = Playlist()
playlist.user_id = request.user.id
playlist.name = name
store.add(playlist)
else:
return request.error_formatter(10, 'Missing playlist id or name')
for sid in songs:
track = store.get(Track, sid)
if not track:
return request.error_formatter(70, 'Unknown song')
playlist.tracks.add(track)
store.commit()
return request.formatter({})
@app.route('/rest/deletePlaylist.view', methods = [ 'GET', 'POST' ])
def delete_playlist():
status, res = get_entity(request, Playlist)
if not status:
return res
if res.user_id != request.user.id and not request.user.admin:
return request.error_formatter(50, "You're not allowed to delete a playlist that isn't yours")
res.tracks.clear()
store.remove(res)
store.commit()
return request.formatter({})
@app.route('/rest/updatePlaylist.view', methods = [ 'GET', 'POST' ])
def update_playlist():
status, res = get_entity(request, Playlist, 'playlistId')
if not status:
return res
if res.user_id != request.user.id and not request.user.admin:
return request.error_formatter(50, "You're not allowed to delete a playlist that isn't yours")
playlist = res
name, comment, public = map(request.values.get, [ 'name', 'comment', 'public' ])
to_add, to_remove = map(request.values.getlist, [ 'songIdToAdd', 'songIndexToRemove' ])
try:
to_add = set(map(uuid.UUID, to_add))
to_remove = sorted(set(map(int, to_remove)))
except:
return request.error_formatter(0, 'Invalid parameter')
if name:
playlist.name = name
if comment:
playlist.comment = comment
if public:
playlist.public = public in (True, 'True', 'true', 1, '1')
tracks = list(playlist.tracks)
for sid in to_add:
track = store.get(Track, sid)
if not track:
return request.error_formatter(70, 'Unknown song')
if track not in playlist.tracks:
playlist.tracks.add(track)
for idx in to_remove:
if idx < 0 or idx >= len(tracks):
return request.error_formatter(0, 'Index out of range')
playlist.tracks.remove(tracks[idx])
store.commit()
return request.formatter({})
|
nwokeo/supysonic
|
supysonic/api/playlists.py
|
Python
|
agpl-3.0
| 4,858
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
########################################################################
#
# Odoo Tools by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import os, shutil
import platform
import subprocess
import sys
import logging
import odootools.lib.config
_logger = logging.getLogger('odootools.lib.tools')
def check_root():
uid = os.getuid()
_logger.debug('UID = %s' % uid)
return uid == 0
def exit_if_not_root(command_name):
if not check_root():
_logger.error("The command: \"%s\" must be used as root. Aborting.\n" % command_name)
sys.exit(1)
else:
return True
def get_os():
""" Returns a dict with os info:
key os: os name
key version: tuple with more info
"""
supported_dists = ['Ubuntu','arch','LinuxMint']
os_name = platform.system()
os_version = ""
known_os = False
if os_name == "Linux":
known_os = True
# for linux the os_version is in the form: (distro name, version, version code name)
os_version = platform.linux_distribution(supported_dists=supported_dists)
elif os_name == "Mac":
known_os = True
# for mac the os_version is in the form: (release, versioninfo, machine)
os_version = platform.mac_ver()
elif os_name == "Windows":
known_os = True
os_version = platform.win32_ver()
if known_os:
_logger.debug('OS: %s, Version: %s' % (os_name, os_version))
return {'os': os_name, 'version': os_version}
else:
return False
def get_hostname():
import socket
short_name = socket.gethostname()
try:
long_name = socket.gethostbyaddr(socket.gethostname())[0]
except:
long_name = None
return (short_name, long_name)
def regenerate_ssh_keys():
regen_script = open('/etc/regen-ssh-keys.sh', 'w')
regen_script.write('''
#!/bin/bash
rm -f /etc/ssh/ssh_host_*
ssh-keygen -f /etc/ssh/ssh_host_rsa_key -t rsa -N ''
ssh-keygen -f /etc/ssh/ssh_host_dsa_key -t dsa -N ''
sed '/regen-ssh-keys.sh/d' /etc/rc.local > /etc/rc.local.tmp
cat /etc/rc.local.tmp > /etc/rc.local
rm -f /etc/rc.local.tmp
rm -f \$0
''')
regen_script.close()
os.chmod('/etc/regen-ssh-keys.sh', stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
rc_local = open('/etc/rc.local', 'a')
rc_local.write('/etc/regen-ssh-keys.sh\n')
rc_local.close()
def exec_command(command, as_root=False):
_logger.debug('Executing command: %s' % command)
if as_root and not check_root():
command = 'sudo ' + command
process = subprocess.Popen(command,
shell=True,
stdin=sys.stdin.fileno(),
stdout=sys.stdout.fileno(),
stderr=sys.stderr.fileno())
process.wait()
_logger.debug('Command finished: %s' % process.returncode)
return process.returncode
def command_not_available():
_logger.error('The command %s is not implemented yet.' % odootools.lib.config.params['command'])
return
def ubuntu_install_package(packages, update=False):
_logger.info('Installing packages with apt-get.')
_logger.debug('Packages: %s' % str(packages))
if update:
if exec_command('apt-get -qy update', as_root=True):
_logger.warning('Failed to update apt-get database.')
if exec_command('apt-get -qy install %s' % ' '.join(packages), as_root=True):
_logger.error('Failed to install packages.')
return False
else:
return True
def arch_install_repo_package(packages):
_logger.info('Installing packages with pacman.')
_logger.debug('Packages: %s' % str(packages))
if exec_command('pacman -Sq --noconfirm --needed %s' % ' '.join(packages), as_root=True):
return False
else:
return True
def arch_install_aur_package(packages):
_logger.info('Installing packages from AUR.')
_logger.debug('Packages: %s' % str(packages))
#TODO: lp:1133341 check if base-devel group is installed
if exec_command('pacman -Sq --noconfirm --needed base-devel', as_root=True):
_logger.error('Error installing base-devel package group. Exiting.')
return False
if not arch_check_package_installed('wget'):
if exec_command('pacman -Sq --noconfirm --needed wget', as_root=True):
_logger.error('Error installing wget package. Exiting.')
return False
import tempfile, tarfile, copy
temp_dir = tempfile.mkdtemp(prefix='odootools-')
cwd = os.getcwd()
error = False
loop_packages = copy.copy(packages)
retry_packages = []
while loop_packages:
os.chdir(temp_dir)
for package in loop_packages:
if exec_command('wget https://aur.archlinux.org/packages/%s/%s/%s.tar.gz' % [package[0:2], package, package]):
_logger.error('Failed to download AUR package: %s' % package)
error = True
continue
try:
tar = tarfile.open(package, 'r')
tar.extractall()
tar.close()
os.chdir(package)
except:
_logger.error('Failed to extract AUR package: %s' % package)
error = True
continue
if exec_command('makepkg -s PKGBUILD', as_root=True):
_logger.warning('Failed to build AUR package: %s. Retrying later.' % package)
retry_packages.append(package)
continue
try:
os.chdir(package)
except:
_logger.error('Failed to install AUR package: %s' % package)
error = True
continue
if exec_command('pacman -U %s*' % package, as_root=True):
_logger.error('Failed to install AUR package: %s' % package)
error = True
continue
os.chdir('..')
if retry_packages and len(loop_packages) != len(retry_packages):
loop_packages = retry_packages
retry_packages = []
elif retry_packages:
_logger.error('Failed to install AUR packages: %s' % ', '.join(retry_packages))
error = True
else:
loop_packages = []
os.chdir(cwd)
shutil.rmtree(temp_dir)
return not error
def arch_check_package_installed(package):
if exec_command('pacman -Qq %s' % package):
return False
else:
return True
|
ClearCorp/odootools
|
odootools/odootools/lib/tools.py
|
Python
|
agpl-3.0
| 7,392
|
class ExperimentSyntaxError(Exception):
def __init__(self, message):
self.message = message
class ExperimentExecutionError(Exception):
def __init__(self, message):
self.message = message
class ExperimentSetupError(Exception):
def __init__(self, message):
self.message = message
class StopExperimentException(Exception):
def __init__(self, scope):
self.scope = scope
|
docmalloc/gplmt
|
src/error.py
|
Python
|
agpl-3.0
| 419
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from shoop import configuration
from shoop.core import cache
from shoop.core.models import ConfigurationItem
from shoop.testing.factories import get_default_shop
@pytest.mark.django_db
def test_simple_set_and_get_with_shop():
shop = get_default_shop()
configuration.set(shop, "answer", 42)
assert configuration.get(shop, "answer") == 42
assert configuration.get(shop, "non-existing") is None
configuration.set(shop, "non-existing", "hello")
assert configuration.get(shop, "non-existing") == "hello"
@pytest.mark.django_db
def test_simple_set_and_get_without_shop():
configuration.set(None, "answer", 42)
assert configuration.get(None, "answer") == 42
assert configuration.get(None, "non-existing") is None
configuration.set(None, "non-existing", "hello")
assert configuration.get(None, "non-existing") == "hello"
@pytest.mark.django_db
def test_simple_set_and_get_cascading():
shop = get_default_shop()
configuration.set(None, "answer", 42)
assert configuration.get(None, "answer") == 42
assert configuration.get(shop, "answer", 42)
assert configuration.get(None, "non-existing") is None
assert configuration.get(shop, "non-existing") is None
configuration.set(shop, "non-existing", "hello")
assert configuration.get(None, "non-existing") is None
assert configuration.get(shop, "non-existing") == "hello"
assert configuration.get(None, "foo") is None
assert configuration.get(shop, "foo") is None
configuration.set(None, "foo", "bar")
configuration.set(shop, "foo", "baz")
assert configuration.get(None, "foo") == "bar"
assert configuration.get(shop, "foo") == "baz"
@pytest.mark.django_db
def test_configuration_gets_saved():
configuration.set(None, "x", 1)
assert configuration.get(None, "x") == 1
configuration.set(None, "x", 2)
assert configuration.get(None, "x") == 2
configuration.set(None, "x", 3)
assert configuration.get(None, "x") == 3
conf_item = ConfigurationItem.objects.get(shop=None, key="x")
assert conf_item.value == 3
@pytest.mark.django_db
def test_configuration_set_and_get():
cache.clear()
shop = get_default_shop()
test_conf_data = {"data": "test"}
configuration.set(shop, "key", test_conf_data)
# Get the configuration via configuration API
assert configuration.get(shop, "key") == test_conf_data
# Check that configuration is saved to database
assert ConfigurationItem.objects.get(shop=shop, key="key").value == test_conf_data
@pytest.mark.django_db
def test_configuration_update():
cache.clear()
shop = get_default_shop()
configuration.set(shop, "key1", {"data": "test1"})
configuration.set(shop, "key2", {"data": "test2"})
configuration.set(shop, "key3", {"data": "test3"})
assert configuration.get(shop, "key1").get("data") == "test1"
assert configuration.get(shop, "key3").get("data") == "test3"
# Update configuration
configuration.set(shop, "key3", {"data": "test_bump"})
assert configuration.get(shop, "key3").get("data") == "test_bump"
@pytest.mark.django_db
def test_global_configurations():
cache.clear()
shop = get_default_shop()
configuration.set(None, "key1", {"data": "test1"})
configuration.set(shop, "key2", {"data": "test2"})
# key1 from shop should come from global configuration
assert configuration.get(shop, "key1").get("data") == "test1"
# key2 shouldn't be in global configurations
assert configuration.get(None, "key2") is None
# Update global configuration
configuration.set(None, "key1", {"data": "test_bump"})
assert configuration.get(shop, "key1").get("data") == "test_bump"
# Override shop data for global key1
configuration.set(shop, "key1", "test_data")
assert configuration.get(shop, "key1") == "test_data"
# Update shop configuration for global key1
configuration.set(shop, "key1", "test_data1")
assert configuration.get(shop, "key1") == "test_data1"
@pytest.mark.django_db
def test_configuration_cache():
cache.clear()
shop = get_default_shop()
configuration.set(None, "key1", "test1")
configuration.set(shop, "key2", "test2")
# Shop configurations cache should be bumped
assert cache.get(configuration._get_cache_key(shop)) is None
configuration.get(shop, "key1")
# Now shop configurations and key2 should found from cache
assert cache.get(configuration._get_cache_key(shop)).get("key2") == "test2"
|
akx/shoop
|
shoop_tests/core/test_configurations.py
|
Python
|
agpl-3.0
| 4,756
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from pypandoc import convert
read_md = lambda f: convert(f, 'rst')
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
setup(
name='thehive4py',
version='1.5.3',
description='Python API client for TheHive.',
long_description=read_md('README.md'),
author='TheHive-Project',
author_email='support@thehive-project.org',
maintainer='TheHive-Project',
url='https://github.com/TheHive-Project/Thehive4py',
license='AGPL-V3',
packages=['thehive4py'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules'
],
include_package_data=True,
install_requires=['future', 'requests', 'python-magic']
)
|
CERT-BDF/TheHive4py
|
setup.py
|
Python
|
agpl-3.0
| 1,293
|
from coalib.bearlib.abstractions.Linter import linter
from coalib.bears.requirements.PipRequirement import PipRequirement
@linter(executable='pycodestyle',
output_format='regex',
output_regex=r'(?P<line>\d+) (?P<column>\d+) '
r'(?P<message>(?P<origin>\S+).*)')
class PycodestyleBear:
"""
A wrapper for the tool ``pycodestyle`` formerly known as ``pep8``.
"""
LANGUAGES = {"Python", "Python 2", "Python 3"}
REQUIREMENTS = {PipRequirement('pycodestyle')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'Formatting'}
@staticmethod
def create_arguments(
filename, file, config_file,
pycodestyle_ignore: str="",
pycodestyle_select: str="",
max_line_length: int=79):
"""
:param pycodestyle_ignore:
Comma separated list of errors to ignore.
See ``pydocstyle`` documentation for a complete list of errors.
:param pycodestyle_select:
Comma separated list of errors to detect. If given only
these errors are going to be detected.
See ``pydocstyle`` documentation for a complete list of errors.
:param max_line_length:
Limit lines to this length.
"""
arguments = [r"--format='%(row)d %(col)d %(code)s %(text)s'"]
if pycodestyle_ignore:
arguments.append("--ignore=" + pycodestyle_ignore)
if pycodestyle_select:
arguments.append("--select=" + pycodestyle_select)
arguments.append("--max-line-length=" + str(max_line_length))
arguments.append(filename)
return arguments
|
dosarudaniel/coala-bears
|
bears/python/PycodestyleBear.py
|
Python
|
agpl-3.0
| 1,751
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-09 07:51
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('seqr', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='SampleBatch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('guid', models.CharField(db_index=True, max_length=30, unique=True)),
('created_date', models.DateTimeField(db_index=True, default=django.utils.timezone.now)),
('last_modified_date', models.DateTimeField(blank=True, db_index=True, null=True)),
('name', models.TextField()),
('description', models.TextField(blank=True, null=True)),
('sequencing_type', models.CharField(choices=[(b'WES', b'Exome'), (b'WGS', b'Whole Genome'), (b'RNA', b'RNA')], max_length=3)),
('genome_build_id', models.CharField(choices=[(b'b37', b'b37'), (b'b38', b'b38')], default=b'b37', max_length=5)),
('variant_callset_is_loaded', models.BooleanField(default=False)),
('variant_callset_loaded_date', models.DateTimeField(blank=True, null=True)),
('variant_callset_path', models.TextField(blank=True, null=True)),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'permissions': (('can_view', 'can_view'), ('can_edit', 'can_edit'), ('is_owner', 'is_owner')),
},
),
migrations.RemoveField(
model_name='dataset',
name='created_by',
),
migrations.AlterUniqueTogether(
name='sequencingsample',
unique_together=set([]),
),
migrations.RemoveField(
model_name='sequencingsample',
name='dataset',
),
migrations.AddField(
model_name='sequencingsample',
name='sample_batch',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='seqr.SampleBatch'),
),
migrations.DeleteModel(
name='Dataset',
),
]
|
macarthur-lab/xbrowse
|
seqr/migrations/0002_auto_20170309_0751.py
|
Python
|
agpl-3.0
| 2,584
|
# Generated by Django 1.11.15 on 2019-04-24 17:31
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import simple_history.models
import uuid
class Migration(migrations.Migration):
dependencies = [
('core', '0011_remove_partner_lms_commerce_api_url'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('course_metadata', '0171_historicalcourserun'),
]
operations = [
migrations.CreateModel(
name='HistoricalCourse',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('draft', models.BooleanField(default=False, help_text='Is this a draft version?')),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, verbose_name='UUID')),
('key', models.CharField(db_index=True, max_length=255)),
('title', models.CharField(blank=True, default=None, max_length=255, null=True)),
('short_description', models.TextField(blank=True, default=None, null=True)),
('full_description', models.TextField(blank=True, default=None, null=True)),
('outcome', models.TextField(blank=True, null=True)),
('prerequisites_raw', models.TextField(blank=True, null=True)),
('syllabus_raw', models.TextField(blank=True, null=True)),
('card_image_url', models.URLField(blank=True, null=True)),
('image', models.TextField(blank=True, help_text='Add the course image', max_length=100, null=True)),
('faq', models.TextField(blank=True, default=None, null=True, verbose_name='FAQ')),
('learner_testimonials', models.TextField(blank=True, default=None, null=True)),
('has_ofac_restrictions', models.BooleanField(default=False, verbose_name='Course Has OFAC Restrictions')),
('enrollment_count', models.IntegerField(blank=True, default=0, help_text='Total number of learners who have enrolled in this course', null=True)),
('recent_enrollment_count', models.IntegerField(blank=True, default=0, help_text='Total number of learners who have enrolled in this course in the last 6 months', null=True)),
('number', models.CharField(blank=True, help_text='Course number format e.g CS002x, BIO1.1x, BIO1.2x', max_length=50, null=True)),
('additional_information', models.TextField(blank=True, default=None, null=True, verbose_name='Additional Information')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('canonical_course_run', models.ForeignKey(blank=True, db_constraint=False, default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='course_metadata.CourseRun')),
('draft_version', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='course_metadata.Course')),
('extra_description', models.ForeignKey(blank=True, db_constraint=False, default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='course_metadata.AdditionalPromoArea')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('level_type', models.ForeignKey(blank=True, db_constraint=False, default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='course_metadata.LevelType')),
('partner', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='core.Partner')),
('video', models.ForeignKey(blank=True, db_constraint=False, default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='course_metadata.Video')),
],
options={
'verbose_name': 'historical course',
'get_latest_by': 'history_date',
'ordering': ('-history_date', '-history_id'),
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
|
edx/course-discovery
|
course_discovery/apps/course_metadata/migrations/0172_historicalcourse.py
|
Python
|
agpl-3.0
| 4,942
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "vidan.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
mcuringa/vid-analyzer
|
web/manage.py
|
Python
|
agpl-3.0
| 248
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('instance', '0064_merge'),
]
operations = [
migrations.AddField(
model_name='mongodbserver',
name='description',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='mongodbserver',
name='name',
field=models.CharField(max_length=250, blank=True),
),
migrations.AddField(
model_name='mysqlserver',
name='description',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='mysqlserver',
name='name',
field=models.CharField(max_length=250, blank=True),
),
]
|
open-craft/opencraft
|
instance/migrations/0065_create_description.py
|
Python
|
agpl-3.0
| 923
|
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': 'Production Note',
'version': '0.1',
'category': 'Customization',
'description': '''
Add every case for manage note in production
''',
'author': 'Micronaet S.r.l. - Nicola Riolini',
'website': 'http://www.micronaet.it',
'license': 'AGPL-3',
'depends': [
'base',
'product',
'sale',
'mrp',
'partner_product_partic_base',
],
'init_xml': [],
'demo': [],
'data': [
'note_view.xml',
],
'active': False,
'installable': True,
'auto_install': False,
}
|
Micronaet/micronaet-production
|
production_note/__openerp__.py
|
Python
|
agpl-3.0
| 1,525
|
#!/usr/bin/env python
import IPython
import os
import random
import re
import requests
import sys
randalpha = lambda n: ''.join([chr(ord('A')+random.randint(0,25)) for _ in range(n)])
new_account = '--regen-creds' in sys.argv or not os.path.exists('web500_creds.txt')
def make_creds():
creds = dict()
if new_account:
creds['username'] = randalpha(16)
creds['password'] = randalpha(16)
with open('web500_creds.txt', 'w') as f:
f.write(repr(creds))
else:
with open('web500_creds.txt', 'r') as f:
creds = eval(f.read())
return creds
def progress_hook(response, *args, **kwargs):
print('"%s": %d' % (response.url, response.status_code))
hooks = {'response': progress_hook}
baseurl = 'http://107.170.127.56'
session = requests.Session()
creds = make_creds()
if new_account:
resp1 = session.post(baseurl+'/register.php', hooks=hooks, data={
'username': creds['username'],
'firstname': creds['username'],
'lastname': creds['username'],
'password': creds['password'],
'repassword': creds['password'],
'submit': 'Sign up',
})
else:
resp1 = session.post(baseurl+'/login.php', hooks=hooks, data={
'username': creds['username'],
'password': creds['password'],
'submit': 'Login',
})
'''
<form name="xml-upload" id="upload" enctype="multipart/form-data" action="settings.php" method="POST">
<input type="hidden" name="MAX_FILE_SIZE" value="1000000" />
<input name="upl" type="file" accept=".xml" style="margin-left: 175px;" />
<p style="margin-top: 5px;">Max of 1 MB.</p>
<input type="submit" value="Upload">
</form>
'''
def do_xxe(session, payload):
resp = session.post(baseurl+'/settings.php', hooks=hooks, data={
'MAX_FILE_SIZE': 1000000,
'submit': 'Upload'
}, files={
'upl': ('xxe.xml', payload, 'text/xml')
})
#has_warnings = re.findall('(.*)<html>', resp.text, re.DOTALL)
position = resp.text.find('<html>')
if position > 0:
warnings = re.findall('(<b>Warning</b>: .*)', resp.text[:position])
for line in warnings:
print(line)
return re.findall('<textarea[^>]*name="bio">(.*)</textarea>\\s*<input type="submit" value="Update">\\s*</form>', resp.text, re.DOTALL)[0]
def arbitrary_file_read(session, fname):
file_payload = '''
<!DOCTYPE bio [ <!ELEMENT bio ANY >
<!ENTITY xxe SYSTEM "file://{filename}" >]>
<data>
<item xml:id="id">1</item>
<item xml:id="firstname">firstname</item>
<item xml:id="lastname">lastname</item>
<item xml:id="bio">&xxe;</item>
</data>
'''.format(filename=fname)
cdata_attempt = '''
<!DOCTYPE bio [ <!ELEMENT bio ANY >
<!ENTITY file SYSTEM "file://{filename}">
<!ENTITY passwd SYSTEM "file:///etc/passwd">
<!ENTITY test "foo">
<!ENTITY start "<![CDATA[">
<!ENTITY % end "]]>">
<!ENTITY xxe "&file;">
]>
<data>
<item xml:id="id">1</item>
<item xml:id="firstname">firstname</item>
<item xml:id="lastname">lastname</item>
<item xml:id="bio">&xxe;</item>
</data>
'''.format(filename=fname)
base64_payload = '''
<!DOCTYPE bio [ <!ELEMENT bio ANY >
<!ENTITY xxe SYSTEM "php://filter/convert.base64-encode/resource=file://{filename}">
]>
<data>
<item xml:id="id">1</item>
<item xml:id="firstname">firstname</item>
<item xml:id="lastname">lastname</item>
<item xml:id="bio">&xxe;</item>
</data>
'''.format(filename=fname)
return do_xxe(session, base64_payload).decode('base64')
"""
def do_glob(session, path):
glob_payload = '''
<!DOCTYPE bio [ <!ELEMENT bio ANY >
<!ENTITY xxe SYSTEM "php://filter/convert.base64-encode/resource=glob://{path}">
]>
<data>
<item xml:id="id">1</item>
<item xml:id="firstname">firstname</item>
<item xml:id="lastname">lastname</item>
<item xml:id="bio">&xxe;</item>
</data>
'''.format(path=path)
return do_xxe(session, glob_payload)
"""
def list_files_for_userid(session, id_):
resp = session.post(baseurl, hooks=hooks, data={'accounts': id_, 'submit':'Show files'})
#return re.findall('<a href.*bytes\)</a>', resp.text)
has_things = re.findall('<h3 class="bars">Uploaded files</h3>\\s*<ul class="list-group">(.*)</ul>', resp.text, re.DOTALL)[0]
ids = re.findall('name="downfileid" value="(\\d*)"', has_things)
fnames = re.findall('\\)" >(.*) \\(\\d* bytes\\)</a>', has_things)
return zip(ids, fnames)
def download_file_by_id(session, id_):
resp = session.post(baseurl, hooks=hooks, data={'downfileid': id_, 'submit':'Show files'})
return resp.text
def download_all_files(session, maxuser):
files = dict()
for user in range(maxuser):
for (i, fname) in list_files_for_userid(session, user):
print(user, i, fname)
files[i] = (user, fname, download_file_by_id(session, i))
with open('web500_files_up_to_%d' % maxuser, 'w') as f:
f.write(repr(files))
return files
def request_keys(session):
session.headers['X-Forwarded-For'] = 'for=0.0.0.0; proto=http; by=253.254.255.256'
'''
function checkpw($code)
{
$len = strlen($code) - 1;
$last = intval($code[$len]);
if(!!$code && $code & 1 && $last % 2 == 0 && $code = "DirtyHarry99")
return true;
return false;
}
'''
'''
$ php -a
php > $x = [0];
php > echo $x & 1;
1
php > echo intval($x[strlen($x)-1]) % 2;
PHP Warning: strlen() expects parameter 1 to be string, array given in php shell code on line 1
PHP Notice: Undefined offset: -1 in php shell code on line 1
0
'''
# as far as I can tell:
# ($x & 1) is true because arrays are true, and true (as a number) is 1
# strlen($x) is undefined, (undefined - 1) is -1, $x[-1] is undefined, and intval(undefined) is 0, which is even
# yay PHP!
resp = session.post(baseurl+'/keys.php', hooks=hooks, data={'passcode[]': "0"})
return resp
print(download_file_by_id(session, 1))
print(arbitrary_file_read(session, '/etc/apache2/apache2.conf'))
print(arbitrary_file_read(session, '/etc/apache2/htpasswd'))
if not os.path.exists('web500_source.tar.gz'):
os.system('wget --user=therealadmin --password=SEXYLOVE http://107.170.127.56/backups/backup.tar.gz --output-document ./web500_source.tar.gz')
resp_keys = request_keys(session)
print(resp_keys.text)
IPython.embed()
|
aweinstock314/aweinstock-ctf-writeups
|
rc3ctf_2015/web500_megafile_exploit.py
|
Python
|
agpl-3.0
| 6,550
|
# Copyright 2019 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import models, fields
class SaleOrder(models.Model):
_inherit = 'sale.order'
sale_product_ids = fields.Many2many(
string='Sale order products', comodel_name='product.product',
compute='_compute_sale_product_ids')
def _compute_sale_product_ids(self):
for sale in self:
lines = sale.order_line.filtered(lambda x: x.state != 'cancel')
if lines:
products = lines.mapped('product_id')
sale.sale_product_ids = [(6, 0, products.ids)]
def action_view_products_stock_forecast_from_sale(self):
self.ensure_one()
if self.sale_product_ids:
self.env['product.product.stock.forecast']. _calc_qty_per_day(
products_lst=self.sale_product_ids)
action = self.env.ref(
'stock_forecast.action_product_stock_forecast_from'
'_sale').read()[0]
action['domain'] = [
('product_id', 'in', self.sale_product_ids.ids)]
return action
|
oihane/odoo-addons
|
stock_forecast/models/sale_order.py
|
Python
|
agpl-3.0
| 1,158
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-08-25 09:51
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.postgres.operations import CITextExtension
class Migration(migrations.Migration):
dependencies = [
('core', '0042_auto_20200825_1051'),
]
operations = [CITextExtension()]
|
BirkbeckCTP/janeway
|
src/core/migrations/0043_install_ci_extension_pg.py
|
Python
|
agpl-3.0
| 363
|
###
# Tests the driver cursor API
###
import unittest
from os import getenv
from sys import path, argv
path.append("../../drivers/python")
import rethinkdb as r
num_rows = int(argv[2])
port = int(argv[1])
class TestCursor(unittest.TestCase):
def setUp(self):
c = r.connect(port=port)
tbl = r.table('test')
self.cur = tbl.run(c)
def test_type(self):
self.assertEqual(type(self.cur), r.Cursor)
def test_count(self):
i = 0
for row in self.cur:
i += 1
self.assertEqual(i, num_rows)
if __name__ == '__main__':
print "Testing cursor for %d rows" % num_rows
suite = unittest.TestSuite()
loader = unittest.TestLoader()
suite.addTest(loader.loadTestsFromTestCase(TestCursor))
unittest.TextTestRunner(verbosity=2).run(suite)
|
AtnNn/rethinkdb
|
test/rql_test/connections/cursor.py
|
Python
|
agpl-3.0
| 824
|
# -*- coding: UTF-8 -*-
# Copyright 2017 Luc Saffre
# License: BSD (see file COPYING for details)
"""The default :attr:`custom_layouts_module
<lino.core.site.Site.custom_layouts_module>` for Lino Cosi.
"""
from lino.api import rt
rt.models.products.Products.column_names = "id name cat sales_price *"
rt.models.products.Products.detail_layout = """
id cat sales_price vat_class delivery_unit
name
description
"""
rt.models.accounts.Accounts.column_names = "\
ref name purchases_allowed group *"
rt.models.countries.Places.detail_layout = """
name country
type parent zip_code id
PlacesByPlace contacts.PartnersByCity
"""
rt.models.accounts.Accounts.detail_layout = """
ref:10 name
group type id default_amount:10 vat_column
needs_partner clearable purchases_allowed
ledger.MovementsByAccount
"""
rt.models.system.SiteConfigs.detail_layout = """
site_company next_partner_id:10
default_build_method simulate_today
"""
|
khchine5/lino-cosi
|
lino_cosi/lib/cosi/layouts.py
|
Python
|
agpl-3.0
| 925
|
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import functools
from django.conf import settings
from django.urls import reverse
from django.template.loader import render_to_string
from django.utils.html import format_html, format_html_join
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import get_language
from django import template
register = template.Library()
from staticmedia import utils
from utils.basexconverter import base62
from videos.views import LanguageList
from videos.types import video_type_registrar, VideoTypeError
from videos import permissions, share_utils, video_size
def cached_by_video(cache_prefix):
"""Wrapper function for tags that cache their content per-video. """
def decorator(func):
@functools.wraps(func)
def wrapper(video, *args, **kwargs):
cache_key = '{}-{}'.format(cache_prefix, get_language())
cached = video.cache.get(cache_key)
if cached:
return mark_safe(cached)
computed = func(video, *args, **kwargs)
video.cache.set(cache_key, computed)
return computed
return wrapper
return decorator
@register.filter
def is_follower(obj, user):
# obj is Video or SubtitleLanguage
if not user.is_authenticated():
return False
if not obj:
return False
return obj.user_is_follower(user)
@register.filter
def can_user_edit_video_urls(video, user):
return permissions.can_user_edit_video_urls(video, user)
@register.filter
def can_user_resync(video, user):
return permissions.can_user_resync(video, user)
from django.template.defaulttags import URLNode
class VideoURLNode(URLNode):
def render(self, video, request):
if self.asvar:
context[self.asvar]= urlparse.urljoin(domain, context[self.asvar])
return ''
else:
return urlparse.urljoin(domain, path)
path = super(AbsoluteURLNode, self).render(context)
return urlparse.urljoin(domain, path)
def video_url(parser, token, node_cls=VideoURLNode):
"""
Does the logic to decide if a video must have a secret url passed into it or not.
If video must be acceceed thourgh private url, the 40chars hash are inserted instead
of the video_id.
"""
bits = token.split_contents()
print "token", token
print "bits", bits
node_instance = url(parser, token)
return node_cls(view_name=node_instance.view_name,
args=node_instance.args,
kwargs=node_instance.kwargs,
asvar=node_instance.asvar)
video_url = register.tag(video_url)
@register.filter
def in_progress(language):
return (not language.get_tip(public=True) and
language.get_tip(public=False))
@register.filter
def format_duration(value):
"""
Based on a Template Tag by Dan Ward 2009 (http://d-w.me)
Usage: {{ VALUE|format_duration }}
"""
if value is None:
return _("Unknown")
# Place seconds in to integer
secs = int(value)
# If seconds are greater than 0
if secs > 0:
# Import math library
import math
# Place durations of given units in to variables
daySecs = 86400
hourSecs = 3600
minSecs = 60
# Create string to hold outout
durationString = ''
# Calculate number of hours from seconds
hours = int(math.floor(secs / int(hourSecs)))
# Subtract hours from seconds
secs = secs - (hours * int(hourSecs))
# Calculate number of minutes from seconds (minus number of hours)
minutes = int(math.floor(secs / int(minSecs)))
# Subtract minutes from seconds
secs = secs - (minutes * int(minSecs))
# Calculate number of seconds (minus hours and minutes)
seconds = secs
# Determine if next string is to be shown
if hours > 0:
durationString += '%02d' % (hours,) + ':'
# If number of minutes is greater than 0
if minutes > 0 or hours > 0:
durationString += '%02d' % (minutes,) + ':'
# If number of seconds is greater than 0
if seconds > 0 or minutes > 0 or hours > 0:
if minutes == 0 and hours == 0:
durationString += '0:%02d' % (seconds,)
else:
durationString += '%02d' % (seconds,)
# Return duration string
return durationString.strip()
# If seconds are not greater than 0
else:
# Provide 'No duration' message
return 'No duration'
def shortlink_for_video( video):
"""Return a shortlink string for the video.
The pattern is http://amara.org/v/<pk>
"""
protocol = getattr(settings, 'DEFAULT_PROTOCOL')
domain = settings.HOSTNAME
# don't www me, we'll redirect users and save three
# chars. Yay for our twitter-brave-new-world
domain = domain.replace("www.", '')
encoded_pk = base62.from_decimal(video.pk)
path = reverse('shortlink', args=[encoded_pk], no_locale=True)
return u"{0}://{1}{2}".format(unicode(protocol),
unicode(domain),
unicode(path))
@register.filter
def multi_video_create_subtitles_data_attrs(video):
attrs = [
('data-video-id', video.id),
('data-video-langs', ':'.join(l.language_code for l in
video.all_subtitle_languages())),
]
if video.primary_audio_language_code:
attrs.append(('data-video-primary-audio-lang-code',
video.primary_audio_language_code))
return mark_safe(' '.join('%s="%s"' % (key, value)
for (key, value) in attrs))
@register.simple_tag(name='language-list')
@cached_by_video('language-list')
def language_list(video):
video.prefetch_languages(with_public_tips=True,
with_private_tips=True)
return mark_safe(render_to_string('videos/_language-list.html', {
'video': video,
'language_list': LanguageList(video),
'STATIC_URL': utils.static_url(),
}))
@register.simple_tag(name='embedder-code')
@cached_by_video('embedder-code')
def embedder_code(video):
video.prefetch_languages(with_public_tips=True,
with_private_tips=True)
return mark_safe(render_to_string('videos/_embed_link.html', {
'video_url': video.get_video_url(),
'team': video.get_team(),
'height': video_size["large"]["height"],
'width': video_size["large"]["width"],
}))
@register.simple_tag(name='video-metadata', takes_context=True)
def video_metadata(context, video):
request = context['request']
metadata = video.get_metadata_for_locale(request.LANGUAGE_CODE)
return format_html_join(u'\n', u'<h4>{0}: {1}</h4>', [
(field['label'], field['content'])
for field in metadata.convert_for_display()
])
@register.simple_tag(name='sharing-widget-for-video')
@cached_by_video('sharing-widget')
def sharing_widget_for_video(video):
context = share_utils.share_panel_context_for_video(video)
content = mark_safe(render_to_string('_sharing_widget.html', context))
return content
@register.filter
def speaker_name(video):
return video.get_metadata().get('speaker-name')
|
pculture/unisubs
|
apps/videos/templatetags/videos_tags.py
|
Python
|
agpl-3.0
| 8,099
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2018: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import time
import signal
import json
import configparser
import subprocess
import threading
from time import sleep
import requests
import shutil
import psutil
import pytest
from .alignak_test import AlignakTest
from alignak.http.generic_interface import GenericInterface
from alignak.http.arbiter_interface import ArbiterInterface
from alignak.http.scheduler_interface import SchedulerInterface
from alignak.http.broker_interface import BrokerInterface
class TestLaunchDaemons(AlignakTest):
def setUp(self):
super(TestLaunchDaemons, self).setUp()
self.cfg_folder = '/tmp/alignak'
self._prepare_configuration(copy=True, cfg_folder=self.cfg_folder)
files = ['%s/etc/alignak.ini' % self.cfg_folder,
'%s/etc/alignak.d/daemons.ini' % self.cfg_folder,
'%s/etc/alignak.d/modules.ini' % self.cfg_folder]
try:
cfg = configparser.ConfigParser()
cfg.read(files)
cfg.set('alignak-configuration', 'launch_missing_daemons', '1')
cfg.set('daemon.arbiter-master', 'alignak_launched', '1')
cfg.set('daemon.scheduler-master', 'alignak_launched', '1')
cfg.set('daemon.poller-master', 'alignak_launched', '1')
cfg.set('daemon.reactionner-master', 'alignak_launched', '1')
cfg.set('daemon.receiver-master', 'alignak_launched', '1')
cfg.set('daemon.broker-master', 'alignak_launched', '1')
with open('%s/etc/alignak.ini' % self.cfg_folder, "w") as modified:
cfg.write(modified)
except Exception as exp:
print("* parsing error in config file: %s" % exp)
assert False
def tearDown(self):
# Restore the default test logger configuration
if 'ALIGNAK_LOGGER_CONFIGURATION' in os.environ:
del os.environ['ALIGNAK_LOGGER_CONFIGURATION']
print("Test terminated!")
def test_arbiter_missing_parameters(self):
""" Running the Alignak Arbiter with missing command line parameters
:return:
"""
print("Launching arbiter with missing parameters...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py")
]
arbiter = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print("%s launched (pid=%d)" % ('arbiter', arbiter.pid))
# Waiting for arbiter to parse the configuration
sleep(3)
ret = arbiter.poll()
print("*** Arbiter exited with code: %d" % ret)
assert ret is not None, "Arbiter is still running!"
stderr = arbiter.stderr.read()
print(stderr)
assert b"usage: alignak_arbiter.py" in stderr
# Arbiter process must exit with a return code == 2
assert ret == 2
def test_arbiter_no_environment(self):
""" Running the Alignak Arbiter without environment file
:return:
"""
print("Launching arbiter without environment file...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py")
]
arbiter = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print("%s launched (pid=%d)" % ('arbiter', arbiter.pid))
# Waiting for arbiter to parse the configuration
sleep(3)
ret = arbiter.poll()
print("*** Arbiter exited with code: %d" % ret)
assert ret is not None, "Arbiter is still running!"
stdout = arbiter.stdout.read()
print(stdout)
stderr = arbiter.stderr.read()
print(stderr)
assert b"usage: alignak_arbiter.py" in stderr
# Arbiter process must exit with a return code == 2
assert ret == 2
# @pytest.mark.skip("To be re-activated with spare mode")
def test_arbiter_class_no_environment(self):
""" Instantiate the Alignak Arbiter class without environment file
:return:
"""
from alignak.daemons.arbiterdaemon import Arbiter
print("Instantiate arbiter without environment file...")
# Using values that are usually provided by the command line parameters
args = {
'env_file': '',
'alignak_name': 'alignak-test',
'daemon_name': 'arbiter-master',
'log_filename': '/tmp/arbiter.log',
'legacy_cfg_files': [os.path.join(self._test_dir, '../etc/alignak.cfg')]
}
# Exception because the logger configuration file does not exist
self.arbiter = Arbiter(**args)
print("Arbiter: %s" % self.arbiter)
assert self.arbiter.env_filename == ''
assert self.arbiter.legacy_cfg_files == [os.path.abspath(os.path.join(self._test_dir, '../etc/alignak.cfg'))]
# Configure the logger
self.arbiter.log_level = 'ERROR'
self.arbiter.setup_alignak_logger()
# Setup our modules manager
# self.arbiter.load_modules_manager()
# Load and initialize the arbiter configuration
# This to check that the configuration is correct!
self.arbiter.load_monitoring_config_file()
def test_arbiter_class_env_default(self):
""" Instantiate the Alignak Arbiter class without legacy cfg files
:return:
"""
# Unset legacy configuration files
files = ['%s/etc/alignak.ini' % self.cfg_folder]
try:
cfg = configparser.ConfigParser()
cfg.read(files)
# Nagios legacy files - not configured
cfg.set('alignak-configuration', 'cfg', '')
with open('%s/etc/alignak.ini' % self.cfg_folder, "w") as modified:
cfg.write(modified)
except Exception as exp:
print("* parsing error in config file: %s" % exp)
assert False
from alignak.daemons.arbiterdaemon import Arbiter
print("Instantiate arbiter with default environment file...")
# Using values that are usually provided by the command line parameters
args = {
'env_file': "/tmp/alignak/etc/alignak.ini",
'daemon_name': 'arbiter-master'
}
self.arbiter = Arbiter(**args)
print("Arbiter: %s" % (self.arbiter))
print("Arbiter: %s" % (self.arbiter.__dict__))
assert self.arbiter.env_filename == '/tmp/alignak/etc/alignak.ini'
assert self.arbiter.legacy_cfg_files == []
assert len(self.arbiter.legacy_cfg_files) == 0
# Configure the logger
self.arbiter.log_level = 'INFO'
self.arbiter.setup_alignak_logger()
# Setup our modules manager
# self.arbiter.load_modules_manager()
# Load and initialize the arbiter configuration
# This to check that the configuration is correct!
self.arbiter.load_monitoring_config_file()
# No legacy files found
assert len(self.arbiter.legacy_cfg_files) == 0
def test_arbiter_unexisting_environment(self):
""" Running the Alignak Arbiter with a not existing environment file
:return:
"""
print("Launching arbiter with a not existing environment file...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", "/tmp/etc/unexisting.ini"
]
arbiter = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print("%s launched (pid=%d)" % ('arbiter', arbiter.pid))
# Waiting for arbiter to parse the configuration
sleep(3)
ret = arbiter.poll()
print("*** Arbiter exited with code: %d" % ret)
assert ret is not None, "Arbiter is still running!"
stdout = arbiter.stdout.read()
print(stdout)
assert b"Daemon 'arbiter-master' did not correctly read " \
b"Alignak environment file: /tmp/etc/unexisting.ini" in stdout
# Arbiter process must exit with a return code == 1
assert ret == 99
def test_arbiter_no_monitoring_configuration(self):
""" Running the Alignak Arbiter with no monitoring configuration defined -
no legacy cfg files
:return:
"""
print("Launching arbiter with no monitoring configuration...")
# Unset legacy configuration files
files = ['%s/etc/alignak.ini' % self.cfg_folder]
try:
cfg = configparser.ConfigParser()
cfg.read(files)
# Nagios legacy files - not configured
cfg.set('alignak-configuration', 'cfg', '')
with open('%s/etc/alignak.ini' % self.cfg_folder, "w") as modified:
cfg.write(modified)
except Exception as exp:
print("* parsing error in config file: %s" % exp)
assert False
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
ret = self._run_command_with_timeout(args, 30)
errors = 0
ok = False
with open('/tmp/alignak/log/arbiter-master.log') as f:
for line in f:
if 'total number of hosts in all realms: 0' in line:
ok = True
assert errors == 0
assert ok
def test_arbiter_unexisting_monitoring_configuration(self):
""" Running the Alignak Arbiter with a not existing monitoring configuration file
:return:
"""
print("Launching arbiter with no monitoring configuration...")
files = ['%s/etc/alignak.ini' % self.cfg_folder]
try:
cfg = configparser.ConfigParser()
cfg.read(files)
# Nagios legacy files
cfg.set('alignak-configuration', 'cfg', '%(etcdir)s/alignak-missing.cfg')
with open('%s/etc/alignak.ini' % self.cfg_folder, "w") as modified:
cfg.write(modified)
except Exception as exp:
print("* parsing error in config file: %s" % exp)
assert False
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
ret = self._run_command_with_timeout(args, 20)
errors = 0
ok = False
with open('/tmp/alignak/log/arbiter-master.log') as f:
for line in f:
if 'WARNING:' in line and "cannot open main file '/tmp/alignak/etc/alignak-missing.cfg' for reading" in line:
ok = True
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# Arbiter process must exit with a return code == 0 and no errors
assert errors == 2
# Arbiter process must exit with a return code == 1
assert ret == 1
assert ok
def test_arbiter_bad_configuration(self):
""" Running the Alignak Arbiter with bad monitoring configuration (unknown sub directory)
:return:
"""
print("Launching arbiter with a bad monitoring configuration...")
files = ['%s/etc/alignak.ini' % self.cfg_folder]
try:
cfg = configparser.ConfigParser()
cfg.read(files)
# Nagios legacy files
cfg.set('alignak-configuration', 'cfg', '%(etcdir)s/alignak.cfg')
with open('%s/etc/alignak.ini' % self.cfg_folder, "w") as modified:
cfg.write(modified)
except Exception as exp:
print("* parsing error in config file: %s" % exp)
assert False
# Update configuration with a bad file name
files = ['%s/etc/alignak.cfg' % self.cfg_folder]
replacements = {
'cfg_dir=arbiter/templates': 'cfg_dir=unexisting/objects/realms'
}
self._files_update(files, replacements)
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
ret = self._run_command_with_timeout(args, 20)
errors = 0
ok = False
with open('/tmp/alignak/log/arbiter-master.log') as f:
for line in f:
if 'ERROR:' in line and "*** One or more problems were encountered while " \
"processing the configuration (first check)..." in line:
ok = True
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# Arbiter process must exit with a return code == 0 and no errors
assert errors == 2
# Arbiter process must exit with a return code == 1
assert ret == 1
assert ok
def test_arbiter_i_am_not_configured(self):
""" Running the Alignak Arbiter with missing arbiter configuration
:return:
"""
print("Launching arbiter with a missing arbiter configuration...")
# Current working directory for the default log file!
if os.path.exists('%s/my-arbiter-name.log' % os.getcwd()):
os.remove('%s/my-arbiter-name.log' % os.getcwd())
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", '%s/etc/alignak.ini' % self.cfg_folder,
"-n", "my-arbiter-name"
]
ret = self._run_command_with_timeout(args, 20)
errors = 0
ok = False
# Note the log filename!
with open('%s/my-arbiter-name.log' % os.getcwd()) as f:
for line in f:
if "I cannot find my own configuration (my-arbiter-name)" in line:
ok = True
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# Arbiter process must exit with a return code == 0 and no errors
assert errors == 2
# Arbiter process must exit with a return code == 1
assert ret == 1
assert ok
def test_arbiter_verify(self):
""" Running the Alignak Arbiter in verify mode only with the default shipped configuration
:return:
"""
# Set a specific logger configuration - do not use the default test configuration
os.environ['ALIGNAK_LOGGER_CONFIGURATION'] = \
os.path.abspath('./etc/warning_alignak-logger.json')
print("Logger configuration file is: %s" % os.environ['ALIGNAK_LOGGER_CONFIGURATION'])
print("Launching arbiter in verification mode...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", '%s/etc/alignak.ini' % self.cfg_folder,
"-V"
]
ret = self._run_command_with_timeout(args, 20)
errors = 0
specific_log = False
info_log = False
with open('/tmp/alignak/log/arbiter-master.log') as f:
for line in f:
if 'INFO:' in line:
info_log = True
if 'Arbiter is in configuration check mode' in line:
specific_log = True
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# Arbiter process must exit with a return code == 0 and no errors
# Arbiter changed the log level to INFO because of the verify mode
assert specific_log is True
assert info_log is True
assert errors == 0
assert ret == 0
def test_arbiter_parameters_pid(self):
""" Run the Alignak Arbiter with some parameters - set a pid file
:return:
"""
# All the default configuration files are in /tmp/etc
print("Launching arbiter with forced PID file...")
if os.path.exists('/tmp/arbiter.pid'):
os.remove('/tmp/arbiter.pid')
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", '%s/etc/alignak.ini' % self.cfg_folder, "-V",
"--pid_file", "/tmp/arbiter.pid"
]
ret = self._run_command_with_timeout(args, 20)
# The arbiter unlinks the pid file - I cannot assert it exists!
# assert os.path.exists('/tmp/arbiter.pid')
errors = 0
# ok = False
with open('/tmp/alignak/log/arbiter-master.log') as f:
for line in f:
# if 'Unlinking /tmp/arbiter.pid' in line:
# ok = True
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# Arbiter process must exit with a return code == 0 and no errors
assert errors == 0
assert ret == 0
# assert ok
def test_arbiter_parameters_log(self):
""" Run the Alignak Arbiter with some parameters - log file name
Log file name and log level may be specified on the command line
:return:
"""
# All the default configuration files are in /tmp/etc
print("Launching arbiter with forced log file...")
if os.path.exists('/tmp/arbiter.log'):
os.remove('/tmp/arbiter.log')
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-e", '%s/etc/alignak.ini' % self.cfg_folder,
"-V", "-vv",
"--log_level", "INFO", "--log_file", "/tmp/arbiter.log"
]
ret = self._run_command_with_timeout(args, 20)
# Log file created because of the -V option
assert os.path.exists("/tmp/arbiter.log")
errors = 0
with open('/tmp/arbiter.log') as f:
for line in f:
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# Arbiter process must exit with a return code == 0 and no errors
assert errors == 0
assert ret == 0
@pytest.mark.skip("To be re-activated with spare mode")
def test_arbiter_spare_missing_configuration(self):
""" Run the Alignak Arbiter in spare mode - missing spare configuration
:return:
"""
print("Launching arbiter in spare mode...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-a", cfg_folder + "/alignak.cfg",
"-c", cfg_folder + "/daemons/arbiterd.ini",
"-n", "arbiter-spare"
]
arbiter = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print("%s launched (pid=%d)" % ('arbiter', arbiter.pid))
sleep(5)
ret = arbiter.poll()
print("*** Arbiter exited with code: %s" % ret)
assert ret is not None, "Arbiter is still running!"
# Arbiter process must exit with a return code == 1
assert ret == 1
@pytest.mark.skip("To be re-activated with spare mode")
def test_arbiter_spare(self):
""" Run the Alignak Arbiter in spare mode - missing spare configuration
:return:
"""
print("Launching arbiter in spare mode...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-a", cfg_folder + "/alignak.cfg",
"-c", cfg_folder + "/daemons/arbiterd.ini",
"-n", "arbiter-spare"
]
arbiter = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print("%s launched (pid=%d)" % ('arbiter', arbiter.pid))
ret = arbiter.poll()
# Arbiter must still be running ... it is still trying to dispatch the configuration!
assert ret is None, "Arbiter exited!"
sleep(5)
# Arbiter never stops trying to send its configuration! We must kill it...
print("Asking arbiter to end...")
os.kill(arbiter.pid, signal.SIGTERM)
ret = arbiter.poll()
print("*** Arbiter exited on kill, no return code!")
assert ret is None, "Arbiter is still running!"
# No ERRORS because the daemons are not alive !
ok = 0
for line in iter(arbiter.stdout.readline, b''):
print(">>> %s" % line.rstrip())
if b'INFO:' in line:
# I must find this line
if b'[alignak.daemons.arbiterdaemon] I found myself in the configuration: arbiter-spare' in line:
ok += 1
# and this one also
if b'[alignak.daemons.arbiterdaemon] I am a spare Arbiter: arbiter-spare' in line:
ok += 1
if b'I am not the master arbiter, I stop parsing the configuration' in line:
ok += 1
if b'Waiting for master...' in line:
ok += 1
if b'Waiting for master death' in line:
ok += 1
assert b'CRITICAL:' not in line
for line in iter(arbiter.stderr.readline, b''):
print("*** %s" % line.rstrip())
if sys.version_info > (2, 7):
assert False, "stderr output!"
assert ok == 5
def test_arbiter_normal(self):
""" Running the Alignak Arbiter - normal verbosity
Expects log at the WARNING level - depends upon the logger configuration file
:return:
"""
self._arbiter(verbosity=None)
def test_arbiter_verbose(self):
""" Running the Alignak Arbiter - normal verbosity
Expects log at the INFO level
:return:
"""
self._arbiter(verbosity='--verbose')
def test_arbiter_verbose2(self):
self._arbiter(verbosity='-v')
def test_arbiter_very_verbose(self):
""" Running the Alignak Arbiter - very verbose
Expects log at the DEBUG level
:return:
"""
self._arbiter(verbosity='--debug')
def test_arbiter_very_verbose2(self):
self._arbiter(verbosity='-vv')
def _arbiter(self, verbosity=None, log_file=None):
""" Running the Alignak Arbiter with a specific verbosity
:return:
"""
# Set a specific logger configuration - do not use the default test configuration
# to use the default shipped configuration
os.environ['ALIGNAK_LOGGER_CONFIGURATION'] = \
os.path.abspath('./etc/warning_alignak-logger.json')
print("Logger configuration file is: %s" % os.environ['ALIGNAK_LOGGER_CONFIGURATION'])
print("Launching arbiter ...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_arbiter.py"),
"-n", "arbiter-master",
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
if verbosity:
args.append(verbosity)
arbiter = subprocess.Popen(args)
print("%s launched (pid=%d)" % ('arbiter', arbiter.pid))
# Wait for the arbiter to get started
time.sleep(5)
# This function will request the arbiter daemon to stop
self._stop_alignak_daemons(request_stop_uri='http://127.0.0.1:7770')
errors = 0
info_log = False
debug_log = False
with open('/tmp/alignak/log/arbiter-master.log') as f:
for line in f:
if 'DEBUG:' in line:
debug_log = True
if 'INFO:' in line:
info_log = True
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# arbiter process may exit with no errors!
# assert errors == 0
# Arbiter changed the log level to INFO because of the verify mode
if verbosity in ['-v', '--verbose']:
assert info_log is True
# Arbiter changed the log level to DEBUG because of the verify mode
if verbosity in ['-vv', '--debug']:
assert debug_log is True
def test_broker(self):
""" Running the Alignak Broker
:return:
"""
print("Launching broker ...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_broker.py"),
"-n", "broker-master",
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
broker = subprocess.Popen(args)
print("%s launched (pid=%d)" % ('broker', broker.pid))
# Wait for the broker to get started
time.sleep(2)
# This function will request the arbiter daemon to stop
self._stop_alignak_daemons(request_stop_uri='http://127.0.0.1:7772')
errors = 0
with open('/tmp/alignak/log/broker-master.log') as f:
for line in f:
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# broker process must exit with no errors
assert errors == 0
def test_poller(self):
""" Running the Alignak poller
:return:
"""
print("Launching poller ...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_poller.py"),
"-n", "poller-master",
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
poller = subprocess.Popen(args)
print("%s launched (pid=%d)" % ('poller', poller.pid))
# Wait for the poller to get started
time.sleep(2)
# This function will request the arbiter daemon to stop
self._stop_alignak_daemons(request_stop_uri='http://127.0.0.1:7771')
errors = 0
with open('/tmp/alignak/log/poller-master.log') as f:
for line in f:
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# poller process must exit with a return code == 0 and no errors
assert errors == 0
def test_reactionner(self):
""" Running the Alignak reactionner
:return:
"""
print("Launching reactionner ...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_reactionner.py"),
"-n", "reactionner-master",
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
reactionner = subprocess.Popen(args)
print("%s launched (pid=%d)" % ('reactionner', reactionner.pid))
# Wait for the reactionner to get started
time.sleep(2)
# This function will request the arbiter daemon to stop
self._stop_alignak_daemons(request_stop_uri='http://127.0.0.1:7769')
errors = 0
with open('/tmp/alignak/log/reactionner-master.log') as f:
for line in f:
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# reactionner process must exit with a return code == 0 and no errors
assert errors == 0
def test_receiver(self):
""" Running the Alignak receiver
:return:
"""
print("Launching receiver ...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_receiver.py"),
"-n", "receiver-master",
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
receiver = subprocess.Popen(args)
print("%s launched (pid=%d)" % ('receiver', receiver.pid))
# Wait for the receiver to get started
time.sleep(2)
# This function will request the arbiter daemon to stop
self._stop_alignak_daemons(request_stop_uri='http://127.0.0.1:7773')
errors = 0
with open('/tmp/alignak/log/receiver-master.log') as f:
for line in f:
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# receiver process must exit with a return code == 0 and no errors
assert errors == 0
def test_scheduler(self):
""" Running the Alignak scheduler
:return:
"""
print("Launching scheduler ...")
args = [
os.path.join(self._test_dir, "../alignak/bin/alignak_scheduler.py"),
"-n", "scheduler-master",
"-e", '%s/etc/alignak.ini' % self.cfg_folder
]
scheduler = subprocess.Popen(args)
print("%s launched (pid=%d)" % ('scheduler', scheduler.pid))
# Wait for the scheduler to get started
time.sleep(2)
# This function will request the arbiter daemon to stop
self._stop_alignak_daemons(request_stop_uri='http://127.0.0.1:7768')
errors = 0
with open('/tmp/alignak/log/scheduler-master.log') as f:
for line in f:
if 'ERROR:' in line or 'CRITICAL:' in line:
print("*** %s" % line.rstrip())
errors = errors + 1
# scheduler process must exit with a return code == 0 and no errors
assert errors == 0
|
Alignak-monitoring/alignak
|
tests_integ/test_launch_daemons.py
|
Python
|
agpl-3.0
| 30,105
|
"""
This is the default template for our main set of AWS servers.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
import codecs
import copy
import os
import warnings
import yaml
from corsheaders.defaults import default_headers as corsheaders_default_headers
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse_lazy
from edx_django_utils.plugins import add_plugins
from path import Path as path
from openedx.core.djangoapps.plugins.constants import ProjectType, SettingsType
from .common import *
from openedx.core.lib.derived import derive_settings # lint-amnesty, pylint: disable=wrong-import-order
from openedx.core.lib.logsettings import get_logger_config # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.modulestore_settings import convert_module_store_setting_if_needed # lint-amnesty, pylint: disable=wrong-import-order
def get_env_setting(setting):
""" Get the environment setting or return exception """
try:
return os.environ[setting]
except KeyError:
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg) # lint-amnesty, pylint: disable=raise-missing-from
############### ALWAYS THE SAME ################################
DEBUG = False
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
# IMPORTANT: With this enabled, the server must always be behind a proxy that
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
# a user can fool our server into thinking it was an https connection.
# See
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
# for other warnings.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
############### END ALWAYS THE SAME ################################
# A file path to a YAML file from which to load all the configuration for the edx platform
try:
CONFIG_FILE = get_env_setting('CMS_CFG')
except ImproperlyConfigured:
CONFIG_FILE = get_env_setting('STUDIO_CFG')
warnings.warn(
"STUDIO_CFG environment variable is deprecated. Use CMS_CFG instead.",
DeprecationWarning,
stacklevel=2,
)
with codecs.open(CONFIG_FILE, encoding='utf-8') as f:
__config__ = yaml.safe_load(f)
# ENV_TOKENS and AUTH_TOKENS are included for reverse compatability.
# Removing them may break plugins that rely on them.
ENV_TOKENS = __config__
AUTH_TOKENS = __config__
# Add the key/values from config into the global namespace of this module.
# But don't override the FEATURES dict because we do that in an additive way.
__config_copy__ = copy.deepcopy(__config__)
KEYS_WITH_MERGED_VALUES = [
'FEATURES',
'TRACKING_BACKENDS',
'EVENT_TRACKING_BACKENDS',
'JWT_AUTH',
'CELERY_QUEUES',
'MKTG_URL_LINK_MAP',
'MKTG_URL_OVERRIDES',
]
for key in KEYS_WITH_MERGED_VALUES:
if key in __config_copy__:
del __config_copy__[key]
vars().update(__config_copy__)
try:
# A file path to a YAML file from which to load all the code revisions currently deployed
REVISION_CONFIG_FILE = get_env_setting('REVISION_CFG')
with codecs.open(REVISION_CONFIG_FILE, encoding='utf-8') as f:
REVISION_CONFIG = yaml.safe_load(f)
except Exception: # pylint: disable=broad-except
REVISION_CONFIG = {}
# Do NOT calculate this dynamically at startup with git because it's *slow*.
EDX_PLATFORM_REVISION = REVISION_CONFIG.get('EDX_PLATFORM_REVISION', EDX_PLATFORM_REVISION)
###################################### CELERY ################################
# Don't use a connection pool, since connections are dropped by ELB.
BROKER_POOL_LIMIT = 0
BROKER_CONNECTION_TIMEOUT = 1
# For the Result Store, use the django cache named 'celery'
CELERY_RESULT_BACKEND = 'django-cache'
# When the broker is behind an ELB, use a heartbeat to refresh the
# connection and to detect if it has been dropped.
BROKER_HEARTBEAT = ENV_TOKENS.get('BROKER_HEARTBEAT', 60.0)
BROKER_HEARTBEAT_CHECKRATE = ENV_TOKENS.get('BROKER_HEARTBEAT_CHECKRATE', 2)
# Each worker should only fetch one message at a time
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_ROUTES = "openedx.core.lib.celery.routers.route_task"
# STATIC_URL_BASE specifies the base url to use for static files
STATIC_URL_BASE = ENV_TOKENS.get('STATIC_URL_BASE', None)
if STATIC_URL_BASE:
STATIC_URL = STATIC_URL_BASE
if not STATIC_URL.endswith("/"):
STATIC_URL += "/"
STATIC_URL += 'studio/'
DEFAULT_COURSE_VISIBILITY_IN_CATALOG = ENV_TOKENS.get(
'DEFAULT_COURSE_VISIBILITY_IN_CATALOG',
DEFAULT_COURSE_VISIBILITY_IN_CATALOG
)
# DEFAULT_MOBILE_AVAILABLE specifies if the course is available for mobile by default
DEFAULT_MOBILE_AVAILABLE = ENV_TOKENS.get(
'DEFAULT_MOBILE_AVAILABLE',
DEFAULT_MOBILE_AVAILABLE
)
# How long to cache OpenAPI schemas and UI, in seconds.
OPENAPI_CACHE_TIMEOUT = ENV_TOKENS.get('OPENAPI_CACHE_TIMEOUT', 60 * 60)
# STATIC_ROOT specifies the directory where static files are
# collected
STATIC_ROOT_BASE = ENV_TOKENS.get('STATIC_ROOT_BASE', None)
if STATIC_ROOT_BASE:
STATIC_ROOT = path(STATIC_ROOT_BASE) / 'studio'
WEBPACK_LOADER['DEFAULT']['STATS_FILE'] = STATIC_ROOT / "webpack-stats.json"
WEBPACK_LOADER['WORKERS']['STATS_FILE'] = STATIC_ROOT / "webpack-worker-stats.json"
EMAIL_FILE_PATH = ENV_TOKENS.get('EMAIL_FILE_PATH', None)
# CMS_BASE: Public domain name of Studio (should be resolvable from the end-user's browser)
CMS_BASE = ENV_TOKENS.get('CMS_BASE')
LMS_BASE = ENV_TOKENS.get('LMS_BASE')
LMS_ROOT_URL = ENV_TOKENS.get('LMS_ROOT_URL')
LMS_INTERNAL_ROOT_URL = ENV_TOKENS.get('LMS_INTERNAL_ROOT_URL', LMS_ROOT_URL)
ENTERPRISE_API_URL = ENV_TOKENS.get('ENTERPRISE_API_URL', LMS_INTERNAL_ROOT_URL + '/enterprise/api/v1/')
ENTERPRISE_CONSENT_API_URL = ENV_TOKENS.get('ENTERPRISE_CONSENT_API_URL', LMS_INTERNAL_ROOT_URL + '/consent/api/v1/')
# Note that FEATURES['PREVIEW_LMS_BASE'] gets read in from the environment file.
# List of logout URIs for each IDA that the learner should be logged out of when they logout of
# Studio. Only applies to IDA for which the social auth flow uses DOT (Django OAuth Toolkit).
IDA_LOGOUT_URI_LIST = ENV_TOKENS.get('IDA_LOGOUT_URI_LIST', [])
SITE_NAME = ENV_TOKENS['SITE_NAME']
ALLOWED_HOSTS = [
# TODO: bbeggs remove this before prod, temp fix to get load testing running
"*",
CMS_BASE,
]
LOG_DIR = ENV_TOKENS['LOG_DIR']
DATA_DIR = path(ENV_TOKENS.get('DATA_DIR', DATA_DIR))
CACHES = ENV_TOKENS['CACHES']
# Cache used for location mapping -- called many times with the same key/value
# in a given request.
if 'loc_cache' not in CACHES:
CACHES['loc_cache'] = {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
}
if 'staticfiles' in CACHES:
CACHES['staticfiles']['KEY_PREFIX'] = EDX_PLATFORM_REVISION
# In order to transition from local disk asset storage to S3 backed asset storage,
# we need to run asset collection twice, once for local disk and once for S3.
# Once we have migrated to service assets off S3, then we can convert this back to
# managed by the yaml file contents
STATICFILES_STORAGE = os.environ.get('STATICFILES_STORAGE', ENV_TOKENS.get('STATICFILES_STORAGE', STATICFILES_STORAGE))
# Load all AWS_ prefixed variables to allow an S3Boto3Storage to be configured
_locals = locals()
for key, value in ENV_TOKENS.items():
if key.startswith('AWS_'):
_locals[key] = value
SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN')
SESSION_COOKIE_HTTPONLY = ENV_TOKENS.get('SESSION_COOKIE_HTTPONLY', True)
REGISTRATION_EMAIL_PATTERNS_ALLOWED = ENV_TOKENS.get('REGISTRATION_EMAIL_PATTERNS_ALLOWED')
# allow for environments to specify what cookie name our login subsystem should use
# this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can
# happen with some browsers (e.g. Firefox)
if ENV_TOKENS.get('SESSION_COOKIE_NAME', None):
# NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str()
SESSION_COOKIE_NAME = str(ENV_TOKENS.get('SESSION_COOKIE_NAME'))
# This is the domain that is used to set shared cookies between various sub-domains.
# By default, it's set to the same thing as the SESSION_COOKIE_DOMAIN, but we want to make it overrideable.
SHARED_COOKIE_DOMAIN = ENV_TOKENS.get('SHARED_COOKIE_DOMAIN', SESSION_COOKIE_DOMAIN)
# Determines whether the CSRF token can be transported on
# unencrypted channels. It is set to False here for backward compatibility,
# but it is highly recommended that this is True for environments accessed
# by end users.
CSRF_COOKIE_SECURE = ENV_TOKENS.get('CSRF_COOKIE_SECURE', False)
#Email overrides
MKTG_URL_LINK_MAP.update(ENV_TOKENS.get('MKTG_URL_LINK_MAP', {}))
MKTG_URL_OVERRIDES.update(ENV_TOKENS.get('MKTG_URL_OVERRIDES', MKTG_URL_OVERRIDES))
for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items():
oldvalue = CODE_JAIL.get(name)
if isinstance(oldvalue, dict):
for subname, subvalue in value.items():
oldvalue[subname] = subvalue
else:
CODE_JAIL[name] = value
COURSES_WITH_UNSAFE_CODE = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", [])
# COMPREHENSIVE_THEME_LOCALE_PATHS contain the paths to themes locale directories e.g.
# "COMPREHENSIVE_THEME_LOCALE_PATHS" : [
# "/edx/src/edx-themes/conf/locale"
# ],
COMPREHENSIVE_THEME_LOCALE_PATHS = ENV_TOKENS.get('COMPREHENSIVE_THEME_LOCALE_PATHS', [])
# PREPEND_LOCALE_PATHS contain the paths to locale directories to load first e.g.
# "PREPEND_LOCALE_PATHS" : [
# "/edx/my-locale/"
# ],
PREPEND_LOCALE_PATHS = ENV_TOKENS.get('PREPEND_LOCALE_PATHS', [])
#Timezone overrides
TIME_ZONE = ENV_TOKENS.get('CELERY_TIMEZONE', CELERY_TIMEZONE)
##### REGISTRATION RATE LIMIT SETTINGS #####
REGISTRATION_VALIDATION_RATELIMIT = ENV_TOKENS.get(
'REGISTRATION_VALIDATION_RATELIMIT', REGISTRATION_VALIDATION_RATELIMIT
)
REGISTRATION_RATELIMIT = ENV_TOKENS.get('REGISTRATION_RATELIMIT', REGISTRATION_RATELIMIT)
# Push to LMS overrides
GIT_REPO_EXPORT_DIR = ENV_TOKENS.get('GIT_REPO_EXPORT_DIR', '/edx/var/edxapp/export_course_repos')
ENV_FEATURES = ENV_TOKENS.get('FEATURES', {})
for feature, value in ENV_FEATURES.items():
FEATURES[feature] = value
# Additional installed apps
for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []):
INSTALLED_APPS.append(app)
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
service_variant=SERVICE_VARIANT)
# The following variables use (or) instead of the default value inside (get). This is to enforce using the Lazy Text
# values when the varibale is an empty string. Therefore, setting these variable as empty text in related
# json files will make the system reads thier values from django translation files
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME') or PLATFORM_NAME
PLATFORM_DESCRIPTION = ENV_TOKENS.get('PLATFORM_DESCRIPTION') or PLATFORM_DESCRIPTION
STUDIO_NAME = ENV_TOKENS.get('STUDIO_NAME') or STUDIO_NAME
STUDIO_SHORT_NAME = ENV_TOKENS.get('STUDIO_SHORT_NAME') or STUDIO_SHORT_NAME
# Event Tracking
if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS:
TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS")
# Heartbeat
HEARTBEAT_CELERY_ROUTING_KEY = ENV_TOKENS.get('HEARTBEAT_CELERY_ROUTING_KEY', HEARTBEAT_CELERY_ROUTING_KEY)
# Sometimes, OAuth2 clients want the user to redirect back to their site after logout. But to determine if the given
# redirect URL/path is safe for redirection, the following variable is used by edX.
LOGIN_REDIRECT_WHITELIST = ENV_TOKENS.get(
'LOGIN_REDIRECT_WHITELIST',
LOGIN_REDIRECT_WHITELIST
)
LOGIN_REDIRECT_WHITELIST.extend([reverse_lazy('home')])
############### XBlock filesystem field config ##########
if 'DJFS' in AUTH_TOKENS and AUTH_TOKENS['DJFS'] is not None:
DJFS = AUTH_TOKENS['DJFS']
if 'url_root' in DJFS:
DJFS['url_root'] = DJFS['url_root'].format(platform_revision=EDX_PLATFORM_REVISION)
AWS_SES_REGION_NAME = ENV_TOKENS.get('AWS_SES_REGION_NAME', 'us-east-1')
AWS_SES_REGION_ENDPOINT = ENV_TOKENS.get('AWS_SES_REGION_ENDPOINT', 'email.us-east-1.amazonaws.com')
# Note that this is the Studio key for Segment. There is a separate key for the LMS.
CMS_SEGMENT_KEY = AUTH_TOKENS.get('SEGMENT_KEY')
SECRET_KEY = AUTH_TOKENS['SECRET_KEY']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
if AWS_ACCESS_KEY_ID == "":
AWS_ACCESS_KEY_ID = None
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
if AWS_SECRET_ACCESS_KEY == "":
AWS_SECRET_ACCESS_KEY = None
AWS_STORAGE_BUCKET_NAME = AUTH_TOKENS.get('AWS_STORAGE_BUCKET_NAME', 'edxuploads')
# Disabling querystring auth instructs Boto to exclude the querystring parameters (e.g. signature, access key) it
# normally appends to every returned URL.
AWS_QUERYSTRING_AUTH = AUTH_TOKENS.get('AWS_QUERYSTRING_AUTH', True)
AWS_DEFAULT_ACL = 'private'
AWS_BUCKET_ACL = AWS_DEFAULT_ACL
AWS_QUERYSTRING_EXPIRE = 7 * 24 * 60 * 60 # 7 days
AWS_S3_CUSTOM_DOMAIN = AUTH_TOKENS.get('AWS_S3_CUSTOM_DOMAIN', 'edxuploads.s3.amazonaws.com')
if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
else:
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
COURSE_IMPORT_EXPORT_BUCKET = ENV_TOKENS.get('COURSE_IMPORT_EXPORT_BUCKET', '')
if COURSE_IMPORT_EXPORT_BUCKET:
COURSE_IMPORT_EXPORT_STORAGE = 'cms.djangoapps.contentstore.storage.ImportExportS3Storage'
else:
COURSE_IMPORT_EXPORT_STORAGE = DEFAULT_FILE_STORAGE
USER_TASKS_ARTIFACT_STORAGE = COURSE_IMPORT_EXPORT_STORAGE
COURSE_METADATA_EXPORT_BUCKET = ENV_TOKENS.get('COURSE_METADATA_EXPORT_BUCKET', '')
if COURSE_METADATA_EXPORT_BUCKET:
COURSE_METADATA_EXPORT_STORAGE = 'cms.djangoapps.export_course_metadata.storage.CourseMetadataExportS3Storage'
else:
COURSE_METADATA_EXPORT_STORAGE = DEFAULT_FILE_STORAGE
DATABASES = AUTH_TOKENS['DATABASES']
# The normal database user does not have enough permissions to run migrations.
# Migrations are run with separate credentials, given as DB_MIGRATION_*
# environment variables
for name, database in DATABASES.items():
if name != 'read_replica':
database.update({
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
})
MODULESTORE = convert_module_store_setting_if_needed(AUTH_TOKENS.get('MODULESTORE', MODULESTORE))
MODULESTORE_FIELD_OVERRIDE_PROVIDERS = ENV_TOKENS.get(
'MODULESTORE_FIELD_OVERRIDE_PROVIDERS',
MODULESTORE_FIELD_OVERRIDE_PROVIDERS
)
XBLOCK_FIELD_DATA_WRAPPERS = ENV_TOKENS.get(
'XBLOCK_FIELD_DATA_WRAPPERS',
XBLOCK_FIELD_DATA_WRAPPERS
)
CONTENTSTORE = AUTH_TOKENS['CONTENTSTORE']
DOC_STORE_CONFIG = AUTH_TOKENS['DOC_STORE_CONFIG']
############################### BLOCKSTORE #####################################
BLOCKSTORE_API_URL = ENV_TOKENS.get('BLOCKSTORE_API_URL', None) # e.g. "https://blockstore.example.com/api/v1/"
# Configure an API auth token at (blockstore URL)/admin/authtoken/token/
BLOCKSTORE_API_AUTH_TOKEN = AUTH_TOKENS.get('BLOCKSTORE_API_AUTH_TOKEN', None)
# Datadog for events!
DATADOG = AUTH_TOKENS.get("DATADOG", {})
DATADOG.update(ENV_TOKENS.get("DATADOG", {}))
# TODO: deprecated (compatibility with previous settings)
if 'DATADOG_API' in AUTH_TOKENS:
DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API']
# Celery Broker
CELERY_ALWAYS_EAGER = ENV_TOKENS.get("CELERY_ALWAYS_EAGER", False)
CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "")
CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "")
CELERY_BROKER_VHOST = ENV_TOKENS.get("CELERY_BROKER_VHOST", "")
CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "")
CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "")
BROKER_URL = "{}://{}:{}@{}/{}".format(CELERY_BROKER_TRANSPORT,
CELERY_BROKER_USER,
CELERY_BROKER_PASSWORD,
CELERY_BROKER_HOSTNAME,
CELERY_BROKER_VHOST)
BROKER_USE_SSL = ENV_TOKENS.get('CELERY_BROKER_USE_SSL', False)
try:
BROKER_TRANSPORT_OPTIONS = {
'fanout_patterns': True,
'fanout_prefix': True,
**ENV_TOKENS.get('CELERY_BROKER_TRANSPORT_OPTIONS', {})
}
except TypeError as exc:
raise ImproperlyConfigured('CELERY_BROKER_TRANSPORT_OPTIONS must be a dict') from exc
# Message expiry time in seconds
CELERY_EVENT_QUEUE_TTL = ENV_TOKENS.get('CELERY_EVENT_QUEUE_TTL', None)
# Allow CELERY_QUEUES to be overwritten by ENV_TOKENS,
ENV_CELERY_QUEUES = ENV_TOKENS.get('CELERY_QUEUES', None)
if ENV_CELERY_QUEUES:
CELERY_QUEUES = {queue: {} for queue in ENV_CELERY_QUEUES}
# Then add alternate environment queues
ALTERNATE_QUEUE_ENVS = ENV_TOKENS.get('ALTERNATE_WORKER_QUEUES', '').split()
ALTERNATE_QUEUES = [
DEFAULT_PRIORITY_QUEUE.replace(QUEUE_VARIANT, alternate + '.')
for alternate in ALTERNATE_QUEUE_ENVS
]
CELERY_QUEUES.update(
{
alternate: {}
for alternate in ALTERNATE_QUEUES
if alternate not in list(CELERY_QUEUES.keys())
}
)
# Queue to use for updating grades due to grading policy change
POLICY_CHANGE_GRADES_ROUTING_KEY = ENV_TOKENS.get('POLICY_CHANGE_GRADES_ROUTING_KEY', DEFAULT_PRIORITY_QUEUE)
SOFTWARE_SECURE_VERIFICATION_ROUTING_KEY = ENV_TOKENS.get(
'SOFTWARE_SECURE_VERIFICATION_ROUTING_KEY',
HIGH_PRIORITY_QUEUE
)
# Event tracking
TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS['tracking_logs']['OPTIONS']['backends'].update(AUTH_TOKENS.get("EVENT_TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS['segmentio']['OPTIONS']['processors'][0]['OPTIONS']['whitelist'].extend(
AUTH_TOKENS.get("EVENT_TRACKING_SEGMENTIO_EMIT_WHITELIST", []))
##### ACCOUNT LOCKOUT DEFAULT PARAMETERS #####
MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED = ENV_TOKENS.get(
"MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED", MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED
)
MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS = ENV_TOKENS.get(
"MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS", MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS
)
#### PASSWORD POLICY SETTINGS #####
AUTH_PASSWORD_VALIDATORS = ENV_TOKENS.get("AUTH_PASSWORD_VALIDATORS", AUTH_PASSWORD_VALIDATORS)
### INACTIVITY SETTINGS ####
SESSION_INACTIVITY_TIMEOUT_IN_SECONDS = AUTH_TOKENS.get("SESSION_INACTIVITY_TIMEOUT_IN_SECONDS")
################ PUSH NOTIFICATIONS ###############
PARSE_KEYS = AUTH_TOKENS.get("PARSE_KEYS", {})
# Video Caching. Pairing country codes with CDN URLs.
# Example: {'CN': 'http://api.xuetangx.com/edx/video?s3_url='}
VIDEO_CDN_URL = ENV_TOKENS.get('VIDEO_CDN_URL', {})
if FEATURES['ENABLE_COURSEWARE_INDEX'] or FEATURES['ENABLE_LIBRARY_INDEX'] or FEATURES['ENABLE_CONTENT_LIBRARY_INDEX']:
# Use ElasticSearch for the search engine
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
# TODO: Once we have successfully upgraded to ES7, switch this back to ELASTIC_SEARCH_CONFIG.
ELASTIC_SEARCH_CONFIG = ENV_TOKENS.get('ELASTIC_SEARCH_CONFIG_ES7', [{}])
XBLOCK_SETTINGS = ENV_TOKENS.get('XBLOCK_SETTINGS', {})
XBLOCK_SETTINGS.setdefault("VideoBlock", {})["licensing_enabled"] = FEATURES.get("LICENSING", False)
XBLOCK_SETTINGS.setdefault("VideoBlock", {})['YOUTUBE_API_KEY'] = AUTH_TOKENS.get('YOUTUBE_API_KEY', YOUTUBE_API_KEY)
############################ OAUTH2 Provider ###################################
#### JWT configuration ####
JWT_AUTH.update(ENV_TOKENS.get('JWT_AUTH', {}))
JWT_AUTH.update(AUTH_TOKENS.get('JWT_AUTH', {}))
######################## CUSTOM COURSES for EDX CONNECTOR ######################
if FEATURES.get('CUSTOM_COURSES_EDX'):
INSTALLED_APPS.append('openedx.core.djangoapps.ccxcon.apps.CCXConnectorConfig')
############## Settings for CourseGraph ############################
COURSEGRAPH_JOB_QUEUE = ENV_TOKENS.get('COURSEGRAPH_JOB_QUEUE', LOW_PRIORITY_QUEUE)
########## Settings for video transcript migration tasks ############
VIDEO_TRANSCRIPT_MIGRATIONS_JOB_QUEUE = ENV_TOKENS.get('VIDEO_TRANSCRIPT_MIGRATIONS_JOB_QUEUE', DEFAULT_PRIORITY_QUEUE)
########## Settings youtube thumbnails scraper tasks ############
SCRAPE_YOUTUBE_THUMBNAILS_JOB_QUEUE = ENV_TOKENS.get('SCRAPE_YOUTUBE_THUMBNAILS_JOB_QUEUE', DEFAULT_PRIORITY_QUEUE)
########## Settings update search index task ############
UPDATE_SEARCH_INDEX_JOB_QUEUE = ENV_TOKENS.get('UPDATE_SEARCH_INDEX_JOB_QUEUE', DEFAULT_PRIORITY_QUEUE)
########################## Extra middleware classes #######################
# Allow extra middleware classes to be added to the app through configuration.
MIDDLEWARE.extend(ENV_TOKENS.get('EXTRA_MIDDLEWARE_CLASSES', []))
########################## Settings for Completion API #####################
# Once a user has watched this percentage of a video, mark it as complete:
# (0.0 = 0%, 1.0 = 100%)
COMPLETION_VIDEO_COMPLETE_PERCENTAGE = ENV_TOKENS.get(
'COMPLETION_VIDEO_COMPLETE_PERCENTAGE',
COMPLETION_VIDEO_COMPLETE_PERCENTAGE,
)
####################### Enterprise Settings ######################
# A default dictionary to be used for filtering out enterprise customer catalog.
ENTERPRISE_CUSTOMER_CATALOG_DEFAULT_CONTENT_FILTER = ENV_TOKENS.get(
'ENTERPRISE_CUSTOMER_CATALOG_DEFAULT_CONTENT_FILTER',
ENTERPRISE_CUSTOMER_CATALOG_DEFAULT_CONTENT_FILTER
)
ENTERPRISE_CATALOG_INTERNAL_ROOT_URL = ENV_TOKENS.get(
'ENTERPRISE_CATALOG_INTERNAL_ROOT_URL',
ENTERPRISE_CATALOG_INTERNAL_ROOT_URL
)
INTEGRATED_CHANNELS_API_CHUNK_TRANSMISSION_LIMIT = ENV_TOKENS.get(
'INTEGRATED_CHANNELS_API_CHUNK_TRANSMISSION_LIMIT',
INTEGRATED_CHANNELS_API_CHUNK_TRANSMISSION_LIMIT
)
############### Settings for Retirement #####################
RETIREMENT_SERVICE_WORKER_USERNAME = ENV_TOKENS.get(
'RETIREMENT_SERVICE_WORKER_USERNAME',
RETIREMENT_SERVICE_WORKER_USERNAME
)
############### Settings for edx-rbac ###############
SYSTEM_WIDE_ROLE_CLASSES = ENV_TOKENS.get('SYSTEM_WIDE_ROLE_CLASSES') or SYSTEM_WIDE_ROLE_CLASSES
######################## Setting for content libraries ########################
MAX_BLOCKS_PER_CONTENT_LIBRARY = ENV_TOKENS.get('MAX_BLOCKS_PER_CONTENT_LIBRARY', MAX_BLOCKS_PER_CONTENT_LIBRARY)
########################## Derive Any Derived Settings #######################
derive_settings(__name__)
####################### Plugin Settings ##########################
# This is at the bottom because it is going to load more settings after base settings are loaded
add_plugins(__name__, ProjectType.CMS, SettingsType.PRODUCTION)
############# CORS headers for cross-domain requests #################
if FEATURES.get('ENABLE_CORS_HEADERS'):
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = ENV_TOKENS.get('CORS_ORIGIN_WHITELIST', ())
CORS_ORIGIN_ALLOW_ALL = ENV_TOKENS.get('CORS_ORIGIN_ALLOW_ALL', False)
CORS_ALLOW_INSECURE = ENV_TOKENS.get('CORS_ALLOW_INSECURE', False)
CORS_ALLOW_HEADERS = corsheaders_default_headers + (
'use-jwt-cookie',
)
################# Settings for brand logos. #################
LOGO_URL = ENV_TOKENS.get('LOGO_URL', LOGO_URL)
LOGO_URL_PNG = ENV_TOKENS.get('LOGO_URL_PNG', LOGO_URL_PNG)
LOGO_TRADEMARK_URL = ENV_TOKENS.get('LOGO_TRADEMARK_URL', LOGO_TRADEMARK_URL)
FAVICON_URL = ENV_TOKENS.get('FAVICON_URL', FAVICON_URL)
######################## CELERY ROTUING ########################
# Defines alternate environment tasks, as a dict of form { task_name: alternate_queue }
ALTERNATE_ENV_TASKS = {
'completion_aggregator.tasks.update_aggregators': 'lms',
'openedx.core.djangoapps.content.block_structure.tasks.update_course_in_cache': 'lms',
'openedx.core.djangoapps.content.block_structure.tasks.update_course_in_cache_v2': 'lms',
}
# Defines the task -> alternate worker queue to be used when routing.
EXPLICIT_QUEUES = {
'lms.djangoapps.grades.tasks.compute_all_grades_for_course': {
'queue': POLICY_CHANGE_GRADES_ROUTING_KEY},
'cms.djangoapps.contentstore.tasks.update_search_index': {
'queue': UPDATE_SEARCH_INDEX_JOB_QUEUE},
'openedx.core.djangoapps.coursegraph.tasks.dump_course_to_neo4j': {
'queue': COURSEGRAPH_JOB_QUEUE},
}
LOGO_IMAGE_EXTRA_TEXT = ENV_TOKENS.get('LOGO_IMAGE_EXTRA_TEXT', '')
############## XBlock extra mixins ############################
XBLOCK_MIXINS += tuple(XBLOCK_EXTRA_MIXINS)
############## Settings for course import olx validation ############################
COURSE_OLX_VALIDATION_STAGE = ENV_TOKENS.get('COURSE_OLX_VALIDATION_STAGE', COURSE_OLX_VALIDATION_STAGE)
COURSE_OLX_VALIDATION_IGNORE_LIST = ENV_TOKENS.get(
'COURSE_OLX_VALIDATION_IGNORE_LIST',
COURSE_OLX_VALIDATION_IGNORE_LIST
)
################# show account activate cta after register ########################
SHOW_ACCOUNT_ACTIVATION_CTA = ENV_TOKENS.get('SHOW_ACCOUNT_ACTIVATION_CTA', SHOW_ACCOUNT_ACTIVATION_CTA)
LANGUAGE_COOKIE_NAME = ENV_TOKENS.get('LANGUAGE_COOKIE', None) or ENV_TOKENS.get(
'LANGUAGE_COOKIE_NAME', LANGUAGE_COOKIE_NAME)
################# Discussions micro frontend URL ########################
DISCUSSIONS_MICROFRONTEND_URL = ENV_TOKENS.get('DISCUSSIONS_MICROFRONTEND_URL', DISCUSSIONS_MICROFRONTEND_URL)
################### Discussions micro frontend Feedback URL###################
DISCUSSIONS_MFE_FEEDBACK_URL = ENV_TOKENS.get('DISCUSSIONS_MFE_FEEDBACK_URL', DISCUSSIONS_MFE_FEEDBACK_URL)
|
eduNEXT/edx-platform
|
cms/envs/production.py
|
Python
|
agpl-3.0
| 26,216
|
# Copyright (c) 2013 "OKso http://okso.me"
#
# This file is part of Ra.
#
# Ra is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import serial
class Hawk:
def __init__(self):
self.serial = serial.Serial('/dev/ttyUSB0')
def network(self, id):
msg = 'n{}'.format(id)
self.serial.write(bytes(msg, 'utf-8'))
def plug(self, id):
msg = 'p{}'.format(id)
self.serial.write(bytes(msg, 'utf-8'))
def on(self):
self.serial.write(b('w1'))
def off(self):
self.serial.write(b('w0'))
def write(self, msg):
self.serial.write(msg)
class Plug:
def __init__(self, hawk, network_id, plug_id, status=None):
self.hawk = hawk
self.network_id = bytes('n' + network_id, 'utf-8')
self.plug_id = bytes('p' + plug_id, 'utf-8')
self.status = status
def on(self):
print(self.network_id + self.plug_id + b'w1')
self.hawk.write(self.network_id + self.plug_id + b'w1')
self.status = True
def off(self):
self.hawk.write(self.network_id + self.plug_id + b'w0')
self.status = False
def enforce(self):
assert self.status is not None
if self.status == True:
self.on()
else:
self.off()
|
oksome/Home
|
mandjet/hawk.py
|
Python
|
agpl-3.0
| 1,892
|
"""Add table impact
Revision ID: 6f3aadb613f
Revises: 1894217a6b3f
Create Date: 2014-07-02 16:58:02.545151
"""
# revision identifiers, used by Alembic.
revision = '6f3aadb613f'
down_revision = '4adbb49a02f0'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
impact_status = sa.Enum('published', 'archived', name='impact_status')
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('impact',
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', postgresql.UUID(), nullable=False),
sa.Column('disruption_id', postgresql.UUID(), nullable=True),
sa.ForeignKeyConstraint(['disruption_id'], [u'disruption.id'] ),
sa.PrimaryKeyConstraint('id')
)
impact_status.create(op.get_bind(), True)
op.add_column('impact', sa.Column('status', impact_status, nullable=False, server_default='published', index=True))
op.create_index('ix_impact_status', 'impact', ['status'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_impact_status', 'impact')
op.drop_table('impact')
impact_status.drop(op.get_bind())
### end Alembic commands ###
|
CanalTP/Chaos
|
migrations/versions/6f3aadb613f_.py
|
Python
|
agpl-3.0
| 1,339
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from shoop.admin.utils.picotable import (
Column, TextFilter, true_or_false_filter
)
from shoop.admin.utils.views import PicotableListView
from shoop.core.models import CustomerTaxGroup, Tax, TaxClass
from shoop.utils.i18n import format_percent
def _format_rate(tax_rule):
if tax_rule.rate is None:
return ""
return format_percent(tax_rule.rate, digits=3)
class TaxListView(PicotableListView):
model = Tax
columns = [
Column(
"name", _("Name"), sort_field="translations__name",
filter_config=TextFilter(
filter_field="translations__name",
placeholder=_("Filter by name..."),
),
),
Column("code", _(u"Code")),
Column("rate", _("Rate"), display=_format_rate),
# Column("amount", _(u"Amount")),
Column("enabled", _(u"Enabled"), filter_config=true_or_false_filter),
]
class CustomerTaxGroupListView(PicotableListView):
model = CustomerTaxGroup
columns = [
Column(
"name", _("Name"), sort_field="translations__name",
filter_config=TextFilter(
filter_field="translations__name",
placeholder=_("Filter by name..."),
),
),
]
class TaxClassListView(PicotableListView):
model = TaxClass
columns = [
Column(
"name", _("Name"), sort_field="translations__name",
filter_config=TextFilter(
filter_field="translations__name",
placeholder=_("Filter by name..."),
),
),
]
|
taedori81/shoop
|
shoop/admin/modules/taxes/views/list.py
|
Python
|
agpl-3.0
| 1,947
|
import ckanapi
import os
from xlsxtocsv import xlsxtocsv, rfc4180
import tempfile
import csv
import shutil
import subprocess as sp
import io
import glob
import sys
import cStringIO
import json
# _*_ coding: utf-8 _*_
## Curently this requires a script "pda2pdfa" that conevrts PDF to PDF/A-1b
## Here we use:
## gs -dPDFA -dBATCH -dNOPAUSE -sProcessColorModel=DeviceCMYK -sDEVICE=pdfwrite \
## -sPDFACompatibilityPolicy=1 -sOutputFile="$outfile" "$infile"
## Curently this requires a script pdfa_vali" that validates against PDF/A-1b.
## Here we use:
## java -jar $HOME/java/lib/preflight-app-2.0.4.jar "$1"
# measurement methods
class PrepareNADUF(object):
def __init__(self, basedir):
self.apikey = self.get_apikey()
#self.remote = 'https://eaw-ckan-dev1.eawag.wroot.emp-eaw.ch'
self.remote = 'http://localhost:5000'
self.targetdir = os.path.join(basedir, 'upload')
self.srcdir = os.path.join(basedir,'staging')
self.tmpdir = os.path.join(basedir, 'tmp')
self.metadata = json.load(open('metadata.json', 'r'))
rfc4180.RFC4180() # register dialect
self.connection = self.connect()
def connect(self):
return ckanapi.RemoteCKAN(self.remote, apikey=self.apikey)
def get_apikey(self):
apikey = os.environ['CKAN_APIKEY']
return apikey
def action(self, action):
metadata = self.prep_meta(action)
res = self.connection.call_action(action, metadata)
print(res)
def prep_meta(self, action):
if action == 'package_delete' or action == 'dataset_purge':
return {'id': self.metadata.get('id', None) or
self.metadata['name']}
return self.metadata
def check_pdf_A(self, pdffile):
pdffilepath = os.path.join(self.srcdir, pdffile)
try:
res = sp.check_output(['pdfa_vali', pdffilepath])
except sp.CalledProcessError:
print('{} is not valid PDF/A-1b. trying to convert ...'
.format(pdffile))
return self.pdf2pdfa(pdffile)
else:
return pdffile
def pdf2pdfa(self, pdffile):
pdffilepath = os.path.join(self.srcdir, pdffile)
try:
res = io.BufferedReader(io.BytesIO(sp.check_output(['pdf2pdfa',
pdffilepath, '-'])))
except sp.CalledProcessError as e:
print('Conversion of {} tp PDF/A failed')
raise(e)
else:
outfile = os.path.basename(pdffile).partition('.pdf')[0] + '_A.pdf'
outfilepath = os.path.join(self.tmpdir, outfile)
with io.open(outfilepath, 'bw') as o:
o.write(res.read())
print('converted {} to PDF/A'.format(pdffile, outfile))
return(outfilepath)
def cpfile(self, basedir, srcfn, destfn):
src = eval('os.path.join(self.{}, srcfn)'.format(basedir))
target = os.path.join(self.targetdir, destfn)
shutil.copyfile(src, target)
print('Copied {}\n->{}'.format(src, target))
return target
def get_files(self, basedir, pattern, relative=False):
"""
Returns absolute paths (relative=False) or paths relative
to basedir (relative=True) determined by basedir and pattern.
pattern is a glob-pattern relative to the one indicated by basedir:
'tmpdir': self.tmpdir
'srcdir': self.srcdir
'targetdir': self.targetdir
"""
pattern = eval('os.path.join(self.{}, pattern)'.format(basedir))
fs = glob.glob(pattern)
if not relative:
return fs
else:
return [os.path.relpath(f, self.srcdir) for f in fs]
def mktmpdir(self):
return os.path.relpath(tempfile.mkdtemp(dir=self.tmpdir), self.tmpdir)
def extract_xlsx(self, xlsxfiles, sheets=None, tmpdir=None, strip=False):
if not type(xlsxfiles) == list:
xlsxfiles = [xlsxfiles]
if tmpdir:
out_dir = os.path.join(self.tmpdir, tmpdir)
else:
out_dir = tempfile.mkdtemp(dir=self.tmpdir)
for xlsxfile in xlsxfiles:
print('Extracting data from {} ...'.format(xlsxfile))
xlsxtocsv.main(xlsxfile, out_dir, sheets=sheets)
return os.path.basename(out_dir)
def strip_csv(self, csvfiles, killemptyrows=True):
"""Strips leading and trailing whitespace from cells.
csvfiles is the list of abs. paths to operate on.
Also removes rows with no data.
Files are overwritten.
pattern is a glob - pattern relative to the one indicated by basedir:
'tmpdir': self.tmpdir
'srcdir': self.srcdir
'targetdir': self.targetdir
"""
if not type(csvfiles) == list:
csvfiles = [csvfiles]
for csvf in csvfiles:
print('{}:\nStripping leading and trailing whitespace from cells'
.format(os.path.basename(csvf)))
ftmp = tempfile.SpooledTemporaryFile(max_size=1048576, mode='w+b')
tmpwriter = csv.writer(ftmp, dialect='RFC4180')
with open(csvf, 'rb') as f:
for i, row in enumerate(csv.reader(f, dialect='RFC4180')):
if killemptyrows and all([c == '' for c in row]):
print('{}:\nremoved empty line {}\n'
.format(os.path.basename(csvf), i))
continue
tmpwriter.writerow([c.strip() if isinstance(c, basestring)
else c for c in row ])
ftmp.flush()
ftmp.seek(0)
with open(csvf, 'wb') as f:
shutil.copyfileobj(ftmp, f)
def crop_csv(self, csvfiles):
"""Strips columns that contain only empty cells."""
if not type(csvfiles) == list:
csvfiles = [csvfiles]
for csvf in csvfiles:
print('{}:\nremoving empty columns'.format(os.path.basename(csvf)))
with open(csvf, 'rb') as f:
table = [row for row in csv.reader(f, dialect='RFC4180')]
table_inv = zip(*table)
goodcols = []
for i, col in enumerate(table_inv):
if all([c == '' for c in col]):
print('removing empty column {}'.format(i))
continue
goodcols.append(i)
table_inv = [table_inv[i] for i in goodcols]
table = zip(*table_inv)
with open(csvf, 'wb') as f:
csv.writer(f, dialect='RFC4180').writerows(table)
def check_column_compat(self, csvfiles):
"""Checks whether a set of csv files has the same column headings.
pattern is a glob - pattern relative to the one indicated by basedir:
tmpdir: self.tmpdir
srcdir: self.srcdir
targetdir: self.targetdir
"""
print('Checking compatibility of headers for')
checklist = {}
for fn in csvfiles:
fbase = os.path.basename(fn)
print(fbase)
with open(fn, 'rb') as f:
checklist[fbase] = tuple(csv.reader(f, dialect='RFC4180').next())
headerset = set(checklist.values())
if len(headerset) > 1:
print('Incompatibilies detected:')
incomp = {}
for header in headerset:
incfiles = [k for k in checklist if checklist[k] == header]
incomp.setdefault(header, []).extend(incfiles)
print(incomp)
print(len(incomp))
def cat_csv(self, csvfiles, outfilename):
"""Concatenates the files in csvfiles.
No sanity checks. have to be done beforehand.
"""
tmpd = os.path.join(self.tmpdir, self.mktmpdir())
ofpath = os.path.join(tmpd, outfilename)
with open(csvfiles[0], 'r') as f:
header = f.readline()
with open(ofpath, 'w') as f:
f.write(header)
for csvf in csvfiles:
with open(csvf, 'r') as srcfile:
srcfile.readline()
f.writelines(srcfile.readlines())
return ofpath
def extract_subtable(self, csvfile, row1=None, row2=None,
col1=None, col2=None, totxt=False):
"""Extracts a rectangular area from CSV - table. Coordinate parameter
with value None are interpreted to yield the maximum possible
size of the recatangle.
Indices start with 1.
If totxt=True, rows will be concatenated and written into a .txt file.
"""
res = []
row1 = row1 or 1
col1 = col1 or 1
if type(csvfile) == list:
csvfile = csvfile[0]
with open(csvfile, 'rb') as f:
readr = csv.reader(f, dialect='RFC4180')
for c, row in enumerate(readr):
if c + 1 < row1:
continue
elif (row2 is not None) and (c + 1 > row2):
continue # not breaking just to count lines
else:
if [x for x in row if x]:
res.append(row)
row2 = row2 or readr.line_num
res_t = zip(*res)
col2 = col2 or len(res_t)
res_t = res_t[col1-1:col2]
res = zip(*res_t)
suffix = 'txt' if totxt else 'csv'
outfile = (os.path.basename(csvfile).partition('.csv')[0]
+ '_{}_{}_{}_{}.{}'.format(str(row1), str(row2),
str(col1), str(col2), suffix))
outfilepath = os.path.join(os.path.dirname(csvfile), outfile)
with io.open(outfilepath, 'bw') as o:
if totxt:
res = [' '.join(l) + '\r\n' for l in res]
o.writelines(res)
else:
wr = csv.writer(o, dialect='RFC4180')
wr.writerows(res)
print('wrote {}'.format(outfile))
return outfilepath
P = PrepareNADUF('/home/vonwalha/rdm/data/preparation/naduf')
#P.action('package_create')
#P.action('package_update')
#P.action('package_delete')
#P.action('dataset_purge')
## main data
dmain_tmp = P.extract_xlsx(P.get_files('srcdir','Messdaten/Daten 2015.xlsx'))
dmain1_tmp = P.extract_xlsx(P.get_files('srcdir', 'Messdaten/Jahresmittel-2.xlsx'))
P.strip_csv(P.get_files('tmpdir', os.path.join(dmain_tmp, '*.csv')))
P.strip_csv(P.get_files('tmpdir', os.path.join(dmain1_tmp, '*.csv')))
P.crop_csv(P.get_files('tmpdir', os.path.join(dmain_tmp, '*.csv')))
P.crop_csv(P.get_files('tmpdir', os.path.join(dmain1_tmp, '*.csv')))
## station information
dstations_tmp = P.extract_xlsx(
P.get_files('srcdir','Stationen/Stationszusammenstellung Jan17.xlsx'))
### Sheet "Bemerkungen Quellen"
notes_sources = P.get_files('tmpdir', os.path.join(dstations_tmp, '*Quellen*.csv'))
P.strip_csv(notes_sources, killemptyrows=False)
stations_description_legend = P.extract_subtable(notes_sources, 7, 18, 1, 4)
P.strip_csv(stations_description_legend)
P.crop_csv(stations_description_legend)
stations_description_sources = P.extract_subtable(notes_sources, 21, 38, 1, 3)
P.strip_csv(stations_description_sources)
P.crop_csv(stations_description_sources)
stations_description_notes = P.extract_subtable(notes_sources, 1, 5, 1, 1,
totxt=True)
### Sheet "Allgemeine Daten
general_data_and_classifications = (
P.get_files('tmpdir', os.path.join(dstations_tmp, '*Allgemeine*.csv'))
+ P.get_files('tmpdir', os.path.join(dstations_tmp, '*Klassifikation*.csv'))
)
P.strip_csv(general_data_and_classifications)
P.crop_csv(general_data_and_classifications)
## Logfiles and Stoerungen
dnotes = P.mktmpdir()
P.extract_xlsx(
P.get_files('srcdir', 'Hauptfiles (Instrument für mich)/*.xlsx'),
sheets=['Stoerungen','Logsheet'], tmpdir=dnotes)
stoerungen = P.get_files('tmpdir', os.path.join(dnotes, '*Stoerungen.csv'))
logsheets = P.get_files('tmpdir', os.path.join(dnotes, '*Logsheet.csv'))
P.strip_csv(stoerungen + logsheets)
P.crop_csv(stoerungen + logsheets)
P.check_column_compat(stoerungen)
P.check_column_compat(logsheets)
logfile = P.cat_csv(logsheets, 'log.csv')
stoerfile = P.cat_csv(stoerungen, 'stoer.csv')
sys.exit()
## copy files:
ftocopy = [
(P.check_pdf_A('Messmethoden/methods NADUF-english.pdf'),
'methods_chemical_analysis.pdf'),
(P.check_pdf_A('ReadMe.pdf'), 'measurements_notes.pdf'),
(os.path.join(dmain_tmp, 'Daten 2015_Onelinemessung.csv'),
'hourly_measurements_1990-1998.csv'),
(os.path.join(dmain_tmp, 'Daten 2015_Originaldaten.csv'),
'measurements_raw.csv'),
(os.path.join(dmain_tmp, 'Daten 2015_14tg_Daten.csv'),
'measurements_raw.csv'),
(os.path.join(dmain_tmp, 'Daten 2015_14tg_Daten.csv'),
'measurements_biweekly.csv'),
(os.path.join(dmain1_tmp, 'Jahresmittel-2_Sheet1.csv'),
'measurements_annual.csv'),
(os.path.join(dstations_tmp,
'Stationszusammenstellung Jan17_Allgemeine_Daten.csv'),
'stations_description.csv'),
(P.extract_subtable(
os.path.join(dstations_tmp,
'Stationszusammenstellung Jan17_Bemerkungen_Quellen.csv'),
7, 18, 1, 4),'stations_description_legend.csv'),
(P.extract_subtable(
os.path.join(dstations_tmp,
'Stationszusammenstellung Jan17_Bemerkungen_Quellen.csv'),
21, 38, 1, 3),'stations_description_sources.csv'),
(P.extract_subtable(
os.path.join(dstations_tmp,
'Stationszusammenstellung Jan17_Bemerkungen_Quellen.csv'),
1, 5, 1, 1, totxt=True),'stations_description_notes.txt'),
(os.path.join(dstations_tmp,
'Stationszusammenstellung Jan17_Klassifikation_AS_CH.csv'),
'stations_description_arealstats_mapping_CH.csv'),
(os.path.join(dstations_tmp,
'Stationszusammenstellung Jan17_Klassifikation_AS_EU.csv'),
'stations_description_arealstats_mapping_EU.csv'),
]
# for f in ftocopy:
# P.cpfile(f[0], f[1])
# csvfile = '/home/vonwalha/rdm/data/preparation/naduf/upload/stations_description.csv'
# row1 = 26
# row2 = None
# res = P.extract_subtable(csvfile, row1, row2, 3, None)
|
eawag-rdm/prepare_naduf
|
prepare_naduf.py
|
Python
|
agpl-3.0
| 14,470
|
import hashlib
import random
NUMBER_OF_ITERATIONS = 100001
def get_salt(user: bytes):
random.seed(user)
h = hashlib.sha256()
h.update(user)
return h.digest() + bytes(str(random.uniform(-1024.0, 1023.0)), encoding='utf8')
def hash_password(password: bytes, user: bytes):
salt = get_salt(user)
return hashlib.pbkdf2_hmac('sha256', password, salt, NUMBER_OF_ITERATIONS)
|
sanchopanca/make-me-better
|
mmb/hashing.py
|
Python
|
agpl-3.0
| 397
|
"""pharmaship URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/doc/', include('django.contrib.admindocs.urls')),
path('admin/', admin.site.urls),
path('api-auth/', include('rest_framework.urls', namespace='rest_framework')),
path('api/', include('pharmaship.inventory.urls')),
]
|
tuxite/pharmaship
|
pharmaship/app/urls.py
|
Python
|
agpl-3.0
| 967
|
#!/usr/bin/env python
# Copyright (C) 2009-2011 :
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# shinken.objects must be imported first:
import objects
|
baloo/shinken
|
shinken/__init__.py
|
Python
|
agpl-3.0
| 957
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import uuid
from copy import copy
from unittest.mock import patch
from django.conf import settings
from django.contrib.auth.models import Permission
from django.test import TestCase
from base.forms.learning_unit_pedagogy import LearningUnitPedagogyEditForm, TeachingMaterialModelForm
from base.models.enums.learning_unit_year_subtypes import FULL
from base.tests.factories.academic_year import create_current_academic_year
from base.tests.factories.business.learning_units import GenerateAcademicYear
from base.tests.factories.learning_unit_year import LearningUnitYearFactory
from base.tests.factories.person import PersonFactory
from base.tests.factories.teaching_material import TeachingMaterialFactory
from cms.enums import entity_name
from cms.tests.factories.text_label import TextLabelFactory
from cms.tests.factories.translated_text import TranslatedTextFactory
from reference.tests.factories.language import LanguageFactory
class LearningUnitPedagogyContextMixin(TestCase):
""""This mixin is used in this test file in order to setup an environment for testing pedagogy"""
def setUp(self):
self.language = LanguageFactory(code="EN")
self.person = PersonFactory()
self.person.user.user_permissions.add(Permission.objects.get(codename="can_edit_learningunit_pedagogy"))
self.current_ac = create_current_academic_year()
self.ac_years_containers = GenerateAcademicYear(start_year=self.current_ac.year + 1,
end_year=self.current_ac.year + 5)
self.current_luy = LearningUnitYearFactory(
learning_container_year__academic_year=self.current_ac,
academic_year=self.current_ac,
acronym="LAGRO1000",
subtype=FULL
)
self.luys = {self.current_ac.year: self.current_luy}
self.luys.update(
_duplicate_learningunityears(self.current_luy, academic_years=self.ac_years_containers.academic_years)
)
class TestValidation(LearningUnitPedagogyContextMixin):
def setUp(self):
super().setUp()
self.cms_translated_text = TranslatedTextFactory(
entity=entity_name.LEARNING_UNIT_YEAR,
reference=self.luys[self.current_ac.year].id,
language='EN',
text='Text random'
)
self.valid_form_data = _get_valid_cms_form_data(self.cms_translated_text)
def test_invalid_form(self):
del self.valid_form_data['cms_id']
form = LearningUnitPedagogyEditForm(self.valid_form_data)
self.assertFalse(form.is_valid())
def test_valid_form(self):
form = LearningUnitPedagogyEditForm(self.valid_form_data)
self.assertEqual(form.errors, {})
self.assertTrue(form.is_valid())
@patch("cms.models.translated_text.update_or_create")
def test_save_without_postponement(self, mock_update_or_create):
"""In this test, we ensure that if we modify UE of N or N-... => The postponement is not done for CMS data"""
form = LearningUnitPedagogyEditForm(self.valid_form_data)
self.assertTrue(form.is_valid(), form.errors)
form.save()
mock_update_or_create.assert_called_once_with(entity=self.cms_translated_text.entity,
reference=self.cms_translated_text.reference,
language=self.cms_translated_text.language,
text_label=self.cms_translated_text.text_label,
defaults={'text': self.cms_translated_text.text})
@patch("cms.models.translated_text.update_or_create")
def test_save_with_postponement(self, mock_update_or_create):
"""In this test, we ensure that if we modify UE of N+1 or N+X => The postponement until the lastest UE"""
luy_in_future = self.luys[self.current_ac.year + 1]
cms_pedagogy_future = TranslatedTextFactory(
entity=entity_name.LEARNING_UNIT_YEAR,
reference=luy_in_future.id,
language='EN',
text='Text in future'
)
form = LearningUnitPedagogyEditForm(data=_get_valid_cms_form_data(cms_pedagogy_future))
self.assertTrue(form.is_valid(), form.errors)
form.save()
# N+1 ===> N+6
self.assertEqual(mock_update_or_create.call_count, 5)
class TestTeachingMaterialForm(LearningUnitPedagogyContextMixin):
@patch('base.models.teaching_material.postpone_teaching_materials', side_effect=lambda *args: None)
def test_save_without_postponement(self, mock_postpone_teaching_materials):
"""In this test, we ensure that if we modify UE of N or N-... => The postponement is not done for teaching
materials"""
teaching_material = TeachingMaterialFactory.build(learning_unit_year=self.current_luy)
post_data = _get_valid_teaching_material_form_data(teaching_material)
teaching_material_form = TeachingMaterialModelForm(post_data)
self.assertTrue(teaching_material_form.is_valid(), teaching_material_form.errors)
teaching_material_form.save(learning_unit_year=self.current_luy)
self.assertFalse(mock_postpone_teaching_materials.called)
@patch('base.models.teaching_material.postpone_teaching_materials', side_effect=lambda *args: None)
def test_save_with_postponement(self, mock_postpone_teaching_materials):
"""In this test, we ensure that if we modify UE of N+1 or N+X => The postponement until the lastest UE"""
luy_in_future = self.luys[self.current_ac.year + 1]
teaching_material = TeachingMaterialFactory.build(learning_unit_year=luy_in_future)
post_data = _get_valid_teaching_material_form_data(teaching_material)
teaching_material_form = TeachingMaterialModelForm(post_data)
self.assertTrue(teaching_material_form.is_valid(), teaching_material_form.errors)
teaching_material_form.save(learning_unit_year=luy_in_future)
self.assertTrue(mock_postpone_teaching_materials.called)
class TestLearningUnitPedagogyEditForm(LearningUnitPedagogyContextMixin):
@patch("cms.models.translated_text.update_or_create")
def test_save_fr_bibliography_also_updates_en_bibliography(self, mock_update_or_create):
"""Ensure that if we modify bibliography in FR => bibliography in EN is updated with same text"""
text_label_bibliography = TextLabelFactory(
entity=entity_name.LEARNING_UNIT_YEAR,
label='bibliography'
)
cms_translated_text_fr = TranslatedTextFactory(
entity=entity_name.LEARNING_UNIT_YEAR,
reference=self.luys[self.current_ac.year].id,
language='fr-be',
text_label=text_label_bibliography,
text='Some random text'
)
valid_form_data_fr = _get_valid_cms_form_data(cms_translated_text_fr)
form = LearningUnitPedagogyEditForm(valid_form_data_fr)
self.assertTrue(form.is_valid(), form.errors)
form.save()
for language in settings.LANGUAGES:
mock_update_or_create.assert_any_call(
entity=cms_translated_text_fr.entity,
reference=cms_translated_text_fr.reference,
language=language[0],
text_label=cms_translated_text_fr.text_label,
defaults={'text': cms_translated_text_fr.text}
)
def _duplicate_learningunityears(luy_to_duplicate, academic_years):
# Duplicate learning unit year with different academic year
luys = {}
for ac_year in academic_years:
new_luy = copy(luy_to_duplicate)
new_luy.pk = None
new_luy.uuid = uuid.uuid4()
new_luy.academic_year = ac_year
new_luy.save()
luys[ac_year.year] = new_luy
return luys
def _get_valid_cms_form_data(cms_translated_text):
"""Valid data for form CMS form"""
return {
"trans_text": getattr(cms_translated_text, 'text'),
"cms_id": getattr(cms_translated_text, 'id'),
"reference": getattr(cms_translated_text, 'reference')
}
def _get_valid_teaching_material_form_data(teaching_material):
"""Valid data for teaching material form"""
data = {
'title': teaching_material.title,
}
if getattr(teaching_material, 'mandatory', False):
data['mandatory'] = 'on'
return data
|
uclouvain/osis_louvain
|
base/tests/forms/test_learning_unit_pedagogy.py
|
Python
|
agpl-3.0
| 9,738
|