code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""displays the root page"""
############################################################
#
# Copyright 2010, 2014 Mohammed El-Afifi
# This file is part of yabe.
#
# yabe is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# yabe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with yabe. If not, see <http://www.gnu.org/licenses/>.
#
# program: yet another blog engine
#
# file: views.py
#
# function: root view
#
# description: displays the site root page
#
# author: Mohammed Safwat (MS)
#
# environment: Kate 3.4.5, python 2.6.4, Fedora release 13 (Goddard)
#
# notes: This is a private program.
#
############################################################
from django import shortcuts
def index(request):
"""Display the site root page.
`request` is the request invoking this view.
"""
return shortcuts.render_to_response('index.html')
|
MSK61/django-yabe
|
yabe/views.py
|
Python
|
gpl-3.0
| 1,382
|
import subprocess
import os
import dialog
class RDP():
def __init__(self):
self.d = dialog.Dialog(dialog="dialog")
self.storage_path = os.path.expanduser("~/LidskjalvData")
def show_rdp_menu(self, site, host):
# """ """
# FIXME
# print("FIXME: rdp_menu")
# sys.exit(1)
while True:
choices = []
choices.append(["", " "])
choices.append(["Q", "Quit"])
sz = os.get_terminal_size()
# width = sz.columns
# height = sz.lines
code, tag = self.d.menu(
"Choose an action",
height=sz.lines - 5,
width=sz.columns - 8,
menu_height=sz.lines - 15,
choices=choices)
if code == self.d.OK:
if tag == "Q":
return None
if tag == "F":
subprocess.Popen(["rdesktop", host])
if tag == "90":
subprocess.Popen(["rdesktop", host])
if tag == "75":
subprocess.Popen(["rdesktop", host])
if tag == "50":
subprocess.Popen(["rdesktop", host])
if tag == "25":
subprocess.Popen(["rdesktop", host])
"""
rdesktop
-g 1824x1026
-k da
-u USER: adusername
-d DOMAIN: myad
-p PASSWORD: password
-T 'NetworkAdmin'
-a 15
192.168.7.31
"""
|
berserkerbernhard/Lidskjalv
|
code/networkmonitor/modules/serviceutilities/rdp.py
|
Python
|
gpl-3.0
| 1,463
|
import magic
from pytag.structures import PytagDict
from pytag.constants import FIELD_NAMES
from pytag.formats import OggVorbisReader, OggVorbis, Mp3Reader, Mp3
MIMETYPE = {'application/ogg': (OggVorbisReader, OggVorbis),
'audio/mpeg': (Mp3Reader, Mp3)
}
class Tag:
"""Descriptor class.
"""
def __init__(self, name):
self.name = name
def __get__(self, instance, cls):
if instance is None: # pragma: nocover
return self
try:
return instance.__dict__[self.name]
except KeyError:
tags = instance.get_tags()
for name in (set(tags) ^ set(FIELD_NAMES)):
instance.__dict__[name] = None
return instance.__dict__[self.name]
def __set__(self, instance, value):
instance.__dict__[self.name] = value
class MetaAudio(type):
"""Set all the FIELD_NAMES as class descriptors.
"""
def __init__(cls, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in FIELD_NAMES:
setattr(cls, name, Tag(name))
class AudioReader(metaclass=MetaAudio):
"""High level interface for pytag. Creates a new object if the audio format
is supported, or returns a :py:exc:`pytag.FormatNotSupportedError` if not.
"""
_index = 0
def __init__(self, path):
with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as m:
self.mimetype = m.id_filename(path)
try:
self._format = MIMETYPE[self.mimetype][self._index](path)
except KeyError:
raise FormatNotSupportedError(
'"{0}" type is not supported'.format(self.mimetype))
def get_tags(self):
tags = PytagDict(self._format.get_tags())
for name, value in tags.items():
setattr(self, name, value)
return tags
class Audio(AudioReader):
"""Extends :py:class:`pytag.AudioReader` and adds a ``write_tags`` method.
"""
_index = 1
def write_tags(self, tags):
self._format.write_tags(PytagDict(tags))
class FormatNotSupportedError(Exception):
pass
|
jlesquembre/pytag
|
pytag/interface.py
|
Python
|
gpl-3.0
| 2,131
|
# -*- coding: utf-8 -*-
class GetText():
_file_path = None
_body_list = None
_target = None
def __init__(self, file_path):
#self._file_path = open(file_path, "r+").read().replace("<br","\n<br")
self._file_path = file_path.replace("<br />", "<br />\n")
#self._file_path = (self._file_path.replace("\n",";;")).split(";;")
#self._file_path = file_path
#print(self._file_path)
self._file_path = ((self._file_path.replace("\n", "123")).replace(" ", "")).replace("> <", "")
self._file_path = (self._file_path).replace("<p", "\n<p")
#print (self._file_path)
self._body_list = self.get_body().split("123")
self.set_target_content(self.get_body())
self.get_beginning()
self.remove_tags()
#super(leGetText, self).__init__()
def get_html(self):
return self._file_path
def get_body(self):
return self.get_html().split("</head>", -1)[1]
def get_first_br_line(self):
br_index = 0
for i in self._body_list:
if(i.find("<br") > -1):
return (self._body_list.index(i))
else:
++br_index
return br_index
def get_since_first_br(self):
since_first_br = self._body_list
del since_first_br[0:self.get_first_br_line()]
self.set_target_content(since_first_br)
return since_first_br
def set_target_content(self, content):
self._target = content
def get_target(self):
return self._target
def br_check(self, info):
if(info == "<br>"):
return True
elif(info == "<br />"):
return True
elif(info == "</ br>"):
return True
elif(info == "< br>"):
return True
else:
return False
def get_beginning(self):
# verifying whether there's any br in the next index
since_last_br = self.get_since_first_br()
#getting beginning of the lyrics
#print(since_last_br)
i = 0
for j in since_last_br:
if (
j.find("<br") > -1 and
since_last_br[i+1].find("<br") > -1 and
since_last_br[i+2].find("<br") > -1 and
since_last_br[i+3].find("<br") > -1 and
self.br_check(since_last_br[i]) == False and
self.br_check(since_last_br[i+1]) == False and
self.br_check(since_last_br[i+2]) == False and
self.br_check(since_last_br[i+3]) == False
):
del since_last_br[0:i]
break
else:
i = i +1
if (since_last_br[i].find("<br") > -1 and i+3< len(since_last_br) and self.br_check(since_last_br[i+3]) == False):
#print("i + 1 contains <br>")
#print(since_last_br[i])
del since_last_br[0:i]
# print (since_last_br)
break
self.set_target_content(since_last_br[0:200])
def remove_tags(self):
#removing tags
#removing b
html_file = "ç\n".join(self.get_target())
while(html_file.find("<b>") > -1):
#print("occur")
op = html_file.index("<b>")
cl = html_file.index("/b>")+3
html_file = list(html_file)
#for i in range(op, cl):
del html_file[op:cl]
html_file = "".join(html_file)
#removing [] (brackets) => from "[" to "\n"
while(html_file.find("[") > -1):
op = html_file.index("[")
cl = html_file.find("]")+1
bracket_line = html_file.split("ç")
l = 0
for k in bracket_line:
if(k.find("[") > -1):
break
l = l +1
del bracket_line[l]
html_file = ""
for k in bracket_line:
html_file = html_file + k+"ç"
'''
html_file = list(html_file)
#for i in range(op, cl):
del html_file[op:cl]
html_file = "".join(html_file)'''
self.set_target_content(html_file.split("ç"))
def get_end(self):
#getting the end of the lyrics (check if the next tag
#being opened is the same as the one being close
broken_since = "".join(self.get_target())
broken_since = broken_since.split("\n")
new_broken_since = [] #turning <p> into <br>
for x in broken_since:
la = x.replace("<p", "<br")
la = la.replace("</p>", "")
new_broken_since.append(la)
broken_since = new_broken_since
#checking if we are still in the lyrics block
until_the_end = []
l = 0
for k in broken_since:
kk = list(k)
if len(k) > 0:
'''
print("\n\n")
print(broken_since[l+1].find("<br"))
print(broken_since[l+1])
print("< l1 \n l2 >")
print(broken_since[l + 2].find("<br"))
print("\n\n")'''
if(l < 3 or k[0] != "<" or k[1] == "b"
or (broken_since[l+1].find("<br") > -1 and broken_since[l+2].find("<br"))
):
if (k.find("><br") == -1):
#print(k)
until_the_end.append("\n"+k)
else:
break
else:
#print("\n\n")
break
l = l + 1
#removing tags
final = ""
block = False
for j in until_the_end:
i = 0
moral = list(j)
for i in range(0, len(moral)):
if(moral[i] == "<"):
block = True
elif(moral[i] == ">"):
block = False
if(block==False and moral[i]!="<" and moral[i]!=">"):
final=final+moral[i]
return final
'''
oo = open("../../tmp/lrc", "r").read()
#print(oo)
get_text = _GetText(oo)
#print(get_text.get_target())
final = get_text.get_end()
final = final.encode("latin1").decode("utf-8")
'''
#print(final)
'''
#rs = open("../../tmp/lrc", "w+")
#rs.write(final)'
'''
|
henriquesouza/toply
|
src/objects/GetText.py
|
Python
|
gpl-3.0
| 6,417
|
# -*- coding: utf-8 -*-
import random
import textwrap
def print_bold(msg):
#Funcion para mostrar por pantalla un string en negrita
print("\033[1m"+msg+"\033[0m")
def print_linea_punteada(width=72):
print('-'*width)
def ocupar_chozas():
ocupantes = ['enemigo','amigo','no ocupada']
chozas = []
while len(chozas) < 5: #Definimos un número de asentamiento para establecerlo como amigo o enemigo
eleccion_aleatoria = random.choice(ocupantes)
chozas.append(eleccion_aleatoria)
return chozas
def mostrar_mision():
print("\033[1m"+ "Ataca a los Orcos V0.0.5" + "\033[0m")
msg = ("La guerra entre los humanos y sus arqueros enemigos, los Orcos, estaba en el aire."
"Un enorme ejército de orcos se dirigía hacia los territos de los humanos. Destruían"
"prácticamente todo en su camino. Los grandes reyes de la raza humana, se unieron para"
" derrotar a su peor enemigo, era la gran batalla de su tiempo. Sir Gandorel, uno de los "
"valientes caballeros que guardan las llanuras meridionales, inició un largo viaje hacia el este"
", a través de un desconocido bosque espeso. Durante dos días y dos noches, se movió con cautela "
"a través del grueso bosque. En su camino, vio un pequeño asentamiento aislado. Cansado y con "
"la esperanza de reponer su stock de alimentos, decidió tomar un desvío. Cuando se acercó al pueblo,"
"vio cinco chozas. No había nadie alrededor. En ese instante, decidió entrar en un choza...")
print(textwrap.fill(msg, width = 72))
print("\033[1m"+"Misión:"+"\033[0m")
print("Elige una choza donde poder descansar...")
print("\033[1m"+"NOTA:"+"\033[0m")
print("¡Cuidado! Hay enemigos rondando la zona")
print_linea_punteada()
def mostrar_salud(medidor_salud, bold):
if bold:
print_bold("Salud Sir Gandorel:")
print_bold("%d"%(medidor_salud['jugador']))
print_bold("Salud Enemigo:")
print_bold("%d"%(medidor_salud['enemigo']))
else:
print("Salud Sir Gandorel:")
print("%d"%(medidor_salud['jugador']))
print("Salud Enemigo:")
print("%d"%(medidor_salud['enemigo']))
def procesar_decision_usuario():
msg = "\033[1m" + "Elige una choza, introduce un número entre 1 y 5: " + "\033[0m"
decision_usuario = input("\n"+msg)
idx = int(decision_usuario)
return idx
def reset_medidor_salud(medidor_salud):
medidor_salud['jugador']=40
medidor_salud['enemigo']=30
def atacar(medidor_salud):
lista_golpes = 4*['jugador']+6*['enemigo']
unidad_herida = random.choice(lista_golpes)
puntos_vida = medidor_salud[unidad_herida]
herida = random.randint(10,15)
medidor_salud[unidad_herida] = max(puntos_vida- herida,0)
print("¡Ataque!")
mostrar_salud(medidor_salud,bold=False)
def revelar_ocupantes(idx, chozas):
msg=""
print("Revelando los ocupantes...")
for i in range(len(chozas)):
ocupantes_info = "<%d:%s>"%(i+1, chozas[i])
if i+1 == idx:
ocupantes_info = "\033[1m" + ocupantes_info + "\033[0m"
msg += ocupantes_info + " "
print("\t" + msg)
print_linea_punteada()
#En la siguiente función se establece un sistema de combate iterativo
def play_game(medidor_salud):
chozas = ocupar_chozas()
idx = procesar_decision_usuario()
revelar_ocupantes(idx, chozas)
if chozas[idx-1] != 'enemigo':
print_bold("¡Enhorabuena! ¡Has GANADO!")
else:
print_bold('¡Enemigo encontrado!')
mostrar_salud(medidor_salud, bold=True)
continuar_ataque = True
while continuar_ataque:
continuar_ataque = input("...continuar con el ataque? Si(1)/No(0)")
if continuar_ataque == 0:
print_bold("Huyendo con el siguiente estado de salud...")
mostrar_salud(medidor_salud, bold=True)
print_bold("¡Game Over!")
break
atacar(medidor_salud)
if medidor_salud['enemigo'] <=0:
print_bold("¡Sir Gandorel ha derrotado a su enemigo!")
break
if medidor_salud['jugador'] <=0:
print_bold("Sir Gandorel ha muerto ...")
break
#Funcion para hacer funcionar el programa principal que queremos ejecutar
def run_application():
seguir_jugando = 1
medidor_salud = {}
reset_medidor_salud(medidor_salud)
mostrar_mision()
while seguir_jugando == 1:
reset_medidor_salud(medidor_salud)
play_game(medidor_salud)
seguir_jugando = input("¿Quieres jugar de nuevo? Si(1)/No(0):")
if __name__ == '__main__':
run_application()
|
tidus747/Tutoriales_juegos_Python
|
Ataca a los orcos V0.0.5/Ataca_a_los_orcos_V0.0.5.py
|
Python
|
gpl-3.0
| 4,736
|
'''
Open Source Initiative OSI - The MIT License:Licensing
Tue, 2006-10-31 04:56 nelson
The MIT License
Copyright (c) 2009 BK Precision
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
This script talks to the DC load in two ways:
1. Using a DCLoad object (you'd use this method when you write a
python application that talks to the DC load.
2. Using the COM interface. This shows how python code uses the
COM interface. Other programming environments (e.g., Visual
Basic and Visual C++) would use very similar techniques to
talk to the DC load via COM.
Note that the DCLoad object and the COM server interface functions
always return strings.
$RCSfile: client.py $
$Revision: 1.0 $
$Date: 2008/05/16 21:02:50 $
$Author: Don Peterson $
'''
import sys, dcload
import time
try:
from win32com.client import Dispatch
except:
pass
err = sys.stderr.write
def TalkToLoad(load, port, baudrate):
'''load is either a COM object or a DCLoad object. They have the
same interface, so this code works with either.
port is the COM port on your PC that is connected to the DC load.
baudrate is a supported baud rate of the DC load.
'''
def test(cmd, results):
if results:
print cmd, "failed:"
print " ", results
exit(1)
else:
print cmd
load.Initialize(port, baudrate) # Open a serial connection
print "Time from DC Load =", load.TimeNow()
test("Set to remote control", load.SetRemoteControl())
test("Set max current to 1 A", load.SetMaxCurrent(1))
test("Set CC current to 0.0 A", load.SetCCCurrent(0.0))
print "Settings:"
print " Mode =", load.GetMode()
print " Max voltage =", load.GetMaxVoltage()
print " Max current =", load.GetMaxCurrent()
print " Max power =", load.GetMaxPower()
print " CC current =", load.GetCCCurrent()
print " CV voltage =", load.GetCVVoltage()
print " CW power =", load.GetCWPower()
print " CR resistance =", load.GetCRResistance()
print " Load on timer time =", load.GetLoadOnTimer()
print " Load on timer state =", load.GetLoadOnTimerState()
print " Trigger source =", load.GetTriggerSource()
print " Function =", load.GetFunction()
print
f = open("output.txt", 'w')
f.write("V\tA\tW\n")
test("Turn on load", load.TurnLoadOn())
i = 0.0
while i < 0.21:
test("Set CC current to %f A" % i, load.SetCCCurrent(i))
i += 0.005
time.sleep(0.2)
values = load.GetInputValues()
for value in values.split("\t"):
print " ", value
f.write(value.split(" ")[0])
f.write('\t')
f.write("\n")
f.close()
test("Turn off load", load.TurnLoadOff())
test("Set to local control", load.SetLocalControl())
def Usage():
name = sys.argv[0]
msg = '''Usage: %(name)s {com|obj} port baudrate
Demonstration python script to talk to a B&K DC load either via the COM
(component object model) interface or via a DCLoad object (in dcload.py).
port is the COM port number on your PC that the load is connected to.
baudrate is the baud rate setting of the DC load.
''' % locals()
print msg
exit(1)
def main():
if len(sys.argv) != 4:
Usage()
access_type = sys.argv[1]
port = int(sys.argv[2])
baudrate = int(sys.argv[3])
if access_type == "com":
load = Dispatch('BKServers.DCLoad85xx')
elif access_type == "obj":
load = dcload.DCLoad()
else:
Usage()
TalkToLoad(load, port, baudrate)
return 0
main()
|
sbobovyc/LabNotes
|
bk_precision_8500/profile_solarcell.py
|
Python
|
gpl-3.0
| 4,756
|
#!/usr/bin/env python
#simple smileys icon builder..
SMILEYS_INDEX=(
"""
# Default smileys
[default]
happy.png :) :-)
excited.png :-D :-d :D :d
sad.png :-( :(
wink.png ;-) ;)
tongue.png :P :p :-P :-p
shocked.png =-O =-o
kiss.png :-*
glasses-cool.png 8-)
embarrassed.png :-[
crying.png :'( :'-(
thinking.png :-/ :-\\
angel.png O:-) o:-)
shut-mouth.png :-X
moneymouth.png :-$
foot-in-mouth.png :-!
shout.png >:o >:O
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
[XMPP]
# Following XEP-0038 + GTalk + our default set, in default set order
# The GTalk strings come from ticket #3307.
happy.png :) :-) =)
excited.png :-D :-d :D :d =D =d
sad.png :-( :(
wink.png ;-) ;) ;^)
tongue.png :P :p :-P :-p
shocked.png =-O =-o :-O :-o
kiss.png :kiss: :-*
glasses-cool.png 8-) B-)
embarrassed.png :-[
crying.png :'-( :'(
thinking.png :-/ :-\\
angel.png O:-) o:-)
shut-mouth.png :-X
moneymouth.png :-$
foot-in-mouth.png :-!
shout.png >:o >:O
# Following XEP-0038 + GTalk
angry.png >:-( >:( X-( x-(
good.png :yes:
bad.png :no:
stop.png :wait:
rose.png @->-- :rose:
phone.png :telephone:
mail.png :email:
lamp.png :jabber:
cake.png :cake:
in_love.png :heart: :love: <3
love-over.png :brokenheart:
musical-note.png :music:
beer.png :beer:
coffee.png :coffee:
coins.png :money:
moon.png :moon:
sun.png :sun:
star.png :star:
# Others
neutral.png :| :-|
victory.png \\m/
# Hidden icons from the default set.
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
# Following AIM 6.1
[AIM]
happy.png :-) :)
wink.png ;-) ;)
sad.png :-( :(
tongue.png :P :p :-P :-p
shocked.png =-O
kiss.png :-*
shout.png >:o
excited.png :-D :D
moneymouth.png :-$
foot-in-mouth.png :-!
embarrassed.png :-[
angel.png O:-)
thinking.png :-\\ :-/
crying.png :'(
shut-mouth.png :-X
glasses-cool.png 8-)
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
# Following Windows Live Messenger 8.1
[MSN]
happy.png :) :-)
excited.png :D :d :-D :-d
wink.png ;) ;-)
shocked.png :-O :-o :O :o
tongue.png :-P :P :-p :p
glasses-cool.png (H) (h)
angry.png :@ :-@
embarrassed.png :$ :-$
confused.png :S :s :-S :-s
sad.png :( :-(
crying.png :'(
neutral.png :| :-|
devil.png (6)
angel.png (A) (a)
in_love.png (L) (l)
love-over.png (U) (u)
msn.png (M) (m)
cat.png (@)
dog.png (&)
moon.png (S)
star.png (*)
film.png (~)
musical-note.png (8)
mail.png (E) (e)
rose.png (F) (f)
rose-dead.png (W) (w)
clock.png (O) (o)
kiss.png (K) (k)
present.png (G) (g)
cake.png (^)
camera.png (P) (p)
lamp.png (I) (i)
coffee.png (C) (c)
phone.png (T) (t)
hug-left.png ({)
hug-right.png (})
beer.png (B) (b)
drink.png (D) (d)
boy.png (Z) (z)
girl.png (X) (x)
good.png (Y) (y)
bad.png (N) (n)
vampire.png :[ :-[
goat.png (nah)
sun.png (#)
rainbow.png (R) (r)
quiet.png :-#
teeth.png 8o|
glasses-nerdy.png 8-|
sarcastic.png ^o)
secret.png :-*
sick.png +o(
snail.png (sn)
turtle.png (tu)
plate.png (pl)
bowl.png (||)
pizza.png (pi)
soccerball.png (so)
car.png (au)
airplane.png (ap)
umbrella.png (um)
island.png (ip)
computer.png (co)
mobile.png (mp)
brb.png (brb)
rain.png (st)
highfive.png (h5)
coins.png (mo)
sheep.png (bah)
dont-know.png :^)
thinking.png *-)
thunder.png (li)
party.png <:o)
eyeroll.png 8-)
sleepy.png |-)
bunny.png ('.')
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
# Hidden MSN emotes
cigarette.png (ci) (CI)
handcuffs.png (%)
console.png (xx) (XX)
fingers-crossed.png (yn) (YN)
# Following QQ 2006
[QQ]
shocked.png /:O /jy /surprised
curl-lip.png /:~ /pz /curl_lip
desire.png /:* /se /desire
dazed.png /:| /dazed
party.png /8-) /dy /revel
crying.png /:< /ll /cry
bashful.png /:$ /hx /bashful
shut-mouth.png /:X /bz /shut_mouth
sleeping.png /:Z /shui /sleep
weep.png /:'( /dk /weep
embarrassed.png /:-| /gg /embarassed
pissed-off.png /:@ /fn /pissed_off
act-up.png /:P /tp /act_up
excited.png /:D /cy /toothy_smile
happy.png /:) /wx /small_smile
sad.png /:( /ng /sad
glasses-cool.png /:+ /kuk /cool
doctor.png /:# /feid /SARS
silly.png /:Q /zk /crazy
sick.png /:T /tu /vomit
snicker.png /;p /tx /titter
cute.png /;-D /ka /cute
disdain.png /;d /by /disdain
arrogant.png /;o /am /arrogant
starving.png /:g /jie /starving
sleepy.png /|-) /kun /sleepy
terror.png /:! /jk /terror
hot.png /:L /sweat
smirk.png /:> /hanx /smirk
soldier.png /:; /db /soldier
struggle.png /;f /fendou /struggle
curse.png /:-S /zhm /curse
question.png /? /yiw /question
quiet.png /;x /xu /shh
hypnotized.png /;@ /yun /dizzy
excruciating.png /:8 /zhem /excrutiating
freaked-out.png /;! /shuai /freaked_out
skeleton.png /!!! /kl /skeleton
hammer.png /xx /qiao /hammer
bye.png /bye /zj /bye
go-away.png /go /shan /go
afraid.png /shake /fad /shake
amorous.png /love /aiq /love
jump.png /jump /tiao /jump
search.png /find /zhao /search
lashes.png /& /mm /beautiful_eyebrows
pig.png /pig /zt /pig
cat.png /cat /mm /cat
dog.png /dog /xg /dog
hug-left.png /hug /yb /hug
coins.png /$ /qianc /money
lamp.png /! /dp /lightbulb
bowl.png /cup /bei /cup
cake.png /cake /dg /cake
thunder.png /li /shd /lightning
bomb.png /bome /zhd /bomb
knife.png /kn /dao /knife
soccerball.png /footb /zq /soccer
musical-note.png /music /yy /music
poop.png /shit /bb /shit
coffee.png /coffee /kf /coffee
hungry.png /eat /fan /eat
pill.png /pill /yw /pill
rose.png /rose /mg /rose
wilt.png /fade /dx /wilt
kiss.png /kiss /wen /kiss
in_love.png /heart /xin /heart
love-over.png /break /xs /broken_heart
meeting.png /meeting /hy /meeting
present.png /gift /lw /gift
phone.png /phone /dh /phone
clock.png /time /sj /time
mail.png /email /yj /email
tv.png /TV /ds /TV
sun.png /sun /ty /sun
moon.png /moon /yl /moon
good.png /strong /qiang /thumbs_up
bad.png /weak /ruo /thumbs_down
handshake.png /share /ws /handshake
victory.png /v /shl /victory
beauty.png /<J> /mn /beauty
qq.png /<QQ> /qz /qq
blowkiss.png /<L> /fw /blow_kiss
angry.png /<O> /oh /angry
liquor.png /<B> /bj /baijiu
can.png /<U> /qsh /soda
watermelon.png /<W> /xigua /watermelon
rain.png /<!!> /xy /rain
cloudy.png /<~> /duoy /cloudy
snowman.png /<Z> /xr /snowman
star.png /<*> /xixing /star
girl.png /<00> /nv /woman
boy.png /<11> /nan /man
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
# Following ICQ 6.0
[ICQ]
happy.png :-) :)
neutral.png :-$
sad.png :-( :(
shocked.png =-O
wink.png ;-) ;)
tongue.png :-P :P :-p :p
music.png [:-}
laugh.png *JOKINGLY*
sleeping.png *TIRED*
crying.png :'( :'-(
sick.png :-!
kissed.png *KISSED*
stop.png *STOP*
kiss.png :-{} :-*
kissing.png *KISSING*
victory.png *YAHOO*
silly.png %)
embarrassed.png :-[
devil.png ]:->
angel.png O:-)
rose.png @}->--
shut-mouth.png :-X :X :-x :x
bomb.png @=
thinking.png :-\\ :-/
good.png *THUMBS\ UP*
shout.png >:o >:O :-@
beer.png *DRINK*
excited.png :-D :D
glasses-cool.png 8-)
amorous.png *IN\ LOVE*
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
# Following Yahoo! Messenger 8.1
[Yahoo]
happy.png :) :-)
question.png :-/ :-\\
shocked.png :-O :O :-o :o
devil.png >:)
angel.png O:-) o:-) 0:-)
sick.png :-&
sleepy.png (:|
hypnotized.png @-)
on-the-phone.png :)]
sad.png :( :-(
amorous.png :x :-x :X :-X
angry.png X-( x-( X( x(
crying.png :((
glasses-nerdy.png :-B :-b
quiet.png :-$
drool.png =P~ =p~
lying.png :^O :^o
call-me.png :-c
wink.png ;) ;-)
embarrassed.png :">
mean.png :-> :>
laugh.png :)) :-))
bye.png =;
arrogant.png [-(
thinking.png :-?
waiting.png :-w :-W
at-wits-end.png ~x( ~X(
excited.png :D :-D :d :-d
tongue.png :-P :P :-p :p
glasses-cool.png B-) b-)
neutral.png :| :-|
sleeping.png I-) i-) |-)
clown.png :o) :O)
doh.png #-o #-O
weep.png :-<
go-away.png :-h
lashes.png ;;)
kiss.png :-* :*
confused.png :-S :-s
sarcastic.png /:)
eyeroll.png 8-|
silly.png 8-}
clap.png =D> =d>
mad-tongue.png >:P >:p
time-out.png :-t :-T
hug-left.png >:D< >:d<
love-over.png =((
hot.png #:-S #:-s
rotfl.png =)) :-j :-J
loser.png L-) l-)
party.png <:-P <:-p
nervous.png :-SS :-Ss :-sS :-ss
cowboy.png <):)
desire.png 8->
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
# Hidden Yahoo emotes
alien.png =:) >-)
beat-up.png b-( B-(
chicken.png ~:>
coffee.png ~o) ~O)
cow.png 3:-O 3:-o
dance.png \\:D/ \\:d/
rose.png @};-
dont-know.png :-L :-l
skeleton.png 8-X 8-x
lamp.png *-:)
monkey.png :(|)
coins.png $-)
peace.png :)>-
pig.png :@)
pray.png [-o< [-O<
pumpkin.png (~~)
shame.png [-X [-x
flag.png **==
clover.png %%-
musical-note.png :-"
giggle.png ;))
worship.png ^:)^
star.png (*)
waving.png >:/
talktohand.png :-@
# Only available after activating the Yahoo! Fighter IMVironment
male-fighter1.png o-> O->
male-fighter2.png o=> O=>
female-fighter.png o-+ O-+
yin-yang.png (%)
# Following MySpaceIM Beta 1.0.697.0
[MySpaceIM]
excited.png :D :-D
devil.png }:)
confused.png :Z
glasses-nerdy.png B)
bulgy-eyes.png %)
freaked-out.png :E
smile.png :) :-)
amorous.png :X
laugh.png :))
mohawk.png -:
mad-tongue.png X(
messed.png X)
glasses-nerdy.png Q)
doh.png :G
pirate.png P)
shocked.png :O
sidefrown.png :{
sinister.png :B
smirk.png :,
neutral.png :|
tongue.png :P :p
pissed-off.png B|
wink.png ;-) ;)
sad.png :[
kiss.png :x
skywalker.png C:-) c:-) C:) c:)
monkey.png :-(|) :(|) 8-|)
cyclops.png O-) o-)
# MXit standard emoticons
[MXit]
happy.png :-) :)
sad.png :-( :(
wink.png ;-) ;)
excited.png :-D :D :-> :>
neutral.png :-| :|
shocked.png :-O :O
tongue.png :-P :P
embarrassed.png :-$ :$
glasses-cool.png 8-)
in_love.png (H)
rose.png (F)
### Added in v3.0
boy.png (m)
girl.png (f)
star.png (*)
chilli.png (c)
kiss.png (x)
lamp.png (i)
pissed-off.png :e :-e
shut-mouth.png :-x :x
thunder.png (z)
coffee.png (U)
mrgreen.png (G)
### Added in v5.0
sick.png :o(
excruciating.png :-{ :{
amorous.png :-} :}
eyeroll.png 8-o 8o
crying.png :'(
thinking.png :-? :?
drool.png :-~ :~
sleeping.png :-z :z
lying.png :L)
glasses-nerdy.png 8-| 8|
pirate.png P-)
### Added in v5.9.7
bored.png :-[ :[
cold.png :-< :<
confused.png :-, :,
hungry.png :-C :C
stressed.png :-s :s
""")
import sys
sys.path.append("../")
import yobotproto
import yobotops
from re import escape as re_escape
import re
from cgi import escape as html_escape
tbl = {}
#ok, some major cleanup here...
#some simple groupings...
class SmileyRegistry(object):
def __init__(self):
#indexed by improto
self.regexp = {}
#indexed by (smiley_code, improto)
self.resourcetable = {}
self.allsmileys = {}
def addsmiley(self, improto, smiley):
self.allsmileys.setdefault(improto, set()).add(smiley)
def getsmileys(self, improto):
return self.allsmileys[improto]
DEFAULT_SCHEME = -256
UNUSED = -16
htmlescaped = SmileyRegistry()
plain = SmileyRegistry()
def gensmileys():
current_protocol = None
for l in SMILEYS_INDEX.split("\n"):
l = l.strip()
if not l:
continue
if l.startswith("#"):
continue
if l.startswith("["):
proto_name = l.strip("[]").lower()
if proto_name == "xmpp":
current_protocol = yobotproto.YOBOT_JABBER
elif proto_name == "aim":
current_protocol = yobotproto.YOBOT_AIM
elif proto_name == "msn":
current_protocol = yobotproto.YOBOT_MSN
elif proto_name == "yahoo":
current_protocol = yobotproto.YOBOT_YAHOO
elif proto_name == "default":
current_protocol = DEFAULT_SCHEME
else:
current_protocol = UNUSED
continue
if not current_protocol:
continue
items = l.split()
name, emotes = items[0], items[1:]
for emote in emotes:
htmled = html_escape(emote)
htmlescaped.resourcetable[(htmled, current_protocol)] = name
htmlescaped.addsmiley(current_protocol, htmled)
plain.resourcetable[(emote, current_protocol)] = name
plain.addsmiley(current_protocol, emote)
for o in (plain, htmlescaped):
for k, v in o.allsmileys.items():
o.regexp[k] = re.compile("(%s)" % ("|".join([re.escape(s) for s in sorted(v,key=len, reverse=True)])))
try:
import cPickle as pickle
plain, htmlescaped = pickle.load(open("/tmp/.yobot_smileys_cached.pickle", "r"))
print "imported from pickle"
except Exception, e:
print e
gensmileys()
try:
pickle.dump((plain, htmlescaped), open("/tmp/.yobot_smileys_cached.pickle", "w"))
except Exception, e:
print e
#gensmileys()
if __name__ == "__main__":
sys.exit()
for k, v in plain.resourcetable.items():
smiley, proto = k
print "%-15s %-15s -> %s" % (smiley, yobotops.imprototostr(proto), v)
for k, v in plain.allsmileys.items():
print yobotops.imprototostr(k)
items = "\t"
counter = 0
for i in v:
i += " "
items += i
counter += len(i)
if counter >= 70:
items += "\n\t"
counter = 0
print items
|
mnunberg/yobot
|
py/gui/smiley.py
|
Python
|
gpl-3.0
| 18,464
|
import env
import numpy as np
import metaomr
import metaomr.kanungo as kan
from metaomr.page import Page
import glob
import pandas as pd
import itertools
import os.path
import sys
from datetime import datetime
from random import random, randint
IDEAL = [path for path in sorted(glob.glob('testset/modern/*.png'))
if 'nostaff' not in path]
def random_params():
if random() < 0.25:
nu = 0
else:
nu = random() * 0.05
if random() < 0.25:
a0 = a = 0
else:
a0 = random() * 0.2
a = 0.5 + random() * 2
if random() < 0.25:
b0 = b = 0
else:
b0 = random() * 0.2
b = 0.5 + random() * 2
k = randint(0, 4)
return nu, a0, a, b0, b, k
columns = pd.MultiIndex.from_product([['real', 'estimate'], 'nu a0 a b0 b k'.split()])
columns = columns.append(pd.MultiIndex.from_product([['estimate'],['stat','time','status','nfev']]))
cols = []
results = []
fun = 'ks'
method = 'Nelder-Mead'
for image in IDEAL:
name = os.path.basename(image).split('.')[0]
page, = metaomr.open(image)
kimg = kan.KanungoImage(kan.normalized_page(page)[0])
for i in xrange(3):
params = random_params()
synth = Page(kimg.degrade(params))
synth.staff_dist = 8
for maxfev in [25, 50]:
start = datetime.now()
est_params = kan.est_parameters(synth, test_fn=kan.test_hists_ks if fun == 'ks' else kan.test_hists_chisq, opt_method=method, maxfev=maxfev)
end = datetime.now()
cols.append((name, fun, maxfev, i))
results.append(list(params) + list(est_params.x) + [est_params.fun, (end - start).total_seconds(), est_params.status, est_params.nfev])
sys.stderr.write('.')
res = pd.DataFrame(results, columns=columns)
res.index = pd.MultiIndex.from_tuples(cols)
res.index.names = 'doc test maxfev num'.split()
res.to_csv('kanungo_eval.csv')
sys.stderr.write('\n')
|
ringw/MetaOMR
|
metaomr_tests/eval_kanungo_est.py
|
Python
|
gpl-3.0
| 1,954
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.conf import settings
from guardian.shortcuts import get_anonymous_user
from geonode.groups.models import GroupProfile, GroupInvitation, GroupCategory
from geonode.documents.models import Document
from geonode.layers.models import Layer
from geonode.maps.models import Map
from geonode.base.populate_test_data import create_models
from geonode.security.views import _perms_info_json
class SmokeTest(TestCase):
"""
Basic checks to make sure pages load, etc.
"""
fixtures = ['initial_data.json', "group_test_data"]
def setUp(self):
create_models(type='layer')
create_models(type='map')
create_models(type='document')
self.norman = get_user_model().objects.get(username="norman")
self.norman.groups.add(Group.objects.get(name='anonymous'))
self.test_user = get_user_model().objects.get(username='test_user')
self.test_user.groups.add(Group.objects.get(name='anonymous'))
self.bar = GroupProfile.objects.get(slug='bar')
self.anonymous_user = get_anonymous_user()
def test_group_permissions_extend_to_user(self):
"""
Ensures that when a user is in a group, the group permissions
extend to the user.
"""
layer = Layer.objects.all()[0]
# Set the default permissions
layer.set_default_permissions()
# Test that the anonymous user can read
self.assertTrue(
self.anonymous_user.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# Test that the default perms give Norman view permissions but not
# write permissions
self.assertTrue(
self.norman.has_perm(
'view_resourcebase',
layer.get_self_resource()))
self.assertFalse(
self.norman.has_perm(
'change_resourcebase',
layer.get_self_resource()))
# Make sure Norman is not in the bar group.
self.assertFalse(self.bar.user_is_member(self.norman))
# Add norman to the bar group.
self.bar.join(self.norman)
# Ensure Norman is in the bar group.
self.assertTrue(self.bar.user_is_member(self.norman))
# Give the bar group permissions to change the layer.
permissions = {
'groups': {
'bar': [
'view_resourcebase',
'change_resourcebase']}}
layer.set_permissions(permissions)
self.assertTrue(
self.norman.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# check that now norman can change the layer
self.assertTrue(
self.norman.has_perm(
'change_resourcebase',
layer.get_self_resource()))
# Test adding a new user to the group after setting permissions on the layer.
# Make sure Test User is not in the bar group.
self.assertFalse(self.bar.user_is_member(self.test_user))
self.assertFalse(
self.test_user.has_perm(
'change_resourcebase',
layer.get_self_resource()))
self.bar.join(self.test_user)
self.assertTrue(
self.test_user.has_perm(
'change_resourcebase',
layer.get_self_resource()))
def test_group_resource(self):
"""
Tests the resources method on a Group object.
"""
layer = Layer.objects.all()[0]
map = Map.objects.all()[0]
perm_spec = {'groups': {'bar': ['change_resourcebase']}}
# Give the self.bar group write perms on the layer
layer.set_permissions(perm_spec)
map.set_permissions(perm_spec)
# Ensure the layer is returned in the group's resources
self.assertTrue(layer.get_self_resource() in self.bar.resources())
self.assertTrue(map.get_self_resource() in self.bar.resources())
# Test the resource filter
self.assertTrue(
layer.get_self_resource() in self.bar.resources(
resource_type='layer'))
self.assertTrue(
map.get_self_resource() not in self.bar.resources(
resource_type='layer'))
# Revoke permissions on the layer from the self.bar group
layer.set_permissions("{}")
# Ensure the layer is no longer returned in the groups resources
self.assertFalse(layer.get_self_resource() in self.bar.resources())
def test_perms_info(self):
"""
Tests the perms_info function (which passes permissions to the response context).
"""
# Add test to test perms being sent to the front end.
layer = Layer.objects.all()[0]
layer.set_default_permissions()
perms_info = layer.get_all_level_info()
# Ensure there is only one group 'anonymous' by default
self.assertEqual(len(perms_info['groups'].keys()), 1)
# Add the foo group to the layer object groups
layer.set_permissions({'groups': {'bar': ['view_resourcebase']}})
perms_info = _perms_info_json(layer)
# Ensure foo is in the perms_info output
self.assertItemsEqual(
json.loads(perms_info)['groups'], {
'bar': ['view_resourcebase']})
def test_resource_permissions(self):
"""
Tests that the client can get and set group permissions through the test_resource_permissions view.
"""
self.assertTrue(self.client.login(username="admin", password="admin"))
layer = Layer.objects.all()[0]
document = Document.objects.all()[0]
map_obj = Map.objects.all()[0]
layer.set_default_permissions()
document.set_default_permissions()
map_obj.set_default_permissions()
objects = layer, document, map_obj
for obj in objects:
response = self.client.get(
reverse(
'resource_permissions',
kwargs=dict(
resource_id=obj.id)))
self.assertEqual(response.status_code, 200)
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(
permissions, str):
permissions = json.loads(permissions)
# Ensure the groups value is empty by default
expected_permissions = {}
if settings.DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION:
expected_permissions.setdefault(
u'anonymous', []).append(u'download_resourcebase')
if settings.DEFAULT_ANONYMOUS_VIEW_PERMISSION:
expected_permissions.setdefault(
u'anonymous', []).append(u'view_resourcebase')
self.assertItemsEqual(
permissions.get('groups'),
expected_permissions)
permissions = {
'groups': {
'bar': ['change_resourcebase']
},
'users': {
'admin': ['change_resourcebase']
}
}
# Give the bar group permissions
response = self.client.post(
reverse(
'resource_permissions',
kwargs=dict(resource_id=obj.id)),
data=json.dumps(permissions),
content_type="application/json")
self.assertEqual(response.status_code, 200)
response = self.client.get(
reverse(
'resource_permissions',
kwargs=dict(
resource_id=obj.id)))
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(
permissions, str):
permissions = json.loads(permissions)
# Make sure the bar group now has write permissions
self.assertItemsEqual(
permissions['groups'], {
'bar': ['change_resourcebase']})
# Remove group permissions
permissions = {"users": {"admin": ['change_resourcebase']}}
# Update the object's permissions to remove the bar group
response = self.client.post(
reverse(
'resource_permissions',
kwargs=dict(resource_id=obj.id)),
data=json.dumps(permissions),
content_type="application/json")
self.assertEqual(response.status_code, 200)
response = self.client.get(
reverse(
'resource_permissions',
kwargs=dict(
resource_id=obj.id)))
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(
permissions, str):
permissions = json.loads(permissions)
# Assert the bar group no longer has permissions
self.assertItemsEqual(permissions['groups'], {})
def test_create_new_group(self):
"""
Tests creating a group through the group_create route.
"""
d = dict(title='TestGroup',
description='This is a test group.',
access='public',
keywords='testing, groups')
self.client.login(username="admin", password="admin")
response = self.client.post(reverse('group_create'), data=d)
# successful POSTS will redirect to the group's detail view.
self.assertEqual(response.status_code, 302)
self.assertTrue(GroupProfile.objects.get(title='TestGroup'))
def test_delete_group_view(self):
"""
Tests deleting a group through the group_delete route.
"""
# Ensure the group exists
self.assertTrue(GroupProfile.objects.get(id=self.bar.id))
self.client.login(username="admin", password="admin")
# Delete the group
response = self.client.post(
reverse(
'group_remove', args=[
self.bar.slug]))
# successful POSTS will redirect to the group list view.
self.assertEqual(response.status_code, 302)
self.assertFalse(
GroupProfile.objects.filter(
id=self.bar.id).count() > 0)
def test_delete_group_view_no_perms(self):
"""
Tests deleting a group through the group_delete with a non-manager.
"""
# Ensure the group exists
self.assertTrue(GroupProfile.objects.get(id=self.bar.id))
self.client.login(username="norman", password="norman")
# Delete the group
response = self.client.post(
reverse(
'group_remove', args=[
self.bar.slug]))
self.assertEqual(response.status_code, 403)
# Ensure the group still exists
self.assertTrue(GroupProfile.objects.get(id=self.bar.id))
def test_groupmember_manager(self):
"""
Tests the get_managers method.
"""
norman = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username='admin')
# Make sure norman is not a user
self.assertFalse(self.bar.user_is_member(norman))
# Add norman to the self.bar group
self.bar.join(norman)
# Ensure norman is now a member
self.assertTrue(self.bar.user_is_member(norman))
# Ensure norman is not in the managers queryset
self.assertTrue(norman not in self.bar.get_managers())
# Ensure admin is in the managers queryset
self.assertTrue(admin in self.bar.get_managers())
def test_public_pages_render(self):
"""
Verify pages that do not require login load without internal error
"""
response = self.client.get("/groups/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/members/")
self.assertEqual(200, response.status_code)
# 302 for auth failure since we redirect to login page
response = self.client.get("/groups/create/")
self.assertEqual(302, response.status_code)
response = self.client.get("/groups/group/bar/update/")
self.assertEqual(302, response.status_code)
# 405 - json endpoint, doesn't support GET
response = self.client.get("/groups/group/bar/invite/")
self.assertEqual(405, response.status_code)
def test_protected_pages_render(self):
"""
Verify pages that require login load without internal error
"""
self.assertTrue(self.client.login(username="admin", password="admin"))
response = self.client.get("/groups/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/members/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/create/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/update/")
self.assertEqual(200, response.status_code)
# 405 - json endpoint, doesn't support GET
response = self.client.get("/groups/group/bar/invite/")
self.assertEqual(405, response.status_code)
def test_group_activity_pages_render(self):
"""
Verify Activity List pages
"""
self.assertTrue(self.client.login(username="admin", password="admin"))
response = self.client.get("/groups/")
self.assertEqual(200, response.status_code)
response = self.client.get("/groups/group/bar/activity/")
self.assertEqual(200, response.status_code)
self.assertContains(response,
'<a href="/layers/geonode:CA">CA</a>',
count=0,
status_code=200,
msg_prefix='',
html=False)
self.assertContains(response,
'uploaded',
count=0,
status_code=200,
msg_prefix='',
html=False)
try:
# Add test to test perms being sent to the front end.
layer = Layer.objects.all()[0]
layer.set_default_permissions()
perms_info = layer.get_all_level_info()
# Ensure there is only one group 'anonymous' by default
self.assertEqual(len(perms_info['groups'].keys()), 1)
# Add the foo group to the layer object groups
layer.set_permissions({'groups': {'bar': ['view_resourcebase']}})
perms_info = _perms_info_json(layer)
# Ensure foo is in the perms_info output
self.assertItemsEqual(
json.loads(perms_info)['groups'], {
'bar': ['view_resourcebase']})
layer.group = self.bar.group
layer.save()
response = self.client.get("/groups/group/bar/activity/")
self.assertEqual(200, response.status_code)
self.assertContains(response,
'<a href="/layers/geonode:CA">CA</a>',
count=2,
status_code=200,
msg_prefix='',
html=False)
self.assertContains(response,
'uploaded',
count=2,
status_code=200,
msg_prefix='',
html=False)
finally:
layer.set_default_permissions()
layer.group = None
layer.save()
class MembershipTest(TestCase):
"""
Tests membership logic in the geonode.groups models
"""
fixtures = ["group_test_data"]
def test_group_is_member(self):
"""
Tests checking group membership
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
group = GroupProfile.objects.get(slug="bar")
self.assert_(not group.user_is_member(anon))
self.assert_(not group.user_is_member(normal))
def test_group_add_member(self):
"""
Tests adding a user to a group
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
group = GroupProfile.objects.get(slug="bar")
group.join(normal)
self.assert_(group.user_is_member(normal))
self.assertRaises(ValueError, lambda: group.join(anon))
class InvitationTest(TestCase):
"""
Tests invitation logic in geonode.groups models
"""
fixtures = ["group_test_data"]
def test_invite_user(self):
"""
Tests inviting a registered user
"""
normal = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username="admin")
group = GroupProfile.objects.get(slug="bar")
group.invite(normal, admin, role="member", send=False)
self.assert_(
GroupInvitation.objects.filter(
user=normal,
from_user=admin,
group=group).exists())
invite = GroupInvitation.objects.get(
user=normal, from_user=admin, group=group)
# Test that the user can access the token url.
self.client.login(username="norman", password="norman")
response = self.client.get(
"/groups/group/{group}/invite/{token}/".format(group=group, token=invite.token))
self.assertEqual(200, response.status_code)
def test_accept_invitation(self):
"""
Tests accepting an invitation
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username="admin")
group = GroupProfile.objects.get(slug="bar")
group.invite(normal, admin, role="member", send=False)
invitation = GroupInvitation.objects.get(
user=normal, from_user=admin, group=group)
self.assertRaises(ValueError, lambda: invitation.accept(anon))
self.assertRaises(ValueError, lambda: invitation.accept(admin))
invitation.accept(normal)
self.assert_(group.user_is_member(normal))
self.assert_(invitation.state == "accepted")
def test_decline_invitation(self):
"""
Tests declining an invitation
"""
anon = get_anonymous_user()
normal = get_user_model().objects.get(username="norman")
admin = get_user_model().objects.get(username="admin")
group = GroupProfile.objects.get(slug="bar")
group.invite(normal, admin, role="member", send=False)
invitation = GroupInvitation.objects.get(
user=normal, from_user=admin, group=group)
self.assertRaises(ValueError, lambda: invitation.decline(anon))
self.assertRaises(ValueError, lambda: invitation.decline(admin))
invitation.decline(normal)
self.assert_(not group.user_is_member(normal))
self.assert_(invitation.state == "declined")
class GroupCategoriesTestCase(TestCase):
"""
Group Categories tests
"""
def setUp(self):
c1 = GroupCategory.objects.create(name='test #1 category')
g = GroupProfile.objects.create(title='test')
g.categories.add(c1)
g.save()
User = get_user_model()
u = User.objects.create(username='test')
u.set_password('test')
u.save()
def test_api(self):
api_url = '/api/groupcategory/'
r = self.client.get(api_url)
self.assertEqual(r.status_code, 200)
data = json.loads(r.content)
self.assertEqual(
data['meta']['total_count'],
GroupCategory.objects.all().count())
# check if we have non-empty group category
self.assertTrue(
GroupCategory.objects.filter(
groups__isnull=False).exists())
for item in data['objects']:
self.assertTrue(
GroupCategory.objects.filter(
slug=item['slug']).count() == 1)
g = GroupCategory.objects.get(slug=item['slug'])
self.assertEqual(item['member_count'], g.groups.all().count())
def test_group_categories_list(self):
view_url = reverse('group_category_list')
r = self.client.get(view_url)
self.assertEqual(r.status_code, 200)
def test_group_categories_add(self):
view_url = reverse('group_category_create')
r = self.client.get(view_url)
self.assertEqual(r.status_code, 200)
r = self.client.post(view_url)
self.assertEqual(r.status_code, 200)
self.client.login(username='test', password='test')
category = 'test #3 category'
r = self.client.post(view_url, {'name': category})
self.assertEqual(r.status_code, 302)
q = GroupCategory.objects.filter(name=category)
self.assertEqual(q.count(), 1)
self.assertTrue(q.get().slug)
|
ingenieroariel/geonode
|
geonode/groups/tests.py
|
Python
|
gpl-3.0
| 22,844
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CherryMusic documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 1 23:32:37 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.insert(0, os.path.abspath(os.path.join('..', '..')))
import cherrymusicserver as cherry
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'CherryMusic'
copyright = '2013, Tom Wallroth, with Tilman Boerner'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = os.path.splitext(cherry.VERSION)[0]
# The full version, including alpha/beta/rc tags.
release = cherry.VERSION
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
html_theme_options = {
# 'textcolor': '#333333',
'headingcolor': '#892601',
'linkcolor': '#2c5792',
'visitedlinkcolor': '#0c3762',
# 'hoverlinkcolor': '#0c3762',
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
html_title = 'CherryMusic %s documentation' % (cherry.VERSION,)
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'cherrymusicdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CherryMusic.tex', 'CherryMusic Documentation',
'Author', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'CherryMusic', 'CherryMusic Documentation',
['Author'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'CherryMusic', 'CherryMusic Documentation',
'Author', 'CherryMusic', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'CherryMusic'
epub_author = 'Author'
epub_publisher = 'Author'
epub_copyright = '2013, Author'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
|
SDM-OS/playlist
|
doc/sphinx/conf.py
|
Python
|
gpl-3.0
| 9,469
|
# set S-JTSK axes orientation
import matplotlib
matplotlib.use('GTKAgg')
import matplotlib.pyplot as plt
ax=plt.gca()
#ax.set_ylim(ax.get_ylim()[::-1])
# direction of axes
ax.invert_xaxis()
ax.invert_yaxis()
# ticks position
for tick in ax.xaxis.get_major_ticks():
tick.label1On = False
tick.label2On = True
for tick in ax.yaxis.get_major_ticks():
tick.label1On = False
tick.label2On = True
plt.plot([1,2,3,1],[3,1,2,3])
# ticks string formatter
import matplotlib.ticker as ticker
formatter = ticker.FormatStrFormatter('%.2f m')
ax.xaxis.set_major_formatter(formatter)
# ticks func formatter
def format(x, pos):
return "%s - %s" % (x,pos)
formatter = ticker.FuncFormatter(format)
ax.xaxis.set_major_formatter(formatter)
plt.show()
|
gizela/gizela
|
example/matplotlib/axis.py
|
Python
|
gpl-3.0
| 782
|
# coding=utf-8
#https://developers.google.com/drive/v3/web/quickstart/python
from __future__ import print_function
import httplib2
import os
import io
from apiclient import discovery
import oauth2client
from oauth2client import client
from oauth2client import tools
from apiclient.http import MediaIoBaseDownload
from apiclient.http import MediaFileUpload
import sys
import argparse
from pyfcm import FCMNotification
import h5py
"""
DESCRIPTION
Script with class that manages operations with Google.
Send file, uploads file and list files
"""
class GoogleManager:
def __init__(self):
self.SCOPES = 'https://www.googleapis.com/auth/drive'
self.CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json'
self.APPLICATION_NAME = 'pythonscript'
print("[GOOGLE MANAGER] Google Manager started")
def init_for_upload(self,upload_file=None,upload_file_name=None):
if upload_file and upload_file_name:
self.upload_manager(upload_file,upload_file_name)
print("[GOOGLE MANAGER] Will upload file")
else:
raise ValueError("[ERROR] Object initializer has to have file name to upload and name of uploaded file in upload mode. Initialize object with mode, upload filename and upload destination name")
def init_for_download(self,download_file=None):
if download_file:
self.download_manager(download_file)
print("[GOOGLE MANAGER] Will download file")
else:
raise ValueError("[ERROR] Object initializer has to have file name to download in download mode. Initialize object with mode and file name to download")
def init_for_list(self,):
self.download_manager(True)
def download_file(self,file_id, mimeType, filename,drive_service):
if "google-apps" in mimeType:
return
request = drive_service.files().get_media(fileId=file_id)
fh = io.FileIO(filename, 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("[PROGRESS] Download %d%%." % int(status.progress() * 100))
def get_credentials(self):
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json'
APPLICATION_NAME = 'pythonscript'
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
credentials = tools.run_flow(flow, store)
return credentials
def upload_manager(self,fileToUpload,nameToUpload):
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
drive_service = discovery.build('drive', 'v3', http=http)
file_metadata = {'name' : nameToUpload}
media = MediaFileUpload(fileToUpload,resumable=True)
file = drive_service.files().create(body=file_metadata,media_body=media,fields='id').execute()
print(file)
print("[GOOGLE MANAGER] File with name {} uploaded to Google Drive".format(nameToUpload))
def download_manager(self,fileToDownload=None,list = False):
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
results = service.files().list(pageSize=10,fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print("[GOOGLE MANAGER] No files found.")
else:
for item in items:
name = str(item["name"].encode('ascii', 'ignore'))
print("[GOOGLE MANAGER] Found file -> {}".format(name))
if name == fileToDownload and not list:
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
drive_service = discovery.build('drive', 'v3', http=http)
self.download_file(item['id'],"text/plain",item['name'],drive_service)
#drive = GoogleManager()
#drive.init_for_download("weights.h5")
#drive.init_for_download("model.json")
#drive.init_for_upload("more_images.h5","weights.h5")
#drive.init_for_upload("model_more_images.json","model.json")
#drive.init_for_list()
|
aleixo/cnn_fire
|
googlemanager.py
|
Python
|
gpl-3.0
| 4,878
|
from math import *
inp = raw_input()
spl = inp.split()
n = int(spl[0])
m = int(spl[1])
a = int(spl[2])
i = int(ceil(n * 1.0 / a))
j = int(ceil(m * 1.0 / a))
print max(1, i * j)
|
yamstudio/Codeforces
|
000/1A - Theatre Square.py
|
Python
|
gpl-3.0
| 177
|
import warnings
warnings.filterwarnings( "ignore", message = "The sre module is deprecated, please import re." )
from simplejson import JSONEncoder
from datetime import datetime, date
class Json( JSONEncoder ):
def __init__( self, *args, **kwargs ):
JSONEncoder.__init__( self )
if args and kwargs:
raise ValueError( "Please provide either args or kwargs, not both." )
self.__args = args
self.__kwargs = kwargs
def __str__( self ):
if self.__args:
if len( self.__args ) == 1:
return self.encode( self.__args[ 0 ] )
return self.encode( self.__args )
return self.encode( self.__kwargs )
def default( self, obj ):
"""
Invoked by JSONEncoder.encode() for types that it doesn't know how to encode.
"""
if isinstance( obj, datetime ) or isinstance( obj, date ):
return unicode( obj )
if hasattr( obj, "to_dict" ):
return obj.to_dict()
raise TypeError
|
osborne6/luminotes
|
view/Json.py
|
Python
|
gpl-3.0
| 946
|
"""Plotting module for Sympy.
A plot is represented by the ``Plot`` class that contains a reference to the
backend and a list of the data series to be plotted. The data series are
instances of classes meant to simplify getting points and meshes from sympy
expressions. ``plot_backends`` is a dictionary with all the backends.
This module gives only the essential. For all the fancy stuff use directly
the backend. You can get the backend wrapper for every plot from the
``_backend`` attribute. Moreover the data series classes have various useful
methods like ``get_points``, ``get_segments``, ``get_meshes``, etc, that may
be useful if you wish to use another plotting library.
Especially if you need publication ready graphs and this module is not enough
for you - just get the ``_backend`` attribute and add whatever you want
directly to it. In the case of matplotlib (the common way to graph data in
python) just copy ``_backend.fig`` which is the figure and ``_backend.ax``
which is the axis and work on them as you would on any other matplotlib object.
Simplicity of code takes much greater importance than performance. Don't use it
if you care at all about performance. A new backend instance is initialized
every time you call ``show()`` and the old one is left to the garbage collector.
"""
from inspect import getargspec
from itertools import chain
from sympy import sympify, Expr, Tuple, Dummy
from sympy.external import import_module
from sympy.core.compatibility import set_union
from sympy.utilities.decorator import doctest_depends_on
import warnings
from experimental_lambdify import (vectorized_lambdify, lambdify)
# N.B.
# When changing the minimum module version for matplotlib, please change
# the same in the `SymPyDocTestFinder`` in `sympy/utilities/runtests.py`
# Backend specific imports - textplot
from sympy.plotting.textplot import textplot
# Global variable
# Set to False when running tests / doctests so that the plots don't show.
_show = True
def unset_show():
global _show
_show = False
##############################################################################
# The public interface
##############################################################################
class Plot(object):
"""The central class of the plotting module.
For interactive work the function ``plot`` is better suited.
This class permits the plotting of sympy expressions using numerous
backends (matplotlib, textplot, the old pyglet module for sympy, Google
charts api, etc).
The figure can contain an arbitrary number of plots of sympy expressions,
lists of coordinates of points, etc. Plot has a private attribute _series that
contains all data series to be plotted (expressions for lines or surfaces,
lists of points, etc (all subclasses of BaseSeries)). Those data series are
instances of classes not imported by ``from sympy import *``.
The customization of the figure is on two levels. Global options that
concern the figure as a whole (eg title, xlabel, scale, etc) and
per-data series options (eg name) and aesthetics (eg. color, point shape,
line type, etc.).
The difference between options and aesthetics is that an aesthetic can be
a function of the coordinates (or parameters in a parametric plot). The
supported values for an aesthetic are:
- None (the backend uses default values)
- a constant
- a function of one variable (the first coordinate or parameter)
- a function of two variables (the first and second coordinate or
parameters)
- a function of three variables (only in nonparametric 3D plots)
Their implementation depends on the backend so they may not work in some
backends.
If the plot is parametric and the arity of the aesthetic function permits
it the aesthetic is calculated over parameters and not over coordinates.
If the arity does not permit calculation over parameters the calculation is
done over coordinates.
Only cartesian coordinates are supported for the moment, but you can use
the parametric plots to plot in polar, spherical and cylindrical
coordinates.
The arguments for the constructor Plot must be subclasses of BaseSeries.
Any global option can be specified as a keyword argument.
The global options for a figure are:
- title : str
- xlabel : str
- ylabel : str
- legend : bool
- xscale : {'linear', 'log'}
- yscale : {'linear', 'log'}
- axis : bool
- axis_center : tuple of two floats or {'center', 'auto'}
- xlim : tuple of two floats
- ylim : tuple of two floats
- aspect_ratio : tuple of two floats or {'auto'}
- autoscale : bool
- margin : float in [0, 1]
The per data series options and aesthetics are:
There are none in the base series. See below for options for subclasses.
Some data series support additional aesthetics or options:
ListSeries, LineOver1DRangeSeries, Parametric2DLineSeries,
Parametric3DLineSeries support the following:
Aesthetics:
- line_color : function which returns a float.
options:
- label : str
- steps : bool
- integers_only : bool
SurfaceOver2DRangeSeries, ParametricSurfaceSeries support the following:
aesthetics:
- surface_color : function which returns a float.
"""
def __init__(self, *args, **kwargs):
super(Plot, self).__init__()
# Options for the graph as a whole.
# The possible values for each option are described in the docstring of
# Plot. They are based purely on convention, no checking is done.
self.title = None
self.xlabel = None
self.ylabel = None
self.aspect_ratio = 'auto'
self.xlim = None
self.ylim = None
self.axis_center = 'auto'
self.axis = True
self.xscale = 'linear'
self.yscale = 'linear'
self.legend = False
self.autoscale = True
self.margin = 0
# Contains the data objects to be plotted. The backend should be smart
# enough to iterate over this list.
self._series = []
self._series.extend(args)
# The backend type. On every show() a new backend instance is created
# in self._backend which is tightly coupled to the Plot instance
# (thanks to the parent attribute of the backend).
self.backend = DefaultBackend
# The keyword arguments should only contain options for the plot.
for key, val in kwargs.iteritems():
if hasattr(self, key):
setattr(self, key, val)
def show(self):
# TODO move this to the backend (also for save)
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.show()
def save(self, path):
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.save(path)
def __str__(self):
series_strs = [('[%d]: ' % i) + str(s)
for i, s in enumerate(self._series)]
return 'Plot object containing:\n' + '\n'.join(series_strs)
def __getitem__(self, index):
return self._series[index]
def __setitem__(self, index, *args):
if len(args) == 1 and isinstance(args[0], BaseSeries):
self._series[index] = args
def __delitem__(self, index):
del self._series[index]
def append(self, *args):
"""Adds one more graph to the figure."""
if len(args) == 1 and isinstance(args[0], BaseSeries):
self._series.append(*args)
else:
self._series.append(Series(*args))
def extend(self, arg):
"""Adds the series from another plot or a list of series."""
if isinstance(arg, Plot):
self._series.extend(arg._series)
else:
self._series.extend(arg)
##############################################################################
# Data Series
##############################################################################
#TODO more general way to calculate aesthetics (see get_color_array)
### The base class for all series
class BaseSeries(object):
"""Base class for the data objects containing stuff to be plotted.
The backend should check if it supports the data series that it's given.
(eg TextBackend supports only LineOver1DRange).
It's the backend responsibility to know how to use the class of
data series that it's given.
Some data series classes are grouped (using a class attribute like is_2Dline)
according to the api they present (based only on convention). The backend is
not obliged to use that api (eg. The LineOver1DRange belongs to the
is_2Dline group and presents the get_points method, but the
TextBackend does not use the get_points method).
"""
# Some flags follow. The rationale for using flags instead of checking base
# classes is that setting multiple flags is simpler than multiple
# inheritance.
is_2Dline = False
# Some of the backends expect:
# - get_points returning 1D np.arrays list_x, list_y
# - get_segments returning np.array (done in Line2DBaseSeries)
# - get_color_array returning 1D np.array (done in Line2DBaseSeries)
# with the colors calculated at the points from get_points
is_3Dline = False
# Some of the backends expect:
# - get_points returning 1D np.arrays list_x, list_y, list_y
# - get_segments returning np.array (done in Line2DBaseSeries)
# - get_color_array returning 1D np.array (done in Line2DBaseSeries)
# with the colors calculated at the points from get_points
is_3Dsurface = False
# Some of the backends expect:
# - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
is_contour = False
# Some of the backends expect:
# - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
is_implicit = False
# Some of the backends expect:
# - get_meshes returning mesh_x (1D array), mesh_y(1D array,
# mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
#Different from is_contour as the colormap in backend will be
#different
is_parametric = False
# The calculation of aesthetics expects:
# - get_parameter_points returning one or two np.arrays (1D or 2D)
# used for calculation aesthetics
def __init__(self):
super(BaseSeries, self).__init__()
@property
def is_3D(self):
flags3D = [
self.is_3Dline,
self.is_3Dsurface
]
return any(flags3D)
@property
def is_line(self):
flagslines = [
self.is_2Dline,
self.is_3Dline
]
return any(flagslines)
### 2D lines
class Line2DBaseSeries(BaseSeries):
"""A base class for 2D lines.
- adding the label, steps and only_integers options
- making is_2Dline true
- defining get_segments and get_color_array
"""
is_2Dline = True
_dim = 2
def __init__(self):
super(Line2DBaseSeries, self).__init__()
self.label = None
self.steps = False
self.only_integers = False
self.line_color = None
def get_segments(self):
np = import_module('numpy')
points = self.get_points()
if self.steps is True:
x = np.array((points[0], points[0])).T.flatten()[1:]
y = np.array((points[1], points[1])).T.flatten()[:-1]
points = (x, y)
points = np.ma.array(points).T.reshape(-1, 1, self._dim)
return np.ma.concatenate([points[:-1], points[1:]], axis=1)
def get_color_array(self):
np = import_module('numpy')
c = self.line_color
if hasattr(c, '__call__'):
f = np.vectorize(c)
arity = len(getargspec(c)[0])
if arity == 1 and self.is_parametric:
x = self.get_parameter_points()
return f(centers_of_segments(x))
else:
variables = map(centers_of_segments, self.get_points())
if arity == 1:
return f(variables[0])
elif arity == 2:
return f(*variables[:2])
else: # only if the line is 3D (otherwise raises an error)
return f(*variables)
else:
return c*np.ones(self.nb_of_points)
class List2DSeries(Line2DBaseSeries):
"""Representation for a line consisting of list of points."""
def __init__(self, list_x, list_y):
np = import_module('numpy')
super(List2DSeries, self).__init__()
self.list_x = np.array(list_x)
self.list_y = np.array(list_y)
self.label = 'list'
def __str__(self):
return 'list plot'
def get_points(self):
return (self.list_x, self.list_y)
class LineOver1DRangeSeries(Line2DBaseSeries):
"""Representation for a line consisting of a sympy expression over a range."""
def __init__(self, expr, var_start_end, **kwargs):
super(LineOver1DRangeSeries, self).__init__()
self.expr = sympify(expr)
self.label = str(self.expr)
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.adaptive = kwargs.get('adaptive', True)
self.depth = kwargs.get('depth', 12)
self.line_color = kwargs.get('line_color', None)
def __str__(self):
return 'cartesian line: %s for %s over %s' % (
str(self.expr), str(self.var), str((self.start, self.end)))
def get_segments(self):
"""
Adaptively gets segments for plotting.
The adaptive sampling is done by recursively checking if three
points are almost collinear. If they are not collinear, then more
points are added between those points.
References
==========
[1] Adaptive polygonal approximation of parametric curves,
Luiz Henrique de Figueiredo.
"""
if self.only_integers or not self.adaptive:
return super(LineOver1DRangeSeries, self).get_segments()
else:
f = lambdify([self.var], self.expr)
list_segments = []
def sample(p, q, depth):
""" Samples recursively if three points are almost collinear.
For depth < 6, points are added irrespective of whether they
satisfy the collinearity condition or not. The maximum depth
allowed is 12.
"""
np = import_module('numpy')
#Randomly sample to avoid aliasing.
random = 0.45 + np.random.rand() * 0.1
xnew = p[0] + random * (q[0] - p[0])
ynew = f(xnew)
new_point = np.array([xnew, ynew])
#Maximum depth
if depth > self.depth:
list_segments.append([p, q])
#Sample irrespective of whether the line is flat till the
#depth of 6. We are not using linspace to avoid aliasing.
elif depth < 6:
sample(p, new_point, depth + 1)
sample(new_point, q, depth + 1)
#Sample ten points if complex values are encountered
#at both ends. If there is a real value in between, then
#sample those points further.
elif p[1] is None and q[1] is None:
xarray = np.linspace(p[0], q[0], 10)
yarray = map(f, xarray)
if any(y is not None for y in yarray):
for i in len(yarray) - 1:
if yarray[i] is not None or yarray[i + 1] is not None:
sample([xarray[i], yarray[i]],
[xarray[i + 1], yarray[i + 1]], depth + 1)
#Sample further if one of the end points in None( i.e. a complex
#value) or the three points are not almost collinear.
elif (p[1] is None or q[1] is None or new_point[1] is None
or not flat(p, new_point, q)):
sample(p, new_point, depth + 1)
sample(new_point, q, depth + 1)
else:
list_segments.append([p, q])
f_start = f(self.start)
f_end = f(self.end)
sample([self.start, f_start], [self.end, f_end], 0)
return list_segments
def get_points(self):
np = import_module('numpy')
if self.only_integers is True:
list_x = np.linspace(int(self.start), int(self.end),
num=int(self.end) - int(self.start) + 1)
else:
list_x = np.linspace(self.start, self.end, num=self.nb_of_points)
f = vectorized_lambdify([self.var], self.expr)
list_y = f(list_x)
return (list_x, list_y)
class Parametric2DLineSeries(Line2DBaseSeries):
"""Representation for a line consisting of two parametric sympy expressions
over a range."""
is_parametric = True
def __init__(self, expr_x, expr_y, var_start_end, **kwargs):
super(Parametric2DLineSeries, self).__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.label = "(%s, %s)" % (str(self.expr_x), str(self.expr_y))
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.adaptive = kwargs.get('adaptive', True)
self.depth = kwargs.get('depth', 12)
self.line_color = kwargs.get('line_color', None)
def __str__(self):
return 'parametric cartesian line: (%s, %s) for %s over %s' % (
str(self.expr_x), str(self.expr_y), str(self.var),
str((self.start, self.end)))
def get_parameter_points(self):
np = import_module('numpy')
return np.linspace(self.start, self.end, num=self.nb_of_points)
def get_points(self):
param = self.get_parameter_points()
fx = vectorized_lambdify([self.var], self.expr_x)
fy = vectorized_lambdify([self.var], self.expr_y)
list_x = fx(param)
list_y = fy(param)
return (list_x, list_y)
def get_segments(self):
"""
Adaptively gets segments for plotting.
The adaptive sampling is done by recursively checking if three
points are almost collinear. If they are not collinear, then more
points are added between those points.
References
==========
[1] Adaptive polygonal approximation of parametric curves,
Luiz Henrique de Figueiredo.
"""
if not self.adaptive:
return super(Parametric2DLineSeries, self).get_segments()
f_x = lambdify([self.var], self.expr_x)
f_y = lambdify([self.var], self.expr_y)
list_segments = []
def sample(param_p, param_q, p, q, depth):
""" Samples recursively if three points are almost collinear.
For depth < 6, points are added irrespective of whether they
satisfy the collinearity condition or not. The maximum depth
allowed is 12.
"""
#Randomly sample to avoid aliasing.
np = import_module('numpy')
random = 0.45 + np.random.rand() * 0.1
param_new = param_p + random * (param_q - param_p)
xnew = f_x(param_new)
ynew = f_y(param_new)
new_point = np.array([xnew, ynew])
#Maximum depth
if depth > self.depth:
list_segments.append([p, q])
#Sample irrespective of whether the line is flat till the
#depth of 6. We are not using linspace to avoid aliasing.
elif depth < 6:
sample(param_p, param_new, p, new_point, depth + 1)
sample(param_new, param_q, new_point, q, depth + 1)
#Sample ten points if complex values are encountered
#at both ends. If there is a real value in between, then
#sample those points further.
elif ((p[0] is None and q[1] is None) or
(p[1] is None and q[1] is None)):
param_array = np.linspace(param_p, param_q, 10)
x_array = map(f_x, param_array)
y_array = map(f_y, param_array)
if any(x is not None and y is not None
for x, y in zip(x_array, y_array)):
for i in len(y_array) - 1:
if ((x_array[i] is not None and y_array[i] is not None) or
(x_array[i + 1] is not None and y_array[i + 1] is not None)):
point_a = [x_array[i], y_array[i]]
point_b = [x_array[i + 1], y_array[i + 1]]
sample(param_array[i], param_array[i], point_a,
point_b, depth + 1)
#Sample further if one of the end points in None( ie a complex
#value) or the three points are not almost collinear.
elif (p[0] is None or p[1] is None
or q[1] is None or q[0] is None
or not flat(p, new_point, q)):
sample(param_p, param_new, p, new_point, depth + 1)
sample(param_new, param_q, new_point, q, depth + 1)
else:
list_segments.append([p, q])
f_start_x = f_x(self.start)
f_start_y = f_y(self.start)
start = [f_start_x, f_start_y]
f_end_x = f_x(self.end)
f_end_y = f_y(self.end)
end = [f_end_x, f_end_y]
sample(self.start, self.end, start, end, 0)
return list_segments
### 3D lines
class Line3DBaseSeries(Line2DBaseSeries):
"""A base class for 3D lines.
Most of the stuff is derived from Line2DBaseSeries."""
is_2Dline = False
is_3Dline = True
_dim = 3
def __init__(self):
super(Line3DBaseSeries, self).__init__()
class Parametric3DLineSeries(Line3DBaseSeries):
"""Representation for a 3D line consisting of two parametric sympy
expressions and a range."""
def __init__(self, expr_x, expr_y, expr_z, var_start_end, **kwargs):
super(Parametric3DLineSeries, self).__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.expr_z = sympify(expr_z)
self.label = "(%s, %s)" % (str(self.expr_x), str(self.expr_y))
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.line_color = kwargs.get('line_color', None)
def __str__(self):
return '3D parametric cartesian line: (%s, %s, %s) for %s over %s' % (
str(self.expr_x), str(self.expr_y), str(self.expr_z),
str(self.var), str((self.start, self.end)))
def get_parameter_points(self):
np = import_module('numpy')
return np.linspace(self.start, self.end, num=self.nb_of_points)
def get_points(self):
param = self.get_parameter_points()
fx = vectorized_lambdify([self.var], self.expr_x)
fy = vectorized_lambdify([self.var], self.expr_y)
fz = vectorized_lambdify([self.var], self.expr_z)
list_x = fx(param)
list_y = fy(param)
list_z = fz(param)
return (list_x, list_y, list_z)
### Surfaces
class SurfaceBaseSeries(BaseSeries):
"""A base class for 3D surfaces."""
is_3Dsurface = True
def __init__(self):
super(SurfaceBaseSeries, self).__init__()
self.surface_color = None
def get_color_array(self):
np = import_module('numpy')
c = self.surface_color
if callable(c):
f = np.vectorize(c)
arity = len(getargspec(c)[0])
if self.is_parametric:
variables = map(centers_of_faces, self.get_parameter_meshes())
if arity == 1:
return f(variables[0])
elif arity == 2:
return f(*variables)
variables = map(centers_of_faces, self.get_meshes())
if arity == 1:
return f(variables[0])
elif arity == 2:
return f(*variables[:2])
else:
return f(*variables)
else:
return c*np.ones(self.nb_of_points)
class SurfaceOver2DRangeSeries(SurfaceBaseSeries):
"""Representation for a 3D surface consisting of a sympy expression and 2D
range."""
def __init__(self, expr, var_start_end_x, var_start_end_y, **kwargs):
super(SurfaceOver2DRangeSeries, self).__init__()
self.expr = sympify(expr)
self.var_x = sympify(var_start_end_x[0])
self.start_x = float(var_start_end_x[1])
self.end_x = float(var_start_end_x[2])
self.var_y = sympify(var_start_end_y[0])
self.start_y = float(var_start_end_y[1])
self.end_y = float(var_start_end_y[2])
self.nb_of_points_x = kwargs.get('nb_of_points_x', 50)
self.nb_of_points_y = kwargs.get('nb_of_points_y', 50)
self.surface_color = kwargs.get('surface_color', None)
def __str__(self):
return ('cartesian surface: %s for'
' %s over %s and %s over %s') % (
str(self.expr),
str(self.var_x),
str((self.start_x, self.end_x)),
str(self.var_y),
str((self.start_y, self.end_y)))
def get_meshes(self):
np = import_module('numpy')
mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x,
num=self.nb_of_points_x),
np.linspace(self.start_y, self.end_y,
num=self.nb_of_points_y))
f = vectorized_lambdify((self.var_x, self.var_y), self.expr)
return (mesh_x, mesh_y, f(mesh_x, mesh_y))
class ParametricSurfaceSeries(SurfaceBaseSeries):
"""Representation for a 3D surface consisting of three parametric sympy
expressions and a range."""
is_parametric = True
def __init__(
self, expr_x, expr_y, expr_z, var_start_end_u, var_start_end_v,
**kwargs):
super(ParametricSurfaceSeries, self).__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.expr_z = sympify(expr_z)
self.var_u = sympify(var_start_end_u[0])
self.start_u = float(var_start_end_u[1])
self.end_u = float(var_start_end_u[2])
self.var_v = sympify(var_start_end_v[0])
self.start_v = float(var_start_end_v[1])
self.end_v = float(var_start_end_v[2])
self.nb_of_points_u = kwargs.get('nb_of_points_u', 50)
self.nb_of_points_v = kwargs.get('nb_of_points_v', 50)
self.surface_color = kwargs.get('surface_color', None)
def __str__(self):
return ('parametric cartesian surface: (%s, %s, %s) for'
' %s over %s and %s over %s') % (
str(self.expr_x),
str(self.expr_y),
str(self.expr_z),
str(self.var_u),
str((self.start_u, self.end_u)),
str(self.var_v),
str((self.start_v, self.end_v)))
def get_parameter_meshes(self):
np = import_module('numpy')
return np.meshgrid(np.linspace(self.start_u, self.end_u,
num=self.nb_of_points_u),
np.linspace(self.start_v, self.end_v,
num=self.nb_of_points_v))
def get_meshes(self):
mesh_u, mesh_v = self.get_parameter_meshes()
fx = vectorized_lambdify((self.var_u, self.var_v), self.expr_x)
fy = vectorized_lambdify((self.var_u, self.var_v), self.expr_y)
fz = vectorized_lambdify((self.var_u, self.var_v), self.expr_z)
return (fx(mesh_u, mesh_v), fy(mesh_u, mesh_v), fz(mesh_u, mesh_v))
### Contours
class ContourSeries(BaseSeries):
"""Representation for a contour plot."""
#The code is mostly repetition of SurfaceOver2DRange.
#XXX: Presently not used in any of those functions.
#XXX: Add contour plot and use this seties.
is_contour = True
def __init__(self, expr, var_start_end_x, var_start_end_y):
super(ContourSeries, self).__init__()
self.nb_of_points_x = 50
self.nb_of_points_y = 50
self.expr = sympify(expr)
self.var_x = sympify(var_start_end_x[0])
self.start_x = float(var_start_end_x[1])
self.end_x = float(var_start_end_x[2])
self.var_y = sympify(var_start_end_y[0])
self.start_y = float(var_start_end_y[1])
self.end_y = float(var_start_end_y[2])
self.get_points = self.get_meshes
def __str__(self):
return ('contour: %s for '
'%s over %s and %s over %s') % (
str(self.expr),
str(self.var_x),
str((self.start_x, self.end_x)),
str(self.var_y),
str((self.start_y, self.end_y)))
def get_meshes(self):
np = import_module('numpy')
mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x,
num=self.nb_of_points_x),
np.linspace(self.start_y, self.end_y,
num=self.nb_of_points_y))
f = vectorized_lambdify((self.var_x, self.var_y), self.expr)
return (mesh_x, mesh_y, f(mesh_x, mesh_y))
##############################################################################
# Backends
##############################################################################
class BaseBackend(object):
def __init__(self, parent):
super(BaseBackend, self).__init__()
self.parent = parent
## don't have to check for the success of importing matplotlib in each case;
## we will only be using this backend if we can successfully import matploblib
class MatplotlibBackend(BaseBackend):
def __init__(self, parent):
super(MatplotlibBackend, self).__init__(parent)
are_3D = [s.is_3D for s in self.parent._series]
self.matplotlib = import_module('matplotlib',
__import__kwargs={'fromlist': ['pyplot', 'cm', 'collections']},
min_module_version='1.1.0', catch=(RuntimeError,))
self.plt = self.matplotlib.pyplot
self.cm = self.matplotlib.cm
self.LineCollection = self.matplotlib.collections.LineCollection
if any(are_3D) and not all(are_3D):
raise ValueError('The matplotlib backend can not mix 2D and 3D.')
elif not any(are_3D):
self.fig = self.plt.figure()
self.ax = self.fig.add_subplot(111)
self.ax.spines['left'].set_position('zero')
self.ax.spines['right'].set_color('none')
self.ax.spines['bottom'].set_position('zero')
self.ax.spines['top'].set_color('none')
self.ax.spines['left'].set_smart_bounds(True)
self.ax.spines['bottom'].set_smart_bounds(True)
self.ax.xaxis.set_ticks_position('bottom')
self.ax.yaxis.set_ticks_position('left')
elif all(are_3D):
## mpl_toolkits.mplot3d is necessary for
## projection='3d'
mpl_toolkits = import_module('mpl_toolkits',
__import__kwargs={'fromlist': ['mplot3d']})
self.fig = self.plt.figure()
self.ax = self.fig.add_subplot(111, projection='3d')
def process_series(self):
parent = self.parent
for s in self.parent._series:
# Create the collections
if s.is_2Dline:
collection = self.LineCollection(s.get_segments())
self.ax.add_collection(collection)
elif s.is_contour:
self.ax.contour(*s.get_meshes())
elif s.is_3Dline:
# TODO too complicated, I blame matplotlib
mpl_toolkits = import_module('mpl_toolkits',
__import__kwargs={'fromlist': ['mplot3d']})
art3d = mpl_toolkits.mplot3d.art3d
collection = art3d.Line3DCollection(s.get_segments())
self.ax.add_collection(collection)
x, y, z = s.get_points()
self.ax.set_xlim((min(x), max(x)))
self.ax.set_ylim((min(y), max(y)))
self.ax.set_zlim((min(z), max(z)))
elif s.is_3Dsurface:
x, y, z = s.get_meshes()
collection = self.ax.plot_surface(x, y, z, cmap=self.cm.jet,
rstride=1, cstride=1,
linewidth=0.1)
elif s.is_implicit:
#Smart bounds have to be set to False for implicit plots.
self.ax.spines['left'].set_smart_bounds(False)
self.ax.spines['bottom'].set_smart_bounds(False)
points = s.get_raster()
if len(points) == 2:
#interval math plotting
x, y = _matplotlib_list(points[0])
self.ax.fill(x, y, facecolor='b', edgecolor='None' )
else:
# use contourf or contour depending on whether it is
# an inequality or equality.
#XXX: ``contour`` plots multiple lines. Should be fixed.
ListedColormap = self.matplotlib.colors.ListedColormap
colormap = ListedColormap(["white", "blue"])
xarray, yarray, zarray, plot_type = points
if plot_type == 'contour':
self.ax.contour(xarray, yarray, zarray,
contours=(0, 0), fill=False, cmap=colormap)
else:
self.ax.contourf(xarray, yarray, zarray, cmap=colormap)
else:
raise ValueError('The matplotlib backend supports only '
'is_2Dline, is_3Dline, is_3Dsurface and '
'is_contour objects.')
# Customise the collections with the corresponding per-series
# options.
if hasattr(s, 'label'):
collection.set_label(s.label)
if s.is_line and s.line_color:
if isinstance(s.line_color, (float, int)) or callable(s.line_color):
color_array = s.get_color_array()
collection.set_array(color_array)
else:
collection.set_color(s.line_color)
if s.is_3Dsurface and s.surface_color:
if self.matplotlib.__version__ < "1.2.0": # TODO in the distant future remove this check
warnings.warn('The version of matplotlib is too old to use surface coloring.')
elif isinstance(s.surface_color, (float, int)) or callable(s.surface_color):
color_array = s.get_color_array()
color_array = color_array.reshape(color_array.size)
collection.set_array(color_array)
else:
collection.set_color(s.surface_color)
# Set global options.
# TODO The 3D stuff
# XXX The order of those is important.
mpl_toolkits = import_module('mpl_toolkits',
__import__kwargs={'fromlist': ['mplot3d']})
Axes3D = mpl_toolkits.mplot3d.Axes3D
if parent.xscale and not isinstance(self.ax, Axes3D):
self.ax.set_xscale(parent.xscale)
if parent.yscale and not isinstance(self.ax, Axes3D):
self.ax.set_yscale(parent.yscale)
if parent.xlim:
self.ax.set_xlim(parent.xlim)
if parent.ylim:
self.ax.set_ylim(parent.ylim)
if not isinstance(self.ax, Axes3D) or self.matplotlib.__version__ >= '1.2.0': # XXX in the distant future remove this check
self.ax.set_autoscale_on(parent.autoscale)
if parent.axis_center:
val = parent.axis_center
if isinstance(self.ax, Axes3D):
pass
elif val == 'center':
self.ax.spines['left'].set_position('center')
self.ax.spines['bottom'].set_position('center')
elif val == 'auto':
xl, xh = self.ax.get_xlim()
yl, yh = self.ax.get_ylim()
pos_left = ('data', 0) if xl*xh <= 0 else 'center'
pos_bottom = ('data', 0) if yl*yh <= 0 else 'center'
self.ax.spines['left'].set_position(pos_left)
self.ax.spines['bottom'].set_position(pos_bottom)
else:
self.ax.spines['left'].set_position(('data', val[0]))
self.ax.spines['bottom'].set_position(('data', val[1]))
if not parent.axis:
self.ax.set_axis_off()
if parent.legend:
self.ax.legend()
self.ax.legend_.set_visible(parent.legend)
if parent.margin:
self.ax.set_xmargin(parent.margin)
self.ax.set_ymargin(parent.margin)
if parent.title:
self.ax.set_title(parent.title)
if parent.xlabel:
self.ax.set_xlabel(parent.xlabel, position=(1, 0))
if parent.ylabel:
self.ax.set_ylabel(parent.ylabel, position=(0, 1))
def show(self):
self.process_series()
#TODO after fixing https://github.com/ipython/ipython/issues/1255
# you can uncomment the next line and remove the pyplot.show() call
#self.fig.show()
if _show:
self.plt.show()
def save(self, path):
self.process_series()
self.fig.savefig(path)
def close(self):
self.plt.close(self.fig)
class TextBackend(BaseBackend):
def __init__(self, parent):
super(TextBackend, self).__init__(parent)
def show(self):
if len(self.parent._series) != 1:
raise ValueError(
'The TextBackend supports only one graph per Plot.')
elif not isinstance(self.parent._series[0], LineOver1DRangeSeries):
raise ValueError(
'The TextBackend supports only expressions over a 1D range')
else:
ser = self.parent._series[0]
textplot(ser.expr, ser.start, ser.end)
def close(self):
pass
class DefaultBackend(BaseBackend):
def __new__(cls, parent):
matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,))
if matplotlib:
return MatplotlibBackend(parent)
else:
return TextBackend(parent)
plot_backends = {
'matplotlib': MatplotlibBackend,
'text': TextBackend,
'default': DefaultBackend
}
##############################################################################
# Finding the centers of line segments or mesh faces
##############################################################################
def centers_of_segments(array):
np = import_module('numpy')
return np.average(np.vstack((array[:-1], array[1:])), 0)
def centers_of_faces(array):
np = import_module('numpy')
return np.average(np.dstack((array[:-1, :-1],
array[1:, :-1],
array[:-1, 1: ],
array[:-1, :-1],
)), 2)
def flat(x, y, z, eps=1e-3):
"""Checks whether three points are almost collinear"""
np = import_module('numpy')
vector_a = x - y
vector_b = z - y
dot_product = np.dot(vector_a, vector_b)
vector_a_norm = np.linalg.norm(vector_a)
vector_b_norm = np.linalg.norm(vector_b)
cos_theta = dot_product / (vector_a_norm * vector_b_norm)
return abs(cos_theta + 1) < eps
def _matplotlib_list(interval_list):
"""
Returns lists for matplotlib ``fill`` command from a list of bounding
rectangular intervals
"""
xlist = []
ylist = []
if len(interval_list):
for intervals in interval_list:
intervalx = intervals[0]
intervaly = intervals[1]
xlist.extend([intervalx.start, intervalx.start,
intervalx.end, intervalx.end, None])
ylist.extend([intervaly.start, intervaly.end,
intervaly.end, intervaly.start, None])
else:
#XXX Ugly hack. Matplotlib does not accept empty lists for ``fill``
xlist.extend([None, None, None, None])
ylist.extend([None, None, None, None])
return xlist, ylist
####New API for plotting module ####
# TODO: Add color arrays for plots.
# TODO: Add more plotting options for 3d plots.
# TODO: Adaptive sampling for 3D plots.
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot(*args, **kwargs):
"""
Plots a function of a single variable.
The plotting uses an adaptive algorithm which samples recursively to
accurately plot the plot. The adaptive algorithm uses a random point near
the midpoint of two points that has to be further sampled. Hence the same
plots can appear slightly different.
Usage
=====
Single Plot
``plot(expr, range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with same range.
``plot(expr1, expr2, ..., range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot((expr1, range), (expr2, range), ..., **kwargs)``
Range has to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr`` : Expression representing the function of single variable
``range``: (x, 0, 5), A 3-tuple denoting the range of the free variable.
Keyword Arguments
=================
Arguments for ``LineOver1DRangeSeries`` class:
``adaptive``: Boolean. The default value is set to True. Set adaptive to False and
specify ``nb_of_points`` if uniform sampling is required.
``depth``: int Recursion depth of the adaptive algorithm. A depth of value ``n``
samples a maximum of `2^{n}` points.
``nb_of_points``: int. Used when the ``adaptive`` is set to False. The function
is uniformly sampled at ``nb_of_points`` number of points.
Aesthetics options:
``line_color``: float. Specifies the color for the plot.
See ``Plot`` to see how to set color for the plots.
If there are multiple plots, then the same series series are applied to
all the plots. If you want to set these options separately, you can index
the ``Plot`` object returned and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot. It is set to the latex representation of
the expression, if the plot has only one expression.
``xlabel`` : str. Label for the x - axis.
``ylabel`` : str. Label for the y - axis.
``xscale``: {'linear', 'log'} Sets the scaling of the x - axis.
``yscale``: {'linear', 'log'} Sets the scaling if the y - axis.
``axis_center``: tuple of two floats denoting the coordinates of the center or
{'center', 'auto'}
``xlim`` : tuple of two floats, denoting the x - axis limits.
``ylim`` : tuple of two floats, denoting the y - axis limits.
Examples
========
>>> from sympy import symbols
>>> from sympy.plotting import plot
>>> x = symbols('x')
Single Plot
>>> plot(x**2, (x, -5, 5))
Plot object containing:
[0]: cartesian line: x**2 for x over (-5.0, 5.0)
Multiple plots with single range.
>>> plot(x, x**2, x**3, (x, -5, 5))
Plot object containing:
[0]: cartesian line: x for x over (-5.0, 5.0)
[1]: cartesian line: x**2 for x over (-5.0, 5.0)
[2]: cartesian line: x**3 for x over (-5.0, 5.0)
Multiple plots with different ranges.
>>> plot((x**2, (x, -6, 6)), (x, (x, -5, 5)))
Plot object containing:
[0]: cartesian line: x**2 for x over (-6.0, 6.0)
[1]: cartesian line: x for x over (-5.0, 5.0)
No adaptive sampling.
>>> plot(x**2, adaptive=False, nb_of_points=400)
Plot object containing:
[0]: cartesian line: x**2 for x over (-10.0, 10.0)
See Also
========
Plot, LineOver1DRangeSeries.
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 1, 1)
series = [LineOver1DRangeSeries(*arg, **kwargs) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot_parametric(*args, **kwargs):
"""
Plots a 2D parametric plot.
The plotting uses an adaptive algorithm which samples recursively to
accurately plot the plot. The adaptive algorithm uses a random point near
the midpoint of two points that has to be further sampled. Hence the same
plots can appear slightly different.
Usage
=====
Single plot.
``plot_parametric(expr_x, expr_y, range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with same range.
``plot_parametric((expr1_x, expr1_y), (expr2_x, expr2_y), range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot_parametric((expr_x, expr_y, range), ..., **kwargs)``
Range has to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x`` : Expression representing the function along x.
``expr_y`` : Expression representing the function along y.
``range``: (u, 0, 5), A 3-tuple denoting the range of the parameter
variable.
Keyword Arguments
=================
Arguments for ``Parametric2DLineSeries`` class:
``adaptive``: Boolean. The default value is set to True. Set adaptive to
False and specify ``nb_of_points`` if uniform sampling is required.
``depth``: int Recursion depth of the adaptive algorithm. A depth of
value ``n`` samples a maximum of `2^{n}` points.
``nb_of_points``: int. Used when the ``adaptive`` is set to False. The
function is uniformly sampled at ``nb_of_points`` number of points.
Aesthetics
----------
``line_color``: function which returns a float. Specifies the color for the
plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same Series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``xlabel`` : str. Label for the x - axis.
``ylabel`` : str. Label for the y - axis.
``xscale``: {'linear', 'log'} Sets the scaling of the x - axis.
``yscale``: {'linear', 'log'} Sets the scaling if the y - axis.
``axis_center``: tuple of two floats denoting the coordinates of the center
or {'center', 'auto'}
``xlim`` : tuple of two floats, denoting the x - axis limits.
``ylim`` : tuple of two floats, denoting the y - axis limits.
Examples
========
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot_parametric
>>> u = symbols('u')
Single Parametric plot
>>> plot_parametric(cos(u), sin(u), (u, -5, 5))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0)
Multiple parametric plot with single range.
>>> plot_parametric((cos(u), sin(u)), (u, cos(u)))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-10.0, 10.0)
[1]: parametric cartesian line: (u, cos(u)) for u over (-10.0, 10.0)
Multiple parametric plots.
>>> plot_parametric((cos(u), sin(u), (u, -5, 5)),
... (cos(u), u, (u, -5, 5)))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0)
[1]: parametric cartesian line: (cos(u), u) for u over (-5.0, 5.0)
See Also
========
Plot, Parametric2DLineSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 2, 1)
series = [Parametric2DLineSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot3d_parametric_line(*args, **kwargs):
"""
Plots a 3D parametric line plot.
Usage
=====
Single plot:
``plot3d_parametric_line(expr_x, expr_y, expr_z, range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots.
``plot3d_parametric_line((expr_x, expr_y, expr_z, range), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x`` : Expression representing the function along x.
``expr_y`` : Expression representing the function along y.
``expr_z`` : Expression representing the function along z.
``range``: ``(u, 0, 5)``, A 3-tuple denoting the range of the parameter
variable.
Keyword Arguments
=================
Arguments for ``Parametric3DLineSeries`` class.
``nb_of_points``: The range is uniformly sampled at ``nb_of_points``
number of points.
Aesthetics:
``line_color``: function which returns a float. Specifies the color for the
plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class.
``title`` : str. Title of the plot.
Examples
========
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot3d_parametric_line
>>> u = symbols('u')
Single plot.
>>> plot3d_parametric_line(cos(u), sin(u), u, (u, -5, 5))
Plot object containing:
[0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0)
Multiple plots.
>>> plot3d_parametric_line((cos(u), sin(u), u, (u, -5, 5)),
... (sin(u), u**2, u, (u, -5, 5)))
Plot object containing:
[0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0)
[1]: 3D parametric cartesian line: (sin(u), u**2, u) for u over (-5.0, 5.0)
See Also
========
Plot, Parametric3DLineSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 3, 1)
series = [Parametric3DLineSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot3d(*args, **kwargs):
"""
Plots a 3D surface plot.
Usage
=====
Single plot
``plot3d(expr, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plot with the same range.
``plot3d(expr1, expr2, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot3d((expr1, range_x, range_y), (expr2, range_x, range_y), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr`` : Expression representing the function along x.
``range_x``: (x, 0, 5), A 3-tuple denoting the range of the x
variable.
``range_y``: (y, 0, 5), A 3-tuple denoting the range of the y
variable.
Keyword Arguments
=================
Arguments for ``SurfaceOver2DRangeSeries`` class:
``nb_of_points_x``: int. The x range is sampled uniformly at
``nb_of_points_x`` of points.
``nb_of_points_y``: int. The y range is sampled uniformly at
``nb_of_points_y`` of points.
Aesthetics:
``surface_color``: Function which returns a float. Specifies the color for
the surface of the plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot.
Examples
========
>>> from sympy import symbols
>>> from sympy.plotting import plot3d
>>> x, y = symbols('x y')
Single plot
>>> plot3d(x*y, (x, -5, 5), (y, -5, 5))
Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
Multiple plots with same range
>>> plot3d(x*y, -x*y, (x, -5, 5), (y, -5, 5))
Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
[1]: cartesian surface: -x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
Multiple plots with different ranges.
>>> plot3d((x**2 + y**2, (x, -5, 5), (y, -5, 5)),
... (x*y, (x, -3, 3), (y, -3, 3)))
Plot object containing:
[0]: cartesian surface: x**2 + y**2 for x over (-5.0, 5.0) and y over (-5.0, 5.0)
[1]: cartesian surface: x*y for x over (-3.0, 3.0) and y over (-3.0, 3.0)
See Also
========
Plot, SurfaceOver2DRangeSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 1, 2)
series = [SurfaceOver2DRangeSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot3d_parametric_surface(*args, **kwargs):
"""
Plots a 3D parametric surface plot.
Usage
=====
Single plot.
``plot3d_parametric_surface(expr_x, expr_y, expr_z, range_u, range_v, **kwargs)``
If the ranges is not specified, then a default range of (-10, 10) is used.
Multiple plots.
``plot3d_parametric_surface((expr_x, expr_y, expr_z, range_u, range_v), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x``: Expression representing the function along ``x``.
``expr_y``: Expression representing the function along ``y``.
``expr_z``: Expression representing the function along ``z``.
``range_u``: ``(u, 0, 5)``, A 3-tuple denoting the range of the ``u``
variable.
``range_v``: ``(v, 0, 5)``, A 3-tuple denoting the range of the v
variable.
Keyword Arguments
=================
Arguments for ``ParametricSurfaceSeries`` class:
``nb_of_points_u``: int. The ``u`` range is sampled uniformly at
``nb_of_points_v`` of points
``nb_of_points_y``: int. The ``v`` range is sampled uniformly at
``nb_of_points_y`` of points
Aesthetics:
``surface_color``: Function which returns a float. Specifies the color for
the surface of the plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied for
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot.
Examples
========
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot3d_parametric_surface
>>> u, v = symbols('u v')
Single plot.
>>> plot3d_parametric_surface(cos(u + v), sin(u - v), u - v,
... (u, -5, 5), (v, -5, 5))
Plot object containing:
[0]: parametric cartesian surface: (cos(u + v), sin(u - v), u - v) for u over (-5.0, 5.0) and v over (-5.0, 5.0)
See Also
========
Plot, ParametricSurfaceSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 3, 2)
series = [ParametricSurfaceSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
def check_arguments(args, expr_len, nb_of_free_symbols):
"""
Checks the arguments and converts into tuples of the
form (exprs, ranges)
>>> from sympy import plot, cos, sin, symbols
>>> from sympy.plotting.plot import check_arguments
>>> x,y,u,v = symbols('x y u v')
>>> check_arguments([cos(x), sin(x)], 2, 1)
[(cos(x), sin(x), (x, -10, 10))]
>>> check_arguments([x, x**2], 1, 1)
[(x, (x, -10, 10)), (x**2, (x, -10, 10))]
"""
if expr_len > 1 and isinstance(args[0], Expr):
# Multiple expressions same range.
# The arguments are tuples when the expression length is
# greater than 1.
assert len(args) >= expr_len
for i in range(len(args)):
if isinstance(args[i], Tuple):
break
else:
i = len(args) + 1
exprs = Tuple(*args[:i])
free_symbols = list(set_union(*[e.free_symbols for e in exprs]))
if len(args) == expr_len + nb_of_free_symbols:
#Ranges given
plots = [exprs + Tuple(*args[expr_len:])]
else:
default_range = Tuple(-10, 10)
ranges = []
for symbol in free_symbols:
ranges.append(Tuple(symbol) + default_range)
for i in range(len(free_symbols) - nb_of_free_symbols):
ranges.append(Tuple(Dummy()) + default_range)
plots = [exprs + Tuple(*ranges)]
return plots
if isinstance(args[0], Expr) or (isinstance(args[0], Tuple) and
len(args[0]) == expr_len and
expr_len != 3):
# Cannot handle expressions with number of expression = 3. It is
# not possible to differentiate between expressions and ranges.
#Series of plots with same range
for i in range(len(args)):
if isinstance(args[i], Tuple) and len(args[i]) != expr_len:
break
if not isinstance(args[i], Tuple):
args[i] = Tuple(args[i])
else:
i = len(args) + 1
exprs = args[:i]
assert all(isinstance(e, Expr) for expr in exprs for e in expr)
free_symbols = list(set_union(*[e.free_symbols for expr in exprs
for e in expr]))
if len(free_symbols) > nb_of_free_symbols:
raise ValueError("The number of free_symbols in the expression "
"is greater than %d" % nb_of_free_symbols)
if len(args) == i + nb_of_free_symbols and isinstance(args[i], Tuple):
ranges = Tuple(*[range_expr for range_expr in args[
i:i + nb_of_free_symbols]])
plots = [expr + ranges for expr in exprs]
return plots
else:
#Use default ranges.
default_range = Tuple(-10, 10)
ranges = []
for symbol in free_symbols:
ranges.append(Tuple(symbol) + default_range)
for i in range(len(free_symbols) - nb_of_free_symbols):
ranges.append(Tuple(Dummy()) + default_range)
ranges = Tuple(*ranges)
plots = [expr + ranges for expr in exprs]
return plots
elif isinstance(args[0], Tuple) and len(args[0]) == expr_len + nb_of_free_symbols:
#Multiple plots with different ranges.
for arg in args:
for i in range(expr_len):
if not isinstance(arg[i], Expr):
raise ValueError("Expected an expression, given %s" %
str(arg[i]))
for i in range(nb_of_free_symbols):
if not len(arg[i + expr_len]) == 3:
raise ValueError("The ranges should be a tuple of "
"length 3, got %s" % str(arg[i + expr_len]))
return args
|
lidavidm/mathics-heroku
|
venv/lib/python2.7/site-packages/sympy/plotting/plot.py
|
Python
|
gpl-3.0
| 61,671
|
# -*- encoding: utf-8 -*-
"""Test class for :class:`robottelo.cli.hostgroup.HostGroup` CLI.
@Requirement: Hostgroup
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: CLI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from fauxfactory import gen_string
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.contentview import ContentView
from robottelo.cli.hostgroup import HostGroup
from robottelo.cli.proxy import Proxy
from robottelo.cli.factory import (
make_architecture,
make_content_view,
make_domain,
make_environment,
make_hostgroup,
make_lifecycle_environment,
make_location,
make_medium,
make_org,
make_os,
make_partition_table,
make_subnet,
)
from robottelo.config import settings
from robottelo.datafactory import (
invalid_id_list,
invalid_values_list,
valid_hostgroups_list,
)
from robottelo.decorators import (
bz_bug_is_open,
run_only_on,
skip_if_bug_open,
tier1,
tier2,
)
from robottelo.test import CLITestCase
class HostGroupTestCase(CLITestCase):
"""Test class for Host Group CLI"""
@tier1
def test_positive_create_with_name(self):
"""Successfully creates an HostGroup.
@id: f5f2056f-d090-4e0d-8fb9-d29255a47908
@Assert: HostGroup is created.
"""
for name in valid_hostgroups_list():
with self.subTest(name):
hostgroup = make_hostgroup({'name': name})
self.assertEqual(hostgroup['name'], name)
@tier1
def test_negative_create_with_name(self):
"""Don't create an HostGroup with invalid data.
@id: 853a6d43-129a-497b-94f0-08dc622862f8
@Assert: HostGroup is not created.
"""
for name in invalid_values_list():
with self.subTest(name):
with self.assertRaises(CLIReturnCodeError):
HostGroup.create({'name': name})
@run_only_on('sat')
@tier1
def test_positive_create_with_env(self):
"""Check if hostgroup with environment can be created
@id: f1bfb333-90cf-4a9f-b183-cf77c1773247
@Assert: Hostgroup is created and has new environment assigned
"""
environment = make_environment()
hostgroup = make_hostgroup({'environment-id': environment['id']})
self.assertEqual(environment['name'], hostgroup['environment'])
@run_only_on('sat')
@tier1
def test_positive_create_with_loc(self):
"""Check if hostgroup with location can be created
@id: 84ae02a4-ea7e-43ce-87bd-7bbde3766b14
@Assert: Hostgroup is created and has new location assigned
"""
location = make_location()
hostgroup = make_hostgroup({'location-ids': location['id']})
self.assertIn(location['name'], hostgroup['locations'])
@run_only_on('sat')
@tier1
def test_positive_create_with_os(self):
"""Check if hostgroup with operating system can be created
@id: d12c5939-1aac-44f5-8aa3-a04a824f4e83
@Assert: Hostgroup is created and has operating system assigned
"""
os = make_os()
hostgroup = make_hostgroup({'operatingsystem-id': os['id']})
self.assertEqual(hostgroup['operating-system'], os['title'])
@run_only_on('sat')
@tier1
def test_positive_create_with_org(self):
"""Check if hostgroup with organization can be created
@id: 780d4b93-f35a-4c5b-a645-4053aed4c37b
@Assert: Hostgroup is created and has new organization assigned
"""
org = make_org()
hostgroup = make_hostgroup({'organization-ids': org['id']})
self.assertIn(org['name'], hostgroup['organizations'])
@tier1
def test_positive_create_with_orgs(self):
"""Check if hostgroup with multiple organizations can be created
@id: 32be4630-0032-4f5f-89d4-44f8d05fe585
@Assert: Hostgroup is created and has both new organizations assigned
"""
orgs = [make_org() for _ in range(2)]
hostgroup = make_hostgroup({
'organization-ids': [org['id'] for org in orgs],
})
self.assertEqual(
set(org['name'] for org in orgs),
set(hostgroup['organizations'])
)
@run_only_on('sat')
@tier1
def test_positive_create_with_puppet_ca_proxy(self):
"""Check if hostgroup with puppet CA proxy server can be created
@id: f7ea1c94-8a0e-4500-98b3-0ecd63b3ce3c
@Assert: Hostgroup is created and has puppet CA proxy server assigned
"""
puppet_proxy = Proxy.list({
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})[0]
hostgroup = make_hostgroup({'puppet-ca-proxy': puppet_proxy['name']})
self.assertEqual(puppet_proxy['id'], hostgroup['puppet-ca-proxy-id'])
@run_only_on('sat')
@tier1
def test_positive_create_with_puppet_proxy(self):
"""Check if hostgroup with puppet proxy server can be created
@id: 3a922d9f-7466-4565-b279-c1481f63a4ce
@Assert: Hostgroup is created and has puppet proxy server assigned
"""
puppet_proxy = Proxy.list({
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})[0]
hostgroup = make_hostgroup({'puppet-proxy': puppet_proxy['name']})
self.assertEqual(
puppet_proxy['id'],
hostgroup['puppet-master-proxy-id'],
)
@skip_if_bug_open('bugzilla', 1354544)
@run_only_on('sat')
@tier1
def test_positive_create_with_architecture(self):
"""Check if hostgroup with architecture can be created
@id: 21c619f4-7339-4fb0-9e29-e12dae65f943
@Assert: Hostgroup should be created and has architecture assigned
@BZ: 1354544
"""
arch = 'x86_64'
hostgroup = make_hostgroup({'architecture': arch})
self.assertEqual(arch, hostgroup['architecture'])
@run_only_on('sat')
@tier1
def test_positive_create_with_domain(self):
"""Check if hostgroup with domain can be created
@id: c468fcac-9e42-4ee6-a431-abe29b6848ce
@Assert: Hostgroup should be created and has domain assigned
"""
domain = make_domain()
hostgroup = make_hostgroup({'domain-id': domain['id']})
self.assertEqual(domain['name'], hostgroup['domain'])
@skip_if_bug_open('bugzilla', 1313056)
@run_only_on('sat')
@tier1
def test_positive_create_with_lifecycle_environment(self):
"""Check if hostgroup with lifecyle environment can be created
@id: 24bc3010-4e61-47d8-b8ae-0d66e1055aea
@Assert: Hostgroup should be created and has lifecycle env assigned
@BZ: 1359694
"""
org = make_org()
lc_env = make_lifecycle_environment({'organization-id': org['id']})
hostgroup = make_hostgroup({
'lifecycle-environment': lc_env['name'],
'organization-id': org['id'],
})
self.assertEqual(
lc_env['name'],
hostgroup['lifecycle-environment'],
)
@tier1
def test_positive_create_with_orgs_and_lce(self):
"""Check if hostgroup with multiple organizations can be created
if one of them is associated with lifecycle environment
@id: ca110a74-401d-48f9-9700-6c57f1c10f11
@Assert: Hostgroup is created, has both new organizations assigned
and has lifecycle env assigned
"""
orgs = [make_org() for _ in range(2)]
lce = make_lifecycle_environment({'organization-id': orgs[0]['id']})
hostgroup = make_hostgroup({
'organization-ids': [org['id'] for org in orgs],
'lifecycle-environment-id': lce['id'],
})
self.assertEqual(
set(org['name'] for org in orgs),
set(hostgroup['organizations'])
)
@run_only_on('sat')
@tier2
def test_positive_create_with_multiple_entities(self):
"""Check if hostgroup with multiple options can be created
@id: a3ef4f0e-971d-4307-8d0a-35103dff6586
@Assert: Hostgroup should be created and has all defined entities
assigned
@CaseLevel: Integration
"""
# Common entities
loc = make_location()
org = make_org()
env = make_environment({
'location-ids': loc['id'],
'organization-ids': org['id'],
})
lce = make_lifecycle_environment({'organization-id': org['id']})
puppet_proxy = Proxy.list({
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})[0]
# Content View should be promoted to be used with LC Env
cv = make_content_view({'organization-id': org['id']})
ContentView.publish({'id': cv['id']})
cv = ContentView.info({'id': cv['id']})
ContentView.version_promote({
'id': cv['versions'][0]['id'],
'to-lifecycle-environment-id': lce['id'],
})
# Network
domain = make_domain({
'location-ids': loc['id'],
'organization-ids': org['id'],
})
subnet = make_subnet({
'domain-ids': domain['id'],
'organization-ids': org['id'],
})
# Operating System
arch = make_architecture()
ptable = make_partition_table({
'location-ids': loc['id'],
'organization-ids': org['id'],
})
os = make_os({
'architecture-ids': arch['id'],
'partition-table-ids': ptable['id'],
})
media = make_medium({
'operatingsystem-ids': os['id'],
'location-ids': loc['id'],
'organization-ids': org['id'],
})
make_hostgroup_params = {
'location-ids': loc['id'],
'environment-id': env['id'],
'lifecycle-environment': lce['name'],
'puppet-proxy-id': puppet_proxy['id'],
'puppet-ca-proxy-id': puppet_proxy['id'],
'content-view-id': cv['id'],
'domain-id': domain['id'],
'subnet-id': subnet['id'],
'organization-ids': org['id'],
'architecture-id': arch['id'],
'partition-table-id': ptable['id'],
'medium-id': media['id'],
'operatingsystem-id': os['id'],
}
# If bug is open provide LCE id as parameter
# because LCE name cause errors
if bz_bug_is_open(1395254):
make_hostgroup_params.pop('lifecycle-environment')
make_hostgroup_params['lifecycle-environment-id'] = lce['id']
hostgroup = make_hostgroup(make_hostgroup_params)
self.assertIn(org['name'], hostgroup['organizations'])
self.assertIn(loc['name'], hostgroup['locations'])
self.assertEqual(env['name'], hostgroup['environment'])
self.assertEqual(
puppet_proxy['id'], hostgroup['puppet-master-proxy-id']
)
self.assertEqual(puppet_proxy['id'], hostgroup['puppet-ca-proxy-id'])
self.assertEqual(domain['name'], hostgroup['domain'])
self.assertEqual(subnet['name'], hostgroup['subnet'])
self.assertEqual(arch['name'], hostgroup['architecture'])
self.assertEqual(ptable['name'], hostgroup['partition-table'])
self.assertEqual(media['name'], hostgroup['medium'])
self.assertEqual(
"{0} {1}.{2}".format(
os['name'],
os['major-version'],
os['minor-version']
),
hostgroup['operating-system']
)
if not bz_bug_is_open('1313056'):
self.assertEqual(cv['name'], hostgroup['content-view'])
self.assertEqual(
lce['name'], hostgroup['lifecycle-environment']
)
@skip_if_bug_open('bugzilla', 1354568)
@run_only_on('sat')
@tier1
def test_negative_create_with_subnet_id(self):
"""Check if hostgroup with invalid subnet id raises proper error
@id: c352d7ea-4fc6-4b78-863d-d3ee4c0ad439
@Assert: Proper error should be raised
@BZ: 1354568
"""
subnet_id = gen_string('numeric', 4)
with self.assertRaises(CLIReturnCodeError) as exception:
HostGroup.create({
'name': gen_string('alpha'),
'subnet-id': subnet_id
})
self.assertIs(
exception.exception.stderr,
'Could not find subnet {0}'.format(subnet_id)
)
@skip_if_bug_open('bugzilla', 1354568)
@run_only_on('sat')
@tier1
def test_negative_create_with_domain_id(self):
"""Check if hostgroup with invalid domain id raises proper error
@id: b36c83d6-b27c-4f1a-ac45-6c4999005bf7
@Assert: Proper error should be raised
@BZ: 1354568
"""
domain_id = gen_string('numeric', 4)
with self.assertRaises(CLIReturnCodeError) as exception:
HostGroup.create({
'name': gen_string('alpha'),
'domain-id': domain_id
})
self.assertIs(
exception.exception.stderr,
'Could not find domain {0}'.format(domain_id)
)
@skip_if_bug_open('bugzilla', 1354568)
@run_only_on('sat')
@tier1
def test_negative_create_with_architecture_id(self):
"""Check if hostgroup with invalid architecture id raises proper error
@id: 7b7de0fa-aee9-4163-adc2-354c1e720d90
@Assert: Proper error should be raised
@BZ: 1354568
"""
arch_id = gen_string('numeric', 4)
with self.assertRaises(CLIReturnCodeError) as exception:
HostGroup.create({
'name': gen_string('alpha'),
'architecture-id': arch_id
})
self.assertIs(
exception.exception.stderr,
'Could not find architecture {0}'.format(arch_id)
)
@tier1
def test_positive_update_name(self):
"""Successfully update an HostGroup.
@id: a36e3cbe-83d9-44ce-b8f7-5fab2a2cadf9
@Assert: HostGroup is updated.
"""
hostgroup = make_hostgroup()
for new_name in valid_hostgroups_list():
with self.subTest(new_name):
HostGroup.update({
'id': hostgroup['id'],
'new-name': new_name,
})
hostgroup = HostGroup.info({'id': hostgroup['id']})
self.assertEqual(hostgroup['name'], new_name)
@run_only_on('sat')
@tier1
def test_negative_update_name(self):
"""Create HostGroup then fail to update its name
@id: 42d208a4-f518-4ff2-9b7a-311adb460abd
@assert: HostGroup name is not updated
"""
hostgroup = make_hostgroup()
for new_name in invalid_values_list():
with self.subTest(new_name):
with self.assertRaises(CLIReturnCodeError):
HostGroup.update({
'id': hostgroup['id'],
'new-name': new_name,
})
result = HostGroup.info({'id': hostgroup['id']})
self.assertEqual(hostgroup['name'], result['name'])
@run_only_on('sat')
@tier1
def test_positive_delete_by_id(self):
"""Create HostGroup with valid values then delete it
by ID
@id: fe7dedd4-d7c3-4c70-b70d-c2deff357b76
@assert: HostGroup is deleted
"""
for name in valid_hostgroups_list():
with self.subTest(name):
hostgroup = make_hostgroup({'name': name})
HostGroup.delete({'id': hostgroup['id']})
with self.assertRaises(CLIReturnCodeError):
HostGroup.info({'id': hostgroup['id']})
@run_only_on('sat')
@tier1
def test_negative_delete_by_id(self):
"""Create HostGroup then delete it by wrong ID
@id: 047c9f1a-4dd6-4fdc-b7ed-37cc725c68d3
@assert: HostGroup is not deleted
"""
for entity_id in invalid_id_list():
with self.subTest(entity_id):
with self.assertRaises(CLIReturnCodeError):
HostGroup.delete({'id': entity_id})
|
sthirugn/robottelo
|
tests/foreman/cli/test_hostgroup.py
|
Python
|
gpl-3.0
| 16,401
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from .models import Board, BoardPermissions, User, UserSettings
@receiver(post_save, sender=Board)
def init_board_permissions(sender, **kwargs):
"""Link existing benchmark countries to newly created countries."""
instance = kwargs["instance"]
if kwargs["created"]:
BoardPermissions.objects.create(board=instance)
@receiver(post_save, sender=User)
def init_user_settings(sender, **kwargs):
"""Link existing benchmark countries to newly created countries."""
instance = kwargs["instance"]
if kwargs["created"]:
UserSettings.objects.create(user=instance)
|
twschiller/open-synthesis
|
openach/signals.py
|
Python
|
gpl-3.0
| 681
|
import numpy as np
from ase import Hartree
from gpaw.aseinterface import GPAW
from gpaw.lcao.overlap import NewTwoCenterIntegrals
from gpaw.utilities import unpack
from gpaw.utilities.tools import tri2full, lowdin
from gpaw.lcao.tools import basis_subset2, get_bfi2
from gpaw.coulomb import get_vxc as get_ks_xc
from gpaw.utilities.blas import r2k, gemm
from gpaw.lcao.projected_wannier import dots, condition_number, eigvals, \
get_bfs, get_lcao_projections_HSP
def get_rot(F_MM, V_oM, L):
eps_M, U_MM = np.linalg.eigh(F_MM)
indices = eps_M.real.argsort()[-L:]
U_Ml = U_MM[:, indices]
U_Ml /= np.sqrt(dots(U_Ml.T.conj(), F_MM, U_Ml).diagonal())
U_ow = V_oM.copy()
U_lw = np.dot(U_Ml.T.conj(), F_MM)
for col1, col2 in zip(U_ow.T, U_lw.T):
norm = np.linalg.norm(np.hstack((col1, col2)))
col1 /= norm
col2 /= norm
return U_ow, U_lw, U_Ml
def get_lcao_xc(calc, P_aqMi, bfs=None, spin=0):
nq = len(calc.wfs.ibzk_qc)
nao = calc.wfs.setups.nao
dtype = calc.wfs.dtype
if bfs is None:
bfs = get_bfs(calc)
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_sg = calc.density.nt_sg
vxct_sg = calc.density.finegd.zeros(calc.wfs.nspins)
calc.hamiltonian.xc.calculate(calc.density.finegd, nt_sg, vxct_sg)
vxct_G = calc.wfs.gd.zeros()
calc.hamiltonian.restrict(vxct_sg[spin], vxct_G)
Vxc_qMM = np.zeros((nq, nao, nao), dtype)
for q, Vxc_MM in enumerate(Vxc_qMM):
bfs.calculate_potential_matrix(vxct_G, Vxc_MM, q)
tri2full(Vxc_MM, 'L')
# Add atomic PAW corrections
for a, P_qMi in P_aqMi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.hamiltonian.xc.calculate_paw_correction(calc.wfs.setups[a],
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
for Vxc_MM, P_Mi in zip(Vxc_qMM, P_qMi):
Vxc_MM += dots(P_Mi, H_ii, P_Mi.T.conj())
return Vxc_qMM * Hartree
def get_xc2(calc, w_wG, P_awi, spin=0):
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_g = calc.density.nt_sg[spin]
vxct_g = calc.density.finegd.zeros()
calc.hamiltonian.xc.get_energy_and_potential(nt_g, vxct_g)
vxct_G = calc.wfs.gd.empty()
calc.hamiltonian.restrict(vxct_g, vxct_G)
# Integrate pseudo part
Nw = len(w_wG)
xc_ww = np.empty((Nw, Nw))
r2k(.5 * calc.wfs.gd.dv, w_wG, vxct_G * w_wG, .0, xc_ww)
tri2full(xc_ww, 'L')
# Add atomic PAW corrections
for a, P_wi in P_awi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.wfs.setups[a].xc_correction.calculate_energy_and_derivatives(
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
xc_ww += dots(P_wi, H_ii, P_wi.T.conj())
return xc_ww * Hartree
class ProjectedWannierFunctionsFBL:
"""PWF in the finite band limit.
::
--N
|w_w> = > |psi_n> U_nw
--n=1
"""
def __init__(self, V_nM, No, ortho=False):
Nw = V_nM.shape[1]
assert No <= Nw
V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = np.dot(V_uM.T.conj(), V_uM)
U_ow, U_lw, U_Ml = get_rot(F_MM, V_oM, Nw - No)
self.U_nw = np.vstack((U_ow, dots(V_uM, U_Ml, U_lw)))
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1))
if ortho:
lowdin(self.U_nw, self.S_ww)
self.S_ww = np.identity(Nw)
self.norms_n = np.dot(self.U_nw, np.linalg.solve(
self.S_ww, self.U_nw.T.conj())).diagonal()
def rotate_matrix(self, A_nn):
if A_nn.ndim == 1:
return np.dot(self.U_nw.T.conj() * A_nn, self.U_nw)
else:
return dots(self.U_nw.T.conj(), A_nn, self.U_nw)
def rotate_projections(self, P_ani):
P_awi = {}
for a, P_ni in P_ani.items():
P_awi[a] = np.tensordot(self.U_nw, P_ni, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_nG):
return np.tensordot(self.U_nw, psit_nG, axes=[[0], [0]])
class ProjectedWannierFunctionsIBL:
"""PWF in the infinite band limit.
::
--No --Nw
|w_w> = > |psi_o> U_ow + > |f_M> U_Mw
--o=1 --M=1
"""
def __init__(self, V_nM, S_MM, No, lcaoindices=None):
Nw = V_nM.shape[1]
assert No <= Nw
self.V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = S_MM - np.dot(self.V_oM.T.conj(), self.V_oM)
U_ow, U_lw, U_Ml = get_rot(F_MM, self.V_oM, Nw - No)
self.U_Mw = np.dot(U_Ml, U_lw)
self.U_ow = U_ow - np.dot(self.V_oM, self.U_Mw)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
P_uw = np.dot(V_uM, self.U_Mw)
self.norms_n = np.hstack((
np.dot(U_ow, np.linalg.solve(self.S_ww, U_ow.T.conj())).diagonal(),
np.dot(P_uw, np.linalg.solve(self.S_ww, P_uw.T.conj())).diagonal()))
def rotate_matrix(self, A_o, A_MM):
assert A_o.ndim == 1
A_ww = dots(self.U_ow.T.conj() * A_o, self.V_oM, self.U_Mw)
A_ww += np.conj(A_ww.T)
A_ww += np.dot(self.U_ow.T.conj() * A_o, self.U_ow)
A_ww += dots(self.U_Mw.T.conj(), A_MM, self.U_Mw)
return A_ww
def rotate_projections(self, P_aoi, P_aMi, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
P_awi = {}
for a, P_oi in P_aoi.items():
P_awi[a] = np.tensordot(U_Mw, P_aMi[a], axes=[[0], [0]])
if len(U_ow) > 0:
P_awi[a] += np.tensordot(U_ow, P_oi, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_oG, bfs, q=-1, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
w_wG = np.zeros((U_ow.shape[1],) + psit_oG.shape[1:])
if len(U_ow) > 0:
gemm(1., psit_oG, U_ow.T.copy(), 0., w_wG)
bfs.lcao_to_grid(U_Mw.T.copy(), w_wG, q)
return w_wG
class PWFplusLCAO(ProjectedWannierFunctionsIBL):
def __init__(self, V_nM, S_MM, No, pwfmask, lcaoindices=None):
Nw = V_nM.shape[1]
self.V_oM = V_nM[:No]
dtype = V_nM.dtype
# Do PWF optimization for pwfbasis submatrix only!
Npwf = len(pwfmask.nonzero()[0])
pwfmask2 = np.outer(pwfmask, pwfmask)
s_MM = S_MM[pwfmask2].reshape(Npwf, Npwf)
v_oM = self.V_oM[:, pwfmask]
f_MM = s_MM - np.dot(v_oM.T.conj(), v_oM)
nw = len(s_MM)
assert No <= nw
u_ow, u_lw, u_Ml = get_rot(f_MM, v_oM, nw - No)
u_Mw = np.dot(u_Ml, u_lw)
u_ow = u_ow - np.dot(v_oM, u_Mw)
# Determine U for full lcao basis
self.U_ow = np.zeros((No, Nw), dtype)
for U_w, u_w in zip(self.U_ow, u_ow):
np.place(U_w, pwfmask, u_w)
self.U_Mw = np.identity(Nw, dtype)
np.place(self.U_Mw, pwfmask2, u_Mw.flat)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
self.norms_n = None
def set_lcaoatoms(calc, pwf, lcaoatoms):
ind = get_bfi(calc, lcaoatoms)
for i in ind:
pwf.U_ow[:, i] = 0.0
pwf.U_Mw[:, i] = 0.0
pwf_U_Mw[i, i] = 1.0
class PWF2:
def __init__(self, gpwfilename, fixedenergy=0., spin=0, ibl=True,
basis='sz', zero_fermi=False, pwfbasis=None, lcaoatoms=None,
projection_data=None):
calc = GPAW(gpwfilename, txt=None, basis=basis)
assert calc.wfs.gd.comm.size == 1
assert calc.wfs.kpt_comm.size == 1
assert calc.wfs.band_comm.size == 1
if zero_fermi:
try:
Ef = calc.get_fermi_level()
except NotImplementedError:
Ef = calc.get_homo_lumo().mean()
else:
Ef = 0.0
self.ibzk_kc = calc.get_ibz_k_points()
self.nk = len(self.ibzk_kc)
self.eps_kn = [calc.get_eigenvalues(kpt=q, spin=spin) - Ef
for q in range(self.nk)]
self.M_k = [sum(eps_n <= fixedenergy) for eps_n in self.eps_kn]
print 'Fixed states:', self.M_k
self.calc = calc
self.dtype = self.calc.wfs.dtype
self.spin = spin
self.ibl = ibl
self.pwf_q = []
self.norms_qn = []
self.S_qww = []
self.H_qww = []
if ibl:
if pwfbasis is not None:
pwfmask = basis_subset2(calc.atoms.get_chemical_symbols(),
basis, pwfbasis)
if lcaoatoms is not None:
lcaoindices = get_bfi2(calc.atoms.get_chemical_symbols(),
basis,
lcaoatoms)
else:
lcaoindices = None
self.bfs = get_bfs(calc)
if projection_data is None:
V_qnM, H_qMM, S_qMM, self.P_aqMi = get_lcao_projections_HSP(
calc, bfs=self.bfs, spin=spin, projectionsonly=False)
else:
V_qnM, H_qMM, S_qMM, self.P_aqMi = projection_data
H_qMM -= Ef * S_qMM
for q, M in enumerate(self.M_k):
if pwfbasis is None:
pwf = ProjectedWannierFunctionsIBL(V_qnM[q], S_qMM[q], M,
lcaoindices)
else:
pwf = PWFplusLCAO(V_qnM[q], S_qMM[q], M, pwfmask,
lcaoindices)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q][:M],
H_qMM[q]))
else:
if projection_data is None:
V_qnM = get_lcao_projections_HSP(calc, spin=spin)
else:
V_qnM = projection_data
for q, M in enumerate(self.M_k):
pwf = ProjectedWannierFunctionsFBL(V_qnM[q], M, ortho=False)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q]))
for S in self.S_qww:
print 'Condition number: %0.1e' % condition_number(S)
def get_hamiltonian(self, q=0, indices=None):
if indices is None:
return self.H_qww[q]
else:
return self.H_qww[q].take(indices, 0).take(indices, 1)
def get_overlap(self, q=0, indices=None):
if indices is None:
return self.S_qww[q]
else:
return self.S_qww[q].take(indices, 0).take(indices, 1)
def get_projections(self, q=0, indices=None):
kpt = self.calc.wfs.kpt_u[self.spin * self.nk + q]
if not hasattr(self, 'P_awi'):
if self.ibl:
M = self.M_k[q]
self.P_awi = self.pwf_q[q].rotate_projections(
dict([(a, P_ni[:M]) for a, P_ni in kpt.P_ani.items()]),
dict([(a, P_qMi[q]) for a, P_qMi in self.P_aqMi.items()]),
indices)
else:
self.P_awi = pwf.rotate_projections(kpt.P_ani, indices)
return self.P_awi
def get_orbitals(self, q=0, indices=None):
self.calc.wfs.initialize_wave_functions_from_restart_file()
kpt = self.calc.wfs.kpt_u[self.spin * self.nk + q]
if not hasattr(self, 'w_wG'):
if self.ibl:
self.w_wG = self.pwf_q[q].rotate_function(
kpt.psit_nG[:self.M_k[q]], self.bfs, q, indices)
else:
self.w_wG = self.pwf_q[q].rotate_function(
kpt.psit_nG, indices)
return self.w_wG
def get_Fcore(self, q=0, indices=None):
if indices is None:
Fcore_ww = np.zeros_like(self.H_qww[q])
else:
Fcore_ww = np.zeros((len(indices), len(indices)))
for a, P_wi in self.get_projections(q, indices).items():
X_ii = unpack(self.calc.wfs.setups[a].X_p)
Fcore_ww -= dots(P_wi, X_ii, P_wi.T.conj())
return Fcore_ww * Hartree
def get_eigs(self, q=0):
return eigvals(self.H_qww[q], self.S_ww[q])
def get_condition_number(self, q=0):
return condition_number(self.S_qww[q])
def get_xc(self, q=0, indices=None):
#self.calc.density.ghat.set_positions(
# self.calc.atoms.get_scaled_positions() % 1.)
#self.calc.hamiltonian.poisson.initialize()
if self.ibl:
return get_xc2(self.calc, self.get_orbitals(q, indices),
self.get_projections(q, indices), self.spin)
else:
return self.pwf_q[q].rotate_matrix(get_ks_xc(self.calc,
spin=self.spin))
class LCAOwrap:
def __init__(self, calc, spin=0):
assert calc.wfs.gd.comm.size == 1
assert calc.wfs.kpt_comm.size == 1
assert calc.wfs.band_comm.size == 1
from gpaw.lcao.tools import get_lcao_hamiltonian
H_skMM, S_kMM = get_lcao_hamiltonian(calc)
self.calc = calc
self.dtype = calc.wfs.dtype
self.spin = spin
self.H_qww = H_skMM[spin]
self.S_qww = S_kMM
self.P_aqwi = calc.wfs.P_aqMi
self.Nw = self.S_qww.shape[-1]
for S in self.S_qww:
print 'Condition number: %0.1e' % condition_number(S)
def get_hamiltonian(self, q=0, indices=None):
if indices is None:
return self.H_qww[q]
else:
return self.H_qww[q].take(indices, 0).take(indices, 1)
def get_overlap(self, q=0, indices=None):
if indices is None:
return self.S_qww[q]
else:
return self.S_qww[q].take(indices, 0).take(indices, 1)
def get_projections(self, q=0, indices=None):
if indices is None:
return dict([(a, P_qwi[q]) for a, P_qwi in self.P_aqwi.items()])
else:
return dict([(a, P_qwi[q].take(indices, 0))
for a, P_qwi in self.P_aqwi.items()])
def get_orbitals(self, q=-1, indices=None):
assert q == -1
if indices is None:
indices = range(self.Nw)
Ni = len(indices)
C_wM = np.zeros((Ni, self.Nw), self.dtype)
for i, C_M in zip(indices, C_wM):
C_M[i] = 1.0
w_wG = self.calc.wfs.gd.zeros(Ni, dtype=self.dtype)
self.calc.wfs.basis_functions.lcao_to_grid(C_wM, w_wG, q=-1)
return w_wG
def get_Fcore(self, q=0, indices=None):
if indices is None:
Fcore_ww = np.zeros_like(self.H_qww[q])
else:
Fcore_ww = np.zeros((len(indices), len(indices)))
for a, P_wi in self.get_projections(q, indices).items():
if self.calc.wfs.setups[a].type != 'ghost':
X_ii = unpack(self.calc.wfs.setups[a].X_p)
Fcore_ww -= dots(P_wi, X_ii, P_wi.T.conj())
return Fcore_ww * Hartree
def get_xc(self, q=0, indices=None):
if not hasattr(self, 'Vxc_qww'):
self.Vxc_qww = get_lcao_xc(self.calc, self.P_aqwi,
bfs=self.calc.wfs.basis_functions,
spin=self.spin)
if indices is None:
return self.Vxc_qww[q]
else:
return self.Vxc_qww[q].take(indices, 0).take(indices, 1)
|
ajylee/gpaw-rtxs
|
gpaw/lcao/pwf2.py
|
Python
|
gpl-3.0
| 16,299
|
# -*- coding: utf-8 -*-
__params__ = {'la': 32, 'lb': 32, 'da': 10}
def protocol(client, server, params):
la = params['la']
lb = params['lb']
da = params["da"]
server.a = UnsignedVec(bitlen=la, dim=da).input(src=driver, desc="a")
server.b = Unsigned(bitlen=lb).input(src=driver, desc="b")
client.a <<= server.a
client.b <<= server.b
client.c = client.a * client.b
client.c.output(dest=driver, desc="c")
|
tastyproject/tasty
|
tasty/tests/functional/protocols/mul/unsignedvec_server_server_client/protocol.py
|
Python
|
gpl-3.0
| 442
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# res_core_data_mthd1.py
import unittest
import sys
import os
root_folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." + os.sep + "aikif" )
sys.path.append(root_folder)
import core_data as mod_core
def get_method():
"""
returns a description in RST for the research paper
"""
txt = 'Method 1 - Ontological Mapping\n'
txt += '---------------------------------------\n\n'
txt += 'Using an ontology, map the columns in raw data to a set of standard tables\n\n'
return txt
def get_results(fname, dat):
append_rst(fname, '\nMethod 1: running source data ' + dat + ' .... \n')
with open(dat, 'r') as f:
for line in f:
cols = parse_csv(line)
print(cols)
def append_rst(fname, txt):
with open(fname, 'a') as f:
f.write(txt)
def parse_csv(txt):
cols = txt.split(',')
return cols
|
acutesoftware/AIKIF
|
scripts/res_core_data_mthd1.py
|
Python
|
gpl-3.0
| 961
|
"""Fonctionnal testing for "Python In HTML" (PIH) and "HTML In Python" (HIP).
"""
__author__ = "Didier Wenzek (didier.wenzek@free.fr)"
# The code under test is in the .. directory.
import sys
sys.path.append('..')
# We use the Python unit testing framework
import unittest
import thread, time
from util import *
class TemplateTest(unittest.TestCase):
"""Testing the Karrigell Scripting languages.
A test is prepared with the addScript() and removeScript() methods.
which are used to set up the root directory with scripts picked from
the data directory.
The Karrigell response is returned by the getPage() method
as a Page object with a status and a content attribute.
The expected response can be retrieved from the data repository with the
getGoldenFile() method.
The Karrigell response can then be tested against the expected response,
using any of the unittest methods like assertEqual().
"""
def setUp(self):
launchKarrigell()
def test_PIH_InsertPython(self):
"""<% and %> tags are used to insert python code in HTML."""
import time
addScript("time.pih")
page = getPage("/time.pih")
today = time.strftime("%d:%m:%y",time.localtime(time.time()))
expected = getGoldenFile("time.out", date=today)
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
def test_PIH_PrintValue(self):
"""<%= an %> tags are used to print python value."""
addScript("value.pih")
page = getPage("/value.pih")
expected = getGoldenFile("value.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content + '\n', expected)
def test_PIH_Indentation(self):
"""Python indentation is managed using the <% end %> tag."""
addScript("indent.pih")
page = getPage("/indent.pih", 'POST', params={'hour':'22'})
expected = getGoldenFile("indent.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
def test_PIH_IndentTag(self):
"""Within <indent> tag HTML must follow Python indentation."""
addScript("indent_tag.pih")
page = getPage("/indent_tag.pih", 'POST', params={'hour':'22'})
expected = getGoldenFile("indent_tag.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
def test_PIH_EmbeddedBlocks(self):
"""Python blocks may be embedded."""
addScript("embedded.pih")
page = getPage("/embedded.pih")
expected = getGoldenFile("embedded.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
def test_HIP_Principes(self):
"""Literal text in python are printed to the response stream"""
addScript("the_smiths.hip")
page = getPage("/the_smiths.hip")
expected = getGoldenFile("the_smiths.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
if __name__ == "__main__":
unittest.main()
|
jhjguxin/PyCDC
|
Karrigell-2.3.5/test/TemplateTest.py
|
Python
|
gpl-3.0
| 2,812
|
quantidade = int (input())
cidade = input()
qt_quartos = int (input())
if cidade.lower() == "pipa":
if qt_quartos == 2:
valor_total = (quantidade * 75)+ 600
valor_unitario = valor_total / quantidade
else:
valor_total = (quantidade * 75)+ 900
valor_unitario = valor_total / quantidade
else:
if qt_quartos == 3:
valor_total = (quantidade * 60)+ 950
valor_unitario = valor_total / quantidade
else:
valor_total = (quantidade * 60)+ 1120
valor_unitario = valor_total / quantidade
print("%.2f" %valor_total)
print("%.2f" %valor_unitario)
|
SANDEISON/The-Huxley
|
Python/Viagem de Amigos.py
|
Python
|
gpl-3.0
| 615
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2015 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
import sys, argparse, os, numpy as np
from horton import log, __version__
from horton.scripts.common import parse_h5, store_args, check_output, \
write_script_output
from horton.scripts.espfit import load_charges, load_cost
# All, except underflows, is *not* fine.
np.seterr(divide='raise', over='raise', invalid='raise')
def parse_args():
parser = argparse.ArgumentParser(prog='horton-esp-test.py',
description='Test how well charges reproduce the ESP.')
parser.add_argument('-V', '--version', action='version',
version="%%(prog)s (HORTON version %s)" % __version__)
parser.add_argument('cost',
help='The location of the cost function in the form '
'"file.h5:group/cost". This argument must be the same as the '
'output argument of the script horton-esp-cost.py.')
parser.add_argument('charges', type=str,
help='The atomic charges to be used in the form '
'"file.h5:group/charges". ')
parser.add_argument('output', type=str,
help='The output destination in the form file.h5:group. The colon and '
'the group name are optional. When omitted, the root group of the '
'HDF5 file is used.')
parser.add_argument('--overwrite', default=False, action='store_true',
help='Overwrite existing output in the HDF5 file')
parser.add_argument('--qtot', '-q', default=None, type=float,
help='The total charge of the system. When given, the charges from the '
'HDF5 file are corrected.')
return parser.parse_args()
def main():
args = parse_args()
fn_h5, grp_name = parse_h5(args.output, 'output')
# check if the group is already present (and not empty) in the output file
if check_output(fn_h5, grp_name, args.overwrite):
return
# Load the cost function from the HDF5 file
cost, used_volume = load_cost(args.cost)
# Load the charges from the HDF5 file
charges = load_charges(args.charges)
# Fix total charge if requested
if args.qtot is not None:
charges -= (charges.sum() - args.qtot)/len(charges)
# Store parameters in output
results = {}
results['qtot'] = charges.sum()
# Fitness of the charges
results['cost'] = cost.value_charges(charges)
if results['cost'] < 0:
results['rmsd'] = 0.0
else:
results['rmsd'] = (results['cost']/used_volume)**0.5
# Worst case stuff
results['cost_worst'] = cost.worst(0.0)
if results['cost_worst'] < 0:
results['rmsd_worst'] = 0.0
else:
results['rmsd_worst'] = (results['cost_worst']/used_volume)**0.5
# Write some things on screen
if log.do_medium:
log('RMSD charges: %10.5e' % np.sqrt((charges**2).mean()))
log('RMSD ESP: %10.5e' % results['rmsd'])
log('Worst RMSD ESP: %10.5e' % results['rmsd_worst'])
log.hline()
# Store the results in an HDF5 file
write_script_output(fn_h5, grp_name, results, args)
if __name__ == '__main__':
main()
|
eustislab/horton
|
scripts/horton-esp-test.py
|
Python
|
gpl-3.0
| 3,917
|
from django.db import models
from django.conf import settings
from django.utils import timezone
from django.core.validators import MinValueValidator
from django.urls import reverse
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
from .Committee import Committee
from .Registration import Registration
from lib.CommaSeparatedStringsField import CommaSeparatedStringsField
class Event(models.Model):
name = models.CharField(max_length=25)
description = models.TextField(max_length=255)
long_description = models.TextField(blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
published_at = models.DateTimeField(default=timezone.now, blank=True)
deadline_at = models.DateTimeField()
start_at = models.DateTimeField()
end_at = models.DateTimeField()
note_field = models.CharField(max_length=100, default='', blank=True)
note_field_options = CommaSeparatedStringsField(max_length=255, default='', blank=True)
note_field_required = models.BooleanField()
note_field_public = models.BooleanField()
location = models.CharField(max_length=25)
price = models.DecimalField(max_digits=5, decimal_places=2, default=0)
calendar_url = models.CharField(max_length=255, blank=True)
committee = models.ForeignKey(Committee, on_delete=models.PROTECT)
participants = models.ManyToManyField(settings.AUTH_USER_MODEL, through=Registration)
places = models.PositiveIntegerField(default=None, null=True, blank=True, validators=[MinValueValidator(1)])
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('event-detail', args=[self.pk])
def is_published(self):
"""
Return true if the event is published (past published date and not past end date)
"""
return self.published_at < timezone.now() < self.end_at
def is_expired(self):
"""
Return true if deadline is expired.
"""
return self.deadline_at is not None and self.deadline_at < timezone.now()
def is_full(self):
"""
Return true if there are no free places left.
"""
return self.get_free_places() is not None and self.get_free_places() <= 0
def get_free_places(self):
"""
Return the number of free places left.
"""
if self.places is None:
# If the event doesn't have a places limit, the value of this function is not defined
return None
else:
return self.places - Registration.objects.filter(event=self, withdrawn_at__isnull=True).count()
def get_active_registrations_count(self):
"""
Return the number of non-withdrawn registrations
"""
return self.registration_set.filter(withdrawn_at__isnull=True).count()
def is_almost_expired(self):
"""
Return true if the deadline is closer than a day.
"""
return self.deadline_at - timezone.now() < timezone.timedelta(days=1) and not self.is_expired()
def get_note_field_options(self):
"""
Return list of tuples from list of options
"""
return [('', self.note_field + ':')] + [(x, x) for x in self.note_field_options]
def clean(self):
if self.start_at > self.end_at:
raise ValidationError(_("Begindatum is later dan de einddatum!"))
if self.start_at < timezone.now():
raise ValidationError({'start_at': _("Startdatum is in het verleden!")})
if self.end_at < timezone.now():
raise ValidationError({'end_at': _("Einddatum is in het verleden!")})
if self.note_field_options and len(self.note_field_options) < 2:
raise ValidationError({'note_field_options': _("Geef minstens twee opties op.")})
class Meta:
ordering = ['created_at']
|
Compizfox/Inschrijflijst
|
app/models/Event.py
|
Python
|
gpl-3.0
| 3,577
|
clouthes = ["T-Shirt","Sweater"]
print("Hello, welcome to my shop\n")
while (True):
comment = input("Welcome to our shop, what do you want (C, R, U, D)? ")
if comment.upper()=="C":
new_item = input("Enter new item: ")
clouthes.append(new_item.capitalize())
elif comment.upper()=="R":
print(end='')
elif comment.upper()=="U":
pos = int(input("Update position? "))
if pos <= len(clouthes):
new_item = input("Enter new item: ")
clouthes[pos-1] = new_item.capitalize()
else:
print("Sorry, your item is out of sale!")
elif comment.upper()=="D":
pos = int(input("Delete position? "))
if pos <= len(clouthes):
clouthes.pop(pos-1)
else:
print("Sorry, your item is out of sale!")
else:
print("Allahu akbar! We're in reconstructing and can't serve you. See you again!")
# items =[", "+clouthe for clouthe in clouthes if clouthes.index(clouthe)>0]
# items.insert(0,clouthes[0])
# print("Our items: {0}".format(items))
# print("\n")
print("Our items: ",end='')
for item in clouthes:
if clouthes.index(item)<len(clouthes)-1:
print(item,end=', ')
else:
print(item+"\n")
|
hanamvu/C4E11
|
SS3/clothes_shop.py
|
Python
|
gpl-3.0
| 1,117
|
import unittest
from card_validation import (
numberToMatrix,
getOddDigits,
getEvenDigits,
sumOfDoubleOddPlace,
sumOfEvenPlace,
getDigit,
isValid
)
class CardValidationTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(CardValidationTest, self).__init__(*args, **kwargs)
self.card_number = "4388576018410707"
self.matrix = numberToMatrix(self.card_number)
self.odds = getOddDigits(self.matrix)
self.evens = getEvenDigits(self.matrix)
def test_numberToMatrix(self):
self.assertEqual(self.matrix.__class__, list)
def test_getOddDigits(self):
self.assertEqual(self.odds.__class__, list)
def test_getEvenDigits(self):
self.assertEqual(self.evens.__class__, list)
def test_sumOfDoubleOddPlace(self):
self.assertEqual(sumOfDoubleOddPlace(self.odds), 29)
def test_getDigit(self):
self.assertEqual(getDigit(9), 9)
def test_isValid(self):
self.assertEqual(isValid(self.card_number), True)
if __name__ == "__main__":
unittest.main()
|
dennyb87/pycity
|
week5/card_validation_test.py
|
Python
|
gpl-3.0
| 1,101
|
# ========================== Start Copyright Notice ========================== #
# #
# Copyright 2014 F.D.I.S. #
# This file is part of Kinetic Gunner: Gunner of Angst #
# #
# For the latest version, please visit: #
# https://github.com/CertainlyUncertain/Kinetic-Gunner-Gunner-of-Angst #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
# =========================== End Copyright Notice =========================== #
# Input Manager -------------------------------------------------------------- #
import ogre.renderer.OGRE as ogre
import ogre.io.OIS as OIS
from vector import Vector3
import os
import time
class InputMgr(OIS.KeyListener, OIS.MouseListener, OIS.JoyStickListener):
''' Manages keyboard and mouse, with buffered and unbuffered input. '''
def __init__(self, engine):
''' Creates Input Listeners and Initializes Variables. '''
self.engine = engine
OIS.KeyListener.__init__(self)
OIS.MouseListener.__init__(self)
OIS.JoyStickListener.__init__(self)
self.move = 1000
self.rotate = 25
self.selectionRadius = 100
self.MB_Left_Down = False
self.MB_Right_Down = False
print "Input Manager Created."
def init(self):
''' Sets the Window and Creates Input System and Objects. '''
windowHandle = 0
renderWindow = self.engine.gfxMgr.root.getAutoCreatedWindow()
windowHandle = renderWindow.getCustomAttributeUnsignedLong("WINDOW")
paramList = [("WINDOW", str(windowHandle))]
if os.name == "nt":
#t = [("w32_mouse","DISCL_FOREGROUND"), ("w32_mouse", "DISCL_NONEXCLUSIVE")]
t = [("w32_mouse","DISCL_FOREGROUND"), ("w32_mouse", "DISCL_EXCLUSIVE")]
else:
t = [("x11_mouse_grab", "true"), ("x11_mouse_hide", "true")]
#t = [("x11_mouse_grab", "false"), ("x11_mouse_hide", "true")]
paramList.extend(t)
self.inputManager = OIS.createPythonInputSystem(paramList)
# Now InputManager is initialized for use. Keyboard and Mouse objects
# must still be initialized separately
self.keyboard = None
self.mouse = None
try:
self.keyboard = self.inputManager.createInputObjectKeyboard(OIS.OISKeyboard, True)
self.mouse = self.inputManager.createInputObjectMouse(OIS.OISMouse, True)
#Joystick
except Exception, e:
print "No Keyboard or mouse!!!!"
raise e
if self.keyboard:
self.keyboard.setEventCallback(self)
if self.mouse:
self.mouse.setEventCallback(self)
self.windowResized( renderWindow )
print "Input Manager Initialized."
def crosslink(self):
''' Links to other Managers. '''
pass
def tick(self, dtime):
''' Update keyboard and mouse. '''
self.keyboard.capture()
self.mouse.capture()
self.handleCamera(dtime)
self.handleModifiers(dtime)
# Quit
if self.keyboard.isKeyDown(OIS.KC_ESCAPE):
self.engine.keepRunning = False
pass
def stop(self):
''' Destory Input Objects and System. '''
self.inputManager.destroyInputObjectKeyboard(self.keyboard)
self.inputManager.destroyInputObjectMouse(self.mouse)
OIS.InputManager.destroyInputSystem(self.inputManager)
self.inputManager = None
print "Input Manager Stopped."
# Keyboard Listener ----------------------------------------------------- #
def keyPressed(self, evt):
'''Handles Toggleable Key Presses'''
# Swap Cameras (Between First-Person and Debug Views)
if self.keyboard.isKeyDown(OIS.KC_G):
self.engine.camMgr.swap()
# Pause ------------------------DEBUG-----------------------------------
if self.keyboard.isKeyDown(OIS.KC_SPACE):
time.sleep(10)
return True
def keyReleased(self, evt):
return True
def handleModifiers(self, dtime):
self.leftShiftDown = self.keyboard.isKeyDown(OIS.KC_LSHIFT)
self.leftCtrlDown = self.keyboard.isKeyDown(OIS.KC_LCONTROL)
pass
def handleCamera(self, dtime):
'''Move the camera using keyboard input.'''
# Forward
if self.keyboard.isKeyDown(OIS.KC_W):
self.engine.camMgr.transVector.z -= self.move
# Backward
if self.keyboard.isKeyDown(OIS.KC_S):
self.engine.camMgr.transVector.z += self.move
# Left
if self.keyboard.isKeyDown(OIS.KC_A):
self.engine.camMgr.transVector.x -= self.move
# Right
if self.keyboard.isKeyDown(OIS.KC_D):
self.engine.camMgr.transVector.x += self.move
# Up
if self.keyboard.isKeyDown(OIS.KC_3):
self.engine.camMgr.transVector.y += self.move
# Down
if self.keyboard.isKeyDown(OIS.KC_4):
self.engine.camMgr.transVector.y -= self.move
# Yaw
if self.keyboard.isKeyDown(OIS.KC_Q):
self.engine.camMgr.yawRot = -self.rotate
# Yaw
if self.keyboard.isKeyDown(OIS.KC_E):
self.engine.camMgr.yawRot = self.rotate
# Pitch
if self.keyboard.isKeyDown(OIS.KC_Z):
self.engine.camMgr.pitchRot = -self.rotate
# Pitch
if self.keyboard.isKeyDown(OIS.KC_X):
self.engine.camMgr.pitchRot = self.rotate
# Roll
if self.keyboard.isKeyDown(OIS.KC_R):
self.engine.camMgr.rollRot = self.rotate
# Roll
if self.keyboard.isKeyDown(OIS.KC_V):
self.engine.camMgr.rollRot = -self.rotate
pass
# MouseListener --------------------------------------------------------- #
def mouseMoved(self, evt):
currMouse = self.mouse.getMouseState()
self.engine.camMgr.yawRot += currMouse.X.rel
self.engine.camMgr.pitchRot += currMouse.Y.rel
return True
def mousePressed(self, evt, id):
#self.mouse.capture()
#self.ms = self.mouse.getMouseState()
#self.ms.width = self.engine.gfxMgr.viewPort.actualWidth
#self.ms.height = self.engine.gfxMgr.viewPort.actualHeight
#self.mousePos = (self.ms.X.abs/float(self.ms.width), self.ms.Y.abs/float(self.ms.height))
if id == OIS.MB_Left:
self.MB_Left_Down = True
elif id == OIS.MB_Right:
self.MB_Right_Down = True
return True
def mouseReleased(self, evt, id):
if id == OIS.MB_Left:
self.MB_Left_Down = False
elif id == OIS.MB_Right:
self.MB_Right_Down = False
return True
# JoystickListener ------------------------------------------------------ #
def buttonPressed(self, evt, button):
return True
def buttonReleased(self, evt, button):
return True
def axisMoved(self, evt, axis):
return True
def windowResized (self, rw):
temp = 0
width, height, depth, left, top= rw.getMetrics(temp,temp,temp, temp, temp) # Note the wrapped function as default needs unsigned int's
ms = self.mouse.getMouseState()
ms.width = width
ms.height = height
# Input Manager -------------------------------------------------------------- #
|
CertainlyUncertain/Kinetic-Gunner-Gunner-of-Angst
|
inputMgr.py
|
Python
|
gpl-3.0
| 8,769
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class Channel(models.Model):
_inherit = 'slide.channel'
nbr_certification = fields.Integer("Number of Certifications", compute='_compute_slides_statistics', store=True)
class Category(models.Model):
_inherit = 'slide.category'
nbr_certification = fields.Integer("Number of Certifications", compute='_count_presentations', store=True)
|
t3dev/odoo
|
addons/website_slides_survey/models/slide_channel.py
|
Python
|
gpl-3.0
| 488
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
from Muon.GUI.Common.muon_load_data import MuonLoadData
import Muon.GUI.Common.utilities.load_utils as load_utils
class LoadRunWidgetModel(object):
"""Stores info on all currently loaded workspaces"""
def __init__(self, loaded_data_store=MuonLoadData(), context=None):
# Used with load thread
self._filenames = []
self._loaded_data_store = loaded_data_store
self._context = context
self._current_run = None
def remove_previous_data(self):
self._loaded_data_store.remove_last_added_data()
# Used with load thread
def loadData(self, filenames):
self._filenames = filenames
# Used with load thread
def execute(self):
failed_files = []
for filename in self._filenames:
try:
ws, run, filename = load_utils.load_workspace_from_filename(filename)
except Exception as error:
failed_files += [(filename, error)]
continue
self._loaded_data_store.remove_data(run=[run])
self._loaded_data_store.add_data(run=[run], workspace=ws, filename=filename, instrument=self._context.instrument)
if failed_files:
message = load_utils.exception_message_for_failed_files(failed_files)
raise ValueError(message)
# This is needed to work with thread model
def output(self):
pass
def cancel(self):
pass
def clear_loaded_data(self):
self._loaded_data_store.clear()
@property
def current_run(self):
return self._current_run
@current_run.setter
def current_run(self, run):
self._current_run = run
@property
def loaded_filenames(self):
return self._loaded_data_store.get_parameter("filename")
@property
def loaded_workspaces(self):
return self._loaded_data_store.get_parameter("workspace")
@property
def loaded_runs(self):
return self._loaded_data_store.get_parameter("run")
|
mganeva/mantid
|
scripts/Muon/GUI/Common/load_run_widget/load_run_model.py
|
Python
|
gpl-3.0
| 2,348
|
"""Tests for functions and classes in data/processing.py."""
import glob
import os
from absl.testing import absltest
import heatnet.data.processing as hdp
import heatnet.file_util as file_util
import heatnet.test.test_util as test_util
import numpy as np
import xarray as xr
class CDSPreprocessorTest(absltest.TestCase):
"""Tests for CDSPreprocesor."""
def test_init(self):
"""Tests CDSPreprocessor initialization."""
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc')
]
proc_path = os.path.join(tmp_dir, 'temp_proc_data.nc')
variables = ['swvl1', 't2m']
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(data_paths, base_out_path=proc_path, mode='ext')
self.assertEqual(pp.raw_files, data_paths)
self.assertEqual(pp.base_out_path, proc_path)
self.assertEqual(pp.lead_times, [1])
self.assertEqual(pp.past_times, [0])
pp.close()
pp = hdp.CDSPreprocessor(
data_paths[0], base_out_path=proc_path, mode='ext')
self.assertEqual(pp.raw_files, data_paths[0])
self.assertEqual(pp.base_out_path, proc_path)
self.assertEqual(pp.lead_times, [1])
self.assertEqual(pp.past_times, [0])
pp.close()
for path in data_paths:
os.remove(path)
def test_raw_to_batched_samples(self):
"""Tests default raw_to_batched_samples call."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
path = os.path.join(tmp_dir, 'temp_data.nc')
proc_path = os.path.join(tmp_dir, 'temp_proc_data.nc')
proc_path1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
test_util.write_dummy_dataset(path, 'swvl1')
pp = hdp.CDSPreprocessor(path, base_out_path=proc_path, mode='ext')
pp.raw_to_batched_samples()
self.assertEqual(pp.pred_varlev_time, ['swvl1/0'])
self.assertEqual(pp.tgt_varlev_time, ['swvl1/0/+1D'])
with xr.open_dataset(path) as ds, xr.open_dataset(proc_path1) as proc_ds:
self.assertTrue(
np.allclose(
ds.isel(time=0).swvl1.values,
proc_ds.isel(sample=0).sel(
pred_varlev='swvl1/0').predictors.values,
rtol=tol,
atol=tol))
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
pp.close()
def test_offsets(self):
"""Tests correctness of time offsets from raw to processed data."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_3.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc'),
]
variables = ['t2m', 'swvl1', 't2m_anom']
proc_path_1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(
data_paths,
past_times=[1, 2],
lead_times=[1, 2],
base_out_path=os.path.join(tmp_dir, 'temp_proc_data.nc'),
mode='ext')
pp.raw_to_batched_samples()
with xr.open_dataset(proc_path_1) as proc_ds:
with xr.open_dataset(data_paths[0]) as ds:
# First possible target with lead time = 2
raw_data_slice = (ds.isel(time=4).t2m.values)
tgt_data_slice = (
proc_ds.sel(tgt_varlev='t2m/0/+1D').isel(sample=1).targets.values)
tgt2_data_slice = (
proc_ds.sel(tgt_varlev='t2m/0/+2D').isel(sample=0).targets.values)
pred0_data_slice = (
proc_ds.sel(pred_varlev='t2m/0').isel(sample=2).predictors.values)
pred1_data_slice = (
proc_ds.sel(pred_varlev='t2m/0/-1D').isel(
sample=3).predictors.values)
pred2_data_slice = (
proc_ds.sel(pred_varlev='t2m/0/-2D').isel(
sample=4).predictors.values)
self.assertTrue(
np.allclose(raw_data_slice, tgt_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, tgt2_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred0_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred1_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred2_data_slice, rtol=tol, atol=tol))
self.assertEqual(ds.time.values[2], proc_ds.sample.values[0])
with xr.open_dataset(data_paths[2]) as ds:
# First possible target with lead time = 2
raw_data_slice = (ds.isel(time=4).t2m_anom.values)
tgt_data_slice = (
proc_ds.sel(tgt_varlev='t2m_anom/0/+1D').isel(
sample=1).targets.values)
tgt2_data_slice = (
proc_ds.sel(tgt_varlev='t2m_anom/0/+2D').isel(
sample=0).targets.values)
pred0_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0').isel(
sample=2).predictors.values)
pred1_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0/-1D').isel(
sample=3).predictors.values)
pred2_data_slice = (
proc_ds.sel(pred_varlev='t2m_anom/0/-2D').isel(
sample=4).predictors.values)
self.assertTrue(
np.allclose(raw_data_slice, tgt_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, tgt2_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred0_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred1_data_slice, rtol=tol, atol=tol))
self.assertTrue(
np.allclose(raw_data_slice, pred2_data_slice, rtol=tol, atol=tol))
pp.close()
for path in data_paths:
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
def test_mean_std_recovery(self):
"""Tests recovery of dimensional data from processed normalized data."""
tol = 1.0e-4
with file_util.mkdtemp() as tmp_dir:
data_paths = [
os.path.join(tmp_dir, 'temp_data.nc'),
os.path.join(tmp_dir, 'temp_data_3.nc'),
os.path.join(tmp_dir, 'temp_data_2.nc'),
]
variables = ['t2m', 'swvl1', 't2m_anom']
proc_path_1 = os.path.join(tmp_dir, 'temp_proc_data.000000.nc')
for path, var in zip(data_paths, variables):
test_util.write_dummy_dataset(path, var)
pp = hdp.CDSPreprocessor(
data_paths,
base_out_path=os.path.join(tmp_dir, 'temp_proc_data.nc'),
past_times=[1, 2],
lead_times=[1, 2],
mode='ext')
pp.raw_to_batched_samples(scale_variables=True)
with xr.open_dataset(proc_path_1) as proc_ds:
with xr.open_dataset(os.path.join(
tmp_dir, 'temp_proc_data.scales.nc')) as scale_ds:
with xr.open_dataset(data_paths[1]) as ds:
raw_values = ds.isel(time=4).swvl1.values
proc_values = proc_ds.isel(sample=2).sel(
pred_varlev='swvl1/0').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='swvl1/0').pred_std.values),
scale_ds.sel(pred_varlev='swvl1/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
proc_values = proc_ds.isel(sample=4).sel(
pred_varlev='swvl1/0/-2D').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='swvl1/0').pred_std.values),
scale_ds.sel(pred_varlev='swvl1/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
with xr.open_dataset(data_paths[2]) as ds:
raw_values = ds.isel(time=4).t2m_anom.values
proc_values = proc_ds.isel(sample=2).sel(
pred_varlev='t2m_anom/0').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='t2m_anom/0').pred_std.values),
scale_ds.sel(pred_varlev='t2m_anom/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
proc_values = proc_ds.isel(sample=3).sel(
pred_varlev='t2m_anom/0/-1D').predictors.values
proc_scaled_values = np.add(
np.multiply(
proc_values,
scale_ds.sel(pred_varlev='t2m_anom/0').pred_std.values),
scale_ds.sel(pred_varlev='t2m_anom/0').pred_mean.values)
self.assertTrue(
np.allclose(raw_values, proc_scaled_values, rtol=tol, atol=tol))
pp.close()
for path in data_paths:
os.remove(path)
for f in glob.glob(os.path.join(tmp_dir, 'temp_proc*')):
os.remove(f)
if __name__ == '__main__':
absltest.main()
|
google-research/heatnet
|
test/test_processing.py
|
Python
|
gpl-3.0
| 9,568
|
from molpher.algorithms.functions import find_path
from molpher.core import ExplorationTree as ETree
class BasicPathfinder:
"""
:param settings: settings to use in the search
:type settings: `Settings`
A very basic pathfinder class that can be used to run exploration with
any combination of operations.
"""
class MaxItersReachedException(Exception):
def __init__(self, tree):
super(BasicPathfinder.MaxItersReachedException, self).__init__(
"Maximum number of iterations reached while searching "
"for a path\n\t source: {0}\n\t target: {1}".format(tree.source, tree.target))
def __init__(self, settings, operations):
self.settings = settings
"""a settings class (should be a subclass of `Settings`)"""
self.tree = ETree.create(source=self.settings.source, target=self.settings.target)
""":class:`~molpher.core.ExplorationTree.ExplorationTree` used in the search"""
if self.settings.tree_params:
self.tree.params = self.settings.tree_params
self.tree.thread_count = self.settings.max_threads
self._iteration = operations
self.path = None
"""a list of SMILES strings if a path was found, `None` otherwise"""
def __call__(self):
"""
Executes the search
:return: discovered path
:rtype: `list` of `str`
"""
counter = 0
while not self.tree.path_found:
counter+=1
if counter > self.settings.max_iters:
raise BasicPathfinder.MaxItersReachedException(self.tree)
print('Iteration {0}'.format(counter))
for oper in self._iteration:
self.tree.runOperation(oper)
self.path = find_path(self.tree, self.tree.params['target'])
print('Path found:', self.path)
return self.path
|
lich-uct/molpher-lib
|
src/python/molpher/algorithms/pathfinders.py
|
Python
|
gpl-3.0
| 1,908
|
import convis
import numpy as np
import matplotlib.pylab as plt
plt.figure()
plt.imshow(convis.numerical_filters.gauss_filter_2d(4.0,4.0))
plt.figure()
plt.plot(convis.numerical_filters.exponential_filter_1d(tau=0.01))
|
jahuth/convis
|
docs/filters-1.py
|
Python
|
gpl-3.0
| 218
|
# -*- coding: utf-8 -*-
# (c) 2015 Tuomas Airaksinen
#
# This file is part of automate-wsgi.
#
# automate-wsgi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# automate-wsgi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with automate-wsgi. If not, see <http://www.gnu.org/licenses/>.
import threading
import socket
import tornado
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
import tornado.web
import tornado.websocket
from traits.api import Instance, Int, CStr, Dict, Str
from automate.common import threaded
from automate.service import AbstractUserService
class TornadoService(AbstractUserService):
"""
Abstract service that provides HTTP server for WSGI applications.
"""
#: Which ip address to listen. Use ``0.0.0.0`` (default) to listen to all local networking interfaces.
http_ipaddr = CStr("0.0.0.0")
#: HTTP (or HTTPS if using SSL) port to listen
http_port = Int(3000)
#: Path to ssl certificate file. If set, SSL will be used.
#:
#: .. tip::
#:
#: You may use script scripts/generate_selfsigned_certificate.sh to generate a
#: self-signed openssl certificate.
ssl_certificate = CStr
#: Path to ssl private key file
ssl_private_key = CStr
#: Number of listener threads to spawn
num_threads = Int(5)
#: Extra static dirs you want to serve. Example::
#:
#: static_dirs = {'/my_static/(.*)': '/path/to/my_static'}
static_dirs = Dict(key_trait=Str, value_trait=Str)
_server = Instance(tornado.ioloop.IOLoop)
_web_thread = Instance(threading.Thread)
@property
def is_alive(self):
return self._web_thread and self._web_thread.is_alive()
def get_wsgi_application(self):
"""
Get WSGI function. Implement this in subclasses.
"""
raise NotImplementedError
def get_websocket(self):
return None
def get_filehandler_class(self):
return tornado.web.StaticFileHandler
def get_tornado_handlers(self):
tornado_handlers = []
websocket = self.get_websocket()
if websocket:
tornado_handlers.append(('/socket', websocket))
for entrypoint, path in self.static_dirs.iteritems():
tornado_handlers.append((entrypoint, self.get_filehandler_class(), {'path': path}))
wsgi_app = self.get_wsgi_application()
if wsgi_app:
wsgi_container = tornado.wsgi.WSGIContainer(wsgi_app)
tornado_handlers.append(('.*', tornado.web.FallbackHandler, dict(fallback=wsgi_container)))
return tornado_handlers
def setup(self):
tornado_app = tornado.web.Application(self.get_tornado_handlers())
if self.ssl_certificate and self.ssl_private_key:
ssl_options = {
"certfile": self.ssl_certificate,
"keyfile": self.ssl_private_key,
}
else:
ssl_options = None
server = tornado.httpserver.HTTPServer(tornado_app, ssl_options=ssl_options)
try:
server.listen(self.http_port, self.http_ipaddr)
except socket.error as e:
self.logger.error('Could not start server: %s', e)
self._server = tornado.ioloop.IOLoop.instance()
if not self._server._running:
self._web_thread = threading.Thread(target=threaded(self._server.start),
name="%s::%s" % (self.system.name, self.__class__.__name__))
self._web_thread.start()
else:
self.logger.debug('Tornado IOLoop already running, no need to start new')
def cleanup(self):
if self.is_alive:
self._server.stop()
|
tuomas2/automate-wsgi
|
automate_wsgi/abstractwsgi.py
|
Python
|
gpl-3.0
| 4,163
|
from collections import namedtuple
from model.flyweight import Flyweight
from model.static.database import database
class ControlTowerResource(Flyweight):
def __init__(self,control_tower_type_id):
#prevents reinitializing
if "_inited" in self.__dict__:
return
self._inited = None
#prevents reinitializing
self.control_tower_type_id = control_tower_type_id
cursor = database.get_cursor(
"select * from invControlTowerResources where controlTowerTypeID={};".format(self.control_tower_type_id))
self.resources = list()
resource_tuple = namedtuple("resource_tuple",
"resource_type_id purpose quantity min_security_level faction_id ")
for row in cursor:
self.resources.append(resource_tuple(
resource_type_id=row["resourceTypeID"],
purpose=row["purpose"],
quantity=row["quantity"],
min_security_level=row["minSecurityLevel"],
faction_id=row["factionID"]))
cursor.close()
|
Iconik/eve-suite
|
src/model/static/inv/control_tower_resources.py
|
Python
|
gpl-3.0
| 1,085
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2016-09-01 22:26:01
# @Author : Your Name (you@example.org)
# @Link : http://example.org
# @Version : $Id$
import os
import threading
import requests
import lxml
from threading import Thread
from bs4 import BeautifulSoup
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
pic_path = 'pic/' # 保存文件路径
URL = 'http://www.nanrenwo.net/z/tupian/hashiqitupian/'
URL1 = 'http://www.nanrenwo.net/'
class Worker(threading.Thread):
def __init__(self, url, img, filename):
super(Worker, self).__init__()
self.url = url
self.img = img
self.filename = filename
def run(self):
try:
u = self.url + self.img
r = requests.get(u, stream=True)
with open(self.filename, 'wb') as fd:
for chunk in r.iter_content(4096):
fd.write(chunk)
except Exception, e:
raise
def get_imgs(url):
t = 1
r = requests.get(url, stream=True)
soup = BeautifulSoup(r.text, 'lxml')
myimg = [img.get('src') for img in soup.find(id='brand-waterfall').find_all('img')] # 查询id下所有img元素
print 'myimg:', myimg
for img in myimg:
pic_name = pic_path + str(t) + '.jpg'
# img_src = img.get('src')
print 'img: ', img
# self.download_pic(URL1,img,pic_name) #request Url,img src,picture name
w = Worker(URL1, img, pic_name)
w.start()
t += 1
get_imgs(URL)
|
xycfree/py_spider
|
spider/down_pic_thread.py
|
Python
|
gpl-3.0
| 1,358
|
"""To install: sudo python setup.py install
"""
import os
from setuptools import setup, find_packages
def read(fname):
"""Utility function to read the README file."""
return open(os.path.join(os.path.dirname(__file__), fname)).read()
VERSION = __import__('lintswitch').__version__
setup(
name='lintswitch',
version=VERSION,
author='Graham King',
author_email='graham@gkgk.org',
description='Lint your Python in real-time',
long_description=read('README.md'),
packages=find_packages(),
package_data={'lintswitch': ['index.html']},
entry_points={
'console_scripts': ['lintswitch=lintswitch.main:main']
},
url='https://github.com/grahamking/lintswitch',
install_requires=['setuptools'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Quality Assurance'
]
)
|
grahamking/lintswitch
|
setup.py
|
Python
|
gpl-3.0
| 1,119
|
import graph
import dot
from core import *
import dataflow
def make_inst(g, addr, dest, op, *args):
def make_arg(a):
if a is None:
return None
if isinstance(a, int):
return VALUE(a)
if isinstance(a, str):
return REG(a)
return a
b = BBlock(addr)
args = [make_arg(a) for a in args]
b.add(Inst(make_arg(dest), op, args, addr))
g.add_node(addr, val=b)
def test_nielson_2_1_4():
g = graph.Graph()
make_inst(g, 1, "x", "=", 2)
make_inst(g, 2, "y", "=", 4)
make_inst(g, 3, "x", "=", 1)
make_inst(g, 4, None, "if", COND(EXPR(">", REG("x"), REG("y"))))
make_inst(g, 5, "z", "=", REG("y"))
make_inst(g, 6, "z", "*", REG("y"), REG("y"))
make_inst(g, 7, "x", "=", REG("z"))
g.add_edge(1, 2)
g.add_edge(2, 3)
g.add_edge(3, 4)
g.add_edge(4, 5)
g.add_edge(4, 6)
g.add_edge(5, 7)
g.add_edge(6, 7)
#dot.dot(g)
#ana = dataflow.LiveVarAnalysis(g)
#ana.init()
#g.print_nodes()
#print("===")
ana = dataflow.LiveVarAnalysis(g)
ana.solve()
#g.print_nodes()
LV_entry = {
1: set(),
2: set(),
3: {REG("y")},
4: {REG("x"), REG("y")},
5: {REG("y")},
6: {REG("y")},
7: {REG("z")},
}
LV_exit = {
1: set(),
2: {REG("y")},
3: {REG("x"), REG("y")},
4: {REG("y")},
5: {REG("z")},
6: {REG("z")},
7: set(),
}
GEN_LV = {
1: set(),
2: set(),
3: set(),
4: {REG("x"), REG("y")},
5: {REG("y")},
6: {REG("y")},
7: {REG("z")},
}
KILL_LV = {
1: {REG("x")},
2: {REG("y")},
3: {REG("x")},
4: set(),
5: {REG("z")},
6: {REG("z")},
7: {REG("x")},
}
for i, info in g.iter_sorted_nodes():
assert info["live_gen"] == GEN_LV[i]
assert info["live_kill"] == KILL_LV[i]
assert info["live_in"] == LV_entry[i], (info["live_in"], LV_entry[i])
assert info["live_out"] == LV_exit[i]
|
pfalcon/ScratchABlock
|
tests_unit/test_liveness.py
|
Python
|
gpl-3.0
| 2,105
|
from base.iterativeRecommender import IterativeRecommender
import numpy as np
from util import config
from collections import defaultdict
from math import log,exp
from scipy.sparse import *
from scipy import *
class CoFactor(IterativeRecommender):
def __init__(self, conf, trainingSet=None, testSet=None, fold='[1]'):
super(CoFactor, self).__init__(conf, trainingSet, testSet, fold)
def readConfiguration(self):
super(CoFactor, self).readConfiguration()
extraSettings = config.OptionConf(self.config['CoFactor'])
self.negCount = int(extraSettings['-k']) #the number of negative samples
if self.negCount < 1:
self.negCount = 1
self.regR = float(extraSettings['-gamma'])
self.filter = int(extraSettings['-filter'])
def printAlgorConfig(self):
super(CoFactor, self).printAlgorConfig()
print('Specified Arguments of', self.config['model.name'] + ':')
print('k: %d' % self.negCount)
print('regR: %.5f' %self.regR)
print('filter: %d' %self.filter)
print('=' * 80)
def initModel(self):
super(CoFactor, self).initModel()
#constructing SPPMI matrix
self.SPPMI = defaultdict(dict)
print('Constructing SPPMI matrix...')
#for larger data set has many items, the process will be time consuming
occurrence = defaultdict(dict)
i=0
for item1 in self.data.item:
i += 1
if i % 100 == 0:
print(str(i) + '/' + str(self.num_items))
uList1, rList1 = self.data.itemRated(item1)
if len(uList1) < self.filter:
continue
for item2 in self.data.item:
if item1 == item2:
continue
if item2 not in occurrence[item1]:
uList2, rList2 = self.data.itemRated(item2)
if len(uList2) < self.filter:
continue
count = len(set(uList1).intersection(set(uList2)))
if count > self.filter:
occurrence[item1][item2] = count
occurrence[item2][item1] = count
maxVal = 0
frequency = {}
for item1 in occurrence:
frequency[item1] = sum(occurrence[item1].values()) * 1.0
D = sum(frequency.values()) * 1.0
# maxx = -1
for item1 in occurrence:
for item2 in occurrence[item1]:
try:
val = max([log(occurrence[item1][item2] * D / (frequency[item1] * frequency[item2])) - log(
self.negCount), 0])
except ValueError:
print(self.SPPMI[item1][item2])
print(self.SPPMI[item1][item2] * D / (frequency[item1] * frequency[item2]))
if val > 0:
if maxVal < val:
maxVal = val
self.SPPMI[item1][item2] = val
self.SPPMI[item2][item1] = self.SPPMI[item1][item2]
#normalize
for item1 in self.SPPMI:
for item2 in self.SPPMI[item1]:
self.SPPMI[item1][item2] = self.SPPMI[item1][item2]/maxVal
def buildModel(self):
self.X=self.P*10 #Theta
self.Y=self.Q*10 #Beta
self.w = np.random.rand(self.num_items) / 10 # bias value of item
self.c = np.random.rand(self.num_items) / 10 # bias value of context
self.G = np.random.rand(self.num_items, self.emb_size) / 10 # context embedding
print('training...')
epoch = 0
while epoch < self.maxEpoch:
self.loss = 0
YtY = self.Y.T.dot(self.Y)
for user in self.data.user:
# C_u = np.ones(self.data.getSize(self.recType))
H = np.ones(self.num_items)
val, pos = [],[]
P_u = np.zeros(self.num_items)
uid = self.data.user[user]
for item in self.data.trainSet_u[user]:
iid = self.data.item[item]
r_ui = float(self.data.trainSet_u[user][item])
pos.append(iid)
val.append(10 * r_ui)
H[iid] += 10 * r_ui
P_u[iid] = 1
error = (P_u[iid] - self.X[uid].dot(self.Y[iid]))
self.loss += pow(error, 2)
# sparse matrix
C_u = coo_matrix((val, (pos, pos)), shape=(self.num_items, self.num_items))
A = (YtY + np.dot(self.Y.T, C_u.dot(self.Y)) + self.regU * np.eye(self.emb_size))
self.X[uid] = np.dot(np.linalg.inv(A), (self.Y.T * H).dot(P_u))
XtX = self.X.T.dot(self.X)
for item in self.data.item:
P_i = np.zeros(self.num_users)
iid = self.data.item[item]
H = np.ones(self.num_users)
val,pos = [],[]
for user in self.data.trainSet_i[item]:
uid = self.data.user[user]
r_ui = float(self.data.trainSet_i[item][user])
pos.append(uid)
val.append(10 * r_ui)
H[uid] += 10 * r_ui
P_i[uid] = 1
matrix_g1 = np.zeros((self.emb_size, self.emb_size))
matrix_g2 = np.zeros((self.emb_size, self.emb_size))
vector_m1 = np.zeros(self.emb_size)
vector_m2 = np.zeros(self.emb_size)
update_w = 0
update_c = 0
if len(self.SPPMI[item])>0:
for context in self.SPPMI[item]:
cid = self.data.item[context]
gamma = self.G[cid]
beta = self.Y[cid]
matrix_g1 += gamma.reshape(self.emb_size, 1).dot(gamma.reshape(1, self.emb_size))
vector_m1 += (self.SPPMI[item][context]-self.w[iid]-
self.c[cid])*gamma
matrix_g2 += beta.reshape(self.emb_size, 1).dot(beta.reshape(1, self.emb_size))
vector_m2 += (self.SPPMI[item][context] - self.w[cid]
- self.c[iid]) * beta
update_w += self.SPPMI[item][context]-self.Y[iid].dot(gamma)-self.c[cid]
update_c += self.SPPMI[item][context]-beta.dot(self.G[iid])-self.w[cid]
C_i = coo_matrix((val, (pos, pos)), shape=(self.num_users, self.num_users))
A = (XtX + np.dot(self.X.T, C_i.dot(self.X)) + self.regU * np.eye(self.emb_size) + matrix_g1)
self.Y[iid] = np.dot(np.linalg.inv(A), (self.X.T * H).dot(P_i)+vector_m1)
if len(self.SPPMI[item]) > 0:
self.G[iid] = np.dot(np.linalg.inv(matrix_g2 + self.regR * np.eye(self.emb_size)), vector_m2)
self.w[iid] = update_w/len(self.SPPMI[item])
self.c[iid] = update_c/len(self.SPPMI[item])
epoch += 1
print('epoch:', epoch, 'loss:', self.loss)
def predictForRanking(self,u):
'invoked to rank all the items for the user'
if self.data.containsUser(u):
u = self.data.getUserId(u)
return self.Y.dot(self.X[u])
else:
return [self.data.globalMean] * self.num_items
|
recq-cse/RecQ
|
model/ranking/CoFactor.py
|
Python
|
gpl-3.0
| 7,469
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Janice Cheng
import logging
from s13.Day5.conf import setting
logger = logging.getLogger('ATM-TRANSACTION-LOG')
logger.setLevel(logging.INFO) #configure a global logging level
console_handler = logging.StreamHandler() #print the log on the console
file_handler = logging.FileHandler("{}/log/access.log".format(setting.APP_DIR))
formatter = logging.Formatter('%(asctime)s - %(name)s - %(filename)s - %(levelname)s - %(message)s')
console_handler.setFormatter(formatter)
file_handler.setFormatter(formatter)
logger.addHandler(console_handler)
logger.addHandler(file_handler)
|
jcchoiling/learningPython
|
s13/Day05/modules/core/atm_log.py
|
Python
|
gpl-3.0
| 639
|
import numpy as np
from PyQt5.QtGui import QPainterPath, QPen
from PyQt5.QtWidgets import QGraphicsPathItem
from urh import settings
from urh.cythonext import path_creator
from urh.ui.painting.GridScene import GridScene
from urh.ui.painting.SceneManager import SceneManager
class FFTSceneManager(SceneManager):
def __init__(self, parent, graphic_view=None):
self.peak = []
super().__init__(parent)
self.scene = GridScene(parent=graphic_view)
self.scene.setBackgroundBrush(settings.BGCOLOR)
self.peak_item = self.scene.addPath(QPainterPath(), QPen(settings.PEAK_COLOR, 0)) # type: QGraphicsPathItem
def show_scene_section(self, x1: float, x2: float, subpath_ranges=None, colors=None):
start = int(x1) if x1 > 0 else 0
end = int(x2) if x2 < self.num_samples else self.num_samples
paths = path_creator.create_path(np.log10(self.plot_data), start, end)
self.set_path(paths, colors=None)
try:
if len(self.peak) > 0:
peak_path = path_creator.create_path(np.log10(self.peak), start, end)[0]
self.peak_item.setPath(peak_path)
except RuntimeWarning:
pass
def init_scene(self, draw_grid=True):
self.scene.draw_grid = draw_grid
self.peak = self.plot_data if len(self.peak) < self.num_samples else np.maximum(self.peak, self.plot_data)
self.scene.setSceneRect(0, -5, self.num_samples, 10)
def clear_path(self):
for item in self.scene.items():
if isinstance(item, QGraphicsPathItem) and item != self.peak_item:
self.scene.removeItem(item)
item.setParentItem(None)
del item
def clear_peak(self):
self.peak = []
if self.peak_item:
self.peak_item.setPath(QPainterPath())
def eliminate(self):
super().eliminate()
self.peak = None
self.peak_item = None
|
jopohl/urh
|
src/urh/ui/painting/FFTSceneManager.py
|
Python
|
gpl-3.0
| 1,960
|
# This file is part of OpenStack Ansible driver for Kostyor.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# I feel incredibly wrong about these lines but it seems like the only
# working solution right now. Celery uses its own fork of native
# multiprocessing module, which is significantly diverged from the
# version of Python 2.7. So when it come to start 'multiprocessing.Process'
# instance from within Celery task, it simply fails due to inability to
# retrieve some properties (e.g. _authkey) from '_current_process' since
# they simply don't exist in 'billiard.Process.
#
# This is essential part of this driver, since Ansible internally use
# multiprocessing.Process to do parallel execution.
#
# https://github.com/celery/billiard/pull/202
import multiprocessing
import billiard
multiprocessing.Process = billiard.Process # noqa
import os
import glob
from ansible.cli.playbook import PlaybookCLI
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.inventory import Inventory
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager
from ansible.utils.vars import combine_vars
from kostyor.rpc.app import app
from . import base
class _setcwd(object):
"""Context manager for temporally changing current working directory.
Some of OpenStack Ansible playbooks require to be called from some
directory. Since Ansible doesn't support passing custom working
directory, we need to change current working directory before calling
this sort of playbooks.
Usage example:
with _setcwd('/opt/openstack-ansible/playbooks'):
_run_playbook(...)
:param cwd: current working directory to be set
:type cwd: str
"""
def __init__(self, cwd):
self._newcwd = cwd
self._oldcwd = None
def __enter__(self):
self._oldcwd = os.getcwd()
if self._newcwd:
os.chdir(self._newcwd)
def __exit__(self, *args):
if self._newcwd:
os.chdir(self._oldcwd)
def _get_user_settings(loader):
"""Read user settings from /etc/openstack_deploy.
OpenStack Ansible user settings are stored in /etc/openstack_deploy
directory. We need to read, combine and pass them to variable
manager before executing any playbook. This is what heppened under
the hood when one calls 'openstack-ansible' wrapper in command line.
:param loader: an instance of ansible data loader to be used
:type loader: :class:`ansible.parsing.dataloader.DataLoader`
"""
settings = {}
# /etc/openstack_deploy is default and, by all means, hardcoded path
# to deployment settings. The dir contains user settings, where each
# file starts with 'user_' prefix and ends with '.yml' suffix.
pattern = os.path.join('/etc', 'openstack_deploy', 'user_*.yml')
for filename in glob.glob(pattern):
# Ansible may use different strategies of combining variables, so
# we need to use its function instead of '.update(...)' method.
settings = combine_vars(settings, loader.load_from_file(filename))
return settings
def _run_playbook_impl(playbook, hosts_fn=None, cwd=None, ignore_errors=False):
# Unfortunately, there's no good way to get the options instance
# with proper defaults since it's generated by argparse inside
# PlaybookCLI. Due to the fact that the options can't be empty
# and must contain proper values we have not choice but extract
# them from PlaybookCLI instance.
playbook_cli = PlaybookCLI(['to-be-stripped', playbook])
playbook_cli.parse()
options = playbook_cli.options
# Get others required options.
loader = DataLoader()
variable_manager = VariableManager()
inventory = Inventory(loader, variable_manager)
variable_manager.set_inventory(inventory)
variable_manager.extra_vars = _get_user_settings(loader)
# Limit playbook execution to hosts returned by 'hosts_fn'.
if hosts_fn is not None:
inventory.subset([
host.get_vars()['inventory_hostname']
for host in hosts_fn(inventory)
])
# Finally, we can create a playbook executor and run the playbook.
executor = PlaybookExecutor(
playbooks=[playbook],
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
options=options,
passwords={}
)
# Some playbooks may rely on current working directory, so better allow
# to change it before execution.
with _setcwd(cwd):
exitcode = executor.run()
# Celery treats exceptions from task as way to mark it failed. So let's
# throw one to do so in case return code is not zero.
if all([not ignore_errors, exitcode is not None, exitcode != 0]):
raise Exception('Playbook "%s" has been finished with errors. '
'Exit code is "%d".' % (playbook, exitcode))
return exitcode
@app.task
def _run_playbook(playbook, cwd=None, ignore_errors=False):
return _run_playbook_impl(
playbook,
cwd=cwd,
ignore_errors=ignore_errors
)
@app.task
def _run_playbook_for(playbook, hosts, service, cwd=None, ignore_errors=False):
return _run_playbook_impl(
playbook,
lambda inv: base.get_component_hosts_on_nodes(inv, service, hosts),
cwd=cwd,
ignore_errors=ignore_errors
)
class Driver(base.Driver):
_run_playbook = _run_playbook
_run_playbook_for = _run_playbook_for
|
ikalnytskyi/kostyor-openstack-ansible
|
kostyor_openstack_ansible/upgrades/ref.py
|
Python
|
gpl-3.0
| 6,099
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# **************************************************************************
# Copyright © 2016 jianglin
# File Name: forums.py
# Author: jianglin
# Email: xiyang0807@gmail.com
# Created: 2016-12-17 13:12:23 (CST)
# Last Update:星期五 2017-11-10 11:04:16 (CST)
# By:
# Description:
# **************************************************************************
from .views import BaseView
from forums.extension import db
from forums.api.forums.models import Board
from forums.api.tag.models import Tags
class BoardView(BaseView):
form_excluded_columns = ('topics')
class TagView(BaseView):
column_searchable_list = ['name']
form_excluded_columns = ('topics', 'followers')
def init_admin(admin):
admin.add_view(
BoardView(
Board,
db.session,
name='管理版块',
endpoint='admin_board',
category='管理社区'))
admin.add_view(
TagView(
Tags,
db.session,
name='管理节点',
endpoint='admin_tag',
category='管理社区'))
|
honmaple/maple-bbs
|
forums/admin/forums.py
|
Python
|
gpl-3.0
| 1,143
|
# -*- coding: utf-8 -*-
# __init__.py
# Copyright (C) 2015 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
A U1DB backend for encrypting data before sending to server and decrypting
after receiving.
"""
import os
from twisted.web.client import Agent
from twisted.internet import reactor
from leap.common.http import getPolicyForHTTPS
from leap.soledad.common.log import getLogger
from leap.soledad.client.http_target.send import HTTPDocSender
from leap.soledad.client.http_target.api import SyncTargetAPI
from leap.soledad.client.http_target.fetch import HTTPDocFetcher
from leap.soledad.client import crypto as old_crypto
logger = getLogger(__name__)
# we may want to collect statistics from the sync process
DO_STATS = False
if os.environ.get('SOLEDAD_STATS'):
DO_STATS = True
class SoledadHTTPSyncTarget(SyncTargetAPI, HTTPDocSender, HTTPDocFetcher):
"""
A SyncTarget that encrypts data before sending and decrypts data after
receiving.
Normally encryption will have been written to the sync database upon
document modification. The sync database is also used to write temporarily
the parsed documents that the remote send us, before being decrypted and
written to the main database.
"""
def __init__(self, url, source_replica_uid, creds, crypto, cert_file):
"""
Initialize the sync target.
:param url: The server sync url.
:type url: str
:param source_replica_uid: The source replica uid which we use when
deferring decryption.
:type source_replica_uid: str
:param creds: A dictionary containing the uuid and token.
:type creds: creds
:param crypto: An instance of SoledadCrypto so we can encrypt/decrypt
document contents when syncing.
:type crypto: soledad._crypto.SoledadCrypto
:param cert_file: Path to the certificate of the ca used to validate
the SSL certificate used by the remote soledad
server.
:type cert_file: str
"""
if url.endswith("/"):
url = url[:-1]
self._url = str(url) + "/sync-from/" + str(source_replica_uid)
self.source_replica_uid = source_replica_uid
self._auth_header = None
self._uuid = None
self.set_creds(creds)
self._crypto = crypto
# TODO: DEPRECATED CRYPTO
self._deprecated_crypto = old_crypto.SoledadCrypto(crypto.secret)
self._insert_doc_cb = None
# Twisted default Agent with our own ssl context factory
factory = getPolicyForHTTPS(cert_file)
self._http = Agent(reactor, factory)
if DO_STATS:
self.sync_exchange_phase = [0]
|
leapcode/soledad
|
src/leap/soledad/client/http_target/__init__.py
|
Python
|
gpl-3.0
| 3,365
|
(S'7f2210613c44962221805a1b28aa76d6'
p1
(ihappydoclib.parseinfo.moduleinfo
ModuleInfo
p2
(dp3
S'_namespaces'
p4
((dp5
S'TkDrawer'
p6
(ihappydoclib.parseinfo.classinfo
ClassInfo
p7
(dp8
g4
((dp9
(dp10
tp11
sS'_filename'
p12
S'../python/frowns/Depict/TkMoleculeDrawer.py'
p13
sS'_docstring'
p14
S''
sS'_class_member_info'
p15
(lp16
sS'_name'
p17
g6
sS'_parent'
p18
g2
sS'_comment_info'
p19
(dp20
sS'_base_class_info'
p21
(lp22
S'DrawMolHarness'
p23
aS'TkMixin'
p24
asS'_configuration_values'
p25
(dp26
sS'_class_info'
p27
g9
sS'_function_info'
p28
g10
sS'_comments'
p29
S''
sbsS'TkMixin'
p30
(ihappydoclib.parseinfo.classinfo
ClassInfo
p31
(dp32
g4
((dp33
(dp34
S'pack_forget'
p35
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p36
(dp37
g4
((dp38
(dp39
tp40
sS'_exception_info'
p41
(dp42
sS'_parameter_names'
p43
(S'self'
p44
tp45
sS'_parameter_info'
p46
(dp47
g44
(NNNtp48
ssg12
g13
sg14
S''
sg17
g35
sg18
g31
sg19
g20
sg25
(dp49
sg27
g38
sg28
g39
sg29
S''
sbsS'_resize'
p50
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p51
(dp52
g4
((dp53
(dp54
tp55
sg41
(dp56
sg43
(S'self'
p57
S'event'
p58
tp59
sg46
(dp60
g57
(NNNtp61
sg58
(NNNtp62
ssg12
g13
sg14
S'(event) -> resive the drawing to event.height, event.width'
p63
sg17
g50
sg18
g31
sg19
g20
sg25
(dp64
sg27
g53
sg28
g54
sg29
S''
sbsS'_clear'
p65
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p66
(dp67
g4
((dp68
(dp69
tp70
sg41
(dp71
sg43
(S'self'
p72
tp73
sg46
(dp74
g72
(NNNtp75
ssg12
g13
sg14
S''
sg17
g65
sg18
g31
sg19
g20
sg25
(dp76
sg27
g68
sg28
g69
sg29
S''
sbsS'_init'
p77
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p78
(dp79
g4
((dp80
(dp81
tp82
sg41
(dp83
sg43
(S'self'
p84
S'master'
p85
S'height'
p86
S'width'
p87
tp88
sg46
(dp89
g87
(NNNtp90
sg84
(NNNtp91
sg85
(NNNtp92
sg86
(NNNtp93
ssg12
g13
sg14
S''
sg17
g77
sg18
g31
sg19
g20
sg25
(dp94
sg27
g80
sg28
g81
sg29
S''
sbsS'postscript'
p95
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p96
(dp97
g4
((dp98
(dp99
tp100
sg41
(dp101
sg43
(S'self'
p102
S'*a'
p103
S'*kw'
p104
tp105
sg46
(dp106
g102
(NNNtp107
sg104
(NNNtp108
sg103
(NNNtp109
ssg12
g13
sg14
S'return a postscript image of the current molecule arguments\n are sent to the Tkinter canvas postscript method'
p110
sg17
g95
sg18
g31
sg19
g20
sg25
(dp111
sg27
g98
sg28
g99
sg29
S''
sbsS'_drawLine'
p112
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p113
(dp114
g4
((dp115
(dp116
tp117
sg41
(dp118
sg43
(S'self'
p119
S'x1'
p120
S'y1'
p121
S'x2'
p122
S'y2'
p123
S'color'
p124
tp125
sg46
(dp126
g123
(NNNtp127
sg124
(NNNtp128
sg119
(NNNtp129
sg122
(NNNtp130
sg121
(NNNtp131
sg120
(NNNtp132
ssg12
g13
sg14
S''
sg17
g112
sg18
g31
sg19
g20
sg25
(dp133
sg27
g115
sg28
g116
sg29
S''
sbsS'grid'
p134
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p135
(dp136
g4
((dp137
(dp138
tp139
sg41
(dp140
sg43
(S'self'
p141
S'*a'
p142
S'*kw'
p143
tp144
sg46
(dp145
g141
(NNNtp146
sg143
(NNNtp147
sg142
(NNNtp148
ssg12
g13
sg14
S''
sg17
g134
sg18
g31
sg19
g20
sg25
(dp149
sg27
g137
sg28
g138
sg29
S''
sbsS'_drawOval'
p150
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p151
(dp152
g4
((dp153
(dp154
tp155
sg41
(dp156
sg43
(S'self'
p157
S'x'
S'y'
S'xh'
p158
S'yh'
p159
tp160
sg46
(dp161
S'y'
(NNNtp162
sS'x'
(NNNtp163
sg157
(NNNtp164
sg158
(NNNtp165
sg159
(NNNtp166
ssg12
g13
sg14
S''
sg17
g150
sg18
g31
sg19
g20
sg25
(dp167
sg27
g153
sg28
g154
sg29
S''
sbsS'_drawText'
p168
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p169
(dp170
g4
((dp171
(dp172
tp173
sg41
(dp174
sg43
(S'self'
p175
S'text'
p176
S'font'
p177
S'fontsize'
p178
S'x'
S'y'
S'color'
p179
S'bg'
p180
tp181
sg46
(dp182
g180
(I1
S'"white"'
Ntp183
sg179
(NNNtp184
sg176
(NNNtp185
sg175
(NNNtp186
sg178
(NNNtp187
sS'y'
(NNNtp188
sS'x'
(NNNtp189
sg177
(NNNtp190
ssg12
g13
sg14
S''
sg17
g168
sg18
g31
sg19
g20
sg25
(dp191
sg27
g171
sg28
g172
sg29
S''
sbsS'pack'
p192
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p193
(dp194
g4
((dp195
(dp196
tp197
sg41
(dp198
sg43
(S'self'
p199
S'*a'
p200
S'*kw'
p201
tp202
sg46
(dp203
g199
(NNNtp204
sg201
(NNNtp205
sg200
(NNNtp206
ssg12
g13
sg14
S''
sg17
g192
sg18
g31
sg19
g20
sg25
(dp207
sg27
g195
sg28
g196
sg29
S''
sbstp208
sg12
g13
sg14
S''
sg15
(lp209
sg17
g30
sg18
g2
sg19
g20
sg21
(lp210
sg25
(dp211
sg27
g33
sg28
g34
sg29
S''
sbs(dp212
tp213
sS'_import_info'
p214
(ihappydoclib.parseinfo.imports
ImportInfo
p215
(dp216
S'_named_imports'
p217
(dp218
S'Tkinter'
p219
(lp220
S'*'
asS'MoleculeDrawer'
p221
(lp222
S'DrawMolHarness'
p223
assS'_straight_imports'
p224
(lp225
sbsg12
g13
sg14
S''
sg17
S'TkMoleculeDrawer'
p226
sg18
Nsg19
g20
sg25
(dp227
S'include_comments'
p228
I1
sS'cacheFilePrefix'
p229
S'.happydoc.'
p230
sS'useCache'
p231
I1
sS'docStringFormat'
p232
S'StructuredText'
p233
ssg27
g5
sg28
g212
sg29
S''
sbt.
|
tuffery/Frog2
|
frowns/Depict/.happydoc.TkMoleculeDrawer.py
|
Python
|
gpl-3.0
| 4,720
|
##
# Copyright 2015-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for installing Cray toolchains, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
@author: Guilherme Peretti Pezzi (CSCS)
@author: Petar Forai (IMP/IMBA)
"""
from easybuild.easyblocks.generic.bundle import Bundle
from easybuild.tools.build_log import EasyBuildError
KNOWN_PRGENVS = ['PrgEnv-cray', 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-nvidia', 'PrgEnv-pgi']
class CrayToolchain(Bundle):
"""
Compiler toolchain: generate module file only, nothing to build/install
"""
def prepare_step(self, *args, **kwargs):
"""Prepare build environment (skip loaded of dependencies)."""
kwargs['load_tc_deps_modules'] = False
super(CrayToolchain, self).prepare_step(*args, **kwargs)
def make_module_dep(self):
"""
Generate load/swap statements for dependencies in the module file
"""
prgenv_mod = None
# collect 'swap' statement for dependencies (except PrgEnv)
swap_deps = []
for dep in self.toolchain.dependencies:
mod_name = dep['full_mod_name']
# determine versionless module name, e.g. 'fftw/3.3.4.1' => 'fftw'
dep_name = '/'.join(mod_name.split('/')[:-1])
if mod_name.startswith('PrgEnv'):
prgenv_mod = mod_name
else:
swap_deps.append(self.module_generator.swap_module(dep_name, mod_name).lstrip())
self.log.debug("Swap statements for dependencies of %s: %s", self.full_mod_name, swap_deps)
if prgenv_mod is None:
raise EasyBuildError("Could not find a PrgEnv-* module listed as dependency: %s",
self.toolchain.dependencies)
# unload statements for other PrgEnv modules
prgenv_unloads = ['']
for prgenv in [prgenv for prgenv in KNOWN_PRGENVS if not prgenv_mod.startswith(prgenv)]:
is_loaded_guard = self.module_generator.is_loaded(prgenv)
unload_stmt = self.module_generator.unload_module(prgenv).strip()
prgenv_unloads.append(self.module_generator.conditional_statement(is_loaded_guard, unload_stmt))
# load statement for selected PrgEnv module (only when not loaded yet)
prgenv_load = self.module_generator.load_module(prgenv_mod, recursive_unload=False)
txt = '\n'.join(prgenv_unloads + [prgenv_load] + swap_deps)
return txt
|
eth-cscs/production
|
easybuild/easyblocks/craytoolchain.py
|
Python
|
gpl-3.0
| 3,472
|
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# FIXME: copied mostly from old code, needs py3 improvements
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import textwrap
import os
import random
import subprocess
import sys
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.utils.color import stringc
class Display:
def __init__(self, verbosity=0):
self.verbosity = verbosity
# list of all deprecation messages to prevent duplicate display
self._deprecations = {}
self._warns = {}
self._errors = {}
self.cowsay = None
self.noncow = os.getenv("ANSIBLE_COW_SELECTION",None)
self.set_cowsay_info()
def set_cowsay_info(self):
if not C.ANSIBLE_NOCOWS:
if os.path.exists("/usr/bin/cowsay"):
self.cowsay = "/usr/bin/cowsay"
elif os.path.exists("/usr/games/cowsay"):
self.cowsay = "/usr/games/cowsay"
elif os.path.exists("/usr/local/bin/cowsay"):
# BSD path for cowsay
self.cowsay = "/usr/local/bin/cowsay"
elif os.path.exists("/opt/local/bin/cowsay"):
# MacPorts path for cowsay
self.cowsay = "/opt/local/bin/cowsay"
if self.cowsay and self.noncow == 'random':
cmd = subprocess.Popen([self.cowsay, "-l"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
cows = out.split()
cows.append(False)
self.noncow = random.choice(cows)
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False):
msg2 = msg
if color:
msg2 = stringc(msg, color)
if not log_only:
if not stderr:
try:
print(msg2)
except UnicodeEncodeError:
print(msg2.encode('utf-8'))
else:
try:
print(msg2, file=sys.stderr)
except UnicodeEncodeError:
print(msg2.encode('utf-8'), file=sys.stderr)
if C.DEFAULT_LOG_PATH != '':
while msg.startswith("\n"):
msg = msg.replace("\n","")
# FIXME: logger stuff needs to be implemented
#if not screen_only:
# if color == 'red':
# logger.error(msg)
# else:
# logger.info(msg)
def vv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=1)
def vvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=2)
def vvvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=3)
def vvvvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=4)
def vvvvvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=5)
def verbose(self, msg, host=None, caplevel=2):
# FIXME: this needs to be implemented
#msg = utils.sanitize_output(msg)
if self.verbosity > caplevel:
if host is None:
self.display(msg, color='blue')
else:
self.display("<%s> %s" % (host, msg), color='blue', screen_only=True)
def deprecated(self, msg, version, removed=False):
''' used to print out a deprecation message.'''
if not removed and not C.DEPRECATION_WARNINGS:
return
if not removed:
if version:
new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in version %s." % (msg, version)
else:
new_msg = "\n[DEPRECATION WARNING]: %s. This feature will be removed in a future release." % (msg)
new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.\n\n"
else:
raise AnsibleError("[DEPRECATED]: %s. Please update your playbooks." % msg)
wrapped = textwrap.wrap(new_msg, 79)
new_msg = "\n".join(wrapped) + "\n"
if new_msg not in self._deprecations:
self.display(new_msg, color='purple', stderr=True)
self._deprecations[new_msg] = 1
def warning(self, msg):
new_msg = "\n[WARNING]: %s" % msg
wrapped = textwrap.wrap(new_msg, 79)
new_msg = "\n".join(wrapped) + "\n"
if new_msg not in self._warns:
self.display(new_msg, color='bright purple', stderr=True)
self._warns[new_msg] = 1
def system_warning(self, msg):
if C.SYSTEM_WARNINGS:
self.warning(msg)
def banner(self, msg, color=None):
'''
Prints a header-looking line with stars taking up to 80 columns
of width (3 columns, minimum)
'''
if self.cowsay:
try:
self.banner_cowsay(msg)
return
except OSError:
# somebody cleverly deleted cowsay or something during the PB run. heh.
pass
msg = msg.strip()
star_len = (80 - len(msg))
if star_len < 0:
star_len = 3
stars = "*" * star_len
self.display("\n%s %s" % (msg, stars), color=color)
def banner_cowsay(self, msg, color=None):
if ": [" in msg:
msg = msg.replace("[","")
if msg.endswith("]"):
msg = msg[:-1]
runcmd = [self.cowsay,"-W", "60"]
if self.noncow:
runcmd.append('-f')
runcmd.append(self.noncow)
runcmd.append(msg)
cmd = subprocess.Popen(runcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
self.display("%s\n" % out, color=color)
def error(self, msg, wrap_text=True):
if wrap_text:
new_msg = "\n[ERROR]: %s" % msg
wrapped = textwrap.wrap(new_msg, 79)
new_msg = "\n".join(wrapped) + "\n"
else:
new_msg = msg
if new_msg not in self._errors:
self.display(new_msg, color='red', stderr=True)
self._errors[new_msg] = 1
|
sean-/ansible
|
lib/ansible/utils/display.py
|
Python
|
gpl-3.0
| 6,949
|
'''
read the input data, parse to int list;
create mappings of (user,item) -> review int list
@author: roseck
@date Mar 15, 2017
'''
from __builtin__ import dict
import gzip
class DataPairMgr():
def _int_list(self,int_str):
'''utility fn for converting an int string to a list of int
'''
return [int(w) for w in int_str.split()]
def __init__(self, filename):
'''
filename: inits the UBRR data from the input file
'''
ub_map = dict()
ub_ratings = dict()
cnt = 0
#read the file
if filename.endswith('.gz'):
f = gzip.open(filename, 'r')
else:
f = open(filename, 'r')
for line in f:
vals = line.split("\t")
if len(vals) == 0:
continue
u = vals[0]
b = vals[1]
r = float(vals[2])
d = vals[3].strip()
ub_map[(u,b)] = self._int_list(d)
ub_ratings[(u,b)] = r
cnt += 1
self.user_item_map = ub_map
self.user_item_rating = ub_ratings
f.close()
print 'Data Pair Manager Initialized with ', cnt, ' reviews'
def get_int_review(self, user, item):
if (user,item) in self.user_item_map:
return self.user_item_map[(user,item)]
else:
return [0]
def get_int_review_rating(self, user, item):
if (user,item) in self.user_item_map:
return self.user_item_map[(user,item)], self.user_item_rating[(user,item)]
else:
return [0], 3.0 #average rating
|
rosecatherinek/TransNets
|
src/DatasetUtils/DataPairMgr.py
|
Python
|
gpl-3.0
| 1,772
|
import os
DEBUG = True
TEMPLATE_DEBUG = True
# SETTINGS_DIR allows media paths and so to be relative to this settings file
# instead of hardcoded to c:\only\on\my\computer.
SETTINGS_DIR = os.path.dirname(os.path.realpath(__file__))
# BUILDOUT_DIR is for access to the "surrounding" buildout, for instance for
# BUILDOUT_DIR/var/static files to give staticfiles a proper place
# to place all collected static files.
BUILDOUT_DIR = os.path.abspath(os.path.join(SETTINGS_DIR, '..'))
# ENGINE: 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
# In case of geodatabase, prepend with:
# django.contrib.gis.db.backends.(postgis)
DATABASES = {
# If you want to use another database, consider putting the database
# settings in localsettings.py. Otherwise, if you change the settings in
# the current file and commit them to the repository, other developers will
# also use these settings whether they have that database or not.
# One of those other developers is Jenkins, our continuous integration
# solution. Jenkins can only run the tests of the current application when
# the specified database exists. When the tests cannot run, Jenkins sees
# that as an error.
'default': {
'NAME': 'lizard-security.db',
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'USER': '',
'PASSWORD': '',
'HOST': '', # empty string for localhost.
'PORT': '', # empty string for default.
}
}
SITE_ID = 1
INSTALLED_APPS = [
'lizard_security.testcontent',
'lizard_security',
'south',
'django_nose',
'django_extensions',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.gis',
'django.contrib.sites',
]
ROOT_URLCONF = 'lizard_security.testcontent.urls'
# ^^^^ THIS IS FOR THE TESTCONTENT.
SOUTH_TESTS_MIGRATE = False
USE_I18N = True
LANGUAGES = (
('nl', 'Nederlands'),
('en', 'English'),
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'lizard_security.middleware.SecurityMiddleware',
'tls.TLSRequestMiddleware',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lizard_security.backends.LizardPermissionBackend',)
TEMPLATE_CONTEXT_PROCESSORS = (
# Default django 1.3 processors.
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages"
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# Used for staticfiles (and for media files
STATIC_URL = '/static_media/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BUILDOUT_DIR, 'var', 'static')
MEDIA_ROOT = os.path.join(BUILDOUT_DIR, 'var', 'media')
SECRET_KEY = "These are unit test settings so we don't need real secrecy"""
try:
# Import local settings that aren't stored in svn/git.
from lizard_security.local_testsettings import *
except ImportError:
pass
|
lizardsystem/lizard-security
|
lizard_security/testsettings.py
|
Python
|
gpl-3.0
| 3,481
|
import sys
import os
extensions = [
'sphinx.ext.todo',
]
source_suffix = '.txt'
master_doc = 'index'
### part to update ###################################
project = u'domogik-plugin-daikcode'
copyright = u'2014, Nico0084'
version = '0.1'
release = version
######################################################
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = project
|
Nico0084/domogik-plugin-daikcode
|
docs/conf.py
|
Python
|
gpl-3.0
| 430
|
# -*- coding: utf-8 -*-
import os
import re
import select
import socket
import struct
import time
from module.plugins.internal.Hoster import Hoster
from module.plugins.internal.misc import exists, fsjoin
class XDCC(Hoster):
__name__ = "XDCC"
__type__ = "hoster"
__version__ = "0.42"
__status__ = "testing"
__pattern__ = r'xdcc://(?P<SERVER>.*?)/#?(?P<CHAN>.*?)/(?P<BOT>.*?)/#?(?P<PACK>\d+)/?'
__config__ = [("nick", "str", "Nickname", "pyload" ),
("ident", "str", "Ident", "pyloadident" ),
("realname", "str", "Realname", "pyloadreal" ),
("ctcp_version", "str","CTCP version string", "pyLoad! IRC Interface")]
__description__ = """Download from IRC XDCC bot"""
__license__ = "GPLv3"
__authors__ = [("jeix", "jeix@hasnomail.com" ),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
def setup(self):
self.timeout = 30
self.multiDL = False
def process(self, pyfile):
#: Change request type
self.req = self.pyload.requestFactory.getRequest(self.classname, type="XDCC")
for _i in xrange(0, 3):
try:
nmn = self.do_download(pyfile.url)
self.log_info("Download of %s finished." % nmn)
return
except socket.error, e:
if hasattr(e, "errno") and e.errno is not None:
err_no = e.errno
if err_no in (10054, 10061):
self.log_warning("Server blocked our ip, retry in 5 min")
self.wait(300)
continue
else:
self.log_error(_("Failed due to socket errors. Code: %s") % err_no)
self.fail(_("Failed due to socket errors. Code: %s") % err_no)
else:
err_msg = e.args[0]
self.log_error(_("Failed due to socket errors: '%s'") % err_msg)
self.fail(_("Failed due to socket errors: '%s'") % err_msg)
self.log_error(_("Server blocked our ip, retry again later manually"))
self.fail(_("Server blocked our ip, retry again later manually"))
def do_download(self, url):
self.pyfile.setStatus("waiting")
server, chan, bot, pack = re.match(self.__pattern__, url).groups()
nick = self.config.get('nick')
ident = self.config.get('ident')
realname = self.config.get('realname')
ctcp_version = self.config.get('ctcp_version')
temp = server.split(':')
ln = len(temp)
if ln == 2:
host, port = temp
elif ln == 1:
host, port = temp[0], 6667
else:
self.fail(_("Invalid hostname for IRC Server: %s") % server)
#######################
#: CONNECT TO IRC AND IDLE FOR REAL LINK
dl_time = time.time()
sock = socket.socket()
self.log_info(_("Connecting to: %s:%s") % (host, port))
sock.connect((host, int(port)))
if nick == "pyload":
nick = "pyload-%d" % (time.time() % 1000) #: last 3 digits
sock.send("NICK %s\r\n" % nick)
sock.send("USER %s %s bla :%s\r\n" % (ident, host, realname))
self.log_info(_("Connect success."))
self.wait(5) # Wait for logon to complete
sock.send("JOIN #%s\r\n" % chan)
sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
#: IRC recv loop
readbuffer = ""
retry = None
m = None
while m is None:
if retry:
if time.time() > retry:
retry = None
dl_time = time.time()
sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
else:
if (dl_time + self.timeout) < time.time(): #@TODO: add in config
sock.send("QUIT :byebye\r\n")
sock.close()
self.log_error(_("XDCC Bot did not answer"))
self.fail(_("XDCC Bot did not answer"))
fdset = select.select([sock], [], [], 0)
if sock not in fdset[0]:
continue
readbuffer += sock.recv(1024)
lines = readbuffer.split("\n")
readbuffer = lines.pop()
for line in lines:
# if self.pyload.debug:
# self.log_debug("*> " + decode(line))
line = line.rstrip()
first = line.split()
if first[0] == "PING":
sock.send("PONG %s\r\n" % first[1])
if first[0] == "ERROR":
self.fail(_("IRC-Error: %s") % line)
msg = line.split(None, 3)
if len(msg) != 4:
continue
msg = {'origin': msg[0][1:],
'action': msg[1],
'target': msg[2],
'text' : msg[3][1:]}
if msg['target'][0:len(nick)] == nick and msg['action'] == "PRIVMSG":
if msg['text'] == "\x01VERSION\x01":
self.log_debug(_("Sending CTCP VERSION"))
sock.send("NOTICE %s :%s\r\n" % (msg['origin'], ctcp_version))
elif msg['text'] == "\x01TIME\x01":
self.log_debug(_("Sending CTCP TIME"))
sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
elif msg['text'] == "\x01LAG\x01":
pass #: don't know how to answer
if msg['origin'][0:len(bot)] != bot\
or msg['target'][0:len(nick)] != nick\
or msg['action'] not in ("PRIVMSG", "NOTICE"):
continue
self.log_debug(_("PrivMsg: <%s> - %s" % (msg['origin'], msg['text'])))
if "You already requested that pack" in msg['text']:
retry = time.time() + 300
elif "you must be on a known channel to request a pack" in msg['text']:
self.log_error(_("Invalid channel"))
self.fail(_("Invalid channel"))
m = re.match('\x01DCC SEND (?P<NAME>.*?) (?P<IP>\d+) (?P<PORT>\d+)(?: (?P<SIZE>\d+))?\x01', msg['text'])
#: Get connection data
ip = socket.inet_ntoa(struct.pack('!I', int(m.group('IP'))))
port = int(m.group('PORT'))
file_name = m.group('NAME')
if m.group('SIZE'):
self.req.filesize = long(m.group('SIZE'))
self.pyfile.name = file_name
dl_folder = fsjoin(self.pyload.config.get('general', 'download_folder'),
self.pyfile.package().folder if self.pyload.config.get("general",
"folder_per_package") else "")
dl_file = fsjoin(dl_folder, file_name)
if not exists(dl_folder):
os.makedirs(dl_folder)
self.set_permissions(dl_folder)
self.log_info(_("Downloading %s from %s:%d") % (file_name, ip, port))
self.pyfile.setStatus("downloading")
newname = self.req.download(ip, port, dl_file, sock, self.pyfile.setProgress)
if newname and newname != dl_file:
self.log_info(_("%(name)s saved as %(newname)s") % {'name': self.pyfile.name, 'newname': newname})
dl_file = newname
#: kill IRC socket
#: sock.send("QUIT :byebye\r\n")
sock.close()
self.last_download = dl_file
return self.last_download
|
kaarl/pyload
|
module/plugins/hoster/XDCC.py
|
Python
|
gpl-3.0
| 7,856
|
my_name = 'Zed A. Shaw'
my_age = 35 # not a lie
my_height = 74 # Inches
my_weight = 180 # lbs
my_eyes = 'Blue'
my_teeth = 'White'
my_hair = 'Brown'
print "Let's talk about %s." % my_name
print "He's %d inches tall." % my_height
print "He's %d pounds heavy." % my_weight
print "Actually that's not too heavy"
print "He's got %s eyes and %s hair." % (my_eyes, my_hair)
print "His teeth are usually %s depending on the coffee." % my_teeth
# this line is tricky, try to get it exactly right
print" If I add %d, %d and %d I get %d." % (my_age, my_height, my_weight, my_age + my_height + my_weight)
|
rdthomson/set09103
|
src/LPHW/ex5.py
|
Python
|
gpl-3.0
| 596
|
"""
Implementation of the trigsimp algorithm by Fu et al.
The idea behind the ``fu`` algorithm is to use a sequence of rules, applied
in what is heuristically known to be a smart order, to select a simpler
expression that is equivalent to the input.
There are transform rules in which a single rule is applied to the
expression tree. The following are just mnemonic in nature; see the
docstrings for examples.
TR0 - simplify expression
TR1 - sec-csc to cos-sin
TR2 - tan-cot to sin-cos ratio
TR2i - sin-cos ratio to tan
TR3 - angle canonicalization
TR4 - functions at special angles
TR5 - powers of sin to powers of cos
TR6 - powers of cos to powers of sin
TR7 - reduce cos power (increase angle)
TR8 - expand products of sin-cos to sums
TR9 - contract sums of sin-cos to products
TR10 - separate sin-cos arguments
TR10i - collect sin-cos arguments
TR11 - reduce double angles
TR12 - separate tan arguments
TR12i - collect tan arguments
TR13 - expand product of tan-cot
TRmorrie - prod(cos(x*2**i), (i, 0, k - 1)) -> sin(2**k*x)/(2**k*sin(x))
TR14 - factored powers of sin or cos to cos or sin power
TR15 - negative powers of sin to cot power
TR16 - negative powers of cos to tan power
TR22 - tan-cot powers to negative powers of sec-csc functions
TR111 - negative sin-cos-tan powers to csc-sec-cot
There are 4 combination transforms (CTR1 - CTR4) in which a sequence of
transformations are applied and the simplest expression is selected from
a few options.
Finally, there are the 2 rule lists (RL1 and RL2), which apply a
sequence of transformations and combined transformations, and the ``fu``
algorithm itself, which applies rules and rule lists and selects the
best expressions. There is also a function ``L`` which counts the number
of trigonometric funcions that appear in the expression.
Other than TR0, re-writing of expressions is not done by the transformations.
e.g. TR10i finds pairs of terms in a sum that are in the form like
``cos(x)*cos(y) + sin(x)*sin(y)``. Such expression are targeted in a bottom-up
traversal of the expression, but no manipulation to make them appear is
attempted. For example,
Set-up for examples below:
>>> from sympy.simplify.fu import fu, L, TR9, TR10i, TR11
>>> from sympy import factor, sin, cos, powsimp
>>> from sympy.abc import x, y, z, a
>>> from time import time
>>> eq = cos(x + y)/cos(x)
>>> TR10i(eq.expand(trig=True))
-sin(x)*sin(y)/cos(x) + cos(y)
If the expression is put in "normal" form (with a common denominator) then
the transformation is successful:
>>> TR10i(_.normal())
cos(x + y)/cos(x)
TR11's behavior is similar. It rewrites double angles as smaller angles but
doesn't do any simplification of the result.
>>> TR11(sin(2)**a*cos(1)**(-a), 1)
(2*sin(1)*cos(1))**a*cos(1)**(-a)
>>> powsimp(_)
(2*sin(1))**a
The temptation is to try make these TR rules "smarter" but that should really
be done at a higher level; the TR rules should try maintain the "do one thing
well" principle. There is one exception, however. In TR10i and TR9 terms are
recognized even when they are each multiplied by a common factor:
>>> fu(a*cos(x)*cos(y) + a*sin(x)*sin(y))
a*cos(x - y)
Factoring with ``factor_terms`` is used but it it "JIT"-like, being delayed
until it is deemed necessary. Furthermore, if the factoring does not
help with the simplification, it is not retained, so
``a*cos(x)*cos(y) + a*sin(x)*sin(z)`` does not become the factored
(but unsimplified in the trigonometric sense) expression:
>>> fu(a*cos(x)*cos(y) + a*sin(x)*sin(z))
a*sin(x)*sin(z) + a*cos(x)*cos(y)
In some cases factoring might be a good idea, but the user is left
to make that decision. For example:
>>> expr=((15*sin(2*x) + 19*sin(x + y) + 17*sin(x + z) + 19*cos(x - z) +
... 25)*(20*sin(2*x) + 15*sin(x + y) + sin(y + z) + 14*cos(x - z) +
... 14*cos(y - z))*(9*sin(2*y) + 12*sin(y + z) + 10*cos(x - y) + 2*cos(y -
... z) + 18)).expand(trig=True).expand()
In the expanded state, there are nearly 1000 trig functions:
>>> L(expr)
932
If the expression where factored first, this would take time but the
resulting expression would be transformed very quickly:
>>> def clock(f, n=2):
... t=time(); f(); return round(time()-t, n)
...
>>> clock(lambda: factor(expr)) # doctest: +SKIP
0.86
>>> clock(lambda: TR10i(expr), 3) # doctest: +SKIP
0.016
If the unexpanded expression is used, the transformation takes longer but
not as long as it took to factor it and then transform it:
>>> clock(lambda: TR10i(expr), 2) # doctest: +SKIP
0.28
So neither expansion nor factoring is used in ``TR10i``: if the
expression is already factored (or partially factored) then expansion
with ``trig=True`` would destroy what is already known and take
longer; if the expression is expanded, factoring may take longer than
simply applying the transformation itself.
Although the algorithms should be canonical, always giving the same
result, they may not yield the best result. This, in general, is
the nature of simplification where searching all possible transformation
paths is very expensive. Here is a simple example. There are 6 terms
in the following sum:
>>> expr = (sin(x)**2*cos(y)*cos(z) + sin(x)*sin(y)*cos(x)*cos(z) +
... sin(x)*sin(z)*cos(x)*cos(y) + sin(y)*sin(z)*cos(x)**2 + sin(y)*sin(z) +
... cos(y)*cos(z))
>>> args = expr.args
Serendipitously, fu gives the best result:
>>> fu(expr)
3*cos(y - z)/2 - cos(2*x + y + z)/2
But if different terms were combined, a less-optimal result might be
obtained, requiring some additional work to get better simplification,
but still less than optimal. The following shows an alternative form
of ``expr`` that resists optimal simplification once a given step
is taken since it leads to a dead end:
>>> TR9(-cos(x)**2*cos(y + z) + 3*cos(y - z)/2 +
... cos(y + z)/2 + cos(-2*x + y + z)/4 - cos(2*x + y + z)/4)
sin(2*x)*sin(y + z)/2 - cos(x)**2*cos(y + z) + 3*cos(y - z)/2 + cos(y + z)/2
Here is a smaller expression that exhibits the same behavior:
>>> a = sin(x)*sin(z)*cos(x)*cos(y) + sin(x)*sin(y)*cos(x)*cos(z)
>>> TR10i(a)
sin(x)*sin(y + z)*cos(x)
>>> newa = _
>>> TR10i(expr - a) # this combines two more of the remaining terms
sin(x)**2*cos(y)*cos(z) + sin(y)*sin(z)*cos(x)**2 + cos(y - z)
>>> TR10i(_ + newa) == _ + newa # but now there is no more simplification
True
Without getting lucky or trying all possible pairings of arguments, the
final result may be less than optimal and impossible to find without
better heuristics or brute force trial of all possibilities.
Notes
=====
This work was started by Dimitar Vlahovski at the Technological School
"Electronic systems" (30.11.2011).
References
==========
http://rfdz.ph-noe.ac.at/fileadmin/Mathematik_Uploads/ACDCA/
DESTIME2006/DES_contribs/Fu/simplification.pdf
http://www.sosmath.com/trig/Trig5/trig5/pdf/pdf.html gives a formula sheet.
"""
from __future__ import print_function, division
from collections import defaultdict
from itertools import combinations
from sympy.simplify.simplify import (simplify, powsimp, ratsimp, combsimp,
_mexpand, bottom_up)
from sympy.core.sympify import sympify
from sympy.functions.elementary.trigonometric import (
cos, sin, tan, cot, sec, csc, sqrt)
from sympy.functions.elementary.hyperbolic import cosh, sinh, tanh, coth
from sympy.core.compatibility import ordered
from sympy.core.core import C
from sympy.core.mul import Mul
from sympy.core.power import Pow
from sympy.core.function import expand_mul, count_ops
from sympy.core.add import Add
from sympy.core.symbol import Dummy
from sympy.core.exprtools import Factors, gcd_terms
from sympy.core.rules import Transform
from sympy.core.basic import S
from sympy.core.numbers import Integer, pi, I
from sympy.strategies.tree import greedy
from sympy.strategies.core import identity, debug
from sympy.polys.polytools import factor
from sympy.ntheory.factor_ import perfect_power
from sympy import SYMPY_DEBUG
# ================== Fu-like tools ===========================
def TR0(rv):
"""Simplification of rational polynomials, trying to simplify
the expression, e.g. combine things like 3*x + 2*x, etc....
"""
# although it would be nice to use cancel, it doesn't work
# with noncommutatives
return rv.normal().factor().expand()
def TR1(rv):
"""Replace sec, csc with 1/cos, 1/sin
Examples
========
>>> from sympy.simplify.fu import TR1, sec, csc
>>> from sympy.abc import x
>>> TR1(2*csc(x) + sec(x))
1/cos(x) + 2/sin(x)
"""
def f(rv):
if rv.func is sec:
a = rv.args[0]
return S.One/cos(a)
elif rv.func is csc:
a = rv.args[0]
return S.One/sin(a)
return rv
return bottom_up(rv, f)
def TR2(rv):
"""Replace tan and cot with sin/cos and cos/sin
Examples
========
>>> from sympy.simplify.fu import TR2
>>> from sympy.abc import x
>>> from sympy import tan, cot, sin, cos
>>> TR2(tan(x))
sin(x)/cos(x)
>>> TR2(cot(x))
cos(x)/sin(x)
>>> TR2(tan(tan(x) - sin(x)/cos(x)))
0
"""
def f(rv):
if rv.func is tan:
a = rv.args[0]
return sin(a)/cos(a)
elif rv.func is cot:
a = rv.args[0]
return cos(a)/sin(a)
return rv
return bottom_up(rv, f)
def TR2i(rv, half=False):
"""Converts ratios involving sin and cos as follows::
sin(x)/cos(x) -> tan(x)
sin(x)/(cos(x) + 1) -> tan(x/2) if half=True
Examples
========
>>> from sympy.simplify.fu import TR2i
>>> from sympy.abc import x, a
>>> from sympy import sin, cos
>>> TR2i(sin(x)/cos(x))
tan(x)
Powers of the numerator and denominator are also recognized
>>> TR2i(sin(x)**2/(cos(x) + 1)**2, half=True)
tan(x/2)**2
The transformation does not take place unless assumptions allow
(i.e. the base must be positive or the exponent must be an integer
for both numerator and denominator)
>>> TR2i(sin(x)**a/(cos(x) + 1)**a)
(cos(x) + 1)**(-a)*sin(x)**a
"""
def f(rv):
if not rv.is_Mul:
return rv
n, d = rv.as_numer_denom()
if n.is_Atom or d.is_Atom:
return rv
def ok(k, e):
# initial filtering of factors
return (
(e.is_integer or k.is_positive) and (
k.func in (sin, cos) or (half and
k.is_Add and
len(k.args) >= 2 and
any(any(ai.func is cos or ai.is_Pow and ai.base is cos
for ai in Mul.make_args(a)) for a in k.args))))
n = n.as_powers_dict()
ndone = [(k, n.pop(k)) for k in list(n.keys()) if not ok(k, n[k])]
if not n:
return rv
d = d.as_powers_dict()
ddone = [(k, d.pop(k)) for k in list(d.keys()) if not ok(k, d[k])]
if not d:
return rv
# factoring if necessary
def factorize(d, ddone):
newk = []
for k in d:
if k.is_Add and len(k.args) > 2:
knew = factor(k) if half else factor_terms(k)
if knew != k:
newk.append((k, knew))
if newk:
for i, (k, knew) in enumerate(newk):
del d[k]
newk[i] = knew
newk = Mul(*newk).as_powers_dict()
for k in newk:
if ok(k, d[k]):
d[k] += newk[k]
else:
ddone.append((k, d[k]))
del newk
factorize(n, ndone)
factorize(d, ddone)
# joining
t = []
for k in n:
if k.func is sin:
a = cos(k.args[0], evaluate=False)
if a in d and d[a] == n[k]:
t.append(tan(k.args[0])**n[k])
n[k] = d[a] = None
elif half:
a1 = 1 + a
if a1 in d and d[a1] == n[k]:
t.append((tan(k.args[0]/2))**n[k])
n[k] = d[a1] = None
elif k.func is cos:
a = sin(k.args[0], evaluate=False)
if a in d and d[a] == n[k]:
t.append(tan(k.args[0])**-n[k])
n[k] = d[a] = None
elif half and k.is_Add and k.args[0] is S.One and \
k.args[1].func is cos:
a = sin(k.args[1].args[0], evaluate=False)
if a in d and d[a] == n[k] and (d[a].is_integer or \
a.is_positive):
t.append(tan(a.args[0]/2)**-n[k])
n[k] = d[a] = None
if t:
rv = Mul(*(t + [b**e for b, e in n.items() if e]))/\
Mul(*[b**e for b, e in d.items() if e])
rv *= Mul(*[b**e for b, e in ndone])/Mul(*[b**e for b, e in ddone])
return rv
return bottom_up(rv, f)
def TR3(rv):
"""Induced formula: example sin(-a) = -sin(a)
Examples
========
>>> from sympy.simplify.fu import TR3
>>> from sympy.abc import x, y
>>> from sympy import pi
>>> from sympy import cos
>>> TR3(cos(y - x*(y - x)))
cos(x*(x - y) + y)
>>> cos(pi/2 + x)
-sin(x)
>>> cos(30*pi/2 + x)
-cos(x)
"""
from sympy.simplify.simplify import signsimp
# Negative argument (already automatic for funcs like sin(-x) -> -sin(x)
# but more complicated expressions can use it, too). Also, trig angles
# between pi/4 and pi/2 are not reduced to an angle between 0 and pi/4.
# The following are automatically handled:
# Argument of type: pi/2 +/- angle
# Argument of type: pi +/- angle
# Argument of type : 2k*pi +/- angle
def f(rv):
if not isinstance(rv, C.TrigonometricFunction):
return rv
rv = rv.func(signsimp(rv.args[0]))
if (rv.args[0] - S.Pi/4).is_positive is (S.Pi/2 - rv.args[0]).is_positive is True:
fmap = {cos: sin, sin: cos, tan: cot, cot: tan, sec: csc, csc: sec}
rv = fmap[rv.func](S.Pi/2 - rv.args[0])
return rv
return bottom_up(rv, f)
def TR4(rv):
"""Identify values of special angles.
a= 0 pi/6 pi/4 pi/3 pi/2
----------------------------------------------------
cos(a) 0 1/2 sqrt(2)/2 sqrt(3)/2 1
sin(a) 1 sqrt(3)/2 sqrt(2)/2 1/2 0
tan(a) 0 sqt(3)/3 1 sqrt(3) --
Examples
========
>>> from sympy.simplify.fu import TR4
>>> from sympy import pi
>>> from sympy import cos, sin, tan, cot
>>> for s in (0, pi/6, pi/4, pi/3, pi/2):
... print('%s %s %s %s' % (cos(s), sin(s), tan(s), cot(s)))
...
1 0 0 zoo
sqrt(3)/2 1/2 sqrt(3)/3 sqrt(3)
sqrt(2)/2 sqrt(2)/2 1 1
1/2 sqrt(3)/2 sqrt(3) sqrt(3)/3
0 1 zoo 0
"""
# special values at 0, pi/6, pi/4, pi/3, pi/2 already handled
return rv
def _TR56(rv, f, g, h, max, pow):
"""Helper for TR5 and TR6 to replace f**2 with h(g**2)
Options
=======
max : controls size of exponent that can appear on f
e.g. if max=4 then f**4 will be changed to h(g**2)**2.
pow : controls whether the exponent must be a perfect power of 2
e.g. if pow=True (and max >= 6) then f**6 will not be changed
but f**8 will be changed to h(g**2)**4
>>> from sympy.simplify.fu import _TR56 as T
>>> from sympy.abc import x
>>> from sympy import sin, cos
>>> h = lambda x: 1 - x
>>> T(sin(x)**3, sin, cos, h, 4, False)
sin(x)**3
>>> T(sin(x)**6, sin, cos, h, 6, False)
(-cos(x)**2 + 1)**3
>>> T(sin(x)**6, sin, cos, h, 6, True)
sin(x)**6
>>> T(sin(x)**8, sin, cos, h, 10, True)
(-cos(x)**2 + 1)**4
"""
def _f(rv):
# I'm not sure if this transformation should target all even powers
# or only those expressible as powers of 2. Also, should it only
# make the changes in powers that appear in sums -- making an isolated
# change is not going to allow a simplification as far as I can tell.
if not (rv.is_Pow and rv.base.func == f):
return rv
if (rv.exp < 0) is True:
return rv
if (rv.exp > max) is True:
return rv
if rv.exp == 2:
return h(g(rv.base.args[0])**2)
else:
if rv.exp == 4:
e = 2
elif not pow:
if rv.exp % 2:
return rv
e = rv.exp//2
else:
p = perfect_power(rv.exp)
if not p:
return rv
e = rv.exp//2
return h(g(rv.base.args[0])**2)**e
return bottom_up(rv, _f)
def TR5(rv, max=4, pow=False):
"""Replacement of sin**2 with 1 - cos(x)**2.
See _TR56 docstring for advanced use of ``max`` and ``pow``.
Examples
========
>>> from sympy.simplify.fu import TR5
>>> from sympy.abc import x
>>> from sympy import sin
>>> TR5(sin(x)**2)
-cos(x)**2 + 1
>>> TR5(sin(x)**-2) # unchanged
sin(x)**(-2)
>>> TR5(sin(x)**4)
(-cos(x)**2 + 1)**2
"""
return _TR56(rv, sin, cos, lambda x: 1 - x, max=max, pow=pow)
def TR6(rv, max=4, pow=False):
"""Replacement of cos**2 with 1 - sin(x)**2.
See _TR56 docstring for advanced use of ``max`` and ``pow``.
Examples
========
>>> from sympy.simplify.fu import TR6
>>> from sympy.abc import x
>>> from sympy import cos
>>> TR6(cos(x)**2)
-sin(x)**2 + 1
>>> TR6(cos(x)**-2) #unchanged
cos(x)**(-2)
>>> TR6(cos(x)**4)
(-sin(x)**2 + 1)**2
"""
return _TR56(rv, cos, sin, lambda x: 1 - x, max=max, pow=pow)
def TR7(rv):
"""Lowering the degree of cos(x)**2
Examples
========
>>> from sympy.simplify.fu import TR7
>>> from sympy.abc import x
>>> from sympy import cos
>>> TR7(cos(x)**2)
cos(2*x)/2 + 1/2
>>> TR7(cos(x)**2 + 1)
cos(2*x)/2 + 3/2
"""
def f(rv):
if not (rv.is_Pow and rv.base.func == cos and rv.exp == 2):
return rv
return (1 + cos(2*rv.base.args[0]))/2
return bottom_up(rv, f)
def TR8(rv, first=True):
"""Converting products of ``cos`` and/or ``sin`` to a sum or
difference of ``cos`` and or ``sin`` terms.
Examples
========
>>> from sympy.simplify.fu import TR8, TR7
>>> from sympy import cos, sin
>>> TR8(cos(2)*cos(3))
cos(5)/2 + cos(1)/2
>>> TR8(cos(2)*sin(3))
sin(5)/2 + sin(1)/2
>>> TR8(sin(2)*sin(3))
-cos(5)/2 + cos(1)/2
"""
def f(rv):
if not (
rv.is_Mul or
rv.is_Pow and
rv.base.func in (cos, sin) and
(rv.exp.is_integer or rv.base.is_positive)):
return rv
if first:
n, d = [expand_mul(i) for i in rv.as_numer_denom()]
newn = TR8(n, first=False)
newd = TR8(d, first=False)
if newn != n or newd != d:
rv = gcd_terms(newn/newd)
if rv.is_Mul and rv.args[0].is_Rational and \
len(rv.args) == 2 and rv.args[1].is_Add:
rv = Mul(*rv.as_coeff_Mul())
return rv
args = {cos: [], sin: [], None: []}
for a in ordered(Mul.make_args(rv)):
if a.func in (cos, sin):
args[a.func].append(a.args[0])
elif (a.is_Pow and a.exp.is_Integer and a.exp > 0 and \
a.base.func in (cos, sin)):
# XXX this is ok but pathological expression could be handled
# more efficiently as in TRmorrie
args[a.base.func].extend([a.base.args[0]]*a.exp)
else:
args[None].append(a)
c = args[cos]
s = args[sin]
if not (c and s or len(c) > 1 or len(s) > 1):
return rv
args = args[None]
n = min(len(c), len(s))
for i in range(n):
a1 = s.pop()
a2 = c.pop()
args.append((sin(a1 + a2) + sin(a1 - a2))/2)
while len(c) > 1:
a1 = c.pop()
a2 = c.pop()
args.append((cos(a1 + a2) + cos(a1 - a2))/2)
if c:
args.append(cos(c.pop()))
while len(s) > 1:
a1 = s.pop()
a2 = s.pop()
args.append((-cos(a1 + a2) + cos(a1 - a2))/2)
if s:
args.append(sin(s.pop()))
return TR8(expand_mul(Mul(*args)))
return bottom_up(rv, f)
def TR9(rv):
"""Sum of ``cos`` or ``sin`` terms as a product of ``cos`` or ``sin``.
Examples
========
>>> from sympy.simplify.fu import TR9
>>> from sympy import cos, sin
>>> TR9(cos(1) + cos(2))
2*cos(1/2)*cos(3/2)
>>> TR9(cos(1) + 2*sin(1) + 2*sin(2))
cos(1) + 4*sin(3/2)*cos(1/2)
If no change is made by TR9, no re-arrangement of the
expression will be made. For example, though factoring
of common term is attempted, if the factored expression
wasn't changed, the original expression will be returned:
>>> TR9(cos(3) + cos(3)*cos(2))
cos(3) + cos(2)*cos(3)
"""
def f(rv):
if not rv.is_Add:
return rv
def do(rv, first=True):
# cos(a)+/-cos(b) can be combined into a product of cosines and
# sin(a)+/-sin(b) can be combined into a product of cosine and
# sine.
#
# If there are more than two args, the pairs which "work" will
# have a gcd extractable and the remaining two terms will have
# the above structure -- all pairs must be checked to find the
# ones that work. args that don't have a common set of symbols
# are skipped since this doesn't lead to a simpler formula and
# also has the arbitrariness of combining, for example, the x
# and y term instead of the y and z term in something like
# cos(x) + cos(y) + cos(z).
if not rv.is_Add:
return rv
args = list(ordered(rv.args))
if len(args) != 2:
hit = False
for i in range(len(args)):
ai = args[i]
if ai is None:
continue
for j in range(i + 1, len(args)):
aj = args[j]
if aj is None:
continue
was = ai + aj
new = do(was)
if new != was:
args[i] = new # update in place
args[j] = None
hit = True
break # go to next i
if hit:
rv = Add(*[_f for _f in args if _f])
if rv.is_Add:
rv = do(rv)
return rv
# two-arg Add
split = trig_split(*args)
if not split:
return rv
gcd, n1, n2, a, b, iscos = split
# application of rule if possible
if iscos:
if n1 == n2:
return gcd*n1*2*cos((a + b)/2)*cos((a - b)/2)
if n1 < 0:
a, b = b, a
return -2*gcd*sin((a + b)/2)*sin((a - b)/2)
else:
if n1 == n2:
return gcd*n1*2*sin((a + b)/2)*cos((a - b)/2)
if n1 < 0:
a, b = b, a
return 2*gcd*cos((a + b)/2)*sin((a - b)/2)
return process_common_addends(rv, do) # DON'T sift by free symbols
return bottom_up(rv, f)
def TR10(rv, first=True):
"""Separate sums in ``cos`` and ``sin``.
Examples
========
>>> from sympy.simplify.fu import TR10
>>> from sympy.abc import a, b, c
>>> from sympy import cos, sin
>>> TR10(cos(a + b))
-sin(a)*sin(b) + cos(a)*cos(b)
>>> TR10(sin(a + b))
sin(a)*cos(b) + sin(b)*cos(a)
>>> TR10(sin(a + b + c))
(-sin(a)*sin(b) + cos(a)*cos(b))*sin(c) + \
(sin(a)*cos(b) + sin(b)*cos(a))*cos(c)
"""
def f(rv):
if not rv.func in (cos, sin):
return rv
f = rv.func
arg = rv.args[0]
if arg.is_Add:
if first:
args = list(ordered(arg.args))
else:
args = list(arg.args)
a = args.pop()
b = Add._from_args(args)
if b.is_Add:
if f == sin:
return sin(a)*TR10(cos(b), first=False) + \
cos(a)*TR10(sin(b), first=False)
else:
return cos(a)*TR10(cos(b), first=False) - \
sin(a)*TR10(sin(b), first=False)
else:
if f == sin:
return sin(a)*cos(b) + cos(a)*sin(b)
else:
return cos(a)*cos(b) - sin(a)*sin(b)
return rv
return bottom_up(rv, f)
def TR10i(rv):
"""Sum of products to function of sum.
Examples
========
>>> from sympy.simplify.fu import TR10i
>>> from sympy import cos, sin, pi, Add, Mul, sqrt, Symbol
>>> from sympy.abc import x, y
>>> TR10i(cos(1)*cos(3) + sin(1)*sin(3))
cos(2)
>>> TR10i(cos(1)*sin(3) + sin(1)*cos(3) + cos(3))
cos(3) + sin(4)
>>> TR10i(sqrt(2)*cos(x)*x + sqrt(6)*sin(x)*x)
2*sqrt(2)*x*sin(x + pi/6)
"""
global _ROOT2, _ROOT3, _invROOT3
if _ROOT2 is None:
_roots()
def f(rv):
if not rv.is_Add:
return rv
def do(rv, first=True):
# args which can be expressed as A*(cos(a)*cos(b)+/-sin(a)*sin(b))
# or B*(cos(a)*sin(b)+/-cos(b)*sin(a)) can be combined into
# A*f(a+/-b) where f is either sin or cos.
#
# If there are more than two args, the pairs which "work" will have
# a gcd extractable and the remaining two terms will have the above
# structure -- all pairs must be checked to find the ones that
# work.
if not rv.is_Add:
return rv
args = list(ordered(rv.args))
if len(args) != 2:
hit = False
for i in range(len(args)):
ai = args[i]
if ai is None:
continue
for j in range(i + 1, len(args)):
aj = args[j]
if aj is None:
continue
was = ai + aj
new = do(was)
if new != was:
args[i] = new # update in place
args[j] = None
hit = True
break # go to next i
if hit:
rv = Add(*[_f for _f in args if _f])
if rv.is_Add:
rv = do(rv)
return rv
# two-arg Add
split = trig_split(*args, two=True)
if not split:
return rv
gcd, n1, n2, a, b, same = split
# identify and get c1 to be cos then apply rule if possible
if same: # coscos, sinsin
gcd = n1*gcd
if n1 == n2:
return gcd*cos(a - b)
return gcd*cos(a + b)
else: #cossin, cossin
gcd = n1*gcd
if n1 == n2:
return gcd*sin(a + b)
return gcd*sin(b - a)
rv = process_common_addends(
rv, do, lambda x: tuple(ordered(x.free_symbols)))
# need to check for inducible pairs in ratio of sqrt(3):1 that
# appeared in different lists when sorting by coefficient
while rv.is_Add:
byrad = defaultdict(list)
for a in rv.args:
hit = 0
if a.is_Mul:
for ai in a.args:
if ai.is_Pow and ai.exp is S.Half and \
ai.base.is_Integer:
byrad[ai].append(a)
hit = 1
break
if not hit:
byrad[S.One].append(a)
# no need to check all pairs -- just check for the onees
# that have the right ratio
args = []
for a in byrad:
for b in [_ROOT3*a, _invROOT3]:
if b in byrad:
for i in range(len(byrad[a])):
if byrad[a][i] is None:
continue
for j in range(len(byrad[b])):
if byrad[b][j] is None:
continue
was = Add(byrad[a][i] + byrad[b][j])
new = do(was)
if new != was:
args.append(new)
byrad[a][i] = None
byrad[b][j] = None
break
if args:
rv = Add(*(args + [Add(*[_f for _f in v if _f])
for v in byrad.values()]))
else:
rv = do(rv) # final pass to resolve any new inducible pairs
break
return rv
return bottom_up(rv, f)
def TR11(rv, base=None):
"""Function of double angle to product. The ``base`` argument can be used
to indicate what is the un-doubled argument, e.g. if 3*pi/7 is the base
then cosine and sine functions with argument 6*pi/7 will be replaced.
Examples
========
>>> from sympy.simplify.fu import TR11
>>> from sympy import cos, sin, pi
>>> from sympy.abc import x
>>> TR11(sin(2*x))
2*sin(x)*cos(x)
>>> TR11(cos(2*x))
-sin(x)**2 + cos(x)**2
>>> TR11(sin(4*x))
4*(-sin(x)**2 + cos(x)**2)*sin(x)*cos(x)
>>> TR11(sin(4*x/3))
4*(-sin(x/3)**2 + cos(x/3)**2)*sin(x/3)*cos(x/3)
If the arguments are simply integers, no change is made
unless a base is provided:
>>> TR11(cos(2))
cos(2)
>>> TR11(cos(4), 2)
-sin(2)**2 + cos(2)**2
There is a subtle issue here in that autosimplification will convert
some higher angles to lower angles
>>> cos(6*pi/7) + cos(3*pi/7)
-cos(pi/7) + cos(3*pi/7)
The 6*pi/7 angle is now pi/7 but can be targeted with TR11 by supplying
the 3*pi/7 base:
>>> TR11(_, 3*pi/7)
-sin(3*pi/7)**2 + cos(3*pi/7)**2 + cos(3*pi/7)
"""
def f(rv):
if not rv.func in (cos, sin):
return rv
if base:
f = rv.func
t = f(base*2)
co = S.One
if t.is_Mul:
co, t = t.as_coeff_Mul()
if not t.func in (cos, sin):
return rv
if rv.args[0] == t.args[0]:
c = cos(base)
s = sin(base)
if f is cos:
return (c**2 - s**2)/co
else:
return 2*c*s/co
return rv
elif not rv.args[0].is_Number:
# make a change if the leading coefficient's numerator is
# divisible by 2
c, m = rv.args[0].as_coeff_Mul(rational=True)
if c.p % 2 == 0:
arg = c.p//2*m/c.q
c = TR11(cos(arg))
s = TR11(sin(arg))
if rv.func == sin:
rv = 2*s*c
else:
rv = c**2 - s**2
return rv
return bottom_up(rv, f)
def TR12(rv, first=True):
"""Separate sums in ``tan``.
Examples
========
>>> from sympy.simplify.fu import TR12
>>> from sympy.abc import x, y
>>> from sympy import tan
>>> from sympy.simplify.fu import TR12
>>> TR12(tan(x + y))
(tan(x) + tan(y))/(-tan(x)*tan(y) + 1)
"""
def f(rv):
if not rv.func == tan:
return rv
arg = rv.args[0]
if arg.is_Add:
if first:
args = list(ordered(arg.args))
else:
args = list(arg.args)
a = args.pop()
b = Add._from_args(args)
if b.is_Add:
tb = TR12(tan(b), first=False)
else:
tb = tan(b)
return (tan(a) + tb)/(1 - tan(a)*tb)
return rv
return bottom_up(rv, f)
def TR12i(rv):
"""Combine tan arguments as
(tan(y) + tan(x))/(tan(x)*tan(y) - 1) -> -tan(x + y)
Examples
========
>>> from sympy.simplify.fu import TR12i
>>> from sympy import tan
>>> from sympy.abc import a, b, c
>>> ta, tb, tc = [tan(i) for i in (a, b, c)]
>>> TR12i((ta + tb)/(-ta*tb + 1))
tan(a + b)
>>> TR12i((ta + tb)/(ta*tb - 1))
-tan(a + b)
>>> TR12i((-ta - tb)/(ta*tb - 1))
tan(a + b)
>>> eq = (ta + tb)/(-ta*tb + 1)**2*(-3*ta - 3*tc)/(2*(ta*tc - 1))
>>> TR12i(eq.expand())
-3*tan(a + b)*tan(a + c)/(2*(tan(a) + tan(b) - 1))
"""
from sympy import factor, fraction, factor_terms
def f(rv):
if not (rv.is_Add or rv.is_Mul or rv.is_Pow):
return rv
n, d = rv.as_numer_denom()
if not d.args or not n.args:
return rv
dok = {}
def ok(di):
m = as_f_sign_1(di)
if m:
g, f, s = m
if s is S.NegativeOne and f.is_Mul and len(f.args) == 2 and \
all(fi.func is tan for fi in f.args):
return g, f
d_args = list(Mul.make_args(d))
for i, di in enumerate(d_args):
m = ok(di)
if m:
g, t = m
s = Add(*[_.args[0] for _ in t.args])
dok[s] = S.One
d_args[i] = g
continue
if di.is_Add:
di = factor(di)
if di.is_Mul:
d_args.extend(di.args)
d_args[i] = S.One
elif di.is_Pow and (di.exp.is_integer or di.base.is_positive):
m = ok(di.base)
if m:
g, t = m
s = Add(*[_.args[0] for _ in t.args])
dok[s] = di.exp
d_args[i] = g**di.exp
else:
di = factor(di)
if di.is_Mul:
d_args.extend(di.args)
d_args[i] = S.One
if not dok:
return rv
def ok(ni):
if ni.is_Add and len(ni.args) == 2:
a, b = ni.args
if a.func is tan and b.func is tan:
return a, b
n_args = list(Mul.make_args(factor_terms(n)))
hit = False
for i, ni in enumerate(n_args):
m = ok(ni)
if not m:
m = ok(-ni)
if m:
n_args[i] = S.NegativeOne
else:
if ni.is_Add:
ni = factor(ni)
if ni.is_Mul:
n_args.extend(ni.args)
n_args[i] = S.One
continue
elif ni.is_Pow and (
ni.exp.is_integer or ni.base.is_positive):
m = ok(ni.base)
if m:
n_args[i] = S.One
else:
ni = factor(ni)
if ni.is_Mul:
n_args.extend(ni.args)
n_args[i] = S.One
continue
else:
continue
else:
n_args[i] = S.One
hit = True
s = Add(*[_.args[0] for _ in m])
ed = dok[s]
newed = ed.extract_additively(S.One)
if newed is not None:
if newed:
dok[s] = newed
else:
dok.pop(s)
n_args[i] *= -tan(s)
if hit:
rv = Mul(*n_args)/Mul(*d_args)/Mul(*[(Add(*[
tan(a) for a in i.args]) - 1)**e for i, e in dok.items()])
return rv
return bottom_up(rv, f)
def TR13(rv):
"""Change products of ``tan`` or ``cot``.
Examples
========
>>> from sympy.simplify.fu import TR13
>>> from sympy import tan, cot, cos
>>> TR13(tan(3)*tan(2))
-tan(2)/tan(5) - tan(3)/tan(5) + 1
>>> TR13(cot(3)*cot(2))
cot(2)*cot(5) + 1 + cot(3)*cot(5)
"""
def f(rv):
if not rv.is_Mul:
return rv
# XXX handle products of powers? or let power-reducing handle it?
args = {tan: [], cot: [], None: []}
for a in ordered(Mul.make_args(rv)):
if a.func in (tan, cot):
args[a.func].append(a.args[0])
else:
args[None].append(a)
t = args[tan]
c = args[cot]
if len(t) < 2 and len(c) < 2:
return rv
args = args[None]
while len(t) > 1:
t1 = t.pop()
t2 = t.pop()
args.append(1 - (tan(t1)/tan(t1 + t2) + tan(t2)/tan(t1 + t2)))
if t:
args.append(tan(t.pop()))
while len(c) > 1:
t1 = c.pop()
t2 = c.pop()
args.append(1 + cot(t1)*cot(t1 + t2) + cot(t2)*cot(t1 + t2))
if c:
args.append(cot(c.pop()))
return Mul(*args)
return bottom_up(rv, f)
def TRmorrie(rv):
"""Returns cos(x)*cos(2*x)*...*cos(2**(k-1)*x) -> sin(2**k*x)/(2**k*sin(x))
Examples
========
>>> from sympy.simplify.fu import TRmorrie, TR8, TR3
>>> from sympy.abc import x
>>> from sympy import Mul, cos, pi
>>> TRmorrie(cos(x)*cos(2*x))
sin(4*x)/(4*sin(x))
>>> TRmorrie(7*Mul(*[cos(x) for x in range(10)]))
7*sin(12)*sin(16)*cos(5)*cos(7)*cos(9)/(64*sin(1)*sin(3))
Sometimes autosimplification will cause a power to be
not recognized. e.g. in the following, cos(4*pi/7) automatically
simplifies to -cos(3*pi/7) so only 2 of the 3 terms are
recognized:
>>> TRmorrie(cos(pi/7)*cos(2*pi/7)*cos(4*pi/7))
-sin(3*pi/7)*cos(3*pi/7)/(4*sin(pi/7))
A touch by TR8 resolves the expression to a Rational
>>> TR8(_)
-1/8
In this case, if eq is unsimplified, the answer is obtained
directly:
>>> eq = cos(pi/9)*cos(2*pi/9)*cos(3*pi/9)*cos(4*pi/9)
>>> TRmorrie(eq)
1/16
But if angles are made canonical with TR3 then the answer
is not simplified without further work:
>>> TR3(eq)
sin(pi/18)*cos(pi/9)*cos(2*pi/9)/2
>>> TRmorrie(_)
sin(pi/18)*sin(4*pi/9)/(8*sin(pi/9))
>>> TR8(_)
cos(7*pi/18)/(16*sin(pi/9))
>>> TR3(_)
1/16
The original expression would have resolve to 1/16 directly with TR8,
however:
>>> TR8(eq)
1/16
References
==========
http://en.wikipedia.org/wiki/Morrie%27s_law
"""
def f(rv):
if not rv.is_Mul:
return rv
args = defaultdict(list)
coss = {}
other = []
for c in rv.args:
b, e = c.as_base_exp()
if e.is_Integer and b.func is cos:
co, a = b.args[0].as_coeff_Mul()
args[a].append(co)
coss[b] = e
else:
other.append(c)
new = []
for a in args:
c = args[a]
c.sort()
no = []
while c:
k = 0
cc = ci = c[0]
while cc in c:
k += 1
cc *= 2
if k > 1:
newarg = sin(2**k*ci*a)/2**k/sin(ci*a)
# see how many times this can be taken
take = None
ccs = []
for i in range(k):
cc /= 2
key = cos(a*cc, evaluate=False)
ccs.append(cc)
take = min(coss[key], take or coss[key])
# update exponent counts
for i in range(k):
cc = ccs.pop()
key = cos(a*cc, evaluate=False)
coss[key] -= take
if not coss[key]:
c.remove(cc)
new.append(newarg**take)
else:
no.append(c.pop(0))
c[:] = no
if new:
rv = Mul(*(new + other + [
cos(k*a, evaluate=False) for a in args for k in args[a]]))
return rv
return bottom_up(rv, f)
def TR14(rv, first=True):
"""Convert factored powers of sin and cos identities into simpler
expressions.
Examples
========
>>> from sympy.simplify.fu import TR14
>>> from sympy.abc import x, y
>>> from sympy import cos, sin
>>> TR14((cos(x) - 1)*(cos(x) + 1))
-sin(x)**2
>>> TR14((sin(x) - 1)*(sin(x) + 1))
-cos(x)**2
>>> p1 = (cos(x) + 1)*(cos(x) - 1)
>>> p2 = (cos(y) - 1)*2*(cos(y) + 1)
>>> p3 = (3*(cos(y) - 1))*(3*(cos(y) + 1))
>>> TR14(p1*p2*p3*(x - 1))
-18*(x - 1)*sin(x)**2*sin(y)**4
"""
def f(rv):
if not rv.is_Mul:
return rv
if first:
# sort them by location in numerator and denominator
# so the code below can just deal with positive exponents
n, d = rv.as_numer_denom()
if d is not S.One:
newn = TR14(n, first=False)
newd = TR14(d, first=False)
if newn != n or newd != d:
rv = newn/newd
return rv
other = []
process = []
for a in rv.args:
if a.is_Pow:
b, e = a.as_base_exp()
if not (e.is_integer or b.is_positive):
other.append(a)
continue
a = b
else:
e = S.One
m = as_f_sign_1(a)
if not m or m[1].func not in (cos, sin):
if e is S.One:
other.append(a)
else:
other.append(a**e)
continue
g, f, si = m
process.append((g, e.is_Number, e, f, si, a))
# sort them to get like terms next to each other
process = list(ordered(process))
# keep track of whether there was any change
nother = len(other)
# access keys
keys = (g, t, e, f, si, a) = list(range(6))
while process:
A = process.pop(0)
if process:
B = process[0]
if A[e].is_Number and B[e].is_Number:
# both exponents are numbers
if A[f] == B[f]:
if A[si] != B[si]:
B = process.pop(0)
take = min(A[e], B[e])
# reinsert any remainder
# the B will likely sort after A so check it first
if B[e] != take:
rem = [B[i] for i in keys]
rem[e] -= take
process.insert(0, rem)
elif A[e] != take:
rem = [A[i] for i in keys]
rem[e] -= take
process.insert(0, rem)
if A[f].func is cos:
t = sin
else:
t = cos
other.append((-A[g]*B[g]*t(A[f].args[0])**2)**take)
continue
elif A[e] == B[e]:
# both exponents are equal symbols
if A[f] == B[f]:
if A[si] != B[si]:
B = process.pop(0)
take = A[e]
if A[f].func is cos:
t = sin
else:
t = cos
other.append((-A[g]*B[g]*t(A[f].args[0])**2)**take)
continue
# either we are done or neither condition above applied
other.append(A[a]**A[e])
if len(other) != nother:
rv = Mul(*other)
return rv
return bottom_up(rv, f)
def TR15(rv, max=4, pow=False):
"""Convert sin(x)*-2 to 1 + cot(x)**2.
See _TR56 docstring for advanced use of ``max`` and ``pow``.
Examples
========
>>> from sympy.simplify.fu import TR15
>>> from sympy.abc import x
>>> from sympy import cos, sin
>>> TR15(1 - 1/sin(x)**2)
-cot(x)**2
"""
def f(rv):
if not (isinstance(rv, Pow) and rv.base.func is sin):
return rv
ia = 1/rv
a = _TR56(ia, sin, cot, lambda x: 1 + x, max=max, pow=pow)
if a != ia:
rv = a
return rv
return bottom_up(rv, f)
def TR16(rv, max=4, pow=False):
"""Convert cos(x)*-2 to 1 + tan(x)**2.
See _TR56 docstring for advanced use of ``max`` and ``pow``.
Examples
========
>>> from sympy.simplify.fu import TR16
>>> from sympy.abc import x
>>> from sympy import cos, sin
>>> TR16(1 - 1/cos(x)**2)
-tan(x)**2
"""
def f(rv):
if not (isinstance(rv, Pow) and rv.base.func is cos):
return rv
ia = 1/rv
a = _TR56(ia, cos, tan, lambda x: 1 + x, max=max, pow=pow)
if a != ia:
rv = a
return rv
return bottom_up(rv, f)
def TR111(rv):
"""Convert f(x)**-i to g(x)**i where either ``i`` is an integer
or the base is positive and f, g are: tan, cot; sin, csc; or cos, sec.
Examples
========
>>> from sympy.simplify.fu import TR111
>>> from sympy.abc import x
>>> from sympy import tan
>>> TR111(1 - 1/tan(x)**2)
-cot(x)**2 + 1
"""
def f(rv):
if not (
isinstance(rv, Pow) and
(rv.base.is_positive or rv.exp.is_integer and rv.exp.is_negative)):
return rv
if rv.base.func is tan:
return cot(rv.base.args[0])**-rv.exp
elif rv.base.func is sin:
return csc(rv.base.args[0])**-rv.exp
elif rv.base.func is cos:
return sec(rv.base.args[0])**-rv.exp
return rv
return bottom_up(rv, f)
def TR22(rv, max=4, pow=False):
"""Convert tan(x)**2 to sec(x)**2 - 1 and cot(x)**2 to csc(x)**2 - 1.
See _TR56 docstring for advanced use of ``max`` and ``pow``.
Examples
========
>>> from sympy.simplify.fu import TR22
>>> from sympy.abc import x
>>> from sympy import tan, cot
>>> TR22(1 + tan(x)**2)
sec(x)**2
>>> TR22(1 + cot(x)**2)
csc(x)**2
"""
def f(rv):
if not (isinstance(rv, Pow) and rv.base.func in (cot, tan)):
return rv
rv = _TR56(rv, tan, sec, lambda x: x - 1, max=max, pow=pow)
rv = _TR56(rv, cot, csc, lambda x: x - 1, max=max, pow=pow)
return rv
return bottom_up(rv, f)
def L(rv):
"""Return count of trigonometric functions in expression.
Examples
========
>>> from sympy.simplify.fu import L
>>> from sympy.abc import x
>>> from sympy import cos, sin
>>> L(cos(x)+sin(x))
2
"""
return S(rv.count(C.TrigonometricFunction))
# ============== end of basic Fu-like tools =====================
if SYMPY_DEBUG:
(TR0, TR1, TR2, TR3, TR4, TR5, TR6, TR7, TR8, TR9, TR10, TR11, TR12, TR13,
TR2i, TRmorrie, TR14, TR15, TR16, TR12i, TR111, TR22
)= list(map(debug,
(TR0, TR1, TR2, TR3, TR4, TR5, TR6, TR7, TR8, TR9, TR10, TR11, TR12, TR13,
TR2i, TRmorrie, TR14, TR15, TR16, TR12i, TR111, TR22)))
# tuples are chains -- (f, g) -> lambda x: g(f(x))
# lists are choices -- [f, g] -> lambda x: min(f(x), g(x), key=objective)
CTR1 = [(TR5, TR0), (TR6, TR0), identity]
CTR2 = (TR11, [(TR5, TR0), (TR6, TR0), TR0])
CTR3 = [(TRmorrie, TR8, TR0), (TRmorrie, TR8, TR10i, TR0), identity]
CTR4 = [(TR4, TR10i), identity]
RL1 = (TR4, TR3, TR4, TR12, TR4, TR13, TR4, TR0)
# XXX it's a little unclear how this one is to be implemented
# see Fu paper of reference, page 7. What is the Union symbol refering to?
# The diagram shows all these as one chain of transformations, but the
# text refers to them being applied independently. Also, a break
# if L starts to increase has not been implemented.
RL2 = [
(TR4, TR3, TR10, TR4, TR3, TR11),
(TR5, TR7, TR11, TR4),
(CTR3, CTR1, TR9, CTR2, TR4, TR9, TR9, CTR4),
identity,
]
def fu(rv, measure=lambda x: (L(x), x.count_ops())):
"""Attempt to simplify expression by using transformation rules given
in the algorithm by Fu et al.
:func:`fu` will try to minimize the objective function ``measure``.
By default this first minimizes the number of trig terms and then minimizes
the number of total operations.
Examples
========
>>> from sympy.simplify.fu import fu
>>> from sympy import cos, sin, tan, pi, S, sqrt
>>> from sympy.abc import x, y, a, b
>>> fu(sin(50)**2 + cos(50)**2 + sin(pi/6))
3/2
>>> fu(sqrt(6)*cos(x) + sqrt(2)*sin(x))
2*sqrt(2)*sin(x + pi/3)
CTR1 example
>>> eq = sin(x)**4 - cos(y)**2 + sin(y)**2 + 2*cos(x)**2
>>> fu(eq)
cos(x)**4 - 2*cos(y)**2 + 2
CTR2 example
>>> fu(S.Half - cos(2*x)/2)
sin(x)**2
CTR3 example
>>> fu(sin(a)*(cos(b) - sin(b)) + cos(a)*(sin(b) + cos(b)))
sqrt(2)*sin(a + b + pi/4)
CTR4 example
>>> fu(sqrt(3)*cos(x)/2 + sin(x)/2)
sin(x + pi/3)
Example 1
>>> fu(1-sin(2*x)**2/4-sin(y)**2-cos(x)**4)
-cos(x)**2 + cos(y)**2
Example 2
>>> fu(cos(4*pi/9))
sin(pi/18)
>>> fu(cos(pi/9)*cos(2*pi/9)*cos(3*pi/9)*cos(4*pi/9))
1/16
Example 3
>>> fu(tan(7*pi/18)+tan(5*pi/18)-sqrt(3)*tan(5*pi/18)*tan(7*pi/18))
-sqrt(3)
Objective function example
>>> fu(sin(x)/cos(x)) # default objective function
tan(x)
>>> fu(sin(x)/cos(x), measure=lambda x: -x.count_ops()) # maximize op count
sin(x)/cos(x)
References
==========
http://rfdz.ph-noe.ac.at/fileadmin/Mathematik_Uploads/ACDCA/
DESTIME2006/DES_contribs/Fu/simplification.pdf
"""
fRL1 = greedy(RL1, measure)
fRL2 = greedy(RL2, measure)
was = rv
rv = sympify(rv)
if not isinstance(rv, C.Expr):
return rv.func(*[fu(a, measure=measure) for a in rv.args])
rv = TR1(rv)
if rv.has(tan, cot):
rv1 = fRL1(rv)
if (measure(rv1) < measure(rv)):
rv = rv1
if rv.has(tan, cot):
rv = TR2(rv)
if rv.has(sin, cos):
rv1 = fRL2(rv)
rv2 = TR8(TRmorrie(rv1))
rv = min([was, rv, rv1, rv2], key=measure)
return min(TR2i(rv), rv, key=measure)
def process_common_addends(rv, do, key2=None, key1=True):
"""Apply ``do`` to addends of ``rv`` that (if key1=True) share at least
a common absolute value of their coefficient and the value of ``key2`` when
applied to the argument. If ``key1`` is False ``key2`` must be supplied and
will be the only key applied.
"""
# collect by absolute value of coefficient and key2
absc = defaultdict(list)
if key1:
for a in rv.args:
c, a = a.as_coeff_Mul()
if c < 0:
c = -c
a = -a # put the sign on `a`
absc[(c, key2(a) if key2 else 1)].append(a)
elif key2:
for a in rv.args:
absc[(S.One, key2(a))].append(a)
else:
raise ValueError('must have at least one key')
args = []
hit = False
for k in absc:
v = absc[k]
c, _ = k
if len(v) > 1:
e = Add(*v, evaluate=False)
new = do(e)
if new != e:
e = new
hit = True
args.append(c*e)
else:
args.append(c*v[0])
if hit:
rv = Add(*args)
return rv
fufuncs = '''
TR0 TR1 TR2 TR3 TR4 TR5 TR6 TR7 TR8 TR9 TR10 TR10i TR11
TR12 TR13 L TR2i TRmorrie TR12i
TR14 TR15 TR16 TR111 TR22'''.split()
FU = dict(list(zip(fufuncs, list(map(locals().get, fufuncs)))))
def _roots():
global _ROOT2, _ROOT3, _invROOT3
_ROOT2, _ROOT3 = sqrt(2), sqrt(3)
_invROOT3 = 1/_ROOT3
_ROOT2 = None
def trig_split(a, b, two=False):
"""Return the gcd, s1, s2, a1, a2, bool where
If two is False (default) then::
a + b = gcd*(s1*f(a1) + s2*f(a2)) where f = cos if bool else sin
else:
if bool, a + b was +/- cos(a1)*cos(a2) +/- sin(a1)*sin(a2) and equals
n1*gcd*cos(a - b) if n1 == n2 else
n1*gcd*cos(a + b)
else a + b was +/- cos(a1)*sin(a2) +/- sin(a1)*cos(a2) and equals
n1*gcd*sin(a + b) if n1 = n2 else
n1*gcd*sin(b - a)
Examples
========
>>> from sympy.simplify.fu import trig_split
>>> from sympy.abc import x, y, z
>>> from sympy import cos, sin, sqrt
>>> trig_split(cos(x), cos(y))
(1, 1, 1, x, y, True)
>>> trig_split(2*cos(x), -2*cos(y))
(2, 1, -1, x, y, True)
>>> trig_split(cos(x)*sin(y), cos(y)*sin(y))
(sin(y), 1, 1, x, y, True)
>>> trig_split(cos(x), -sqrt(3)*sin(x), two=True)
(2, 1, -1, x, pi/6, False)
>>> trig_split(cos(x), sin(x), two=True)
(sqrt(2), 1, 1, x, pi/4, False)
>>> trig_split(cos(x), -sin(x), two=True)
(sqrt(2), 1, -1, x, pi/4, False)
>>> trig_split(sqrt(2)*cos(x), -sqrt(6)*sin(x), two=True)
(2*sqrt(2), 1, -1, x, pi/6, False)
>>> trig_split(-sqrt(6)*cos(x), -sqrt(2)*sin(x), two=True)
(-2*sqrt(2), 1, 1, x, pi/3, False)
>>> trig_split(cos(x)/sqrt(6), sin(x)/sqrt(2), two=True)
(sqrt(6)/3, 1, 1, x, pi/6, False)
>>> trig_split(-sqrt(6)*cos(x)*sin(y), -sqrt(2)*sin(x)*sin(y), two=True)
(-2*sqrt(2)*sin(y), 1, 1, x, pi/3, False)
>>> trig_split(cos(x), sin(x))
>>> trig_split(cos(x), sin(z))
>>> trig_split(2*cos(x), -sin(x))
>>> trig_split(cos(x), -sqrt(3)*sin(x))
>>> trig_split(cos(x)*cos(y), sin(x)*sin(z))
>>> trig_split(cos(x)*cos(y), sin(x)*sin(y))
>>> trig_split(-sqrt(6)*cos(x), sqrt(2)*sin(x)*sin(y), two=True)
"""
global _ROOT2, _ROOT3, _invROOT3
if _ROOT2 is None:
_roots()
a, b = [Factors(i) for i in (a, b)]
ua, ub = a.normal(b)
gcd = a.gcd(b).as_expr()
n1 = n2 = 1
if S.NegativeOne in ua.factors:
ua = ua.quo(S.NegativeOne)
n1 = -n1
elif S.NegativeOne in ub.factors:
ub = ub.quo(S.NegativeOne)
n2 = -n2
a, b = [i.as_expr() for i in (ua, ub)]
def pow_cos_sin(a, two):
"""Return ``a`` as a tuple (r, c, s) such that
``a = (r or 1)*(c or 1)*(s or 1)``.
Three arguments are returned (radical, c-factor, s-factor) as
long as the conditions set by ``two`` are met; otherwise None is
returned. If ``two`` is True there will be one or two non-None
values in the tuple: c and s or c and r or s and r or s or c with c
being a cosine function (if possible) else a sine, and s being a sine
function (if possible) else oosine. If ``two`` is False then there
will only be a c or s term in the tuple.
``two`` also require that either two cos and/or sin be present (with
the condition that if the functions are the same the arguments are
different or vice versa) or that a single cosine or a single sine
be present with an optional radical.
If the above conditions dictated by ``two`` are not met then None
is returned.
"""
c = s = None
co = S.One
if a.is_Mul:
co, a = a.as_coeff_Mul()
if len(a.args) > 2 or not two:
return None
if a.is_Mul:
args = list(a.args)
else:
args = [a]
a = args.pop(0)
if a.func is cos:
c = a
elif a.func is sin:
s = a
elif a.is_Pow and a.exp is S.Half: # autoeval doesn't allow -1/2
co *= a
else:
return None
if args:
b = args[0]
if b.func is cos:
if c:
s = b
else:
c = b
elif b.func is sin:
if s:
c = b
else:
s = b
elif b.is_Pow and b.exp is S.Half:
co *= b
else:
return None
return co if co is not S.One else None, c, s
elif a.func is cos:
c = a
elif a.func is sin:
s = a
if c is None and s is None:
return
co = co if co is not S.One else None
return co, c, s
# get the parts
m = pow_cos_sin(a, two)
if m is None:
return
coa, ca, sa = m
m = pow_cos_sin(b, two)
if m is None:
return
cob, cb, sb = m
# check them
if (not ca) and cb or ca and ca.func is sin:
coa, ca, sa, cob, cb, sb = cob, cb, sb, coa, ca, sa
n1, n2 = n2, n1
if not two: # need cos(x) and cos(y) or sin(x) and sin(y)
c = ca or sa
s = cb or sb
if c.func is not s.func:
return None
return gcd, n1, n2, c.args[0], s.args[0], c.func is cos
else:
if not coa and not cob:
if (ca and cb and sa and sb):
if not ((ca.func is sa.func) is (cb.func is sb.func)):
return
args = set([j.args for j in (ca, sa)])
if not all(i.args in args for i in (cb, sb)):
return
return gcd, n1, n2, ca.args[0], sa.args[0], ca.func is sa.func
if ca and sa or cb and sb or \
two and (ca is None and sa is None or cb is None and sb is None):
return
c = ca or sa
s = cb or sb
if c.args != s.args:
return
if not coa:
coa = S.One
if not cob:
cob = S.One
if coa is cob:
gcd *= _ROOT2
return gcd, n1, n2, c.args[0], pi/4, False
elif coa/cob == _ROOT3:
gcd *= 2*cob
return gcd, n1, n2, c.args[0], pi/3, False
elif coa/cob == _invROOT3:
gcd *= 2*coa
return gcd, n1, n2, c.args[0], pi/6, False
def as_f_sign_1(e):
"""If ``e`` is a sum that can be written as ``g*(a + s)`` where
``s`` is ``+/-1``, return ``g``, ``a``, and ``s`` where ``a`` does
not have a leading negative coefficient.
Examples
========
>>> from sympy.simplify.fu import as_f_sign_1
>>> from sympy.abc import x
>>> as_f_sign_1(x + 1)
(1, x, 1)
>>> as_f_sign_1(x - 1)
(1, x, -1)
>>> as_f_sign_1(-x + 1)
(-1, x, -1)
>>> as_f_sign_1(-x - 1)
(-1, x, 1)
>>> as_f_sign_1(2*x + 2)
(2, x, 1)
"""
if not e.is_Add or len(e.args) != 2:
return
# exact match
a, b = e.args
if a in (S.NegativeOne, S.One):
g = S.One
if b.is_Mul and b.args[0].is_Number and b.args[0] < 0:
a, b = -a, -b
g = -g
return g, b, a
# gcd match
a, b = [Factors(i) for i in e.args]
ua, ub = a.normal(b)
gcd = a.gcd(b).as_expr()
if S.NegativeOne in ua.factors:
ua = ua.quo(S.NegativeOne)
n1 = -1
n2 = 1
elif S.NegativeOne in ub.factors:
ub = ub.quo(S.NegativeOne)
n1 = 1
n2 = -1
else:
n1 = n2 = 1
a, b = [i.as_expr() for i in (ua, ub)]
if a is S.One:
a, b = b, a
n1, n2 = n2, n1
if n1 == -1:
gcd = -gcd
n2 = -n2
if b is S.One:
return gcd, a, n2
def _osborne(e):
"""Replace all hyperbolic functions with trig functions using
the Osborne rule.
References
==========
http://en.wikipedia.org/wiki/Hyperbolic_function
"""
def f(rv):
if not isinstance(rv, C.HyperbolicFunction):
return rv
if rv.func is sinh:
return I*sin(rv.args[0])
elif rv.func is cosh:
return cos(rv.args[0])
elif rv.func is tanh:
return I*tan(rv.args[0])
elif rv.func is coth:
return cot(rv.args[0])/I
else:
raise NotImplementedError('unhandled %s' % rv.func)
return bottom_up(e, f)
def _osbornei(e):
"""Replace all trig functions with hyperbolic functions using
the Osborne rule.
References
==========
http://en.wikipedia.org/wiki/Hyperbolic_function
"""
def f(rv):
if not isinstance(rv, C.TrigonometricFunction):
return rv
if rv.func is sin:
return sinh(rv.args[0])/I
elif rv.func is cos:
return cosh(rv.args[0])
elif rv.func is tan:
return tanh(rv.args[0])/I
elif rv.func is cot:
return coth(rv.args[0])*I
elif rv.func is sec:
return 1/cosh(rv.args[0])
elif rv.func is csc:
return I/sinh(rv.args[0])
else:
raise NotImplementedError('unhandled %s' % rv.func)
return bottom_up(e, f)
def hyper_as_trig(rv):
"""Return an expression containing hyperbolic functions in terms
of trigonometric functions. Any trigonometric functions initially
present are replaced with Dummy symbols and the function to undo
the masking and the conversion back to hyperbolics is also returned. It
should always be true that::
t, f = hyper_as_trig(expr)
expr == f(t)
Examples
========
>>> from sympy.simplify.fu import hyper_as_trig, fu
>>> from sympy.abc import x
>>> from sympy import cosh, sinh
>>> eq = sinh(x)**2 + cosh(x)**2
>>> t, f = hyper_as_trig(eq)
>>> f(fu(t))
cosh(2*x)
References
==========
http://en.wikipedia.org/wiki/Hyperbolic_function
"""
from sympy.simplify.simplify import signsimp
# mask of trig functions
trigs = rv.atoms(C.TrigonometricFunction)
reps = [(t, Dummy()) for t in trigs]
masked = rv.xreplace(dict(reps))
# get inversion substitutions in place
reps = [(v, k) for k, v in reps]
return _osborne(masked), lambda x: signsimp(
_osbornei(x).xreplace(dict(reps)))
|
alephu5/Soundbyte
|
environment/lib/python3.3/site-packages/sympy/simplify/fu.py
|
Python
|
gpl-3.0
| 63,469
|
import logging
from unittest.mock import MagicMock, call
from ert_logging._log_util_abort import _log_util_abort
def test_log_util_abort(caplog, monkeypatch):
shutdown_mock = MagicMock()
monkeypatch.setattr(logging, "shutdown", shutdown_mock)
with caplog.at_level(logging.ERROR):
_log_util_abort("fname", 1, "some_func", "err_message", "my_backtrace")
assert (
"C trace:\nmy_backtrace \nwith message: err_message \nfrom file: "
"fname in some_func at line: 1\n\nPython backtrace:"
) in caplog.text
shutdown_mock.assert_called_once_with() # must shutdown to propagate message
|
joakim-hove/ert
|
tests/ert_tests/shared/test_log_abort.py
|
Python
|
gpl-3.0
| 626
|
# -*- coding: utf-8 -*-
"""Colour class.
This module contains a class implementing an RGB colour.
"""
__author__ = 'Florian Krause <florian@expyriment.org>, \
Oliver Lindemann <oliver@expyriment.org>'
__version__ = ''
__revision__ = ''
__date__ = ''
import colorsys
from . import round
# The named colours are the 140 HTML colour names:
# see https://www.w3schools.com/colors/colors_names.asp
_colours = {
'aliceblue': (240, 248, 255),
'antiquewhite': (250, 235, 215),
'aqua': (0, 255, 255),
'aquamarine': (127, 255, 212),
'azure': (240, 255, 255),
'beige': (245, 245, 220),
'bisque': (255, 228, 196),
'black': (0, 0, 0),
'blanchedalmond': (255, 235, 205),
'blue': (0, 0, 255),
'blueviolet': (138, 43, 226),
'brown': (165, 42, 42),
'burlywood': (222, 184, 135),
'cadetblue': (95, 158, 160),
'chartreuse': (127, 255, 0),
'chocolate': (210, 105, 30),
'coral': (255, 127, 80),
'cornflowerblue': (100, 149, 237),
'cornsilk': (255, 248, 220),
'crimson': (220, 20, 60),
'cyan': (0, 255, 255),
'darkblue': (0, 0, 139),
'darkcyan': (0, 139, 139),
'darkgoldenrod': (184, 134, 11),
'darkgray': (169, 169, 169),
'darkgreen': (0, 100, 0),
'darkkhaki': (189, 183, 107),
'darkmagenta': (139, 0, 139),
'darkolivegreen': (85, 107, 47),
'darkorange': (255, 140, 0),
'darkorchid': (153, 50, 204),
'darkred': (139, 0, 0),
'darksalmon': (233, 150, 122),
'darkseagreen': (143, 188, 143),
'darkslateblue': (72, 61, 139),
'darkslategray': (47, 79, 79),
'darkturquoise': (0, 206, 209),
'darkviolet': (148, 0, 211),
'deeppink': (255, 20, 147),
'deepskyblue': (0, 191, 255),
'dimgray': (105, 105, 105),
'dodgerblue': (30, 144, 255),
'firebrick': (178, 34, 34),
'floralwhite': (255, 250, 240),
'forestgreen': (34, 139, 34),
'fuchsia': (255, 0, 255),
'gainsboro': (220, 220, 220),
'ghostwhite': (248, 248, 255),
'gold': (255, 215, 0),
'goldenrod': (218, 165, 32),
'gray': (128, 128, 128),
'green': (0, 128, 0),
'greenyellow': (173, 255, 47),
'honeydew': (240, 255, 240),
'hotpink': (255, 105, 180),
'indianred': (205, 92, 92),
'indigo': (75, 0, 130),
'ivory': (255, 255, 240),
'khaki': (240, 230, 140),
'lavender': (230, 230, 250),
'lavenderblush': (255, 240, 245),
'lawngreen': (124, 252, 0),
'lemonchiffon': (255, 250, 205),
'lightblue': (173, 216, 230),
'lightcoral': (240, 128, 128),
'lightcyan': (224, 255, 255),
'lightgoldenrodyellow': (250, 250, 210),
'lightgray': (211, 211, 211),
'lightgreen': (144, 238, 144),
'lightpink': (255, 182, 193),
'lightsalmon': (255, 160, 122),
'lightseagreen': (32, 178, 170),
'lightskyblue': (135, 206, 250),
'lightslategray': (119, 136, 153),
'lightsteelblue': (176, 196, 222),
'lightyellow': (255, 255, 224),
'lime': (0, 255, 0),
'limegreen': (50, 205, 50),
'linen': (250, 240, 230),
'magenta': (255, 0, 255),
'maroon': (128, 0, 0),
'mediumaquamarine': (102, 205, 170),
'mediumblue': (0, 0, 205),
'mediumorchid': (186, 85, 211),
'mediumpurple': (147, 112, 219),
'mediumseagreen': (60, 179, 113),
'mediumslateblue': (123, 104, 238),
'mediumspringgreen': (0, 250, 154),
'mediumturquoise': (72, 209, 204),
'mediumvioletred': (199, 21, 133),
'midnightblue': (25, 25, 112),
'mintcream': (245, 255, 250),
'mistyrose': (255, 228, 225),
'moccasin': (255, 228, 181),
'navajowhite': (255, 222, 173),
'navy': (0, 0, 128),
'oldlace': (253, 245, 230),
'olive': (128, 128, 0),
'olivedrab': (107, 142, 35),
'orange': (255, 165, 0),
'orangered': (255, 69, 0),
'orchid': (218, 112, 214),
'palegoldenrod': (238, 232, 170),
'palegreen': (152, 251, 152),
'paleturquoise': (175, 238, 238),
'palevioletred': (219, 112, 147),
'papayawhip': (255, 239, 213),
'peachpuff': (255, 218, 185),
'peru': (205, 133, 63),
'pink': (255, 192, 203),
'plum': (221, 160, 221),
'powderblue': (176, 224, 230),
'purple': (128, 0, 128),
'red': (255, 0, 0),
'rosybrown': (188, 143, 143),
'royalblue': (65, 105, 225),
'saddlebrown': (139, 69, 19),
'salmon': (250, 128, 114),
'sandybrown': (250, 164, 96),
'seagreen': (46, 139, 87),
'seashell': (255, 245, 238),
'sienna': (160, 82, 45),
'silver': (192, 192, 192),
'skyblue': (135, 206, 235),
'slateblue': (106, 90, 205),
'slategray': (112, 128, 144),
'snow': (255, 250, 250),
'springgreen': (0, 255, 127),
'steelblue': (70, 130, 180),
'tan': (210, 180, 140),
'teal': (0, 128, 128),
'thistle': (216, 191, 216),
'tomato': (255, 99, 71),
'turquoise': (64, 224, 208),
'violet': (238, 130, 238),
'wheat': (245, 222, 179),
'white': (255, 255, 255),
'whitesmoke': (245, 245, 245),
'yellow': (255, 255, 0),
'yellowgreen': (154, 205, 50),
}
class Colour(object):
"""Implements a class representing an RGB colour."""
@staticmethod
def get_colour_names():
"""Get a dictionary of all known colour names."""
from collections import OrderedDict
return OrderedDict(sorted(_colours.items(), key=lambda t: t[0]))
@staticmethod
def is_rgb(value):
"""Check for valid RGB tuple value.
Parameters
----------
value : iterable of length 3 (e.g. [255, 0, 0])
the value to be checked
Returns
-------
valid : bool
whether the value is valid or not
"""
if not len(value) == 3:
return False
elif False in [isinstance(x, int) for x in value]:
return False
elif False in [0 <= x <= 255 for x in value]:
return False
else:
return True
@staticmethod
def is_name(value):
"""Check for valid colour name value.
Parameters
----------
value : str (e.g. "red")
the value to be checked
Returns
-------
valid : bool
whether the value is valid or not
"""
if not value in _colours.keys():
return False
else:
return True
@staticmethod
def is_hex(value):
"""Check for valid Hex triplet value.
Parameters
----------
value : string (e.g. "#FF0000")
the value to be checked
Returns
-------
valid : bool
whether the value is valid or not
"""
if not isinstance(value, str):
return False
value = value.lstrip("#")
if len(value) != 6:
return False
else:
for x in value.upper():
if x not in "0123456789ABCDEF":
return False
return True
@staticmethod
def is_hsv(value):
"""Check for valid HSV tuple value.
Parameters
----------
value : iterable of length 3 (e.g. [0, 100, 100])
the value to be checked
Returns
-------
valid : bool
whether the value is valid or not
"""
if not len(value) == 3:
return False
elif False in [isinstance(x, int) for x in value]:
return False
elif not 0 <= value[0] <= 360:
return False
elif False in [0 <= x <= 100 for x in value[1:]]:
return False
else:
return True
@staticmethod
def is_hsl(value):
"""Check for valid HSL tuple value.
Parameters
----------
value : iterable of length 3 (e.g. [0, 100, 50])
the value to be checked
Returns
-------
valid : bool
whether the value is valid or not
"""
return Colour.is_hsv(value)
@staticmethod
def is_colour(value):
"""Check for valid colour value.
Parameters
----------
value : any type
the value to be checked
Returns
-------
valid : bool
whether the value is valid or not
"""
return Colour.is_rgb(value) or\
Colour.is_name(value) or \
Colour.is_hex(value) or \
Colour.is_hsv(value) or\
Colour.is_hsl(value)
def __init__(self, colour):
"""Create an RGB colour.
Parameters
----------
colour : list or tuple or str
the colour to be created as either an RGB tuple (e.g.[255, 0, 0]),
a Hex triplet (e.g. "#FF0000") or a colour name (e.g. "red").
Notes
-----
All methods in Expyriment that have a colour parameter require RGB
colours. This class also allows RGB colours to be defined via HSV/HSL
values (hue [0-360], saturation [0-100], value/lightness [0-100]).To
do so, use the hsv or hls property.
"""
if Colour.is_rgb(colour):
self.rgb = colour
elif Colour.is_hex(colour):
self.hex = colour
elif Colour.is_name(colour):
self.name = colour
else:
raise ValueError("'{0}' is not a valid colour!".format(colour) + \
"\nUse RGB tuple, Hex triplet or colour name.")
def __str__(self):
return "Colour(red={0}, green={1}, blue={2})".format(self._rgb[0],
self._rgb[1],
self._rgb[2])
def __eq__(self, other):
return self._rgb == other._rgb
def __ne__(self, other):
return self._rgb != other._rgb
def __getitem__(self, i):
return self._rgb[i]
def __len__(self):
return len(self._rgb)
@property
def rgb(self):
"""Getter for colour in RGB format [red, green, blue]."""
return self._rgb
@rgb.setter
def rgb(self, value):
"""Setter for colour in RGB format [red, green, blue]."""
if Colour.is_rgb(value):
self._rgb = tuple(value)
else:
raise ValueError("'{0}' is not a valid RGB colour!".format(value))
@property
def hex(self):
"""Getter for colour in Hex format "#RRGGBB"."""
return '#{:02X}{:02X}{:02X}'.format(self._rgb[0],
self._rgb[1],
self._rgb[2])
@hex.setter
def hex(self, value):
"""Setter for colour in Hex format "#RRGGBB"."""
if Colour.is_hex(value):
c = value.lstrip("#")
self._rgb = tuple(int(c[i:i + 2], 16) for i in (0, 2, 4))
else:
raise ValueError("'{0}' is not a valid Hex colour!".format(value))
@property
def name(self):
"""Getter for colour name (if available)."""
for name, rgb in _colours.items():
if rgb == self.rgb:
return name
return None
@name.setter
def name(self, value):
"""Setter for colour name."""
if Colour.is_name(value):
self._rgb = _colours[value.lower()]
else:
raise ValueError("'{0}' is not a valid colour name!".format(value))
@property
def hsv(self):
"""Getter for colour in HSV format [hue, saturation, value]."""
hsv = colorsys.rgb_to_hsv(*divide(self.rgb, 255.0))
rtn = list(multiply([hsv[0]], 360))
rtn.extend(multiply(hsv[1:], 100))
return rtn
@hsv.setter
def hsv(self, value):
"""Setter for colour in HSV format [hue, saturation, value]."""
if Colour.is_hsv(value):
hsv = list(divide([value[0]], 360))
hsv.extend(divide(value[1:], 100))
self._rgb = multiply(colorsys.hsv_to_rgb(*hsv), 255)
else:
raise ValueError("'{0}' is not a valid HSV colour!".format(value))
@property
def hsl(self):
"""Getter for colour in HSL format [hue, saturation, lightness]."""
hsl = colorsys.rgb_to_hls(*divide(self.rgb, 255.0))
rtn = list(multiply([hsl[0]], 360))
rtn.extend(multiply(hsl[1:], 100))
return rtn
@hsl.setter
def hsl(self, value):
"""Setter for colour in HSL format [hue, saturation, lightness]."""
if Colour.is_hsv(value):
hsl = list(divide([value[0]], 360))
hsl.extend(divide(value[1:], 100))
self._rgb = multiply(colorsys.hls_to_rgb(*hsl), 255)
else:
raise ValueError("'{0}' is not a valid HSL colour!".format(value))
# Helper functions
def multiply(v, d):
return tuple(map(lambda x:int(round(x*d)), v))
def divide(v, d):
return tuple(map(lambda x:x/float(d), v))
|
expyriment/expyriment
|
expyriment/misc/_colour.py
|
Python
|
gpl-3.0
| 15,022
|
# -*- encoding: utf-8 -*-
"""Test class for Smart/Puppet Variables
:Requirement: Smart_Variables
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: API
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import json
from random import choice, uniform
import yaml
from fauxfactory import gen_integer, gen_string
from nailgun import entities
from requests import HTTPError
from robottelo.api.utils import publish_puppet_module
from robottelo.constants import CUSTOM_PUPPET_REPO
from robottelo.datafactory import (
filtered_datapoint,
generate_strings_list,
invalid_values_list,
valid_data_list,
)
from robottelo.decorators import (
run_only_on,
skip_if_bug_open,
stubbed,
tier1,
tier2,
)
from robottelo.test import APITestCase
@filtered_datapoint
def valid_sc_variable_data():
"""Returns a list of valid smart class variable types and values"""
return [
{
u'sc_type': 'string',
u'value': choice(generate_strings_list()),
},
{
u'sc_type': 'boolean',
u'value': choice([True, False]),
},
{
u'sc_type': 'integer',
u'value': gen_integer(),
},
{
u'sc_type': 'real',
u'value': uniform(-1000, 1000),
},
{
u'sc_type': 'array',
u'value': u'["{0}","{1}","{2}"]'.format(
gen_string('alpha'),
gen_string('numeric').lstrip('0'),
gen_string('html'),
),
},
{
u'sc_type': 'hash',
u'value': '{{ "{0}": "{1}" }}'.format(
gen_string('alpha'), gen_string('alpha')),
},
{
u'sc_type': 'yaml',
u'value': '--- {0}=>{1} ...'.format(
gen_string('alpha'), gen_string('alpha')),
},
{
u'sc_type': 'json',
u'value': u'{{"{0}":"{1}","{2}":"{3}"}}'.format(
gen_string('alpha'),
gen_string('numeric').lstrip('0'),
gen_string('alpha'),
gen_string('alphanumeric')
),
},
]
@filtered_datapoint
def invalid_sc_variable_data():
"""Returns a list of invalid smart class variable type and values"""
return [
{
u'sc_type': 'boolean',
u'value': gen_string('alphanumeric'),
},
{
u'sc_type': 'integer',
u'value': gen_string('utf8'),
},
{
u'sc_type': 'real',
u'value': gen_string('alphanumeric'),
},
{
u'sc_type': 'array',
u'value': gen_string('alpha'),
},
{
u'sc_type': 'hash',
u'value': gen_string('alpha'),
},
{
u'sc_type': 'yaml',
u'value': '{{{0}:{1}}}'.format(
gen_string('alpha'), gen_string('alpha')),
},
{
u'sc_type': 'json',
u'value': u'{{{0}:{1},{2}:{3}}}'.format(
gen_string('alpha'),
gen_string('numeric').lstrip('0'),
gen_string('alpha'),
gen_string('alphanumeric')
),
}
]
class SmartVariablesTestCase(APITestCase):
"""Implements Smart Variables tests in API"""
@classmethod
def setUpClass(cls):
"""Import some parametrized puppet classes. This is required to make
sure that we have data to be able to perform interactions with smart
class variables.
"""
super(SmartVariablesTestCase, cls).setUpClass()
cls.puppet_modules = [
{'author': 'robottelo', 'name': 'api_test_variables'},
]
cls.org = entities.Organization().create()
cv = publish_puppet_module(
cls.puppet_modules, CUSTOM_PUPPET_REPO, cls.org)
cls.env = entities.Environment().search(
query={'search': u'content_view="{0}"'.format(cv.name)}
)[0]
# Find imported puppet class
cls.puppet_class = entities.PuppetClass().search(query={
'search': u'name = "{0}" and environment = "{1}"'.format(
cls.puppet_modules[0]['name'], cls.env.name)
})[0]
# And all its subclasses
cls.puppet_subclasses = entities.PuppetClass().search(query={
'search': u'name ~ "{0}::" and environment = "{1}"'.format(
cls.puppet_modules[0]['name'], cls.env.name)
})
# TearDown brakes parallel tests run as every test depends on the same
# puppet class that will be removed during TearDown.
# Uncomment for developing or debugging and do not forget to import
# `robottelo.api.utils.delete_puppet_class`.
#
# @classmethod
# def tearDownClass(cls):
# """Removes puppet class."""
# super(SmartVariablesTestCase, cls).tearDownClass()
# delete_puppet_class(cls.puppet_class.name)
@run_only_on('sat')
@tier1
def test_positive_create(self):
"""Create a Smart Variable with valid name
:id: 4cd20cca-d419-43f5-9734-e9ae1caae4cb
:steps: Create a smart Variable with Valid name and valid default value
:expectedresults: The smart Variable is created successfully
:CaseImportance: Critical
"""
for name in valid_data_list():
with self.subTest(name):
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
variable=name,
).create()
self.assertEqual(smart_variable.variable, name)
@run_only_on('sat')
@tier1
def test_negative_create(self):
"""Create a Smart Variable with invalid name
:id: d92f8bdd-93de-49ba-85a3-685aac9eda0a
:steps: Create a smart Variable with invalid name and valid default
value
:expectedresults: The smart Variable is not created
:CaseImportance: Critical
"""
for name in invalid_values_list():
with self.subTest(name), self.assertRaises(HTTPError):
entities.SmartVariable(
puppetclass=self.puppet_class,
variable=name,
).create()
@run_only_on('sat')
@tier1
def test_positive_delete_smart_variable_by_id(self):
"""Delete a Smart Variable by id
:id: 6d8354db-a028-4ae0-bcb6-87aa1cb9ec5d
:steps: Delete a smart Variable by id
:expectedresults: The smart Variable is deleted successfully
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class
).create()
smart_variable.delete()
with self.assertRaises(HTTPError) as context:
smart_variable.read()
self.assertRegexpMatches(
context.exception.response.text,
"Smart variable not found by id"
)
@run_only_on('sat')
@skip_if_bug_open('bugzilla', 1375857)
@tier1
def test_positive_update_variable_puppet_class(self):
"""Update Smart Variable's puppet class.
:id: 2312cb28-c3b0-4fbc-84cf-b66f0c0c64f0
:steps:
1. Create a smart variable with valid name.
2. Update the puppet class associated to the smart variable created
in step1.
:expectedresults: The variable is updated with new puppet class.
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
).create()
self.assertEqual(smart_variable.puppetclass.id, self.puppet_class.id)
new_puppet = entities.PuppetClass().search(query={
'search': 'name="{0}"'.format(choice(self.puppet_subclasses).name)
})[0]
smart_variable.puppetclass = new_puppet
updated_sv = smart_variable.update(['puppetclass'])
self.assertEqual(updated_sv.puppetclass.id, new_puppet.id)
@run_only_on('sat')
@tier1
def test_positive_update_name(self):
"""Update Smart Variable's name
:id: b8214eaa-e276-4fc4-8381-fb0386cda6a5
:steps:
1. Create a smart variable with valid name.
2. Update smart variable name created in step1.
:expectedresults: The variable is updated with new name.
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
).create()
for new_name in valid_data_list():
with self.subTest(new_name):
smart_variable.variable = new_name
smart_variable = smart_variable.update(['variable'])
self.assertEqual(smart_variable.variable, new_name)
@run_only_on('sat')
@tier1
def test_negative_duplicate_name_variable(self):
"""Create Smart Variable with an existing name.
:id: c49ad14d-913f-4adc-8ebf-88493556c027
:steps:
1. Create a smart Variable with Valid name and default value.
2. Attempt to create a variable with same name from same/other
class.
:expectedresults: The variable with same name are not allowed to create
from any class.
:CaseImportance: Critical
"""
name = gen_string('alpha')
entities.SmartVariable(
variable=name,
puppetclass=self.puppet_class,
).create()
with self.assertRaises(HTTPError) as context:
entities.SmartVariable(
variable=name,
puppetclass=self.puppet_class,
).create()
self.assertRegexpMatches(
context.exception.response.text,
"Key has already been taken"
)
@run_only_on('sat')
@tier2
def test_positive_list_variables_by_host_id(self):
"""List all the variables associated to Host by host id
:id: 4fc1f249-5da7-493b-a1d3-4ce7b625ad96
:expectedresults: All variables listed for Host
:CaseLevel: Integration
"""
entities.SmartVariable(puppetclass=self.puppet_class).create()
host = entities.Host(organization=self.org).create()
host.environment = self.env
host.update(['environment'])
host.add_puppetclass(data={'puppetclass_id': self.puppet_class.id})
self.assertGreater(len(host.list_smart_variables()['results']), 0)
@run_only_on('sat')
@tier2
def test_positive_list_variables_by_hostgroup_id(self):
"""List all the variables associated to HostGroup by hostgroup id
:id: db6861cc-b390-45bc-8c7d-cf10f46aecb3
:expectedresults: All variables listed for HostGroup
:CaseLevel: Integration
"""
entities.SmartVariable(puppetclass=self.puppet_class).create()
hostgroup = entities.HostGroup().create()
hostgroup.add_puppetclass(
data={'puppetclass_id': self.puppet_class.id})
self.assertGreater(len(hostgroup.list_smart_variables()['results']), 0)
@run_only_on('sat')
@tier1
def test_positive_list_variables_by_puppetclass_id(self):
"""List all the variables associated to puppet class by puppet class id
:id: cd743329-b354-4ddc-ada0-3ddd774e2701
:expectedresults: All variables listed for puppet class
:CaseImportance: Critical
"""
self.assertGreater(len(self.puppet_class.list_smart_variables()), 0)
@run_only_on('sat')
@tier1
def test_positive_create_variable_type(self):
"""Create variable for variable types - Valid Value
Types - string, boolean, integer, real, array, hash, yaml, json
:id: 4c8b4134-33c1-4f7f-83f9-a751c49ae2da
:steps: Create a variable with all valid key types and default values
:expectedresults: Variable created with all given types successfully
:CaseImportance: Critical
"""
for data in valid_sc_variable_data():
with self.subTest(data):
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
variable_type=data['sc_type'],
default_value=data['value'],
).create()
self.assertEqual(smart_variable.variable_type, data['sc_type'])
if data['sc_type'] in ('json', 'hash', 'array'):
self.assertEqual(
smart_variable.default_value, json.loads(data['value'])
)
elif data['sc_type'] == 'yaml':
self.assertEqual(
smart_variable.default_value, yaml.load(data['value']))
else:
self.assertEqual(
smart_variable.default_value, data['value'])
@run_only_on('sat')
@tier1
def test_negative_create_variable_type(self):
"""Negative variable Update for variable types - Invalid Value
Types - string, boolean, integer, real, array, hash, yaml, json
:id: 9709d67c-682f-4e6c-8b8b-f02f6c2d3b71
:steps: Create a variable with all valid key types and invalid default
values
:expectedresults: Variable is not created for invalid value
:CaseImportance: Critical
"""
for data in invalid_sc_variable_data():
with self.subTest(data):
with self.assertRaises(HTTPError) as context:
entities.SmartVariable(
puppetclass=self.puppet_class,
variable_type=data['sc_type'],
default_value=data['value'],
).create()
self.assertRegexpMatches(
context.exception.response.text,
"Default value is invalid"
)
@run_only_on('sat')
@tier1
def test_positive_create_matcher_empty_value(self):
"""Create matcher with empty value with string type
:id: a90b5bcd-f76c-4663-bf41-2f96e7e15c0f
:steps: Create a matcher for variable with empty value and type string
:expectedresults: Matcher is created with empty value
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
variable_type='string',
override_value_order='is_virtual',
).create()
entities.OverrideValue(
smart_variable=smart_variable,
match='is_virtual=true',
value='',
).create()
smart_variable = smart_variable.read()
self.assertEqual(
smart_variable.override_values[0]['match'], 'is_virtual=true')
self.assertEqual(
smart_variable.override_values[0]['value'], '')
@run_only_on('sat')
@tier1
def test_negative_create_matcher_empty_value(self):
"""Create matcher with empty value with type other than string
:id: ad24999f-1bed-4abb-a01f-3cb485d67968
:steps: Create a matcher for variable with empty value and type any
other than string
:expectedresults: Matcher is not created for empty value
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=gen_integer(),
variable_type='integer',
override_value_order='is_virtual',
).create()
with self.assertRaises(HTTPError) as context:
entities.OverrideValue(
smart_variable=smart_variable,
match='is_virtual=true',
value='',
).create()
self.assertEqual(len(smart_variable.read().override_values), 0)
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Value is invalid integer"
)
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_match_value(self):
"""Attempt to create matcher with invalid match value.
:id: 625e3221-237d-4440-ab71-6d98cff67713
:steps: Create a matcher for variable with invalid match value
:expectedresults: Matcher is not created
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
).create()
with self.assertRaises(HTTPError) as context:
entities.OverrideValue(
smart_variable=smart_variable,
match='invalid_value',
value=gen_string('alpha'),
).create()
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Match is invalid"
)
@run_only_on('sat')
@tier1
def test_negative_create_default_value_with_regex(self):
"""Create variable with non matching regex validator
:id: 0c80bd58-26aa-4c2a-a087-ed3b88b226a7
:steps:
1. Create variable with default value that doesn't matches the
regex of step 2
2. Validate this value with regexp validator type and rule
:expectedresults: Variable is not created for non matching value with
regex
:CaseImportance: Critical
"""
value = gen_string('alpha')
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=value,
).create()
smart_variable.default_value = gen_string('alpha')
smart_variable.validator_type = 'regexp'
smart_variable.validator_rule = '[0-9]'
with self.assertRaises(HTTPError) as context:
smart_variable.update([
'default_value', 'validator_type', 'validator_rule'
])
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Default value is invalid"
)
self.assertEqual(smart_variable.read().default_value, value)
@run_only_on('sat')
@tier1
def test_positive_create_default_value_with_regex(self):
"""Create variable with matching regex validator
:id: aa9803b9-9a45-4ad8-b502-e0e32fc4b7d8
:steps:
1. Create variable with default value that matches the regex of
step 2
2. Validate this value with regex validator type and rule
:expectedresults: Variable is created for matching value with regex
:CaseImportance: Critical
"""
value = gen_string('numeric')
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=gen_string('alpha'),
).create()
smart_variable.default_value = value
smart_variable.validator_type = 'regexp'
smart_variable.validator_rule = '[0-9]'
smart_variable.update([
'default_value', 'validator_type', 'validator_rule'
])
smart_variable = smart_variable.read()
self.assertEqual(smart_variable.default_value, value)
self.assertEqual(smart_variable.validator_type, 'regexp')
self.assertEqual(smart_variable.validator_rule, '[0-9]')
@run_only_on('sat')
@tier1
def test_negative_create_matcher_value_with_regex(self):
"""Create matcher with non matching regexp validator
:id: 8a0f9251-7992-4d1e-bace-7e32637bf56f
:steps:
1. Create a matcher with value that doesn't matches the regex of
step 2
2. Validate this value with regex validator type and rule
:expectedresults: Matcher is not created for non matching value with
regexp
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=gen_string('numeric'),
validator_type='regexp',
validator_rule='[0-9]',
).create()
with self.assertRaises(HTTPError) as context:
entities.OverrideValue(
smart_variable=smart_variable,
match='domain=example.com',
value=gen_string('alpha'),
).create()
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Value is invalid"
)
self.assertEqual(len(smart_variable.read().override_values), 0)
@run_only_on('sat')
@tier1
def test_positive_create_matcher_value_with_regex(self):
"""Create matcher with matching regex validator
:id: 3ad09261-eb55-4758-b915-84006c9e527c
:steps:
1. Create a matcher with value that matches the regex of step 2
2. Validate this value with regex validator type and rule
:expectedresults: Matcher is created for matching value with regex
:CaseImportance: Critical
"""
value = gen_string('numeric')
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=gen_string('numeric'),
validator_type='regexp',
validator_rule='[0-9]',
).create()
entities.OverrideValue(
smart_variable=smart_variable,
match='domain=example.com',
value=value,
).create()
smart_variable = smart_variable.read()
self.assertEqual(smart_variable.validator_type, 'regexp')
self.assertEqual(smart_variable.validator_rule, '[0-9]')
self.assertEqual(
smart_variable.override_values[0]['match'], 'domain=example.com')
self.assertEqual(
smart_variable.override_values[0]['value'], value)
@run_only_on('sat')
@tier1
def test_negative_create_default_value_with_list(self):
"""Create variable with non matching list validator
:id: cacb83a5-3e50-490b-b94f-a5d27f44ae12
:steps:
1. Create variable with default value that doesn't matches the list
validator of step 2
2. Validate this value with list validator type and rule
:expectedresults: Variable is not created for non matching value with
list validator
:CaseImportance: Critical
"""
with self.assertRaises(HTTPError) as context:
entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=gen_string('alphanumeric'),
validator_type='list',
validator_rule='5, test',
).create()
self.assertRegexpMatches(
context.exception.response.text,
r"Default value \w+ is not one of"
)
@run_only_on('sat')
@tier1
def test_positive_create_default_value_with_list(self):
"""Create variable with matching list validator
:id: 6bc2caa0-1300-4751-8239-34b96517465b
:steps:
1. Create variable with default value that matches the list
validator of step 2
2. Validate this value with list validator type and rule
:expectedresults: Variable is created for matching value with list
:CaseImportance: Critical
"""
# Generate list of values
values_list = [
gen_string('alpha'),
gen_string('alphanumeric'),
gen_integer(min_value=100),
choice(['true', 'false']),
]
# Generate string from list for validator_rule
values_list_str = ", ".join(str(x) for x in values_list)
value = choice(values_list)
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=value,
validator_type='list',
validator_rule=values_list_str,
).create()
self.assertEqual(smart_variable.default_value, value)
self.assertEqual(smart_variable.validator_type, 'list')
self.assertEqual(smart_variable.validator_rule, values_list_str)
@run_only_on('sat')
@tier1
def test_negative_create_matcher_value_with_list(self):
"""Create matcher with non matching list validator
:id: 0aff0fdf-5a62-49dc-abe1-b727459d030a
:steps:
1. Create a matcher with value that doesn't matches the list
validator of step 2
2. Validate this value with list validator type and rule
:expectedresults: Matcher is not created for non matching value with
list validator
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value='example',
validator_type='list',
validator_rule='test, example, 30',
).create()
with self.assertRaises(HTTPError) as context:
entities.OverrideValue(
smart_variable=smart_variable,
match='domain=example.com',
value='not_in_list',
).create()
self.assertRegexpMatches(
context.exception.response.text,
r"Validation failed: Value \w+ is not one of"
)
self.assertEqual(len(smart_variable.read().override_values), 0)
@run_only_on('sat')
@tier1
def test_positive_create_matcher_value_with_list(self):
"""Create matcher with matching list validator
:id: f5eda535-6623-4130-bea0-97faf350a6a6
:steps:
1. Create a matcher with value that matches the list validator of
step 2
2. Validate this value with list validator type and rule
:expectedresults: Matcher is created for matching value with list
validator
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value='example',
validator_type='list',
validator_rule='test, example, 30',
).create()
entities.OverrideValue(
smart_variable=smart_variable,
match='domain=example.com',
value=30,
).create()
smart_variable = smart_variable.read()
self.assertEqual(smart_variable.validator_type, 'list')
self.assertEqual(smart_variable.validator_rule, 'test, example, 30')
self.assertEqual(
smart_variable.override_values[0]['match'], 'domain=example.com')
self.assertEqual(
smart_variable.override_values[0]['value'], 30)
@run_only_on('sat')
@skip_if_bug_open('bugzilla', 1375643)
@tier1
def test_negative_create_matcher_value_with_default_type(self):
"""Create matcher with non matching type of default value
:id: 790c63d7-4e8a-4187-8566-3d85d57f9a4f
:steps:
1. Create variable with valid type and value
2. Create a matcher with value that doesn't matches the default
type
:expectedresults: Matcher is not created for non matching the type of
default value
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=True,
variable_type='boolean',
).create()
with self.assertRaises(HTTPError) as context:
entities.OverrideValue(
smart_variable=smart_variable,
match='domain=example.com',
value=50,
).create()
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Value is invalid"
)
self.assertEqual(smart_variable.read().default_value, True)
@run_only_on('sat')
@tier1
def test_positive_create_matcher_value_with_default_type(self):
"""Create matcher with matching type of default value
:id: 99057f05-62cb-4230-b16c-d96ca6a5ae91
:steps:
1. Create variable with valid type and value
2. Create a matcher with value that matches the default value type
:expectedresults: Matcher is created for matching the type of default
value
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=True,
variable_type='boolean',
override_value_order='is_virtual',
).create()
entities.OverrideValue(
smart_variable=smart_variable,
match='is_virtual=true',
value=False,
).create()
smart_variable = smart_variable.read()
self.assertEqual(
smart_variable.override_values[0]['match'], 'is_virtual=true')
self.assertEqual(
smart_variable.override_values[0]['value'], False)
@run_only_on('sat')
@tier1
def test_negative_create_matcher_non_existing_attribute(self):
"""Create matcher for non existing attribute
:id: 23b16e7f-0626-467e-b53b-35e1634cc30d
:steps: Create matcher for non existing attribute
:expectedresults: Matcher is not created for non existing attribute
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
).create()
with self.assertRaises(HTTPError) as context:
entities.OverrideValue(
smart_variable=smart_variable,
match='hostgroup=nonexistingHG',
value=gen_string('alpha')
).create()
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Match hostgroup=nonexistingHG does not match "
"an existing host group"
)
self.assertEqual(len(smart_variable.read().override_values), 0)
@run_only_on('sat')
@tier1
def test_positive_create_matcher(self):
"""Create matcher for attribute in variable
:id: f0b3d51a-cf9a-4b43-9567-eb12cd973299
:steps: Create a matcher with all valid values
:expectedresults: The matcher has been created successfully
:CaseImportance: Critical
"""
value = gen_string('alpha')
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
).create()
entities.OverrideValue(
smart_variable=smart_variable,
match='domain=example.com',
value=value,
).create()
smart_variable = smart_variable.read()
self.assertEqual(
smart_variable.override_values[0]['match'], 'domain=example.com')
self.assertEqual(
smart_variable.override_values[0]['value'], value)
@run_only_on('sat')
@stubbed()
@tier1
def test_positive_update_variable_attribute_priority(self):
"""Variable value set on Attribute Priority for Host
:id: 78474b5e-7a50-4de0-b22c-3413ac06d067
:bz: 1362372
:steps:
1. Create variable with some valid value and type
2. Set fqdn as top priority attribute
3. Create first matcher for fqdn with valid details
4. Create second matcher for some attribute with valid details Note
- The FQDN/host should have this attribute
5. Check ENC output of associated host.
:expectedresults: The ENC output shows variable value of fqdn matcher
only
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_negative_update_variable_attribute_priority(self):
"""Matcher Value set on Attribute Priority
for Host - alternate priority
:id: f6ef2193-5d63-43f1-8d91-e30984b2c0c5
:bz: 1362372
:steps:
1. Create variable with valid value and type
2. Set some attribute(other than fqdn) as top priority attribute
Note - The fqdn/host should have this attribute
3. Create first matcher for fqdn with valid details
4. Create second matcher for attribute of step 3 with valid details
5. Check ENC output of associated host.
:expectedresults: The ENC output shows variable value of step 4 matcher
only
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_positive_update_variable_merge_override(self):
"""Merge the values of all the associated matchers
Note - This TC is only for array and hash key types
:id: bb37995e-71f9-441c-b4d5-79e5b5ff3973
:bz: 1362372
:steps:
1. Create variable with valid value and type
2. Create first matcher for attribute fqdn with valid details
3. Create second matcher for other attribute with valid details.
Note - The fqdn/host should have this attribute
4. Create more matchers for some more attributes if any Note - The
fqdn/host should have this attributes
5. Set 'merge overrides' to True
6. Check ENC output of associated host
:expectedresults:
1. The ENC output shows variable values merged from all the
associated matchers
2. The variable doesn't show the default value of variable.
3. Duplicate values in any are displayed
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_negative_update_variable_merge_override(self):
"""Merge the override values from non associated matchers
Note - This TC is only for array and hash key types
:id: afcb7ef4-38dd-484b-8a02-bc4e3d027204
:bz: 1362372
:steps:
1. Create variable with valid value and type
2. Create first matcher for attribute fqdn with valid details
3. Create second matcher for other attribute with valid details
Note - The fqdn/host should not have this attribute
4. Create more matchers for some more attributes if any Note - The
fqdn/host should not have this attributes
5. Set 'merge overrides' to True
6. Check ENC output of associated host
:expectedresults:
1. The ENC output shows variable values only for fqdn
2. The variable doesn't have the values for attribute which are not
associated to host
3. The variable doesn't have the default value of variable
4. Duplicate values if any are displayed
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_positive_update_variable_merge_default(self):
"""Merge the values of all the associated matchers + default value
Note - This TC is only for array and hash key types
:id: 9607c52c-f4c7-468b-a741-d179de144646
:bz: 1362372
:steps:
1. Create variable with valid value and type
2. Create first matcher for attribute fqdn with valid details
3. Create second matcher for other attribute with valid details
Note - The fqdn/host should have this attribute
4. Create more matchers for some more attributes if any Note - The
fqdn/host should have this attributes
5. Set 'merge overrides' to True
6. Set 'merge default' to True
7. Check ENC output of associated host
:expectedresults:
1. The ENC output shows the variable values merged from all the
associated matchers
2. The variable values has the default value of variable
3. Duplicate values if any are displayed
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_negative_update_variable_merge_default(self):
"""Empty default value is not shown in merged values
Note - This TC is only for array and hash key types
:id: 9033de15-f7e8-42be-b2be-c04c13aa039b
:bz: 1362372
:steps:
1. Create variable with empty value and type
2. Create first matcher for attribute fqdn with valid details
3. Create second matcher for other attribute with valid details
Note - The fqdn/host should have this attribute
4. Create more matchers for some more attributes if any Note - The
fqdn/host should have this attributes
5. Set 'merge overrides' to True
6. Set 'merge default' to True
7. Check ENC output of associated host
:expectedresults:
1. The ENC output shows variable values merged from all the
associated matchers
2. The variable doesn't have the empty default value of variable
3. Duplicate values if any are displayed
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_positive_update_variable_avoid_duplicate(self):
"""Merge the values of all the associated matchers, remove duplicates
Note - This TC is only for array and hash key types
:id: fcb2dfb9-64d6-4647-bbcc-3e5c900aca1b
:bz: 1362372
:steps:
1. Create variable with valid value and type
2. Create first matcher for attribute fqdn with some value
3. Create second matcher for other attribute with same value as
fqdn matcher. Note - The fqdn/host should have this attribute
4. Set 'merge overrides' to True
5. Set 'merge default' to True
6. Set 'avoid duplicate' to True
7. Check ENC output of associated host
:expectedresults:
1. The ENC output shows the variable values merged from all the
associated matchers
2. The variable shows the default value of variable
3. Duplicate values are removed / not displayed
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_negative_update_variable_avoid_duplicate(self):
"""Duplicates are not removed as they were not really present
Note - This TC is only for array and hash key types
:id: 1f8a06de-0c53-424e-b2c9-b48a580d6298
:bz: 1362372
:steps:
1. Create variable with valid value and type
2. Create first matcher for attribute fqdn with some value
3. Create second matcher for other attribute with other value than
fqdn matcher and default value. Note - The fqdn/host should
have this attribute
4. Set 'merge overrides' to True
5. Set 'merge default' to True
6. Set 'avoid duplicates' to True
7. Check ENC output of associated host
:expectedresults:
1. The ENC output shows the variable values merged from all
matchers
2. The variable shows default value of variable
3. No value removed as duplicate value
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@tier1
def test_positive_enable_merge_overrides_and_default_flags(self):
"""Enable Merge Overrides, Merge Default flags for supported types
:id: af2c16e1-9a78-4615-9bc3-34fadca6a179
:steps: Set variable type to array/hash
:expectedresults: The Merge Overrides, Merge Default flags are enabled
to set
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=[gen_integer()],
variable_type='array',
).create()
smart_variable.merge_overrides = True
smart_variable.merge_default = True
smart_variable.update(['merge_overrides', 'merge_default'])
smart_variable.read()
self.assertEqual(smart_variable.merge_overrides, True)
self.assertEqual(smart_variable.merge_default, True)
@run_only_on('sat')
@tier1
def test_negative_enable_merge_overrides_default_flags(self):
"""Disable Merge Overrides, Merge Default flags for non supported types
:id: f62a7e23-6fb4-469a-8589-4c987ff589ef
:steps: Set variable type other than array/hash
:expectedresults: The Merge Overrides, Merge Default flags are not
enabled to set
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value='50',
variable_type='string',
).create()
with self.assertRaises(HTTPError) as context:
smart_variable.merge_overrides = True
smart_variable.update(['merge_overrides'])
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Merge overrides can only be set for "
"array or hash"
)
with self.assertRaises(HTTPError) as context:
smart_variable.merge_default = True
smart_variable.update(['merge_default'])
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Merge default can only be set when merge "
"overrides is set"
)
smart_variable = smart_variable.read()
self.assertEqual(smart_variable.merge_overrides, False)
self.assertEqual(smart_variable.merge_default, False)
@run_only_on('sat')
@tier1
def test_positive_enable_avoid_duplicates_flag(self):
"""Enable Avoid duplicates flag for supported type
:id: 98fb1884-ad2b-45a0-b376-66bbc5ef6f72
:steps:
1. Set variable type to array
2. Set 'merge overrides' to True
:expectedresults: The Avoid Duplicates is enabled to set to True
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=[gen_integer()],
variable_type='array',
).create()
smart_variable.merge_overrides = True
smart_variable.avoid_duplicates = True
smart_variable.update(['merge_overrides', 'avoid_duplicates'])
self.assertEqual(smart_variable.merge_overrides, True)
self.assertEqual(smart_variable.avoid_duplicates, True)
@run_only_on('sat')
@tier1
def test_negative_enable_avoid_duplicates_flag(self):
"""Disable Avoid duplicates flag for non supported types
:id: c7a2f718-6346-4851-b5f1-ab36c2fa8c6a
:steps: Set variable type other than array
:expectedresults:
1. The Merge Overrides flag is only enabled to set for type hash
other than array
2. The Avoid duplicates flag not enabled to set for any type than
array
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=True,
variable_type='boolean',
).create()
with self.assertRaises(HTTPError) as context:
smart_variable.merge_overrides = True
smart_variable.update(['merge_overrides'])
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Merge overrides can only be set for "
"array or hash"
)
with self.assertRaises(HTTPError) as context:
smart_variable.avoid_duplicates = True
smart_variable.update(['avoid_duplicates'])
self.assertRegexpMatches(
context.exception.response.text,
"Validation failed: Avoid duplicates can only be set for arrays "
"that have merge_overrides set to true"
)
smart_variable = smart_variable.read()
self.assertEqual(smart_variable.merge_overrides, False)
self.assertEqual(smart_variable.avoid_duplicates, False)
@run_only_on('sat')
@tier1
def test_positive_remove_matcher(self):
"""Removal of matcher from variable
:id: 7a932a99-2bd9-43ee-bcda-2b01a389787c
:steps:
1. Create the variable and create a matcher for some attribute
2. Remove the matcher created in step 1
:expectedresults: The matcher removed from variable
:CaseImportance: Critical
"""
value = gen_string('alpha')
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
override_value_order='is_virtual',
).create()
matcher = entities.OverrideValue(
smart_variable=smart_variable,
match='is_virtual=true',
value=value,
).create()
smart_variable = smart_variable.read()
self.assertEqual(
smart_variable.override_values[0]['match'], 'is_virtual=true')
self.assertEqual(
smart_variable.override_values[0]['value'], value)
matcher.delete()
self.assertEqual(len(smart_variable.read().override_values), 0)
@run_only_on('sat')
@tier2
def test_positive_impact_variable_delete_attribute(self):
"""Impact on variable after deleting associated attribute
:id: d4faec04-be29-48e6-8585-10ff1c361a9e
:steps:
1. Create a variable and matcher for some attribute
2. Delete the attribute
3. Recreate the attribute with same name as earlier
:expectedresults:
1. The matcher for deleted attribute removed from variable
2. On recreating attribute, the matcher should not reappear in
variable
:CaseLevel: Integration
"""
hostgroup_name = gen_string('alpha')
matcher_value = gen_string('alpha')
# Create variable
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
).create()
# Create hostgroup and add puppet class to it
hostgroup = entities.HostGroup(
name=hostgroup_name,
environment=self.env,
).create()
hostgroup.add_puppetclass(
data={'puppetclass_id': self.puppet_class.id})
# Create matcher
entities.OverrideValue(
smart_variable=smart_variable,
match='hostgroup={0}'.format(hostgroup_name),
value=matcher_value,
).create()
smart_variable = smart_variable.read()
self.assertEqual(
smart_variable.override_values[0]['match'],
'hostgroup={0}'.format(hostgroup_name)
)
self.assertEqual(
smart_variable.override_values[0]['value'], matcher_value)
# Delete hostgroup
hostgroup.delete()
self.assertEqual(len(smart_variable.read().override_values), 0)
# Recreate hostgroup
hostgroup = entities.HostGroup(
name=hostgroup_name,
environment=self.env,
).create()
hostgroup.add_puppetclass(
data={'puppetclass_id': self.puppet_class.id})
self.assertEqual(len(smart_variable.read().override_values), 0)
@run_only_on('sat')
@tier1
def test_positive_hide_variable_default_value(self):
"""Hide the default value of variable
:id: 04bed7fa8-a5be-4fc0-8e9b-d68da00f8de0
:steps:
1. Create variable with valid type and value
2. Set 'Hidden Value' flag to true
:expectedresults: The 'hidden value' flag is set
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
hidden_value=True,
).create()
self.assertEqual(getattr(smart_variable, 'hidden_value?'), True)
self.assertEqual(smart_variable.default_value, u'*****')
@run_only_on('sat')
@tier1
def test_positive_unhide_variable_default_value(self):
"""Unhide the default value of variable
:id: e8b3ec03-1abb-48d8-9409-17178bb887cb
:steps:
1. Create variable with valid type and value
2. Set 'Hidden Value' flag to True
3. After hiding, set the 'Hidden Value' flag to False
:expectedresults: The 'hidden value' flag set to false
:CaseImportance: Critical
"""
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
hidden_value=True,
).create()
self.assertEqual(getattr(smart_variable, 'hidden_value?'), True)
smart_variable.hidden_value = False
smart_variable.update(['hidden_value'])
smart_variable = smart_variable.read()
self.assertEqual(getattr(smart_variable, 'hidden_value?'), False)
@run_only_on('sat')
@tier1
def test_positive_update_hidden_value_in_variable(self):
"""Update the hidden default value of variable
:id: 21b5586e-9434-45ea-ae85-12e24c549412
:steps:
1. Create variable with valid type and value
2. Set 'Hidden Value' flag to true
3. Now in hidden state, update the default value
:expectedresults: 1. The variable default value is updated 2. The
'hidden value' flag set to True
:CaseImportance: Critical
"""
value = gen_string('alpha')
smart_variable = entities.SmartVariable(
puppetclass=self.puppet_class,
default_value=gen_string('alpha'),
hidden_value=True,
).create()
self.assertEqual(getattr(smart_variable, 'hidden_value?'), True)
self.assertEqual(smart_variable.default_value, u'*****')
smart_variable.default_value = value
smart_variable.update(['default_value'])
smart_variable = smart_variable.read(params={'show_hidden': 'true'})
self.assertEqual(smart_variable.default_value, value)
self.assertEqual(getattr(smart_variable, 'hidden_value?'), True)
|
ares/robottelo
|
tests/foreman/api/test_variables.py
|
Python
|
gpl-3.0
| 50,980
|
#!/usr/bin/env python
import os
from setuptools import setup
from distutils.cmd import Command
import django_auth_iam
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name='django-auth-iam',
version=django_auth_iam.__version__,
description='Django authentication backend using Amazon IAM',
long_description=read('README.rst'),
url='https://github.com/viewworld/django-auth-iam/',
author='Michael Budde',
author_email='mb@viewworld.dk',
license='GPL v3',
packages=['django_auth_iam'],
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: System :: Systems Administration :: Authentication/Directory',
],
keywords=['django', 'amazon', 'authentication', 'auth'],
install_requires=['boto', 'PyCrypto', 'py_bcrypt'],
)
|
viewworld/django-auth-iam
|
setup.py
|
Python
|
gpl-3.0
| 1,017
|
""" Package for tesseract cube controllers. """
from .sim import SimulatedController
from .gpio import GPIOController
from .c_gpio import CffiGpioController
CUBES = [
SimulatedController,
GPIOController,
CffiGpioController,
]
|
decoydavid/tesseract
|
ract/cubes/__init__.py
|
Python
|
gpl-3.0
| 241
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Region Fixer.
# Fix your region files with a backup copy of your Minecraft world.
# Copyright (C) 2020 Alejandro Aguilera (Fenixin)
# https://github.com/Fenixin/Minecraft-Region-Fixer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from cmd import Cmd
import regionfixer_core.constants as c
from regionfixer_core import world
from regionfixer_core.scan import console_scan_world, console_scan_regionset
class InteractiveLoop(Cmd):
def __init__(self, world_list, regionset, options, backup_worlds):
Cmd.__init__(self)
self.world_list = world_list
self.regionset = regionset
self.world_names = [str(i.name) for i in self.world_list]
# if there's only one world use it
if len(self.world_list) == 1 and len(self.regionset) == 0:
self.current = world_list[0]
elif len(self.world_list) == 0 and len(self.regionset) > 0:
self.current = self.regionset
else:
self.current = None
self.options = options
self.backup_worlds = backup_worlds
self.prompt = "#-> "
self.intro = ("Minecraft Region-Fixer interactive mode.\n(Use tab to "
"autocomplete. Type help for a list of commands.)\n")
# Possible args for chunks stuff
possible_args = ""
first = True
for i in list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all']:
if not first:
possible_args += ", "
possible_args += i
first = False
self.possible_chunk_args_text = possible_args
# Possible args for region stuff
possible_args = ""
first = True
for i in list(c.REGION_PROBLEMS_ARGS.values()) + ['all']:
if not first:
possible_args += ", "
possible_args += i
first = False
self.possible_region_args_text = possible_args
#################################################
# Do methods
#################################################
def do_set(self, arg):
""" Command to change some options and variables in interactive
mode """
args = arg.split()
if len(args) > 2:
print("Error: too many parameters.")
elif len(args) == 0:
print("Write \'help set\' to see a list of all possible variables")
else:
if args[0] == "entity-limit":
if len(args) == 1:
print("entity-limit = {0}".format(self.options.entity_limit))
else:
try:
if int(args[1]) >= 0:
self.options.entity_limit = int(args[1])
print("entity-limit = {0}".format(args[1]))
print("Updating chunk status...")
self.current.rescan_entities(self.options)
else:
print("Invalid value. Valid values are positive integers and zero")
except ValueError:
print("Invalid value. Valid values are positive integers and zero")
elif args[0] == "workload":
if len(args) == 1:
if self.current:
print("Current workload:\n{0}\n".format(self.current.__str__()))
print("List of possible worlds and region-sets (determined by the command used to run region-fixer):")
number = 1
for w in self.world_list:
print(" ### world{0} ###".format(number))
number += 1
# add a tab and print
for i in w.__str__().split("\n"):
print("\t" + i)
print()
print(" ### regionset ###")
for i in self.regionset.__str__().split("\n"):
print("\t" + i)
print("\n(Use \"set workload world1\" or name_of_the_world or regionset to choose one)")
else:
a = args[1]
if len(a) == 6 and a[:5] == "world" and int(a[-1]) >= 1:
# get the number and choos the correct world from the list
number = int(args[1][-1]) - 1
try:
self.current = self.world_list[number]
print("workload = {0}".format(self.current.world_path))
except IndexError:
print("This world is not in the list!")
elif a in self.world_names:
for w in self.world_list:
if w.name == args[1]:
self.current = w
print("workload = {0}".format(self.current.world_path))
break
else:
print("This world name is not on the list!")
elif args[1] == "regionset":
if len(self.regionset):
self.current = self.regionset
print("workload = set of region files")
else:
print("The region set is empty!")
else:
print("Invalid world number, world name or regionset.")
elif args[0] == "processes":
if len(args) == 1:
print("processes = {0}".format(self.options.processes))
else:
try:
if int(args[1]) > 0:
self.options.processes = int(args[1])
print("processes = {0}".format(args[1]))
else:
print("Invalid value. Valid values are positive integers.")
except ValueError:
print("Invalid value. Valid values are positive integers.")
elif args[0] == "verbose":
if len(args) == 1:
print("verbose = {0}".format(str(self.options.verbose)))
else:
if args[1] == "True":
self.options.verbose = True
print("verbose = {0}".format(args[1]))
elif args[1] == "False":
self.options.verbose = False
print("verbose = {0}".format(args[1]))
else:
print("Invalid value. Valid values are True and False.")
else:
print("Invalid argument! Write \'help set\' to see a list of valid variables.")
def do_summary(self, arg):
""" Prints a summary of all the problems found in the region
files. """
if len(arg) == 0:
if self.current:
if self.current.scanned:
text = self.current.generate_report(True)
if text:
print(text)
else:
print("No problems found!")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
else:
print("No world/region-set is set! Use \'set workload\' to set a world/regionset to work with.")
else:
print("This command doesn't use any arguments.")
def do_current_workload(self, arg):
""" Prints the info of the current workload """
if len(arg) == 0:
if self.current:
print(self.current)
else:
print("No world/region-set is set! Use \'set workload\' to set a world/regionset to work with.")
else:
print("This command doesn't use any arguments.")
def do_scan(self, arg):
""" Scans the current workload. """
# TODO: what about scanning while deleting entities as done in non-interactive mode?
# this would need an option to choose which of the two methods use
o = self.options
if len(arg.split()) > 0:
print("Error: too many parameters.")
else:
if self.current:
if isinstance(self.current, world.World):
self.current = world.World(self.current.path)
console_scan_world(self.current, o.processes,
o.entity_limit, o.delete_entities,
o.verbose)
elif isinstance(self.current, world.RegionSet):
print("\n{0:-^60}".format(' Scanning region files '))
console_scan_regionset(self.current, o.processes,
o.entity_limit, o.delete_entities,
o.verbose)
else:
print("No world set! Use \'set workload\'")
def do_count_chunks(self, arg):
""" Counts the number of chunks with the given problem and
prints the result """
if self.current and self.current.scanned:
if len(arg.split()) == 0:
print("Possible counters are: {0}".format(self.possible_chunk_args_text))
elif len(arg.split()) > 1:
print("Error: too many parameters.")
else:
if arg in list(c.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all':
total = self.current.count_chunks(None)
for problem, status_text, a in c.CHUNK_PROBLEMS_ITERATOR:
if arg == 'all' or arg == a:
n = self.current.count_chunks(problem)
print("Chunks with status \'{0}\': {1}".format(status_text, n))
print("Total chunks: {0}".format(total))
else:
print("Unknown counter.")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_count_regions(self, arg):
""" Counts the number of regions with the given problem and
prints the result """
if self.current and self.current.scanned:
if len(arg.split()) == 0:
print("Possible counters are: {0}".format(self.possible_region_args_text))
elif len(arg.split()) > 1:
print("Error: too many parameters.")
else:
if arg in list(c.REGION_PROBLEMS_ARGS.values()) or arg == 'all':
total = self.current.count_regions(None)
for problem, status_text, a in c.REGION_PROBLEMS_ITERATOR:
if arg == 'all' or arg == a:
n = self.current.count_regions(problem)
print("Regions with status \'{0}\': {1}".format(status_text, n))
print("Total regions: {0}".format(total))
else:
print("Unknown counter.")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_count_all(self, arg):
""" Print all the counters for chunks and regions. """
if self.current and self.current.scanned:
if len(arg.split()) > 0:
print("This command doesn't requiere any arguments")
else:
print("{0:#^60}".format("Chunk problems:"))
self.do_count_chunks('all')
print("\n")
print("{0:#^60}".format("Region problems:"))
self.do_count_regions('all')
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_remove_entities(self, arg):
if self.current and self.current.scanned:
if len(arg.split()) > 0:
print("Error: too many parameters.")
else:
print("WARNING: This will delete all the entities in the chunks that have more entities than entity-limit, make sure you know what entities are!.\nAre you sure you want to continue? (yes/no):")
answer = input()
if answer == 'yes':
counter = self.current.remove_entities()
print("Deleted {0} entities.".format(counter))
if counter:
self.current.scanned = False
self.current.rescan_entities(self.options)
elif answer == 'no':
print("Ok!")
else:
print("Invalid answer, use \'yes\' or \'no\' the next time!.")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_remove_chunks(self, arg):
if self.current and self.current.scanned:
if len(arg.split()) == 0:
print("Possible arguments are: {0}".format(self.possible_chunk_args_text))
elif len(arg.split()) > 1:
print("Error: too many parameters.")
else:
if arg in list(c.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all':
for problem, status_text, a in c.CHUNK_PROBLEMS_ITERATOR:
if arg == 'all' or arg == a:
n = self.current.remove_problematic_chunks(problem)
if n:
self.current.scanned = False
print("Removed {0} chunks with status \'{1}\'.\n".format(n, status_text))
else:
print("Unknown argument.")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_replace_chunks(self, arg):
el = self.options.entity_limit
de = self.options.delete_entities
if self.current and self.current.scanned:
if len(arg.split()) == 0:
print("Possible arguments are: {0}".format(self.possible_chunk_args_text))
elif len(arg.split()) > 1:
print("Error: too many parameters.")
else:
if arg in list(c.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all':
for problem, status_text, a in c.CHUNK_PROBLEMS_ITERATOR:
if arg == 'all' or arg == a:
n = self.current.replace_problematic_chunks(self.backup_worlds, problem, el, de)
if n:
self.current.scanned = False
print("\nReplaced {0} chunks with status \'{1}\'.".format(n, status_text))
else:
print("Unknown argument.")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_replace_regions(self, arg):
el = self.options.entity_limit
de = self.options.delete_entities
if self.current and self.current.scanned:
if len(arg.split()) == 0:
print("Possible arguments are: {0}".format(self.possible_region_args_text))
elif len(arg.split()) > 1:
print("Error: too many parameters.")
else:
if arg in list(c.REGION_PROBLEMS_ARGS.values()) or arg == 'all':
for problem, status_text, a in c.REGION_PROBLEMS_ITERATOR:
if arg == 'all' or arg == a:
n = self.current.replace_problematic_regions(self.backup_worlds, problem, el, de)
if n:
self.current.scanned = False
print("\nReplaced {0} regions with status \'{1}\'.".format(n, status_text))
else:
print("Unknown argument.")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_remove_regions(self, arg):
if self.current and self.current.scanned:
if len(arg.split()) == 0:
print("Possible arguments are: {0}".format(self.possible_region_args_text))
elif len(arg.split()) > 1:
print("Error: too many parameters.")
else:
if arg in list(c.REGION_PROBLEMS_ARGS.values()) or arg == 'all':
for problem, status_text, a in c.REGION_PROBLEMS_ITERATOR:
if arg == 'all' or arg == a:
n = self.current.remove_problematic_regions(problem)
if n:
self.current.scanned = False
print("\nRemoved {0} regions with status \'{1}\'.".format(n, status_text))
else:
print("Unknown argument.")
else:
print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.")
def do_quit(self, arg):
print("Quitting.")
return True
def do_exit(self, arg):
print("Exiting.")
return True
def do_EOF(self, arg):
print("Quitting.")
return True
#################################################
# Complete methods
#################################################
def complete_arg(self, text, possible_args):
l = []
for arg in possible_args:
if text in arg and arg.find(text) == 0:
l.append(arg + " ")
return l
def complete_set(self, text, line, begidx, endidx):
if "workload " in line:
# return the list of world names plus 'regionset' plus a list of world1, world2...
possible_args = tuple(self.world_names) + ('regionset',) + tuple(['world' + str(i + 1) for i in range(len(self.world_names))])
elif 'verbose ' in line:
possible_args = ('True', 'False')
else:
possible_args = ('entity-limit', 'verbose', 'processes', 'workload')
return self.complete_arg(text, possible_args)
def complete_count_chunks(self, text, line, begidx, endidx):
possible_args = list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all']
return self.complete_arg(text, possible_args)
def complete_remove_chunks(self, text, line, begidx, endidx):
possible_args = list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all']
return self.complete_arg(text, possible_args)
def complete_replace_chunks(self, text, line, begidx, endidx):
possible_args = list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all']
return self.complete_arg(text, possible_args)
def complete_count_regions(self, text, line, begidx, endidx):
possible_args = list(c.REGION_PROBLEMS_ARGS.values()) + ['all']
return self.complete_arg(text, possible_args)
def complete_remove_regions(self, text, line, begidx, endidx):
possible_args = list(c.REGION_PROBLEMS_ARGS.values()) + ['all']
return self.complete_arg(text, possible_args)
def complete_replace_regions(self, text, line, begidx, endidx):
possible_args = list(c.REGION_PROBLEMS_ARGS.values()) + ['all']
return self.complete_arg(text, possible_args)
#################################################
# Help methods
#################################################
# TODO sería una buena idea poner un artículo de ayuda de como usar el programa en un caso típico.
# TODO: the help texts need a normalize
def help_set(self):
print("\nSets some variables used for the scan in interactive mode. "
"If you run this command without an argument for a variable "
"you can see the current state of the variable. You can set:\n"
" verbose\n"
"If True prints a line per scanned region file instead of "
"showing a progress bar.\n"
" entity-limit\n"
"If a chunk has more than this number of entities it will be "
"added to the list of chunks with too many entities problem.\n"
" processes"
"Number of cores used while scanning the world.\n"
" workload\n"
"If you input a few worlds you can choose wich one will be "
"scanned using this command.\n")
def help_current_workload(self):
print("\nPrints information of the current region-set/world. This will be the region-set/world to scan and fix.\n")
def help_scan(self):
print("\nScans the current world set or the region set.\n")
def help_count_chunks(self):
print("\n Prints out the number of chunks with the given status. For example")
print("\'count corrupted\' prints the number of corrupted chunks in the world.")
print()
print("Possible status are: {0}\n".format(self.possible_chunk_args_text))
def help_remove_entities(self):
print("\nRemove all the entities in chunks that have more than entity-limit entities.")
print()
print("This chunks are the ones with status \'too many entities\'.\n")
def help_remove_chunks(self):
print("\nRemoves bad chunks with the given problem.")
print()
print("Please, be careful, when used with the status too-many-entities this will")
print("REMOVE THE CHUNKS with too many entities problems, not the entities.")
print("To remove only the entities see the command remove_entities.")
print()
print("For example \'remove_chunks corrupted\' this will remove corrupted chunks.")
print()
print("Possible status are: {0}\n".format(self.possible_chunk_args_text))
print()
def help_replace_chunks(self):
print("\nReplaces bad chunks with the given status using the backups directories.")
print()
print("Exampe: \"replace_chunks corrupted\"")
print()
print("this will replace the corrupted chunks with the given backups.")
print()
print("Possible status are: {0}\n".format(self.possible_chunk_args_text))
print()
print("Note: after replacing any chunks you have to rescan the world.\n")
def help_count_regions(self):
print("\n Prints out the number of regions with the given status. For example ")
print("\'count_regions too-small\' prints the number of region with \'too-small\' status.")
print()
print("Possible status are: {0}\n".format(self.possible_region_args_text))
def help_remove_regions(self):
print("\nRemoves regions with the given status.")
print()
print("Example: \'remove_regions too-small\'")
print()
print("this will remove the region files with status \'too-small\'.")
print()
print("Possible status are: {0}".format(self.possible_region_args_text))
print()
print("Note: after removing any regions you have to rescan the world.\n")
def help_replace_regions(self):
print("\nReplaces regions with the given status.")
print()
print("Example: \"replace_regions too-small\"")
print()
print("this will try to replace the region files with status \'too-small\'")
print("with the given backups.")
print()
print("Possible status are: {0}".format(self.possible_region_args_text))
print()
print("Note: after replacing any regions you have to rescan the world.\n")
def help_summary(self):
print("\nPrints a summary of all the problems found in the current workload.\n")
def help_quit(self):
print("\nQuits interactive mode, exits region-fixer. Same as \'EOF\' and \'exit\' commands.\n")
def help_EOF(self):
print("\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'exit\' commands\n")
def help_exit(self):
print("\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'EOF\' commands\n")
def help_help(self):
print("Prints help help.")
|
Fenixin/Minecraft-Region-Fixer
|
regionfixer_core/interactive.py
|
Python
|
gpl-3.0
| 24,997
|
from dotenv import load_dotenv
load_dotenv('./.env')
|
mbernson/iscp-search-engine
|
retrouve/__init__.py
|
Python
|
gpl-3.0
| 53
|
"""
Django settings for todo project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-b9xx8+eul3#8q&c@tv^5e!u66j=a6@377$y^b2q!0a%vj+!ny'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
DJANGO_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
THIRD_PARTY_APPS = []
CUSTOM_APPS = [
'tasks.apps.TasksConfig',
]
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + CUSTOM_APPS
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'todo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['todo/templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'todo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
jacoboamn87/todolist
|
todo/settings.py
|
Python
|
gpl-3.0
| 3,308
|
#!/usr/bin/python
'''
Study Common Emitter Characteristics of NPN transistors.
Saturation currents, and their dependence on base current
can be easily visualized.
'''
from __future__ import print_function
import time,sys,os
from SEEL_Apps.utilitiesClass import utilitiesClass
from .templates import ui_NFET as NFET
from PyQt4 import QtCore, QtGui
import pyqtgraph as pg
import numpy as np
params = {
'image' : 'transistorCE.png',
'name':'N-FET Transfer\nCharacteristics',
'hint':''
}
class AppWindow(QtGui.QMainWindow, NFET.Ui_MainWindow,utilitiesClass):
def __init__(self, parent=None,**kwargs):
super(AppWindow, self).__init__(parent)
self.setupUi(self)
self.I=kwargs.get('I',None)
self.I.set_gain('CH1',2)
self.setWindowTitle(self.I.H.version_string+' : '+params.get('name','').replace('\n',' ') )
self.plot=self.add2DPlot(self.plot_area,enableMenu=False)
self.sig = self.rightClickToZoomOut(self.plot)
labelStyle = {'color': 'rgb(255,255,255)', 'font-size': '11pt'}
self.plot.setLabel('left','Drain Current', units='A',**labelStyle)
self.plot.setLabel('bottom','Gate-Source Voltage', units='V',**labelStyle)
self.startV.setMinimum(-3.3); self.startV.setMaximum(0); self.startV.setValue(-3.3)
self.stopV.setMinimum(-3.3); self.stopV.setMaximum(0)
self.biasV.setMinimum(-5); self.biasV.setMaximum(5); self.biasV.setValue(5)
self.sweepLabel.setText('Gate Voltage Range(PV2)')
self.biasLabel.setText('Drain Voltage(PV1)')
self.totalpoints=2000
self.X=[]
self.Y=[]
self.RESISTANCE = 560
self.traceName =''
self.curves=[]
self.curveLabels=[]
self.looptimer = self.newTimer()
self.looptimer.timeout.connect(self.acquire)
self.running = True
def savePlots(self):
self.saveDataWindow(self.curves,self.plot)
def run(self):
self.looptimer.stop()
self.X=[];self.Y=[]
self.VCC = self.I.set_pv1(self.biasV.value())
self.traceName = 'Vcc = %s'%(self.applySIPrefix(self.VCC,'V'))
self.curves.append( self.addCurve(self.plot ,self.traceName) )
self.START = self.startV.value()
self.STOP = self.stopV.value()
self.STEP = (self.STOP-self.START)/self.totalPoints.value()
self.V = self.START
self.I.set_pv2(self.V)
time.sleep(0.2)
P=self.plot.getPlotItem()
self.plot.setXRange(self.V,self.stopV.value()*1.2)
self.plot.setYRange(0,20e-3)
if len(self.curves)>1:P.enableAutoRange(True,True)
if self.running:self.looptimer.start(20)
def acquire(self):
VG=self.I.set_pv2(self.V)
self.X.append(VG)
VC = self.I.get_voltage('CH1',samples=10)
self.Y.append((self.VCC-VC)/self.RESISTANCE) # list( ( np.linspace(V,V+self.stepV.value(),1000)-VC)/1.e3)
self.curves[-1].setData(self.X,self.Y)
self.V+=self.STEP
if self.V>self.stopV.value():
self.looptimer.stop()
txt='<div style="text-align: center"><span style="color: #FFF;font-size:8pt;">%s</span></div>'%(self.traceName)
text = pg.TextItem(html=txt, anchor=(0,0), border='w', fill=(0, 0, 255, 100))
self.plot.addItem(text)
text.setPos(self.X[-1],self.Y[-1])
self.curveLabels.append(text)
self.tracesBox.addItem(self.traceName)
def delete_curve(self):
c = self.tracesBox.currentIndex()
if c>-1:
self.tracesBox.removeItem(c)
self.removeCurve(self.plot,self.curves[c]);
self.plot.removeItem(self.curveLabels[c]);
self.curves.pop(c);self.curveLabels.pop(c);
if len(self.curves)==0: # reset counter for plot numbers
self.plotnum=0
def __del__(self):
self.looptimer.stop()
print ('bye')
def closeEvent(self, event):
self.looptimer.stop()
self.finished=True
if __name__ == "__main__":
from SEEL import interface
app = QtGui.QApplication(sys.argv)
myapp = AppWindow(I=interface.connect())
myapp.show()
sys.exit(app.exec_())
|
jithinbp/SEELablet-apps
|
seel_res/GUI/B_ELECTRONICS/A_BJT_AND_FET/D_NFET_GS_ID.py
|
Python
|
gpl-3.0
| 3,753
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import datetime
import threading
import traceback
import sickbeard
from sickbeard import logger
from sickbeard import db
from sickbeard import common
from sickbeard import helpers
from sickbeard import exceptions
from sickbeard import network_timezones
from sickbeard.exceptions import ex
from sickbeard.common import SKIPPED
from common import Quality, qualityPresetStrings, statusStrings
class DailySearcher():
def __init__(self):
self.lock = threading.Lock()
self.amActive = False
def run(self, force=False):
if self.amActive:
return
self.amActive = True
logger.log(u"Searching for new released episodes ...")
if not network_timezones.network_dict:
network_timezones.update_network_dict()
if network_timezones.network_dict:
curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
else:
curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal()
curTime = datetime.datetime.now(network_timezones.sb_timezone)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND (airdate <= ? and airdate > 1)",
[common.UNAIRED, curDate])
sql_l = []
show = None
for sqlEp in sqlResults:
try:
if not show or int(sqlEp["showid"]) != show.indexerid:
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
# for when there is orphaned series in the database but not loaded into our showlist
if not show or show.paused:
continue
except exceptions.MultipleShowObjectsException:
logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid']))
continue
try:
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs,
show.network) + datetime.timedelta(
minutes=helpers.tryInt(show.runtime, 60))
# filter out any episodes that haven't aried yet
if end_time > curTime:
continue
except:
# if an error occured assume the episode hasn't aired yet
continue
UpdateWantedList = 0
ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
with ep.lock:
if ep.season == 0:
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to SKIPPED because is a special season")
ep.status = common.SKIPPED
elif sickbeard.TRAKT_USE_ROLLING_DOWNLOAD and sickbeard.USE_TRAKT:
ep.status = common.SKIPPED
UpdateWantedList = 1
else:
logger.log(u"New episode %s airs today, setting to default episode status for this show: %s" % (ep.prettyName(), common.statusStrings[ep.show.default_ep_status]))
ep.status = ep.show.default_ep_status
sql_l.append(ep.get_sql())
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
else:
logger.log(u"No new released episodes found ...")
sickbeard.traktRollingScheduler.action.updateWantedList()
# queue episode for daily search
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem()
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
self.amActive = False
|
keen99/SickRage
|
sickbeard/dailysearcher.py
|
Python
|
gpl-3.0
| 4,533
|
import sys
class DualModulusPrescaler:
def __init__(self,p):
self.m_p = p
return
def set_prescaler(self):
return
# may be internal
def set_a(self,a):
self.m_a = a
return
# may be internal
def set_n(self,n):
self.m_n = n
return
def set_ref_divider(self, r):
self.m_r = r
return
def get_ref_divider(self):
return self.m_r
def get_division_ratio(self):
v = (self.m_p * self.m_n) + self.m_a
return v
class Osc:
def __init__(self, ref_freq, prescaler):
self.m_ref = ref_freq
self.m_prescaler = prescaler
return
def get_freq(self):
# print self.m_prescaler.get_division_ratio()
return (self.m_ref/self.m_prescaler.get_ref_divider()) * self.m_prescaler.get_division_ratio()
def calc_a(self):
return
def calc_n(self):
return
def get_counter_params(self,freq):
x = freq * self.m_prescaler.get_ref_divider()/self.m_ref
n = int(x/32)
a = int(round(x-n*32))
encoded = (n<<7) + a
return (n, a, encoded)
def set_freq(self,freq):
return
class Danphone:
def __init__(self,datalink):
dmps = DualModulusPrescaler(32)
#dmps.set_ref_divider(2048)
dmps.set_ref_divider(1088)
osc = Osc(12.8e6,dmps)
print osc.get_counter_params(70.3529E6)
sys.exit(0)
for i in range(128):
dmps.set_a(i)
dmps.set_n(456)
osc = Osc(12.8e6,dmps)
print osc.get_freq()/1000000
return
if __name__=="__main__":
d = Danphone(None)
|
johngumb/danphone
|
junkbox/dm.py
|
Python
|
gpl-3.0
| 1,692
|
import logging
import os
from lib import exception
from lib import repository
from lib.constants import REPOSITORIES_DIR
LOG = logging.getLogger(__name__)
def get_versions_repository(config):
"""
Get the packages metadata Git repository, cloning it if does not
yet exist.
Args:
config (dict): configuration dictionary
Raises:
exception.RepositoryError: if the clone is unsuccessful
"""
path = os.path.join(config.get('work_dir'),
REPOSITORIES_DIR)
url = config.get('packages_metadata_repo_url')
name = "versions_{subcommand}".format(
subcommand=config.get('subcommand'))
try:
versions_repo = repository.get_git_repository(url, path, name)
except exception.RepositoryError:
LOG.error("Failed to clone versions repository")
raise
return versions_repo
def setup_versions_repository(config):
"""
Prepare the packages metadata Git repository, cloning it and
checking out at the chosen branch.
Args:
config (dict): configuration dictionary
Raises:
exception.RepositoryError: if the clone or checkout are
unsuccessful
"""
versions_repo = get_versions_repository(config)
branch = config.get('packages_metadata_repo_branch')
refspecs = config.get('packages_metadata_repo_refspecs')
try:
versions_repo.checkout(branch, refspecs)
except exception.RepositoryError:
LOG.error("Failed to checkout versions repository")
raise
return versions_repo
def read_version_and_milestone(versions_repo):
"""
Read current version and milestone (alpha or beta) from VERSION file
Args:
versions_repo (GitRepository): packages metadata git repository
Returns:
version_milestone (str): version and milestone. Format:
<version>-<milestone>, valid milestone values: alpha, beta
"""
version_file_path = os.path.join(versions_repo.working_tree_dir, 'open-power-host-os', 'CentOS', '7', 'SOURCES', 'VERSION')
version_milestone = ""
with open(version_file_path, 'r') as version_file:
#ignore first line with file format information
version_file.readline()
version_milestone = version_file.readline().strip('\n')
return version_milestone
|
olavph/builds
|
lib/versions_repository.py
|
Python
|
gpl-3.0
| 2,326
|
import uuid
from flask import g
# Utility method to get source_ip from a request - first checks headers for forwarded IP, then uses remote_addr if not
def get_source_ip(my_request):
try:
# First check for an X-Forwarded-For header provided by a proxy / router e.g. on Heroku
source_ip = my_request.headers['X-Forwarded-For']
except KeyError:
try:
# First check for an X-Forwarded-For header provided by a proxy / router e.g. on Heroku
source_ip = my_request.headers['X-Client-IP']
except KeyError:
# If that header is not present, attempt to get the Source IP address from the request itself
source_ip = my_request.remote_addr
g.source_ip = source_ip
return source_ip
# Utility method to get the request_id from the X-Request-Id header, and if not present generate one
def get_request_id(my_request):
try:
request_id = my_request.headers['X-Request-Id']
except KeyError:
request_id = str(uuid.uuid4())
g.request_id = request_id
return request_id
# Utility method which takes a dict of request parameters and writes them out as pipe delimeted kv pairs
def dict_to_piped_kv_pairs(dict_for_conversion):
output_string = ""
for key, value in sorted(dict_for_conversion.items()):
output_string += "{0}={1}|".format(key, value)
return output_string
|
open-ods/open-ods
|
openods/request_utils.py
|
Python
|
gpl-3.0
| 1,400
|
import os, re, shutil
for (base, _, files) in os.walk("essays",):
for f in files:
if f.endswith(".markdown"):
fp = os.path.join(base, f)
_, np = os.path.split(base)
np = re.sub(r"_def$", "", np)
np = os.path.join("essays", np+".markdown")
# print fp, "=>", np
# shutil.copy(fp, np)
cmd = 'git mv "{0}" "{1}"'.format(fp, np)
print cmd
os.system(cmd)
|
DigitalPublishingToolkit/Society-of-the-Query-Reader
|
scripts/gather_essays.py
|
Python
|
gpl-3.0
| 470
|
# (c) 2014 James Cammarata, <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import codecs
from ansible.errors import AnsibleParserError
from ansible.parsing.quoting import unquote
# Decode escapes adapted from rspeer's answer here:
# http://stackoverflow.com/questions/4020539/process-escape-sequences-in-a-string-in-python
_HEXCHAR = '[a-fA-F0-9]'
_ESCAPE_SEQUENCE_RE = re.compile(r'''
( \\U{0} # 8-digit hex escapes
| \\u{1} # 4-digit hex escapes
| \\x{2} # 2-digit hex escapes
| \\N\{{[^}}]+\}} # Unicode characters by name
| \\[\\'"abfnrtv] # Single-character escapes
)'''.format(_HEXCHAR*8, _HEXCHAR*4, _HEXCHAR*2), re.UNICODE | re.VERBOSE)
def _decode_escapes(s):
def decode_match(match):
return codecs.decode(match.group(0), 'unicode-escape')
return _ESCAPE_SEQUENCE_RE.sub(decode_match, s)
def parse_kv(args, check_raw=False):
'''
Convert a string of key/value items to a dict. If any free-form params
are found and the check_raw option is set to True, they will be added
to a new parameter called '_raw_params'. If check_raw is not enabled,
they will simply be ignored.
'''
### FIXME: args should already be a unicode string
from ansible.utils.unicode import to_unicode
args = to_unicode(args, nonstring='passthru')
options = {}
if args is not None:
try:
vargs = split_args(args)
except ValueError as ve:
if 'no closing quotation' in str(ve).lower():
raise AnsibleParsingError("error parsing argument string, try quoting the entire line.")
else:
raise
raw_params = []
for orig_x in vargs:
x = _decode_escapes(orig_x)
if "=" in x:
pos = 0
try:
while True:
pos = x.index('=', pos + 1)
if pos > 0 and x[pos - 1] != '\\':
break
except ValueError:
# ran out of string, but we must have some escaped equals,
# so replace those and append this to the list of raw params
raw_params.append(x.replace('\\=', '='))
continue
k = x[:pos]
v = x[pos + 1:]
# FIXME: make the retrieval of this list of shell/command
# options a function, so the list is centralized
if check_raw and k not in ('creates', 'removes', 'chdir', 'executable', 'warn'):
raw_params.append(orig_x)
else:
options[k.strip()] = unquote(v.strip())
else:
raw_params.append(orig_x)
# recombine the free-form params, if any were found, and assign
# them to a special option for use later by the shell/command module
if len(raw_params) > 0:
options[u'_raw_params'] = ' '.join(raw_params)
return options
def _get_quote_state(token, quote_char):
'''
the goal of this block is to determine if the quoted string
is unterminated in which case it needs to be put back together
'''
# the char before the current one, used to see if
# the current character is escaped
prev_char = None
for idx, cur_char in enumerate(token):
if idx > 0:
prev_char = token[idx-1]
if cur_char in '"\'' and prev_char != '\\':
if quote_char:
if cur_char == quote_char:
quote_char = None
else:
quote_char = cur_char
return quote_char
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that
block based on the difference
'''
num_open = token.count(open_token)
num_close = token.count(close_token)
if num_open != num_close:
cur_depth += (num_open - num_close)
if cur_depth < 0:
cur_depth = 0
return cur_depth
def split_args(args):
'''
Splits args on whitespace, but intelligently reassembles
those that may have been split over a jinja2 block or quotes.
When used in a remote module, we won't ever have to be concerned about
jinja2 blocks, however this function is/will be used in the
core portions as well before the args are templated.
example input: a=b c="foo bar"
example output: ['a=b', 'c="foo bar"']
Basically this is a variation shlex that has some more intelligence for
how Ansible needs to use it.
'''
# the list of params parsed out of the arg string
# this is going to be the result value when we are done
params = []
# Initial split on white space
args = args.strip()
items = args.strip().split('\n')
# iterate over the tokens, and reassemble any that may have been
# split on a space inside a jinja2 block.
# ex if tokens are "{{", "foo", "}}" these go together
# These variables are used
# to keep track of the state of the parsing, since blocks and quotes
# may be nested within each other.
quote_char = None
inside_quotes = False
print_depth = 0 # used to count nested jinja2 {{ }} blocks
block_depth = 0 # used to count nested jinja2 {% %} blocks
comment_depth = 0 # used to count nested jinja2 {# #} blocks
# now we loop over each split chunk, coalescing tokens if the white space
# split occurred within quotes or a jinja2 block of some kind
for itemidx,item in enumerate(items):
# we split on spaces and newlines separately, so that we
# can tell which character we split on for reassembly
# inside quotation characters
tokens = item.strip().split(' ')
line_continuation = False
for idx,token in enumerate(tokens):
# if we hit a line continuation character, but
# we're not inside quotes, ignore it and continue
# on to the next token while setting a flag
if token == '\\' and not inside_quotes:
line_continuation = True
continue
# store the previous quoting state for checking later
was_inside_quotes = inside_quotes
quote_char = _get_quote_state(token, quote_char)
inside_quotes = quote_char is not None
# multiple conditions may append a token to the list of params,
# so we keep track with this flag to make sure it only happens once
# append means add to the end of the list, don't append means concatenate
# it to the end of the last token
appended = False
# if we're inside quotes now, but weren't before, append the token
# to the end of the list, since we'll tack on more to it later
# otherwise, if we're inside any jinja2 block, inside quotes, or we were
# inside quotes (but aren't now) concat this token to the last param
if inside_quotes and not was_inside_quotes:
params.append(token)
appended = True
elif print_depth or block_depth or comment_depth or inside_quotes or was_inside_quotes:
if idx == 0 and was_inside_quotes:
params[-1] = "%s%s" % (params[-1], token)
elif len(tokens) > 1:
spacer = ''
if idx > 0:
spacer = ' '
params[-1] = "%s%s%s" % (params[-1], spacer, token)
else:
params[-1] = "%s\n%s" % (params[-1], token)
appended = True
# if the number of paired block tags is not the same, the depth has changed, so we calculate that here
# and may append the current token to the params (if we haven't previously done so)
prev_print_depth = print_depth
print_depth = _count_jinja2_blocks(token, print_depth, "{{", "}}")
if print_depth != prev_print_depth and not appended:
params.append(token)
appended = True
prev_block_depth = block_depth
block_depth = _count_jinja2_blocks(token, block_depth, "{%", "%}")
if block_depth != prev_block_depth and not appended:
params.append(token)
appended = True
prev_comment_depth = comment_depth
comment_depth = _count_jinja2_blocks(token, comment_depth, "{#", "#}")
if comment_depth != prev_comment_depth and not appended:
params.append(token)
appended = True
# finally, if we're at zero depth for all blocks and not inside quotes, and have not
# yet appended anything to the list of params, we do so now
if not (print_depth or block_depth or comment_depth) and not inside_quotes and not appended and token != '':
params.append(token)
# if this was the last token in the list, and we have more than
# one item (meaning we split on newlines), add a newline back here
# to preserve the original structure
if len(items) > 1 and itemidx != len(items) - 1 and not line_continuation:
params[-1] += '\n'
# always clear the line continuation flag
line_continuation = False
# If we're done and things are not at zero depth or we're still inside quotes,
# raise an error to indicate that the args were unbalanced
if print_depth or block_depth or comment_depth or inside_quotes:
raise AnsibleParserError("failed at splitting arguments, either an unbalanced jinja2 block or quotes: {}".format(args))
return params
|
dr0pz0ne/sibble
|
lib/ansible/parsing/splitter.py
|
Python
|
gpl-3.0
| 10,657
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# async - A tool to manage and sync different machines
# Copyright 2012,2013 Abdó Roig-Maranges <abdo.roig@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import glob
import shlex
from collections import OrderedDict
from configparser import ConfigParser
class AsyncConfigError(Exception):
def __init__(self, msg=None):
super(AsyncConfigError, self).__init__(msg)
# Parsers
# --------------------------------------------------------------------
# All these parsers accept ket, val, dic. The parse key, val and store it in dic.
# key: key for the entry
# val: value to be parsed. If none, ignore it silently
# old: dictionary containing
def parse_string(key, val, dic):
if val:
dic[key] = val.strip()
return dic.get(key, None)
def parse_bool(key, val, dic):
if val:
val = val.strip().lower()
bval = False
if val in set(['on', 'true', '1', 'yes']): bval = True
elif val in set(['off', 'false', '0', 'no']): bval = False
else: raise ValueError("Unrecognized boolean value: %s" % val)
dic[key] = bval
return dic.get(key, None)
def parse_path(key, val, dic):
if val:
dic[key] = os.path.expandvars(os.path.expanduser(val.strip()))
return dic.get(key, None)
def parse_list(key, val, dic, parse_val=parse_string):
if not key in dic: dic[key] = []
if val:
L = val.split(',')
# do not accumulate with previous values if the list starts with a comma.
if len(L[0].strip()) == 0: L=L[1:]
else: dic[key] = []
for it in L:
v = parse_val('key', it, {})
if v:
dic[key].append(v)
return dic[key]
def parse_list_args(key, val, dic):
if not key in dic: dic[key] = []
if val:
dic[key] = [] # never accumulate args
for it in shlex.split(val):
dic[key].append(it.strip())
return dic[key]
def parse_dict(key, val, dic, parse_val=parse_string):
"""ex. dict=A:1,B:2,C:3"""
if not key in dic: dic[key] = {}
if val:
L = val.split(',')
# do not accumulate with previous values if the list starts with a comma.
if len(L[0].strip()) == 0: L=L[1:]
else: dic[key] = {}
for it in L:
spl = it.strip().split(':', 1)
if len(spl) == 0:
continue
elif len(spl) == 2:
parse_val(spl[0].strip(), spl[1].strip(), dic[key])
else:
raise ValueError("Wrong value for dict field: %s" % key)
return dic[key]
def parse_keyval(key, val, dic, parse_val=parse_string):
"""ex. uuid:directory = blahblah"""
ret = {}
spl = key.strip().split('.', 1)
if len(spl) == 0:
return
elif len(spl) == 1:
parse_val(spl[0], val, dic)
elif len(spl) >= 2:
key = spl[0]
rest = spl[1]
if not key in dic: dic[key] = {}
parse_keyval(rest, val, dic[key], parse_val)
def parse_dict_path(key, val, dic):
return parse_dict(key, val, dic, parse_path)
def parse_list_path(key, val, dic):
return parse_list(key, val, dic, parse_path)
def parse_keyval_path(key, val, dic):
return parse_keyval(key, val, dic, parse_path)
class AsyncConfig(ConfigParser):
# default values and parsing functions
FIELDS={
'host': {
'dirs' : ([], parse_list),
'ignore' : ([], parse_list_path),
'symlinks' : ({}, parse_dict_path), # key:val. 'key' is dirname, 'val' target path.
'hostname' : (None, parse_string),
'user' : (None, parse_string),
'mac_address' : (None, parse_string),
'ssh_key' : (None, parse_path),
'ssh_trust' : (False, parse_bool),
'unison_as_rsync': (False, parse_bool),
'kill_systemd_user' : (False, parse_bool),
'swapfile' : (None, parse_path),
'mounts' : ({}, parse_dict),
'luks' : ({}, parse_dict),
'ecryptfs' : ({}, parse_dict),
'vol_keys' : (None, parse_path),
'mount_options' : (None, parse_string),
'default_remote' : (None, parse_string),
'path' : (None, parse_path),
'check_mounts' : ([], parse_list),
'type' : (None, parse_string),
'instance' : (None, parse_string),
'annex_pull' : ([], parse_list), # directories where we pull annexed files from remote
'annex_push' : ([], parse_list), # directories where we push annexed files to remote
'log_cmd' : (None, parse_path), # parse as a path to expand shell vars
'update_cmd' : (None, parse_path), # parse as a path to expand shell vars
'save_lastsync' : (True, parse_bool), # store .async.last with last sync metadata
'asynclast_file' : (".async.last", parse_string),
'skip_missing' : (False, parse_bool),
},
'instance': {
'ec2_ami' : (None, parse_string),
'ec2_owner' : (None, parse_string),
'ec2_region' : (None, parse_string),
'ec2_itype' : (None, parse_string),
'ec2_keypair' : (None, parse_string),
'ec2_security_group' : (None, parse_string),
'aws_keys' : (None, parse_path),
'volumes' : ({}, parse_dict),
'zone' : (None, parse_string),
'user' : (None, parse_string),
},
'remote': {
'url' : (None, parse_string),
'host' : (None, parse_string),
'dead' : (False, parse_bool),
'git_hooks\..*' : ({}, parse_keyval_path),
'uuid\..*' : ({}, parse_keyval),
},
'directory': {
'perms' : ('700', parse_string), # directory perms
'type' : (None, parse_string), # sync method
'symlink' : (None, parse_path), # the directory is a symlink to this target
'path' : (None, parse_path), # relative path of the dir. None means same as name.
'path_rename' : ({}, parse_dict_path), # rename path on specific hosts
'save_lastsync' : (True, parse_bool), # store .async.last with last sync metadata
'asynclast_file' : (".async.last", parse_string),
'subdirs' : ([], parse_list_path),
'check_paths' : ([], parse_list),
'ignore' : ([], parse_list_path),
'unison_profile' : (None, parse_string),
'unison_args' : ([], parse_list_args),
'rsync_args' : ([], parse_list_args),
'githooks_dir' : ("", parse_path),
'pre_init_hook' : ([], parse_list_path), # scripts to run before initialization
'post_init_hook' : ([], parse_list_path), # scripts to run after initialization
'pre_sync_hook' : ([], parse_list_path), # scripts to run before sync
'post_sync_hook' : ([], parse_list_path), # scripts to run after sync
'pre_sync_remote_hook' : ([], parse_list_path), # scripts to run on the remote before sync
'post_sync_remote_hook' : ([], parse_list_path), # scripts to run on the remote after sync
'pre_check_hook' : ([], parse_list_path), # scripts to run before check
'post_check_hook' : ([], parse_list_path), # scripts to run after check
},
'async': {
'color' : (True, parse_bool), # color UI
'logfile' : (None, parse_path), # logfile
'pager_cmd' : ("", parse_path), # parse as a path to expand shell vars
},
}
def _parse_config(self, conf, fields, defaults):
dic = {'conf_path': self.path}
for key, pair in fields.items():
func = pair[1]
initval = pair[0]
# if key contains a star, it is a regexp. match it!
if '*' in key:
Lconf = [k for k, v in conf.items() if re.match(key, k)]
Ldef = [k for k, v in defaults.items() if re.match(key, k)]
else:
Lconf = [key]
Ldef = [key]
if not key in dic: dic[key] = initval
# parse defaults
for key in Ldef:
defval = defaults.get(key, None)
func(key, defval, dic)
# parse config
for key in Lconf:
val = conf.get(key, None)
func(key, val, dic)
return dic
def __init__(self, cfgdir):
ConfigParser.__init__(self)
self.read(glob.glob(os.path.join(cfgdir, '*.conf')))
self.path = cfgdir
self.host = {}
self.remote = {}
self.instance = {}
self.directory = {}
self.async = {}
# put objects in a dict for easier acces
objects = {
'host': self.host,
'remote': self.remote,
'instance': self.instance,
'directory': self.directory,
}
# parse async settings
self.async = self._parse_config(dict(self.items('async')), AsyncConfig.FIELDS['async'], {})
# parse sections
sec_re = re.compile(r'^\s*(.*)\s+([^"]*|"[^"]*")\s*$')
for sec in self.sections():
m = sec_re.match(sec)
if m:
obj = m.group(1).strip()
name = m.group(2).strip('"')
if not obj in objects.keys():
raise AsyncConfigError("Unknown object section '%s'" % obj)
defaults = dict(self.items('%s_defaults' % obj))
conf = dict(self.items(sec))
objects[obj][name] = self._parse_config(conf, AsyncConfig.FIELDS[obj], defaults)
objects[obj][name]['name'] = name
# match instance to ec2 hosts
for k, val in self.host.items():
if val['type'] == 'ec2':
if k in self.instance:
val['instance'] = dict(self.instance[k])
else:
raise AsyncConfigError("Unknown instance for host: %s" % k)
# match hosts to remotes
for k, val in self.remote.items():
host = val['host']
if host != None:
if host in self.host:
val['host'] = self.host[host]
else:
raise AsyncConfigError("Unknown host %s in remote %s" % (host, k))
# match remotes to git or annex dirs
for k, val in self.directory.items():
if val['type'] == 'annex' or val['type'] == 'git':
val['git_remotes'] = self.remote
# attach dirs data to hosts
for k, val in self.host.items():
dirs = val['dirs']
if dirs:
# check that all dirs are known
for k in dirs:
if not k in self.directory: raise AsyncConfigError("Unknown directory: %s" % k)
# copy dirs objects to the host
val['dirs'] = OrderedDict([(k, dict(self.directory[k])) for k in dirs])
# override symlink settings from host
for k, p in val['symlinks'].items():
if k in val['dirs']:
val['dirs'][k]['symlink'] = p
# vim: expandtab:shiftwidth=4:tabstop=4:softtabstop=4:textwidth=80
|
aroig/async
|
async/config.py
|
Python
|
gpl-3.0
| 12,584
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010 Roger Philibert
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import datetime
import lxml.html
import re
from weboob.tools.browser import BasePage
from weboob.tools.misc import to_unicode
from weboob.tools.parsers.lxmlparser import select, SelectElementException
from ..video import YoujizzVideo
__all__ = ['VideoPage']
class VideoPage(BasePage):
def get_video(self, video=None):
_id = to_unicode(self.group_dict['id'])
if video is None:
video = YoujizzVideo(_id)
title_el = select(self.document.getroot(), 'title', 1)
video.title = to_unicode(title_el.text.strip())
# youjizz HTML is crap, we must parse it with regexps
data = lxml.html.tostring(self.document.getroot())
m = re.search(r'<strong>.*?Runtime.*?</strong> (.+?)<br.*>', data)
try:
if m:
minutes, seconds = (int(v) for v in to_unicode(m.group(1).strip()).split(':'))
video.duration = datetime.timedelta(minutes=minutes, seconds=seconds)
else:
raise Exception()
except Exception:
raise SelectElementException('Could not retrieve video duration')
video_file_urls = re.findall(r'"(http://media[^ ,]+\.flv)"', data)
if len(video_file_urls) == 0:
raise SelectElementException('Video URL not found')
elif len(video_file_urls) > 1:
raise SelectElementException('Many video file URL found')
else:
video.url = video_file_urls[0]
return video
|
jocelynj/weboob
|
weboob/backends/youjizz/pages/video.py
|
Python
|
gpl-3.0
| 2,211
|
"""Tests for Oscapcontent
:Requirement: Oscapcontent
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import unittest2
from fauxfactory import gen_string
from nailgun import entities
from robottelo.config import settings
from robottelo.constants import OSCAP_DEFAULT_CONTENT
from robottelo.datafactory import invalid_values_list, valid_data_list
from robottelo.decorators import (
skip_if_bug_open,
skip_if_not_set,
tier1,
tier2,
)
from robottelo.helpers import get_data_file
from robottelo.test import UITestCase
from robottelo.ui.factory import make_oscapcontent
from robottelo.ui.locators import common_locators
from robottelo.ui.session import Session
class OpenScapContentTestCase(UITestCase):
"""Implements Oscap Content tests in UI."""
@classmethod
@skip_if_not_set('oscap')
def setUpClass(cls):
super(OpenScapContentTestCase, cls).setUpClass()
path = settings.oscap.content_path
cls.content_path = get_data_file(path)
org = entities.Organization(name=gen_string('alpha')).create()
cls.org_name = org.name
proxy = entities.SmartProxy().search(
query={
u'search': u'name={0}'.format(
settings.server.hostname)
}
)[0]
proxy.organization = [org]
@tier1
def test_positive_create(self):
"""Create OpenScap content.
:id: 6580cffa-da37-40d5-affa-cfb1ff27c545
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
:expectedresults: Whether creating content for OpenScap is successful.
:CaseImportance: Critical
"""
with Session(self.browser) as session:
for content_name in valid_data_list():
with self.subTest(content_name):
make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
self.assertIsNotNone(
self.oscapcontent.search(content_name))
@skip_if_bug_open('bugzilla', 1289571)
@tier1
def test_negative_create_with_invalid_name(self):
"""Create OpenScap content with negative values
:id: 8ce0e8b4-396a-43cd-8cbe-fb60fcf853b0
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
:expectedresults: Creating content for OpenScap is not successful.
:BZ: 1289571
:CaseImportance: Critical
"""
with Session(self.browser) as session:
for content_name in invalid_values_list(interface='ui'):
with self.subTest(content_name):
make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
self.assertIsNotNone(session.nav.wait_until_element(
common_locators['haserror']))
@tier1
@unittest2.skip('oscap contents are not installed by default.'
'Installer needs to be fixed')
def test_positive_default(self):
"""Check whether OpenScap content exists by default.
:id: 0beca127-8294-4d85-bace-b9170215c0cd
:Steps:
1. Set Org as Any Org.
2. Navigate to oscap Content page.
:expectedresults: Whether oscap content exists by default.
:CaseImportance: Critical
"""
# see BZ 1336374
with Session(self.browser):
self.assertIsNotNone(self.oscapcontent.search(
OSCAP_DEFAULT_CONTENT['rhel7_content']))
self.assertIsNotNone(self.oscapcontent.search(
OSCAP_DEFAULT_CONTENT['rhel6_content']))
@tier2
def test_positive_update(self):
"""Update OpenScap content.
:id: 9870555d-0b60-41ab-a481-81d4d3f78fec
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
3. Update the openscap content, here the Org.
:expectedresults: Whether creating content for OpenScap is successful.
:CaseLevel: Integration
"""
org = entities.Organization(name=gen_string('alpha')).create()
content_name = gen_string('alpha')
with Session(self.browser) as session:
make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
self.oscapcontent.update(content_name, content_org=org.name)
session.nav.go_to_select_org(org.name)
self.assertIsNotNone(
self.oscapcontent.search(content_name))
@tier1
def test_positive_delete(self):
"""Create OpenScap content and then delete it.
:id: 8eade129-5666-4e90-ba3e-f0c51a3090ce
:Steps:
1. Create an openscap content.
2. Provide all the appropriate parameters.
3. Delete the openscap content.
:expectedresults: Deleting content for OpenScap is successful.
:CaseImportance: Critical
"""
with Session(self.browser) as session:
for content_name in valid_data_list():
with self.subTest(content_name):
make_oscapcontent(
session,
name=content_name,
content_path=self.content_path,
content_org=self.org_name,
)
self.assertIsNotNone(
self.oscapcontent.search(content_name))
self.oscapcontent.delete(content_name)
|
elyezer/robottelo
|
tests/foreman/ui/test_oscapcontent.py
|
Python
|
gpl-3.0
| 6,070
|
#!/usr/bin/env python
#
# Copyright 2012 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
#
from gnuradio import gr, gr_unittest
import flysky_swig
class qa_dumpsync (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self):
# set up fg
self.tb.run ()
# check data
if __name__ == '__main__':
gr_unittest.main ()
|
zeza/gnuradio-rc-testcode
|
gr-flysky/python/qa_flysky_dumpsync.py
|
Python
|
gpl-3.0
| 1,124
|
from typing import Iterable, Mapping, Optional
from lib import data
from ..channel import pyramid
from ..channel import wall
def filterMessage() -> Iterable[data.ChatCommand]:
return []
def commands() -> Mapping[str, Optional[data.ChatCommand]]:
if not hasattr(commands, 'commands'):
setattr(commands, 'commands', {
'!pyramid': pyramid.commandPyramid,
'!rpyramid': pyramid.commandRandomPyramid,
'!wall': wall.commandWall,
})
return getattr(commands, 'commands')
def commandsStartWith() -> Mapping[str, Optional[data.ChatCommand]]:
if not hasattr(commandsStartWith, 'commands'):
setattr(commandsStartWith, 'commands', {
'!pyramid-': pyramid.commandPyramidLong,
'!wall-': wall.commandWallLong,
})
return getattr(commandsStartWith, 'commands')
def processNoCommand() -> Iterable[data.ChatCommand]:
return []
|
MeGotsThis/BotGotsThis
|
pkg/spam/items/channel.py
|
Python
|
gpl-3.0
| 941
|
import sqlite3
import sys
"""<Mindpass is a intelligent password manager written in Python3
that checks your mailbox for logins and passwords that you do not remember.>
Copyright (C) <2016> <Cantaluppi Thibaut, Garchery Martial, Domain Alexandre, Boulmane Yassine>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>."""
sys.path.append('../fenetres/')
from functools import partial
from PyQt5 import QtWidgets, QtGui, QtCore
from fenetreGestion import Ui_fenetreGestion
from requetes import *
import numpy as np
import colorsys
bdd = "../../../Traitement_mails/bdd.sq3"
def print_(arg):
"""
Args:
arg: la valeur à afficher
Returns: la valeur à afficher ainsi qu'une série de '-', afin d'espacer l'affichage
"""
print(arg)
print("-------------------------------------")
class LineEditWithFocusOut(QtWidgets.QLineEdit):
"""docstring for LineEditWithFocusOut
Ré-implémentation de QLineEdit(), en modifiant son comportement
lors d'un focusOut event. Ici, on update l'identifiant de la
table sites_reconnus_"+self.nom_table+".
"""
def __init__(self, nom_table):
super().__init__()
self.nom_table = nom_table
def focusOutEvent(self, arg):
QtWidgets.QLineEdit.focusOutEvent(self, arg)
# self.id contient l'id de la LigneEdit, ajouté dans afficher_ligne_site()
requete= "UPDATE sites_reconnus_"+self.nom_table+" SET identifiant =? WHERE rowid=?"
bdd_update(requete, (self.text(), self.id +1))
if self.text() == "":
self.setPlaceholderText("Ajouter un pseudo")
class LigneSite(object):
"""docstring for LigneSite"""
def __init__(self, y, site_web, identifiant, mdp, categorie, objet, nom_table):
self.position = y
self.objet = objet
self.nom_site = site_web
self.nom_mdp = mdp
self.nom_cat = categorie
self.nom_table = nom_table
self.ligne = QtWidgets.QHBoxLayout()
self.site_web =QtWidgets.QLabel()
self.site_web.setAlignment(QtCore.Qt.AlignCenter)
self.site_web.setObjectName("site_web")
self.site_web.setText(site_web)
self.ligne.addWidget(self.site_web)
self.identifiant = LineEditWithFocusOut(self.nom_table)
self.identifiant.setAlignment(QtCore.Qt.AlignCenter)
self.identifiant.setObjectName('identifiant')
self.identifiant.id = y
if identifiant is None or identifiant == "":
self.identifiant.setPlaceholderText("Ajouter un pseudo")
else:
self.identifiant.setText(identifiant)
self.ligne.addWidget(self.identifiant)
self.mdp = QtWidgets.QComboBox()
self.mdp.setObjectName("mdp")
self.afficher_combo_pwd() # affichage des éléments de la combobox en fonction de la bdd
self.ligne.addWidget(self.mdp)
self.categorie = QtWidgets.QComboBox()
self.categorie.setObjectName("categorie")
self.afficher_combo_cat() # affichage des éléments de la combobox en fonction de la bdd
self.ligne.addWidget(self.categorie)
self.ligne.setStretch(0, 2)
self.ligne.setStretch(1, 2)
self.ligne.setStretch(2, 2)
self.ligne.setStretch(3, 2)
self.categorie.currentIndexChanged.connect(self.changement_cat)
self.mdp.currentIndexChanged.connect(self.changement_pwd)
def changement_cat(self, event):
requete ="SELECT categorie FROM sites_reconnus_"+self.nom_table+" WHERE rowid=?"
ancienne_categorie = toliste(bdd_select(requete, (self.position+1,)))[0]
# On ajoute le site_web sous la catégorie correspondante
requete= "UPDATE sites_reconnus_"+self.nom_table+" SET categorie=? WHERE rowid=?"
bdd_update(requete, (self.categorie.currentText(), self.position +1))
print("Catégorie changée en"+ self.categorie.currentText())
for k in range(len(self.objet.cats)):
if(self.objet.cats[k].nom == self.categorie.currentText()):
liste_label_name =[]
for element in self.objet.cats[k].labels:
liste_label_name.append(element.text())
if(self.categorie.currentText() not in liste_label_name):
label = QtWidgets.QLabel()
font = QtGui.QFont()
font.setPointSize(9)
font.setItalic(True)
label.setFont(font)
label.setObjectName("sites_lies_cat")
label.setText(self.site_web.text())
self.objet.cats[k].labels.append(label)
self.objet.cats[k].verticalLayout_groupBox.addWidget(label)
break
# On met à jour le groupBox de l'ancienne catégorie
for k in range(len(self.objet.cats)):
if(self.objet.cats[k].nom == ancienne_categorie):
for label in self.objet.cats[k].labels:
label.deleteLater()
self.objet.cats[k].labels = []
requete ="SELECT site_web FROM sites_reconnus_"+self.nom_table+" WHERE categorie=?"
sites_lies= toliste(bdd_select(requete, (ancienne_categorie,)))
self.objet.cats[k].affichage_sites_lies(sites_lies)
# On update le label dont la catégorie a été changée
for pwd in self.objet.pwds:
for label in pwd.labels:
if(label.texte == self.nom_site):
pwd.update(label, self.categorie.currentText())
# On update la couleur du groupBox_pwd contenant le label associé
pwd.update_color_groupBox()
def changement_pwd(self):
requete ="SELECT mdp FROM sites_reconnus_"+self.nom_table+" WHERE rowid=?"
ancien_mdp = toliste(bdd_select(requete, (self.position+1,)))[0]
# On ajoute le site_web sous le mdp correspondant
requete= "UPDATE sites_reconnus_"+self.nom_table+" SET mdp=? WHERE rowid=?"
nouveau_mdp = self.mdp.currentText()
bdd_update(requete, (nouveau_mdp , self.position +1))
print("Mdp changée en"+ nouveau_mdp)
for k in range(len(self.objet.pwds)):
if(self.objet.pwds[k].nom == nouveau_mdp):
liste_label_name =[]
for element in self.objet.pwds[k].labels:
liste_label_name.append(element.text())
if(nouveau_mdp not in liste_label_name):
self.objet.pwds[k].label(self.site_web.text())
break
# On met à jour le groupBox de l'ancienn mdp
for k in range(len(self.objet.pwds)):
if(self.objet.pwds[k].nom == ancien_mdp):
for label in self.objet.pwds[k].labels:
label.deleteLater()
self.objet.pwds[k].labels = []
requete ="SELECT site_web FROM sites_reconnus_"+self.nom_table+" WHERE mdp=?"
sites_lies= toliste(bdd_select(requete, (ancien_mdp,)))
self.objet.pwds[k].affichage_sites_lies(sites_lies)
for pwd in self.objet.pwds:
if(pwd.nom == ancien_mdp):
pwd.update_color_groupBox()
elif(pwd.nom == nouveau_mdp):
pwd.update_color_groupBox()
def update_pwd_combobox(self, complet):
print(self.mdp.maxCount())
def afficher_combo_pwd(self):
requete= "SELECT mdp FROM mdps_"+self.nom_table+""
tab = bdd_select(requete)
result = []
for k in range(len(tab)):
result.append(tab[k][0])
self.mdp.addItem(self.nom_mdp)
for pwd in result:
if pwd and pwd != self.nom_mdp:
self.mdp.addItem(pwd)
if(self.nom_mdp and self.nom_mdp != ""):
self.mdp.addItem("")
def afficher_combo_cat(self):
requete= "SELECT nom_categorie FROM categories_"+self.nom_table
tab = bdd_select(requete)
result = []
for k in range(len(tab)):
result.append(tab[k][0])
self.categorie.addItem(self.nom_cat)
for cat in result:
if cat and cat != self.nom_cat:
self.categorie.addItem(cat)
if(self.nom_cat and self.nom_cat != ""):
self.categorie.addItem("")
class Ligne(object):
"""docstring for ligneCategorie
(objet) est l'objet contenant tous les éléments de la fenetre.
Permet d'accéder à ces éléments et de les modifier.
"""
def __init__(self, position, nom, sites_lies, objet, nom_table):
self.position = position
self.nom = nom
self.sites_lies = sites_lies
self.objet = objet
self.nom_table =nom_table
self.ligne = QtWidgets.QHBoxLayout()
self.pushButton = QtWidgets.QPushButton()
self.pushButton.setMinimumSize(QtCore.QSize(24, 24))
self.groupBox = QtWidgets.QGroupBox()
self.colorHEX = "#757575"
self.labels = [] # contiendra la liste des labels (noms des sites liés)
self.groupBox.setGeometry(QtCore.QRect(20, 50, 91, 50))
font = QtGui.QFont()
font.setPointSize(11)
self.groupBox.setFont(font)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_groupBox = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout_groupBox.setObjectName("verticalLayout_groupBox")
self.ligne.addWidget(self.groupBox)
self.ligne.addWidget(self.pushButton)
self.ligne.setStretch(0, 20)
self.ligne.setStretch(1, 1)
self.affichage_sites_lies(sites_lies)
# Evènement
self.pushButton.clicked.connect(self.msgbox)
def msgbox(self):
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Information)
msg.setText("Voulez-vous vraiment supprimer \""+ str(self.nom) + "\" ?")
msg.setIcon(2)
msg.setInformativeText("Les liens établis avec les sites seront perdus.")
msg.setWindowTitle("Confirmer suppression")
msg.addButton(QtWidgets.QPushButton('Oui'), QtWidgets.QMessageBox.YesRole)
msg.addButton(QtWidgets.QPushButton('Non'), QtWidgets.QMessageBox.NoRole)
msg.buttonClicked.connect(self.msgbtn)
ret = msg.exec_();
def msgbtn(self, buttonClicked):
if(buttonClicked.text() == "Oui"):
self.suppression()
def suppression(self):
self.suppression_bdd()
self.suppression_affichage()
def affichage_sites_lies(self, site_lies):
pass
class Categorie(Ligne):
"""docstring for Categorie"""
def __init__(self, position, nom, sites_lies, objet, nom_table):
# On exécute Ligne.__init__()
super().__init__(position, nom, sites_lies, objet, nom_table)
# On ajoute d'autres attributs/propriétés
self.ligne.setObjectName("ligne_categorie")
self.groupBox.setObjectName("groupBox_cat")
self.groupBox.setTitle(nom)
self.pushButton.setObjectName("pushButton_cat")
def setColor(self, k, nb_cat):
num_colors=nb_cat
colors=[]
for i in np.arange(0., 360., 360. / num_colors):
hue = i/360.
lightness = (50 + np.random.rand() * 10)/100.
saturation = (55 + np.random.rand() * 10)/100.
colors.append(colorsys.hls_to_rgb(hue, lightness, saturation))
t= colors
self.colorRGB = (int(t[k][0]*255),int(t[k][1]*255),int(t[k][2]*255))
self.colorHEX ='#%02x%02x%02x' % self.colorRGB
self.groupBox.setStyleSheet("QGroupBox {\n"
"border: 2px solid rgb(" + str(self.colorRGB[0]) + "," + str(self.colorRGB[1]) + "," + str(self.colorRGB[2]) + ");\n"
"}\n"
"QGroupBox:title {\n"
"color: rgb(" + str(self.colorRGB[0]) + "," + str(self.colorRGB[1]) + "," + str(self.colorRGB[2]) + ");\n"
"}\n"
)
def affichage_sites_lies(self, sites_lies):
for site in sites_lies:
label = QtWidgets.QLabel()
font = QtGui.QFont()
font.setPointSize(9)
font.setItalic(True)
label.setFont(font)
label.setObjectName("sites_lies_cat")
label.setText(site)
self.labels.append(label)
self.verticalLayout_groupBox.addWidget(label)
def suppression_bdd(self):
requete = "DELETE FROM categories_"+self.nom_table +" WHERE nom_categorie=?"
bdd_delete(requete, (self.nom,))
print("Categorie supprimée: "+ self.nom)
def suppression_affichage(self):
# suppression combobox
for k in range(len(self.objet.sites)):
if self.objet.sites[k].categorie.currentText() == self.nom:
# si la catégorie supprimée était celle du site, alors on change la catégorie de celui-ci en le choix vide:""
if self.objet.sites[k].categorie.findText("") == -1:
# si il n'y a pas le choix vide "", on l'ajoute
self.objet.sites[k].categorie.addItem("")
self.objet.sites[k].categorie.setCurrentIndex(self.objet.sites[k].categorie.findText(""))
index = self.objet.sites[k].categorie.findText(self.nom)
self.objet.sites[k].categorie.removeItem(index)
# destruction des layouts dans la scroll_area
self.objet.scrollAreaWidgetContents_cat.deleteLater()
# on vide les attributs
self.objet.cats = []
# On en recrée un vide
self.objet.scrollAreaWidgetContents_cat = QtWidgets.QWidget()
self.objet.scrollAreaWidgetContents_cat.setGeometry(QtCore.QRect(0, 0, 177, 767))
self.objet.scrollAreaWidgetContents_cat.setObjectName("scrollAreaWidgetContents_cat")
self.objet.verticalLayout_3 = QtWidgets.QVBoxLayout(self.objet.scrollAreaWidgetContents_cat)
self.objet.verticalLayout_3.setObjectName("verticalLayout_3")
self.objet.scrollArea_cat.setWidget(self.objet.scrollAreaWidgetContents_cat)
# on relance la méthode d'affichage des catégories
self.objet.afficher_categories()
self.objet.actualiser_couleur_pwd()
def ajout_combobox(self):
for k in range(len(self.objet.sites)):
self.objet.sites[k].categorie.addItem(self.nom)
class Password(Ligne):
"""docstring for Password"""
def __init__(self, position, nom, sites_lies, objet, nom_table):
super().__init__(position, nom, sites_lies, objet, nom_table)
self.ligne.setObjectName("ligne_pwd")
self.groupBox.setObjectName("groupBox_pwd")
self.groupBox.setTitle(nom)
self.pushButton.setObjectName("pushButton_pwd")
# On modifie la couleur de la groupBox_pwd
self.update_color_groupBox()
def update_title(self, titre):
self.groupBox.setTitle(titre)
def affichage_sites_lies(self, sites_lies):
for site in sites_lies:
self.label(site)
def label(self, site):
label = QtWidgets.QLabel()
label.texte = site
font = QtGui.QFont()
font.setPointSize(9)
font.setItalic(True)
label.setFont(font)
label.setObjectName("sites_lies_pwd")
label.colorRGB = self.getColor_label(site)[0]
label.colorHEX = self.getColor_label(site)[1]
texte = self.create_text_label(label.colorHEX, site)
label.setText(texte)
self.labels.append(label)
self.verticalLayout_groupBox.addWidget(label)
def create_text_label(self, couleur, site):
texte = "<font size='5' font-style='' color="+couleur+">•</font> "
for lettre in site:
texte += lettre
return(texte)
def update(self, label, categorie):
couleur ="#fff"
for k in range(len(self.objet.cats)):
if(self.objet.cats[k].nom == categorie):
couleur = self.objet.cats[k].colorHEX
label.colorHEX = couleur
texte= self.create_text_label(couleur, label.texte)
label.setText(texte)
def update_color_groupBox(self):
colorGroupBox = self.colorHEX
if(self.labels != []):
if(self.labels[0].colorHEX != "#fff"):
colorGroupBox = self.labels[0].colorHEX
b = 1
for label in self.labels:
if(label.colorHEX != colorGroupBox):
b=0
if(not b):
colorGroupBox = "#757575"
else:
colorGroupBox = "#757575"
self.groupBox.setStyleSheet("QGroupBox {"
"border-color:"+colorGroupBox+";"
"}")
def getColor_label(self, site):
"""En paramètre le site, retourne un tableau de couleur [RGB, HEX] (associée à la categorie
éventuellement assignées
"""
requete = "SELECT categorie FROM sites_reconnus_"+self.nom_table+" WHERE site_web=?"
categorie = toliste(bdd_select(requete, (site,)))[0]
tab = ["rgb(255,255,255)","#fff"]
for k in range(len(self.objet.cats)):
if(self.objet.cats[k].nom == categorie):
tab[0] = self.objet.cats[k].colorRGB
tab[1] = self.objet.cats[k].colorHEX
return(tab)
def suppression_bdd(self):
requete = "DELETE FROM mdps_"+self.nom_table+" WHERE mdp=?"
bdd_delete(requete, (self.nom,))
print("Pwd supprimée: "+ self.nom)
def suppression_affichage(self):
# suppression combobox
for k in range(len(self.objet.sites)):
if self.objet.sites[k].mdp.currentText() == self.nom:
# si le mdp supprimée était celui du site, alors on change le change en le choix vide:""
if self.objet.sites[k].mdp.findText("") == -1:
# si il n'y a pas le choix vide "", on l'ajoute
self.objet.sites[k].mdp.addItem("")
self.objet.sites[k].mdp.setCurrentIndex(self.objet.sites[k].mdp.findText(""))
index = self.objet.sites[k].mdp.findText(self.nom)
self.objet.sites[k].mdp.removeItem(index)
# destruction des layouts dans la scroll_area
self.objet.scrollAreaWidgetContents_pwd.deleteLater()
# on vide les attributs
self.objet.pwds = []
# On en recrée un vide
self.objet.scrollAreaWidgetContents_pwd = QtWidgets.QWidget()
self.objet.scrollAreaWidgetContents_pwd.setGeometry(QtCore.QRect(0, 0, 177, 767))
self.objet.scrollAreaWidgetContents_pwd.setObjectName("scrollAreaWidgetContents_cat")
self.objet.verticalLayout_2 = QtWidgets.QVBoxLayout(self.objet.scrollAreaWidgetContents_pwd)
self.objet.verticalLayout_2.setObjectName("verticalLayout_3")
self.objet.scrollArea_pwd.setWidget(self.objet.scrollAreaWidgetContents_pwd)
# on relance la méthode d'affichage des mdps_"+self.nom_table+"
self.objet.afficher_pwds()
class ClasseGestion(Ui_fenetreGestion):
def __init__(self, fenetre):
self.setupUi(fenetre)
self.ajouter_cat.setPlaceholderText("Ajouter une catégorie")
self.ajouter_pwd.setPlaceholderText("Ajouter un mot de passe")
self.lineEdit_ajout_site.setPlaceholderText("Ajouter un site web")
# Evènements
self.ajouter_cat.returnPressed.connect(self.check_if_exist_cat)
self.ajouter_pwd.returnPressed.connect(self.check_if_exist_pwd)
self.lineEdit_ajout_site.returnPressed.connect(self.check_new_site)
self.pushButton_ajout_site.clicked.connect(self.check_new_site)
self.sites = []
self.cats = []
self.pwds = []
def lancement(self, user_email, nom_table):
self.user_email = user_email
self.nom_table = nom_table
self.afficher_sites()
self.afficher_categories()
self.afficher_pwds()
self.setupMenu()
def setupMenu(self):
self.aide_url = "https://github.com/MindPass/Code/wiki/Aide"
self.apropos_url ="https://github.com/MindPass/Code"
self.actionObtenir_de_l_aide.triggered.connect(self.ouvrirAide)
self.actionA_propos_de_MindPass.triggered.connect(self.ouvrirApropos)
"""
self.actionMode_deux_lettres.triggered.connect(self.check_deux_lettres)
self.actionMode_complet.triggered.connect(self.check_complet)
self.menuAffichage()
"""
"""
def check_deux_lettres(self):
self.actionMode_deux_lettres.setChecked(True)
self.actionMode_complet.setChecked(False)
self.menuAffichage()
def check_complet(self):
self.actionMode_deux_lettres.setChecked(False)
self.actionMode_complet.setChecked(True)
self.menuAffichage()
def menuAffichage(self):
if(self.actionMode_deux_lettres.isChecked()):
self.affichage_deux_lettres()
else:
self.affichage_complet()
def affichage_complet(self):
for pwd in self.pwds:
pwd.update_title(pwd.nom)
for site in self.sites:
site.update_pwd_combobox(1)
def affichage_deux_lettres(self):
pass
"""
def ouvrirAide(self):
self.openURL(self.aide_url)
def ouvrirApropos(self):
self.openURL(self.apropos_url)
def openURL(self, given_url):
url = QtCore.QUrl(given_url)
if not QtGui.QDesktopServices.openUrl(url):
QtGui.QMessageBox.warning(self, "Open Url", "Could not open url")
def check_if_exist_cat(self):
"""
Vérifier que la catégorie en question n'est pas déjà dans la base de donnée
"""
if self.ajouter_cat.displayText() != "":
requete = "SELECT nom_categorie FROM categories_"+self.nom_table +" WHERE nom_categorie=?"
categories_table = bdd_select(requete, (self.ajouter_cat.displayText(),))
conditions = not categories_table or categories_table[0][0] != self.ajouter_cat.displayText()
if conditions:
self.ajouter_categorie()
# On actualise les couleurs des catégories
self.actualiser_couleur()
# On actualise les couleurs des labels dans la colonne Mots de Passe
self.actualiser_couleur_pwd()
def actualiser_couleur(self):
nb_cat = len(self.cats)
for i in range(nb_cat):
self.cats[i].setColor(i, nb_cat)
def afficher_categories(self):
requete= "SELECT nom_categorie FROM categories_"+self.nom_table
tab = bdd_select(requete)
if tab:
for k in range(len(tab)):
self.ajouter_ligne_categorie(k, tab[k][0])
self.actualiser_couleur()
def ajouter_categorie(self):
requete ="INSERT INTO categories_"+self.nom_table +" (nom_categorie) VALUES(?)"
bdd_insert(requete, (self.ajouter_cat.displayText(),))
#ajout dans les combobox
for k in range(len(self.sites)):
self.sites[k].categorie.addItem(self.ajouter_cat.displayText())
# ajout de la catégorie dans la scrollArea Categories
self.ajouter_ligne_categorie(len(self.cats), self.ajouter_cat.displayText())
print("Catégorie ajoutée : "+ str(self.ajouter_cat.displayText()))
self.ajouter_cat.setText("")
def ajouter_ligne_categorie(self, y, nom_categorie):
requete = "SELECT site_web FROM sites_reconnus_"+self.nom_table+" WHERE categorie=?"
sites_lies= toliste(bdd_select(requete, (nom_categorie,)))
self.cats.append(Categorie(y, nom_categorie, sites_lies, self , self.nom_table))
self.verticalLayout_3.addLayout(self.cats[y].ligne)
# On garde l'alignement haut
self.verticalLayout_3.setAlignment(QtCore.Qt.AlignTop)
def check_if_exist_pwd(self):
"""
Vérifier que le pwd en question n'est pas déjà dans la base de donnée
"""
if self.ajouter_pwd.displayText() != "":
requete = "SELECT mdp FROM mdps_"+self.nom_table+" WHERE mdp=?"
pwds_table = bdd_select(requete, (self.ajouter_pwd.displayText(),))
conditions = not pwds_table or pwds_table[0][0] != self.ajouter_pwd.displayText()
if conditions:
self.ajouter_password()
def afficher_pwds(self):
requete= "SELECT mdp FROM mdps_"+self.nom_table+""
tab = bdd_select(requete)
if tab:
for k in range(len(tab)):
self.ajouter_ligne_pwd(k, tab[k][0])
def ajouter_password(self):
requete = "INSERT INTO mdps_"+self.nom_table+" (mdp) VALUES(?)"
bdd_insert(requete, (self.ajouter_pwd.displayText(),))
#ajout dans les combobox
for k in range(len(self.sites)):
self.sites[k].mdp.addItem(self.ajouter_pwd.displayText())
# ajout dans la ScrollArea Passwords
self.ajouter_ligne_pwd(len(self.pwds), self.ajouter_pwd.displayText())
print("Password ajoutée : " + self.ajouter_pwd.displayText())
self.ajouter_pwd.setText("")
def ajouter_ligne_pwd(self, y, nom_pwd):
requete = "SELECT site_web FROM sites_reconnus_"+self.nom_table+" WHERE mdp=?"
sites_lies= toliste(bdd_select(requete, (nom_pwd,)))
self.pwds.append(Password(y, nom_pwd, sites_lies, self, self.nom_table))
self.verticalLayout_2.addLayout(self.pwds[y].ligne)
# On garde l'alignement haut
self.verticalLayout_2.setAlignment(QtCore.Qt.AlignTop)
def actualiser_couleur_pwd(self):
# destruction des layouts dans la scroll_area
self.scrollAreaWidgetContents_pwd.deleteLater()
# on vide les attributs
self.pwds = []
# On en recrée un vide
self.scrollAreaWidgetContents_pwd = QtWidgets.QWidget()
self.scrollAreaWidgetContents_pwd.setGeometry(QtCore.QRect(0, 0, 177, 767))
self.scrollAreaWidgetContents_pwd.setObjectName("scrollAreaWidgetContents_cat")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents_pwd)
self.verticalLayout_2.setObjectName("verticalLayout_3")
self.scrollArea_pwd.setWidget(self.scrollAreaWidgetContents_pwd)
# on relance la méthode d'affichage des mdps_"+self.nom_table+"
self.afficher_pwds()
def afficher_sites(self):
requete= "SELECT site_web, identifiant, mdp, categorie FROM sites_reconnus_"+self.nom_table+""
tab = bdd_select(requete)
for k in range(len(tab)):
self.sites.append(LigneSite(k,tab[k][0], tab[k][1], tab[k][2], tab[k][3], self, self.nom_table))
self.verticalLayout.addLayout(self.sites[k].ligne)
def check_new_site(self):
requete = "SELECT site_web FROM sites_reconnus_"+self.nom_table+""
sites_web = toliste(bdd_select(requete))
if(self.lineEdit_ajout_site.text() not in sites_web and self.lineEdit_ajout_site.text() != ""):
requete = "INSERT INTO sites_reconnus_"+self.nom_table+" VALUES(?,?,?,?,?)"
valeurs =("",self.lineEdit_ajout_site.text(),"", "", "")
bdd_insert(requete, valeurs)
self.sites.append(LigneSite(len(self.sites), self.lineEdit_ajout_site.text(), "", "", "", self, self.nom_table))
self.verticalLayout.addLayout(self.sites[len(self.sites)-1].ligne)
self.lineEdit_ajout_site.setText("")
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
fenetreGestion = QtWidgets.QMainWindow()
classGestion = ClasseGestion(fenetreGestion)
fenetreGestion.show()
sys.exit(app.exec_())
|
MindPass/Code
|
Interface_graphique/PyQt/application/classeGestion.py
|
Python
|
gpl-3.0
| 24,892
|
# scarf1{background-image: url('https://rollforfantasy.com/images/clothing/nmale/scarf1.png');}
scarf = ["scarf{}.png".format(i) for i in range(1, 31)]
|
d2emon/generator-pack
|
src/fixtures/tools/outfit/scarf.py
|
Python
|
gpl-3.0
| 152
|
# coding=utf-8
from blueman.services.meta import SerialService
from blueman.Sdp import DIALUP_NET_SVCLASS_ID
class DialupNetwork(SerialService):
__group__ = 'serial'
__svclass_id__ = DIALUP_NET_SVCLASS_ID
__icon__ = "modem"
__priority__ = 50
|
rworkman/blueman
|
blueman/services/DialupNetwork.py
|
Python
|
gpl-3.0
| 260
|
'''
Created on 30 de set de 2017
@author: fernando
'''
import unittest
from chapter1.solution_1_1 import has_all_unique_characters
from chapter1.solution_1_4 import are_anagrams
class Test(unittest.TestCase):
def testSolution1_1(self):
result = has_all_unique_characters("cafdgbc")
assert(result == False)
result = has_all_unique_characters("adfegbc")
assert(result)
def testSolution1_4(self):
result = are_anagrams('ovo', 'voo')
assert(result)
result = are_anagrams('voo', 'vo')
assert(result == False)
result = are_anagrams('ovo', 'novo')
assert(result == False)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testSolution1_1']
unittest.main()
|
fernandojvdasilva/algorithm-studies
|
chapter1/test_chapter1_solutions.py
|
Python
|
gpl-3.0
| 852
|
import os
import sys
import signal
import time
import subprocess
WHO = None
def handler(signum, frame):
global WHO
print('Signal handler', signum, WHO, frame)
print('Disable handler', signum, WHO, frame)
signal.signal(signal.SIGINT, signal.SIG_DFL)
def main(argv):
global WHO
WHO = argv[1]
if WHO == 'parent':
signal.signal(signal.SIGINT, handler)
p = subprocess.Popen('python3 signal_propagation.py child',
shell=True)
for index in range(0, 10):
time.sleep(1)
print('Sleep', index, WHO)
if WHO == 'parent':
p.send_signal(signal.SIGINT)
p.communicate()
else:
while True:
time.sleep(1)
print('Sleep 1 infinity')
if __name__ == '__main__':
main(sys.argv)
|
munhyunsu/Hobby
|
Signal/signal_propagation_shell.py
|
Python
|
gpl-3.0
| 819
|
import unittest
from pyrogi import Screen, Backend
from pyrogi.util import Vec2
class TestScreen(Screen):
pass
class TestBackend(unittest.TestCase):
def test_screens(self):
backend = Backend(Vec2(0, 0), Vec2(0, 0), '')
self.assertEqual(len(backend.screens), 0)
backend.set_screen(TestScreen())
self.assertEqual(len(backend.screens), 1)
backend.set_screen(TestScreen())
self.assertEqual(len(backend.screens), 2)
backend.go_back_n_screens(1)
self.assertEqual(len(backend.screens), 1)
backend.set_screen(TestScreen())
self.assertEqual(len(backend.screens), 2)
backend.go_back_n_screens(2)
self.assertEqual(len(backend.screens), 0)
|
BenWeedon/pyrogi
|
pyrogi/test_core.py
|
Python
|
gpl-3.0
| 739
|
# -*- coding: utf-8 -*-
#
# Copyright 2011 Liftoff Software Corporation
#
__doc__ = """\
playback.py - A plugin for Gate One that adds support for saving and playing
back session recordings.
.. note:: Yes this only contains one function and it is exposed to clients through a WebSocket hook.
Hooks
-----
This Python plugin file implements the following hooks::
hooks = {
'WebSocket': {
'playback_save_recording': save_recording,
}
}
Docstrings
----------
"""
# Meta
__version__ = '1.0'
__license__ = "GNU AGPLv3 or Proprietary (see LICENSE.txt)"
__version_info__ = (1, 0)
__author__ = 'Dan McDougall <daniel.mcdougall@liftoffsoftware.com>'
# Python stdlib
import os
from applications.locale import get_translation
from applications.utils import render_string
import io
_ = get_translation()
# Globals
PLUGIN_PATH = os.path.split(__file__)[0]
def get_256_colors(self):
"""
Returns the rendered 256-color CSS.
"""
colors_256_path = self.render_256_colors()
mtime = os.stat(colors_256_path).st_mtime
cached_filename = "%s:%s" % (colors_256_path.replace('/', '_'), mtime)
cache_dir = self.ws.settings['cache_dir']
cached_file_path = os.path.join(cache_dir, cached_filename)
if os.path.exists(cached_file_path):
with open(cached_file_path) as f:
colors_256 = f.read()
else:
# Debug mode is enabled
with open(os.path.join(cache_dir, '256_colors.css')) as f:
colors_256 = f.read()
return colors_256
def save_recording(self, settings):
"""
Handles uploads of session recordings and returns them to the client in a
self-contained HTML file that will auto-start playback.
..note:: The real crux of the code that handles this is in the template.
"""
#import tornado.template
from datetime import datetime
now = datetime.now().strftime('%Y%m%d%H%m%S') # e.g. '20120208200222'
out_dict = {
'result': 'Success',
'filename': 'GateOne_recording-%s.html' % now,
'data': None,
'mimetype': 'text/html'
}
recording = settings["recording"]
container = settings["container"]
prefix = settings["prefix"]
theme_css = settings['theme_css']
colors_css = settings['colors_css']
colors_256 = get_256_colors(self)
templates_path = os.path.join(PLUGIN_PATH, "templates")
recording_template_path = os.path.join(
templates_path, "self_contained_recording.html")
#with open(recording_template_path) as f:
#recording_template_data = f.read()
extra_theme_path = os.path.join(templates_path,'themes/black.css')
with io.open(extra_theme_path, mode='r',encoding='UTF-8') as f:
extra_theme = f.read()
rendered_recording = render_string(recording_template_path,**dict(recording=recording,
container=container,
prefix=prefix,
theme=theme_css,
colors=colors_css,
colors_256=colors_256,
extra_theme=extra_theme))#extra_theme to fix bug
#recording_template = tornado.template.Template(recording_template_data)
#rendered_recording = recording_template.generate(
#recording=recording,
#container=container,
#prefix=prefix,
#theme=theme_css,
#colors=colors_css,
#colors_256=colors_256
#)
out_dict['data'] = rendered_recording
message = {'go:save_file': out_dict}
self.write_message(message)
hooks = {
'WebSocket': {
'terminal:playback_save_recording': save_recording,
}
}
|
jimmy201602/django-gateone
|
applications/plugins/playback/playback.py
|
Python
|
gpl-3.0
| 3,927
|
# -*- coding: utf-8 -*-
# recurso de referencia: http://www.tutorialspoint.com/python/python_lists.htm
lista_de_asignaturas_teoricas = [ 'matematicas', 'lengua castellana']
# el la funcion len() es usado para obtener la cantidad de elementos en una lista
print "cantidad de elementos en la lista lista_de_asignaturas_teoricas"
print len(lista_de_asignaturas_teoricas)
lista_de_numeros = [1, 2, 3, 4, 2, 3, 7, 8]
# hallar elemento de mayor valor en una lista con numeros
print "elemento de mayor valor"
print max(lista_de_numeros)
# 8
# hallar elemento de menor valor en una lista con numeros
print "elemento de menor valor"
print min(lista_de_numeros)
# 1
|
pyladiesmedellin/lessons
|
lesson_4/2_6_lists_functions.py
|
Python
|
gpl-3.0
| 664
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 28 16:52:18 2016 by emin
"""
import os
import sys
import theano
import theano.tensor as T
import numpy as np
from lasagne.layers import InputLayer, ReshapeLayer, DenseLayer
from generators import KalmanFilteringTaskFFWD
import lasagne.layers
import lasagne.nonlinearities
import lasagne.updates
import lasagne.objectives
import lasagne.init
import scipy.io as sio
os.chdir(os.path.dirname(sys.argv[0]))
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
job_idx = int(os.getenv('PBS_ARRAYID'))
np.random.seed(job_idx)
nnn = np.ceil(np.logspace(.5,2.5,14))
nhu_vec, nin_vec = np.meshgrid(nnn, nnn)
nhu_vec = nhu_vec.flatten()
nin_vec = nin_vec.flatten()
n_in = int(nin_vec[job_idx-1])
n_hid = int(nhu_vec[job_idx-1])
def model(input_var, batch_size=10, n_in=50, n_out=1, n_hid=200, ei_ratio=0.8):
# Input Layer
l_in = InputLayer((batch_size, None, n_in), input_var=input_var)
_, seqlen, _ = l_in.input_var.shape
# Recurrent EI Net
l_in_hid = DenseLayer(lasagne.layers.InputLayer((None, n_in)), n_hid, b=None, nonlinearity=lasagne.nonlinearities.linear)
l_hid_hid = DenseLayer(lasagne.layers.InputLayer((None, n_hid)), n_hid, nonlinearity=lasagne.nonlinearities.linear)
l_rec = lasagne.layers.CustomRecurrentLayer(l_in, l_in_hid, l_hid_hid, nonlinearity=lasagne.nonlinearities.rectify)
# Output Layer
l_shp = ReshapeLayer(l_rec, (-1, n_hid))
l_dense = DenseLayer(l_shp, num_units=n_out, nonlinearity=lasagne.nonlinearities.linear)
# To reshape back to our original shape, we can use the symbolic shape variables we retrieved above.
l_out = ReshapeLayer(l_dense, (batch_size, seqlen, n_out))
return l_out, l_rec
if __name__ == '__main__':
# Define the input and expected output variable
input_var, target_var = T.tensor3s('input', 'target')
# The generator to sample examples from
tr_cond = 'all_gains'
test_cond = 'all_gains'
generator = KalmanFilteringTaskFFWD(max_iter=50001, batch_size=10, n_in=n_in, n_out=1, stim_dur=25, sigtc_sq=4.0, signu_sq=1.0, gamma=0.1, tr_cond=tr_cond)
# The model
l_out, l_rec = model(input_var, batch_size=generator.batch_size, n_in=generator.n_in, n_out=generator.n_out, n_hid=n_hid)
# The generated output variable and the loss function
# all_layers = lasagne.layers.get_all_layers(l_out)
# l2_penalty = lasagne.regularization.regularize_layer_params(all_layers, lasagne.regularization.l2) * 1e-6
pred_var = lasagne.layers.get_output(l_out)
loss = T.mean(lasagne.objectives.squared_error(pred_var[:,:,-1], target_var[:,:,-1])) # + l2_penalty
# Create the update expressions
params = lasagne.layers.get_all_params(l_out, trainable=True)
updates = lasagne.updates.adam(loss, params, learning_rate=0.001)
# Compile the function for a training step, as well as the prediction function and a utility function to get the inner details of the RNN
train_fn = theano.function([input_var, target_var], loss, updates=updates, allow_input_downcast=True)
pred_fn = theano.function([input_var], pred_var, allow_input_downcast=True)
rec_layer_fn = theano.function([input_var], lasagne.layers.get_output(l_rec, get_details=True), allow_input_downcast=True)
# If want to continue training an old model, uncomment below
# npzfile_lout = np.load('kf_lout_trained_model.npz')
# npzfile_lrec = np.load('kf_lrec_trained_model.npz')
# lasagne.layers.set_all_param_values(l_out,[npzfile_lout['arr_0'],npzfile_lout['arr_1'],npzfile_lout['arr_2'],npzfile_lout['arr_3'],npzfile_lout['arr_4'],npzfile_lout['arr_5']])
# lasagne.layers.set_all_param_values(l_rec,[npzfile_lout['arr_0'],npzfile_lout['arr_1'],npzfile_lout['arr_2'],npzfile_lout['arr_3'],npzfile_lout['arr_4']])
# TRAINING
success = 0.0
s_vec, opt_s_vec, ex_pred_vec, frac_rmse_vec = [], [], [], []
for i, (example_input, example_output, opt_s) in generator:
score = train_fn(example_input, example_output)
example_prediction = pred_fn(example_input)
s_vec.append(example_output[:,:,-1])
opt_s_vec.append(opt_s[:,:,-1])
ex_pred_vec.append(example_prediction[:,:,-1])
if i % 500 == 0:
rmse_opt = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(opt_s_vec))**2))
rmse_net = np.sqrt(np.nanmean((np.asarray(s_vec) - np.squeeze(np.asarray(ex_pred_vec)))**2))
frac_rmse = (rmse_net - rmse_opt) / rmse_opt
frac_rmse_vec.append(frac_rmse)
print 'Batch #%d; Frac. RMSE: %.6f; Opt. RMSE: %.6f; Net. RMSE: %.6f' % (i, frac_rmse, rmse_opt, rmse_net)
if frac_rmse < 0.1:
success = 1.0
break;
s_vec = []
opt_s_vec = []
ex_pred_vec = []
# SAVE TRAINED MODEL
sio.savemat('kf_nin%i_nhu%i_jobidx%i.mat'%(n_in,n_hid,job_idx), {'frac_rmse':frac_rmse, 'frac_rmse_vec':np.asarray(frac_rmse_vec), 'success':success } )
|
eminorhan/inevitable-probability
|
nin_nhu/ninnhu_kalman_filtering_expt.py
|
Python
|
gpl-3.0
| 5,329
|
"""
Module defining the Spline class, something easy to wrap around SciPy splines.
Includes BOK algorithms (Mollinari et al)
Some rules of splrep (k = 3)
- do not put more then 2 knots between data points.
- splrep wants inner knots only, do not give extremal knots, even only "once".
"""
import numpy as np
import sys
import pycs.gen.util
import copy as pythoncopy
import matplotlib.pyplot as plt
import scipy.optimize as spopt
import scipy.interpolate as si
class DataPoints():
"""
An ultralight version of a lightcurve, made for fast computations.
Can be "merged" from a list of lightcurves, see factory function below.
A Spline object has such a DataPoints object as attribute.
ATTENTION
Datapoints are expected to be ALWAYS SORTED BY JDS, and no two datapoints have the same jd !
See the splitup option of the constructor.
Note that this is not the case for lightcurves ! Hence the existence of datapoints.
Should be enforced in every function that builds datapoints.
ABOUT STAB POINTS
With scipy splines, we always get the last knots at the extrema of data points.
So to get knots "outside" of the real datapoints, we have to insert fake points.
And while we are at it, these fake points can also be used to stabilize the spline in
gaps.
The mask is used to differentiate between actual data points and "stabilization points"
that are inserted to make the spline behave well at the extrema and in season gaps.
It is modified by the two addgappts and addextpts.
The info about stabpoints is written into the object,
so that they can be reconstrucuted from any new jds and mags.
"""
def __init__(self, jds, mags, magerrs, splitup=True, deltat=0.000001, sort=True, stab=False,
stabext=300.0, stabgap = 30.0, stabstep = 5.0, stabmagerr = -2.0, stabrampsize = 0, stabrampfact = 1.0):
"""
Constructor
Always leave splitup and sort on True ! Only if you know that you are already
sorted you can skip them.
You cannot specify a mask, I do this myself. (could be done in principle).
stab : do you want stabilization points ?
Don't forget to run splitup, sort, and addstab again if you change the data !
"""
self.jds = jds
self.mags = mags
self.magerrs = magerrs
self.stab = stab
self.stabext = stabext
self.stabgap = stabgap
self.stabstep = stabstep
self.stabmagerr = stabmagerr
self.stabrampsize = stabrampsize
self.stabrampfact = stabrampfact
self.mask = np.ones(len(self.jds), dtype=np.bool) # an array of True
self.deltat = deltat
if splitup:
self.splitup()
elif sort: # If we do the splitup, we sort anyway.
self.sort()
self.putstab()
# def update(self, jds, mags, magerrs):
# """
# NOT NEEDED ANYMORE, JUST CALL MERGE AND GIVE AN OLDDP. SAFER.
#
# Give me some new datapoints (no stabs) (already splitup and sorted, by definition), I'll update myself.
# In fact everything might move !
# """
# if newdatapoints.stab = True:
# raise RuntimeError("Give me points without stab !")
# self.jds = newdatapoints.jds
# self.mags = newdatapoints.mags
# self.magerrs = newdatapoints.magerrs
# self.mask = np.ones(len(self.jds), dtype=np.bool)
# self.addstab() # runs only if stab = True
def splitup(self):
"""
TO WRITE !!!
We avoid that two points get the same jds...
Note that this might change the order of the jds,
but only of very close ones, so one day it would be ok to leave the mags as they are.
"""
self.jds += self.deltat * np.random.randn(len(self.jds))
self.sort()
def sort(self):
"""
Absolutely mandatory, called in the constructor.
"""
sortedindices = np.argsort(self.jds)
self.jds = self.jds[sortedindices]
self.mags = self.mags[sortedindices]
self.magerrs = self.magerrs[sortedindices]
self.mask = self.mask[sortedindices]
self.validate()
def validate(self):
"""
We check that the datapoint jds are increasing strictly :
"""
first = self.jds[:-1]
second = self.jds[1:]
if not np.alltrue(np.less(first,second)): # Not less_equal ! Strictly increasing !
raise RuntimeError, "These datapoints don't have strcitly increasing jds !"
def rmstab(self):
"""
Deletes all stabilization points
"""
self.jds = self.jds[self.mask]
self.mags = self.mags[self.mask]
self.magerrs = self.magerrs[self.mask]
self.mask = np.ones(len(self.jds), dtype=np.bool)
def putstab(self):
"""
Runs only if stab is True.
I will :
add datapoints (new jds, new mags, new magerrs)
modify the mask = False for all those new datapoints.
"""
if self.stab == True:
# We start by deleting any previous stab stuff :
self.rmstab()
self.addgappts()
self.addextpts()
else:
pass
def calcstabmagerr(self):
"""
Computes the mag err of the stabilisation points.
"""
if self.stabmagerr >= 0.0:
return self.stabmagerr
else:
return - self.stabmagerr * np.median(self.magerrs)
def addgappts(self):
"""
We add stabilization points with low weights into the season gaps
to avoid those big excursions of the splines.
This is done by a linear interpolation across the gaps.
"""
absstabmagerr = self.calcstabmagerr()
gaps = self.jds[1:] - self.jds[:-1] # has a length of len(self.jds) - 1
gapindices = np.arange(len(self.jds) - 1)[gaps > self.stabgap] # indices of those gaps that are larger than stabgap
for n in range(len(gapindices)):
i = gapindices[n]
a = self.jds[i]
b = self.jds[i+1]
newgapjds = np.linspace(a, b, float(b-a)/float(self.stabstep))[1:-1]
newgapindices = i + 1 + np.zeros(len(newgapjds))
newgapmags = np.interp(newgapjds, [a, b], [self.mags[i], self.mags[i+1]])
newgapmagerrs = absstabmagerr * np.ones(newgapmags.shape)
newgapmask = np.zeros(len(newgapjds), dtype=np.bool)
self.jds = np.insert(self.jds, newgapindices, newgapjds)
self.mags = np.insert(self.mags, newgapindices, newgapmags)
self.magerrs = np.insert(self.magerrs, newgapindices, newgapmagerrs)
self.mask = np.insert(self.mask, newgapindices, newgapmask)
gapindices += newgapjds.size # yes, as we inserted some points the indices change.
# If you change this structure, be sure to check SplineML.settargetmags as well !
self.validate()
def addextpts(self):
"""
We add stabilization points at both extrema of the lightcurves
This is done by "repeating" the extremal points, and a ramp in the magerrs
"""
absstabmagerr = self.calcstabmagerr()
extjds = np.arange(self.jds[0], self.jds[0] - self.stabext, -1*self.stabstep)[::-1][:-1]
extmags = self.mags[0] * np.ones(extjds.shape)
extmagerrs = absstabmagerr * np.ones(extjds.shape)
for i in range(1, self.stabrampsize+1):
extmagerrs[-i] += (self.stabrampsize +1 -i) * absstabmagerr * self.stabrampfact
extindices = np.zeros(extjds.shape)
mask = np.zeros(len(extjds), dtype=np.bool)
self.jds = np.insert(self.jds, extindices, extjds)
self.mags = np.insert(self.mags, extindices, extmags)
self.magerrs = np.insert(self.magerrs, extindices, extmagerrs)
self.mask = np.insert(self.mask, extindices, mask)
# And the same at the other end :
extjds = np.arange(self.jds[-1], self.jds[-1] + self.stabext, self.stabstep)[1:]
extmags = self.mags[-1] * np.ones(extjds.shape)
extmagerrs = absstabmagerr * np.ones(extjds.shape)
for i in range(0, self.stabrampsize):
extmagerrs[i] += (self.stabrampsize -i) * absstabmagerr * self.stabrampfact
extindices = len(self.jds) + np.zeros(extjds.shape)
mask = np.zeros(len(extjds), dtype=np.bool)
self.jds = np.insert(self.jds, extindices, extjds)
self.mags = np.insert(self.mags, extindices, extmags)
self.magerrs = np.insert(self.magerrs, extindices, extmagerrs)
self.mask = np.insert(self.mask, extindices, mask)
self.validate()
def getmaskbounds(self):
"""
Returns the upper and lower bounds of the regions containing stabilization points.
This is used when placing knots, so to put fewer knots in these regions.
Crazy stuff...
"""
maskindices = np.where(self.mask == False)[0]
#print maskindices
if len(maskindices) < 3:
print "Hmm, not much masked here ..."
return (np.array([]), np.array([]))
else:
lcuts = maskindices[np.where(maskindices[1:] - maskindices[:-1] > 1)[0] + 1]
lcuts = np.insert(lcuts, 0, maskindices[0])
ucuts = maskindices[np.where(maskindices[1:] - maskindices[:-1] > 1)[0]]
ucuts = np.insert(ucuts, len(ucuts), maskindices[-1])
return (lcuts, ucuts)
def ntrue(self):
"""
Returns the number of real datapoints (skipping stabilization points)
"""
return np.sum(self.mask)
def merge(lcs, olddp=None, splitup=True, deltat=0.000001, sort=True, stab=False,
stabext=300.0, stabgap = 30.0, stabstep = 5.0, stabmagerr = 2.0, stabrampsize = 0, stabrampfact = 1.0):
"""
Factory function for DataPoints objects, starting from lightcurves.
Takes a list of lightcurves and quickly concatenate the jds, mags, and magerrs.
Instead of specifying all the stab point parameters, you can give me an old datapoints object,
and I will reuse its settings... This is useful if you want to "update" the data points.
If overlap is True, I will keep only points that are "covered" by all four lightcurves !
This is useful when you want to build a first source spline, and your microlensing is messy at the borders.
NOT YET IMPLEMENTED ...
"""
jds = np.concatenate([l.getjds() for l in lcs])
mags = np.concatenate([l.getmags() for l in lcs])
magerrs = np.concatenate([l.getmagerrs() for l in lcs])
if olddp == None:
return DataPoints(jds, mags, magerrs, splitup=splitup, deltat=deltat, sort=sort,
stab=stab, stabext=stabext, stabgap=stabgap, stabstep=stabstep, stabmagerr=stabmagerr,
stabrampsize=stabrampsize, stabrampfact=stabrampfact)
else:
return DataPoints(jds, mags, magerrs, splitup=splitup, sort=sort,
deltat=olddp.deltat,
stab=olddp.stab, stabext=olddp.stabext, stabgap=olddp.stabgap, stabstep=olddp.stabstep, stabmagerr=olddp.stabmagerr,
stabrampsize=olddp.stabrampsize, stabrampfact=olddp.stabrampfact)
class Spline():
"""
A class to represent a spline, that is essentially a set of knots and coefficients.
As finding knots and coefficients requires access to some data points, these are included
in the form of a DataPoints object.
Abount knots :
Spline.t are all the knots, including extremas with multiplicity.
But splrep wants internal knots only ! By internal we mean : not even the data extremas !
Spline.getintt() returns only these internal knots.
"""
def __init__(self, datapoints, t = None, c = None, k = 3, bokeps = 2.0, boktests = 5, bokwindow = None, plotcolour="black"):
"""
t : all the knots (not only internal ones !)
c : corresponding coeffs
k : degree : default = cubic splines k=3 -> "order = 4" ???
whatever ... 3 means that you can differentiate twice at the knots.
"""
#self.origdatapoints = datapoints
self.datapoints = datapoints
# At this point we know that your datapoint jds are monotonously increasing. This is tested
# by validate() of datapoints.
self.t = t # the array of knots
self.c = c # the coeffs
self.k = k
self.bokeps = bokeps
self.boktests = boktests
self.bokwindow = bokwindow
self.knottype = "none"
self.plotcolour = plotcolour
self.showknots = True
# Bounds, for BOK
self.lims = None
self.l = None
self.u = None
# We want to keep trace of the r2 of a spline.
self.lastr2nostab = 0.0 # without stab points (the real thing)
self.lastr2stab = 0.0 # with stab points (usually not so interesting)
# If you did not give me a t&c, I'll make some default ones for you :
try:
if (self.t is None):
self.uniknots(2) # This also puts self.c to 0s
except:
if (len(self.t) == 0):
self.uniknots(2) # This also puts self.c to 0s
def __str__(self):
"""
Returns a string with:
* degree
* knot placement
* number of intervals
"""
#return "Spline of degree %i, %i knots (%i inner knots), and %i intervals." % (self.k, len(self.t), len(self.getintt()), self.getnint())
if len(self.knottype) > 6: # That's a string
knottext = "%il%ib" % (self.knottype.count("l"), self.knottype.count("b"))
else:
knottext = self.knottype
return "~%i/%s/%i~" % (self.k, knottext, self.getnint())
def copy(self):
"""
Returns a "deep copy" of the spline.
"""
return pythoncopy.deepcopy(self)
def shifttime(self, timeshift):
"""
Hard-shifts your spline along the time axis.
By "hard-shift", I mean that unlike for a lightcurve, the spline will not know that it was shifted !
It's up to you to be sure that you want to move it.
We shift both the datapoints and the knots.
"""
self.t += timeshift
self.datapoints.jds += timeshift
def shiftmag(self, magshift):
"""
Hard-shifts your spline along the mag axis.
By "hard-shift", I mean that unlike for a lightcurve, the spline will not know that it was shifted !
It's up to you to be sure that you want to move it.
We shift both the datapoints and the knots.
"""
self.c += magshift
self.datapoints.mags += magshift
def updatedp(self, newdatapoints, dpmethod="stretch"):
"""
Replaces the datapoints of the spline, and makes sure that the knots
stay compatible.
If you tweaked your datapoints, I will have to tweak my knots to make sure
that my external knots fit. Hence this method !
Due to the splitup, this is needed even if you just tweaked the mags !
And anyway in this case I have to rebuild the stab points.
.. warning :: IT'S UP TO YOU TO CHECK THAT YOU DON'T REPLACE DATATOINTS WITH DIFFERENT STAB SETTINGS
Anyway it would work, just look ugly !
Replaces the datapoints (jds, mags, and magerrs) touching the knots and coeffs as less as possible.
Note that we also have to deal with stab points here !
This is made for instance for time shifts that only very slightly change the datapoints, and you don't want to
optimize the knots all the time from scratch again.
The current knots are "streched" (keeping their relative spacings) accross the new datapoints.
Options for "dpmethod" :
- "stretch" : changes all the knots
- "extadj" : does not touch the internal knots, but adjusts the external ones only, to
fit the new datapoints. Probably the method to use when optimizing time shifts.
- "leave" : does not touch the knots -> ok to evaluate the spline,
but you will not be able to fit it anymore, as the external knots don't correspond to datapoints.
.. todo:: In principle, why don't we just update the real datapoints here, and leave the stab as
they are ?
"""
if dpmethod == "stretch":
oldmin = self.datapoints.jds[0] # This includes potential stab points
oldmax = self.datapoints.jds[-1]
newmin = newdatapoints.jds[0] # Idem
newmax = newdatapoints.jds[-1]
oldknots = self.getinttex()
#print oldknots
# we will stretch the oldknots by a factor a :
a = (newmax - newmin)/(oldmax - oldmin)
newknots = newmin + a*(oldknots-oldmin)
# We set the new datapoints:
self.datapoints = newdatapoints
self.setinttex(newknots)
elif dpmethod == "extadj" :
intknots = self.getintt()
self.datapoints = newdatapoints
# Ok, now the newdatapoints might be narrower or wider than the knots, we have to deal with this.
# If they are wider, it's easy : setint will put move the external knot on the external datapoint.
# If they are narrower, it's trickier : we have to remove some extra knots, so to really just keep the "internal" ones.
# to feed into setintt.
#if True: # works as well, but maybe faster to test first :
if (self.datapoints.jds[0] >= intknots[0]) or (self.datapoints.jds[-1] <= intknots[-1]):
keepmask = np.ones(intknots.shape, dtype=np.bool)
for i in range(len(intknots)): # Starting from the left ...
if intknots[i] <= self.datapoints.jds[0]:
keepmask[i] = False
else:
break
for i in range(len(intknots))[::-1]: # And now the right ...
if intknots[i] >= self.datapoints.jds[-1]:
keepmask[i] = False
else:
break
#nkick = np.sum(keepmask == False)
#if nkick != 0:
# print "I'll kick %i knots !" % (nkick)
# And finally, we apply the mask .
intknots = intknots[keepmask]
self.setintt(intknots) # This automatically adjusts the extremal knots.
elif dpmethod == "leave" :
knots = self.getinttex()
self.datapoints = newdatapoints
# We quickly check the boundaries
if ( knots[0] >= self.datapoints.jds[0] ) or ( knots[-1] <= self.datapoints.jds[-1] ):
raise RuntimeError("Your newdatapoints are to wide for the current knots !")
else:
raise RuntimeError("Don't know this updatedp method !")
# We reset any bounds just to be sure.
self.lims = None
self.l = None
self.u = None
def uniknots(self, nint, n=True):
"""
Uniform distribution of internal knots across the datapoints (including any stab points).
We don't make a difference between stab and real points.
:param nint: The number of intervals, or the step
:param n:
If True, nint is the number of intervals (== piecewise polynoms) you want.
If False : nint is a step in days you want between the knots (approximately).
:type n: boolean
.. note:: I also put all coeffs back to 0.0 !
"""
#intt = np.linspace(self.datapoints.jds[0], self.datapoints.jds[-1], step+1)[1:-1] # we remove the extremas
a = self.datapoints.jds[0]
b = self.datapoints.jds[-1]
if n:
intt = np.linspace(a, b, nint + 1)[1:-1]
else:
intt = np.linspace(a, b, float(b-a)/float(nint))[1:-1]
if len(intt) == 0:
raise RuntimeError("I am uniknots, and I have only 0 (zero) internal knots ! Increase this number !")
self.setintt(intt)
self.knottype = "u"
# Important : we put some 0 coeffs to go with the new knots
self.resetc()
def resetc(self):
"""
Sets all coeffs to 0.0 -- if you want to start again your fit, keeping the knot positions.
"""
self.c = np.zeros(len(self.t))
def reset(self):
"""
Calls uniknots, i.e. resets both coeffs and knot positions, keeping the same number of knots.
"""
self.uniknots(self.getnint() ,n=True)
def buildbounds(self, verbose = True):
"""
Build bounds for bok.
By default I will make those bounds as wide as possible, still respecting epsilon.
The parameter epsilon is the minimum distance two knots can have.
If you give me a window size, I will not make the bounds as wide as possible, but only put them
0.5*window days around the current knots (still respecting all this epsilon stuff of course).
I look where your current knots are, and for each knots I build the bounds so that
epsilon distance is respected between adjacent upper and lower bounds.
But, there might already be knots only epsilon apart.
So I'm a bit tricky, not so straightforward as my predecessors.
Knots at the extrema are not allowed to move.
Requires existing knots, puts lims in between them, and builds the bounds.
@todo: Optimize me using numpy ! This is experiemental code for now.
"""
if verbose:
print "Building BOK bounds (bokeps = %.3f, bokwindow = %s) ..." % (self.bokeps, self.bokwindow)
knots = self.getinttex() # Including extremal knots (once).
n = len(knots)
# We start by checking the knot spacing
knotspacings = knots[1:] - knots[:-1]
if not np.alltrue(knotspacings > 0.0):
raise RuntimeError("Ouch, your knots are not sorted !")
minspace = np.min(knotspacings)
if verbose :
print "Minimal knot spacing : %.3f" % (minspace)
if minspace < self.bokeps - 0.00001: # Rounding errors, we decrease epsilon a bit...
# If this does still happens, then it was not just a rounding error ...
# Yes it still happens, due to updatedp stretch ...
raise RuntimeError("Knot spacing min = %f, epsilon = %f" % (minspace, self.bokeps))
# Loop through the knots.
lowers = [knots[0]] # First knot is not allowed to move
uppers = [knots[0]]
for i in range(1, n-1): # Internal knots
tk = knots[i] # this knot
pk = knots[i-1] # previous knot
nk = knots[i+1] # next knot
# First we build the wide bounds :
guessl = 0.5*(pk + tk) + 0.5*self.bokeps
if guessl >= tk:
guessl = tk
guessu = 0.5*(nk + tk) - 0.5*self.bokeps
if guessu <= tk:
guessu = tk
# Now we see if the use wants a narrower window within those bounds :
if self.bokwindow != None:
if tk - 0.5*self.bokwindow >= guessl:
guessl = tk - 0.5*self.bokwindow
if tk + 0.5*self.bokwindow <= guessu:
guessu = tk + 0.5*self.bokwindow
lowers.append(guessl)
uppers.append(guessu)
# And now this last knot, doesn't move, like the first one:
lowers.append(knots[-1])
uppers.append(knots[-1])
self.l = np.array(lowers)
self.u = np.array(uppers)
self.knottype += "l"
if verbose:
print "Buildbounds done."
def bok(self, bokmethod="BF", verbose=True, trace=False):
"""
We optimize the positions of knots by some various techniques.
We use fixed bounds for the exploration, run buildbounds (with low epsilon) first.
This means that I will not move my bounds.
For each knot, i will try ntestpos linearly spaced positions within its bounds.
In this version, the bounds are included : I might put a knot on a bound !
The way the bounds are placed by buildbounds ensures that in any case the minimal
distance of epsilon is respected.
Using this sheme, it is now possible to iteratively call mybok and buildbounds in a loop
and still respect epsilon at any time.
bokmethods :
- MCBF : Monte Carlo brute force with ntestpos trial positions for each knot
- BF : brute force, deterministic. Call me twice
- fminind : fminbound on one knot after the other.
- fmin :global fminbound
Exit is automatic, if result does not improve anymore...
"""
intknots = self.getintt() # only internal, the ones we will move
nintknots = len(intknots)
weights = 1.0/self.datapoints.magerrs
def score(intknots, index, value):
modifknots = intknots.copy()
modifknots[index] = value
return si.splrep(self.datapoints.jds, self.datapoints.mags, w=weights, xb=None, xe=None, k=self.k, task=-1, s=None, t=modifknots, full_output=1, per=0, quiet=1)[1]
iniscore = score(intknots, 0, intknots[0])
lastchange = 1
lastscore = iniscore
iterations = 0
if verbose:
print "Starting BOK-%s on %i intknots (boktests = %i)" % (bokmethod, nintknots, self.boktests)
if bokmethod == "MCBF":
while True:
if lastchange >= 2*nintknots: # somewhat arbitrary, but why not.
break
i = np.random.randint(0, nintknots) # (inclusive, exclusive)
testknots = np.linspace(self.l[i+1], self.u[i+1], self.boktests)
# +1, as u and l include extremal knots...
# So we include the extremas in our range to test.
testscores = np.array([score(intknots, i, testknot) for testknot in testknots])
bestone = np.argmin(testscores)
bestscore = testscores[bestone]
if bestscore < lastscore:
lastchange = 0
intknots[i] = testknots[bestone] # WE UPDATE the intknots array !
lastscore = bestscore
lastchange += 1
iterations += 1
if trace:
self.optc()
pycs.gen.util.trace([], [self])
if bokmethod == "BF":
intknotindices = range(nintknots) # We could potentially change the order, just to see if that makes sense.
# No, it doesn't really help
#mid = int(len(intknotindices)/2.0)
#intknotindices = np.concatenate([intknotindices[mid:], intknotindices[:mid][::-1]])
for i in intknotindices:
testknots = np.linspace(self.l[i+1], self.u[i+1], self.boktests)
# +1, as u and l include extremal knots...
# So we include the extremas in our range to test.
testscores = np.array([score(intknots, i, testknot) for testknot in testknots])
bestone = np.argmin(testscores)
bestscore = testscores[bestone]
intknots[i] = testknots[bestone] # WE UPDATE the intknots array !
iterations += 1
if trace:
self.optc()
pycs.gen.util.trace([], [self])
if bokmethod == "fminind":
intknotindices = range(nintknots)
for i in intknotindices:
def target(value):
return score(intknots, i, value)
#inival = intknots[i]
#bounds = (self.l[i+1], self.u[i+1])
out = spopt.fminbound(target, self.l[i+1], self.u[i+1], xtol=0.01, maxfun=100, full_output=1, disp=1)
#print out
optval = out[0]
bestscore = out[1]
intknots[i] = optval # WE UPDATE the intknots array !
iterations += 1
if trace:
self.optc()
pycs.gen.util.trace([], [self])
if bokmethod == "fmin":
def target(modifknots):
#iterations += 1
#if trace:
# self.optc()
# pycs.gen.util.trace([], [self])
return si.splrep(self.datapoints.jds, self.datapoints.mags, w=weights, xb=None, xe=None, k=self.k, task=-1, s=None, t=modifknots, full_output=1, per=0, quiet=1)[1]
bounds = [(a, b) for (a, b) in zip(self.l[1:-1], self.u[1:-1])]
out = spopt.fmin_l_bfgs_b(target, intknots, approx_grad=True, bounds=bounds, m=10, factr=1e7, pgtol=1.e-05, epsilon=1e-04, iprint=-1, maxfun=15000)
#out = spopt.fminbound(target, self.l[1:-1], self.u[1:-1], xtol=0.01, maxfun=1000, full_output=1, disp=3)
#print out
intknots = out[0]
bestscore = out[1]
# relative improvement :
relimp = (iniscore - bestscore)/iniscore
self.knottype += "b"
self.setintt(intknots)
#pycs.gen.lc.display([],[self])
#self.display()
self.optc() # Yes, not yet done !
finalr2 = self.r2(nostab=True)
if verbose:
print "r2 = %f (without stab poins)" % finalr2
print "Done in %i iterations, relative improvement = %f" % (iterations, relimp)
# We count all datapoints here, as score returns the full chi2 including stab pts.
return finalr2
# Some stuff about knots :
def getintt(self):
"""
Returns the internal knots (i.e., not even the datapoints extrema)
This is what you need to feed into splrep !
There are nint - 1 such knots
"""
return self.t[(self.k+1):-(self.k+1)].copy() # We cut the outer knots.
def getinttex(self):
"""
Same as above, but we include the extremal points "once".
"""
return self.t[(self.k):-(self.k)].copy()
def knotstats(self):
"""
Returns a string describing the knot spacing
"""
knots = self.getinttex()
spacings = knots[1:] - knots[:-1]
return " ".join(["%.1f" % (spacing) for spacing in sorted(spacings)])
def setintt(self, intt):
"""
Give me some internal knots (not even containing the datapoints extrema),
and I build the correct total knot vector t for you.
I add the extremas, with appropriate multiplicity.
@TODO: check consistency of intt with datapoints !
"""
# Ok a quick test for consisency :
if len(intt) == 0:
raise RuntimeError("Your list of internal knots is empty !")
if not self.datapoints.jds[0] < intt[0]:
raise RuntimeError("Ouch.")
if not self.datapoints.jds[-1] > intt[-1]:
raise RuntimeError("Ouch.")
#assert self.datapoints.jds[0] < intt[0] # should we put <= here ?
#assert self.datapoints.jds[-1] > intt[-1]
pro = self.datapoints.jds[0] * np.ones(self.k+1)
post = self.datapoints.jds[-1] * np.ones(self.k+1)
self.t = np.concatenate((pro, intt, post))
def setinttex(self, inttex):
"""
Including extremal knots
"""
#pro = self.datapoints.jds[0] * np.ones(self.k)
#post = self.datapoints.jds[-1] * np.ones(self.k)
pro = inttex[0] * np.ones(self.k)
post = inttex[-1] * np.ones(self.k)
self.t = np.concatenate((pro, inttex, post))
def getnint(self):
"""
Returns the number of intervals
"""
return(len(self.t) - 2* (self.k + 1) + 1)
# Similar stuff about coeffs :
def getc(self, m=0):
"""
Returns all active coefficients of the spline, the ones it makes sense to play with.
The length of this guy is number of intervals - 2 !
"""
return self.c[m:-(self.k + 1 + m)].copy()
def setc(self, c, m=0):
"""
Puts the coeffs from getc back into place.
"""
self.c[m:-(self.k + 1 + m)] = c
def getco(self, m=0):
"""
Same as getc, but reorders the coeffs in a way more suited for nonlinear optimization
"""
c = self.getc(m=m)
mid = int(len(c)/2.0)
return np.concatenate([c[mid:], c[:mid][::-1]])
def setco(self, c, m=0):
"""
The inverse of getco.
"""
mid = int(len(c)/2.0)
self.setc(np.concatenate([c[mid+1:][::-1], c[:mid+1]]), m=m)
def setcflat(self, c):
"""
Give me coeffs like those from getc(m=1), I will set the coeffs so that the spline extremas
are flat (i.e. slope = 0).
"""
self.setc(c, m=1)
self.c[0] = self.c[1]
self.c[-(self.k + 2)] = self.c[-(self.k + 3)]
def setcoflat(self, c):
"""
idem, but for reordered coeffs.
"""
mid = int(len(c)/2.0)
self.setcflat(np.concatenate([c[mid:][::-1], c[:mid]]))
def r2(self, nostab=True, nosquare=False):
"""
Evaluates the spline, compares it with the data points and returns a weighted sum of residuals r2.
If nostab = False, stab points are included
This is precisely the same r2 as is used by splrep for the fit, and thus the same value as
returned by optc !
This method can set lastr2nostab, so be sure to end any optimization with it.
If nostab = True, we don't count the stab points
"""
if nostab == True :
splinemags = self.eval(nostab = True, jds = None)
errs = self.datapoints.mags[self.datapoints.mask] - splinemags
werrs = errs/self.datapoints.magerrs[self.datapoints.mask]
if nosquare:
r2 = np.sum(np.fabs(werrs))
else:
r2 = np.sum(werrs * werrs)
self.lastr2nostab = r2
else :
splinemags = self.eval(nostab = False, jds = None)
errs = self.datapoints.mags - splinemags
werrs = errs/self.datapoints.magerrs
if nosquare:
r2 = np.sum(np.fabs(werrs))
else:
r2 = np.sum(werrs * werrs)
self.lastr2stab = r2
return r2
#if red:
# return chi2/len(self.datapoints.jds)
def tv(self):
"""
Returns the total variation of the spline. Simple !
http://en.wikipedia.org/wiki/Total_variation
"""
# Method 1 : linear approximation
ptd = 5 # point density in days ... this is enough !
a = self.t[0]
b = self.t[-1]
x = np.linspace(a, b, int((b-a) * ptd))
y = self.eval(jds = x)
tv1 = np.sum(np.fabs(y[1:] - y[:-1]))
#print "TV1 : %f" % (tv1)
return tv1
# Method 2 : integrating the absolute value of the derivative ... hmm, splint does not integrate derivatives ..
#si.splev(jds, (self.t, self.c, self.k))
def optc(self):
"""
Optimize the coeffs, don't touch the knots
This is the fast guy, one reason to use splines :-)
Returns the chi2 in case you want it (including stabilization points) !
Sets lastr2stab, but not lastr2nostab !
"""
out = si.splrep(self.datapoints.jds, self.datapoints.mags, w=1.0/self.datapoints.magerrs, xb=None, xe=None, k=self.k, task=-1, s=None, t=self.getintt(), full_output=1, per=0, quiet=1)
# We check if it worked :
if not out[2] <= 0:
raise RuntimeError("Problem with spline representation, message = %s" % (out[3]))
self.c = out[0][1] # save the coeffs
#import matplotlib.pyplot as plt
#plt.plot(self.datapoints.jds, self.datapoints.magerrs)
#plt.show()
self.lastr2stab = out[1]
return out[1]
def optcflat(self, verbose = False):
"""
Optimizes only the "border coeffs" so to get zero slope at the extrema
Run optc() first ...
This has to be done with an iterative optimizer
"""
full = self.getc(m=1)
inip = self.getc(m=1)[[0, 1, -2, -1]] # 4 coeffs
def setp(p):
full[[0, 1, -2, -1]] = p
self.setcflat(full)
if verbose:
print "Starting flat coeff optimization ..."
print "Initial pars : ", inip
def errorfct(p):
setp(p)
return self.r2(nostab=False) # To get the same as optc would return !
minout = spopt.fmin_powell(errorfct, inip, full_output=1, disp=verbose)
popt = minout[0]
if popt.shape == ():
popt = np.array([popt])
if verbose:
print "Optimal pars : ", popt
setp(popt)
return self.r2(nostab=False) # We include the stab points, like optc does.
# This last line also updates self.lastr2 ...
def eval(self, jds = None, nostab = True):
"""
Evaluates the spline at jds, and returns the corresponding mags-like vector.
By default, we exclude the stabilization points !
If jds is not None, we use them instead of our own jds (in this case excludestab makes no sense)
"""
if jds is None:
if nostab:
jds = self.datapoints.jds[self.datapoints.mask]
else:
jds = self.datapoints.jds
else:
# A minimal check for non-extrapolation condition should go here !
pass
fitmags = si.splev(jds, (self.t, self.c, self.k))
# By default ext=0 : we do return extrapolated values
return fitmags
def display(self, showbounds = True, showdatapoints = True, showerrorbars=True, figsize=(16,8)):
"""
A display of the spline object, with knots, jds, stab points, etc.
For debugging and checks.
"""
fig = plt.figure(figsize=figsize)
if showdatapoints:
if showerrorbars:
mask = self.datapoints.mask
plt.errorbar(self.datapoints.jds[mask], self.datapoints.mags[mask], yerr=self.datapoints.magerrs[mask], linestyle="None", color="blue")
if not np.alltrue(mask):
mask = mask == False
plt.errorbar(self.datapoints.jds[mask], self.datapoints.mags[mask], yerr=self.datapoints.magerrs[mask], linestyle="None", color="gray")
else:
plt.plot(self.datapoints.jds, self.datapoints.mags, "b,")
if (np.any(self.t) != None) :
if getattr(self, "showknots", True) == True:
for knot in self.t:
plt.axvline(knot, color="gray")
# We draw the spline :
xs = np.linspace(self.datapoints.jds[0], self.datapoints.jds[-1], 1000)
ys = self.eval(jds = xs)
plt.plot(xs, ys, "b-")
if showbounds :
if (np.any(self.l) != None) and (np.any(self.u) != None) :
for l in self.l:
plt.axvline(l, color="blue", dashes=(4, 4))
for u in self.u:
plt.axvline(u, color="red", dashes=(5, 5))
axes = plt.gca()
axes.set_ylim(axes.get_ylim()[::-1])
plt.show()
# Some functions to interact directly with lightcurves :
def fit(lcs, knotstep=20.0, n=None, knots=None, stab=True,
stabext=300.0, stabgap=20.0, stabstep=5.0, stabmagerr=-2.0, stabrampsize=0, stabrampfact=1.0,
bokit=1, bokeps=2.0, boktests=5, bokwindow=None, k=3, verbose=True):
"""
The highlevel function to make a spline fit.
lcs : a list of lightcurves (I will fit the spline through the merged curves)
Specify either
knotstep : spacing of knots
or
n : how many knots to place
or
knots : give me actual initial knot locations, for instance prepared by seasonknots.
stab : do you want to insert stabilization points ?
stabext : number of days to the left and right to fill with stabilization points
stabgap : interval of days considered as a gap to fill with stab points.
stabstep : step of stab points
stabmagerr : if negative, absolte mag err of stab points. If positive, the error bar will be stabmagerr times the median error bar of the data points.
bokit : number of BOK iterations (put to 0 to not move knots)
bokeps : epsilon of BOK
boktests : number of test positions for each knot
"""
dp = merge(lcs, stab=stab, stabext=stabext, stabgap=stabgap, stabstep=stabstep, stabmagerr=stabmagerr, stabrampsize=stabrampsize, stabrampfact=stabrampfact)
s = Spline(dp, k=k, bokeps=bokeps, boktests=boktests, bokwindow=bokwindow)
if knots==None:
if n == None:
s.uniknots(nint = knotstep, n = False)
else :
s.uniknots(nint = n, n = True)
else:
s.setintt(knots)
#if stab:
# s.unistabknots(stabknotn,n=True)
for n in range(bokit):
s.buildbounds(verbose=verbose)
s.bok(bokmethod="BF", verbose=verbose)
s.optc()
s.r2(nostab=True) # This is to set s.lastr2nostab
return s
def seasonknots(lcs, knotstep, ingap, seasongap=60.0):
"""
A little helper to get some knot locations inside of seasons only
knotstep is for inside seasons
ingap is the number of knots inside gaps.
"""
knots = []
#knotstep = 10
dp = merge(lcs, splitup=True, deltat=0.000001, sort=True, stab=False)
gaps = dp.jds[1:] - dp.jds[:-1]
gapindices = list(np.arange(len(dp.jds)-1)[gaps > seasongap])
# knots inside of seasons :
a = dp.jds[0]
for gapi in gapindices:
b = dp.jds[gapi]
#print (a, b)
knots.append(np.linspace(a, b, float(b - a)/float(knotstep)))
a = dp.jds[gapi+1]
b = dp.jds[-1]
knots.append(np.linspace(a, b, float(b - a)/float(knotstep)))
# knots inside of gaps
for gapi in gapindices:
a = dp.jds[gapi]
b = dp.jds[gapi+1]
knots.append(np.linspace(a, b, ingap+2)[1:-1])
knots = np.concatenate(knots)
knots.sort()
return knots
#print gapindices
"""
for n in range(len(gapindices)):
i = gapindices[n]
a = self.jds[i]
b = self.jds[i+1]
newgapjds = np.linspace(a, b, float(b-a)/float(self.stabstep))[1:-1]
newgapindices = i + 1 + np.zeros(len(newgapjds))
newgapmags = np.interp(newgapjds, [a, b], [self.mags[i], self.mags[i+1]])
newgapmagerrs = absstabmagerr * np.ones(newgapmags.shape)
newgapmask = np.zeros(len(newgapjds), dtype=np.bool)
self.jds = np.insert(self.jds, newgapindices, newgapjds)
knotstep
"""
def r2(lcs, spline, nosquare=False):
"""
I do not modify the spline (not even its datapoints) !
Just evaluate the quality of the match, returning an r2 (without any stab points, of course).
This is used if you want to optimize something on the lightcurves without touching the spline.
Of course, I do not touch lastr2nostab or lastr2stab of the spline ! So this has really nothing
to do with source spline optimization !
"""
myspline = spline.copy()
newdp = pycs.gen.spl.merge(lcs, stab=False) # Indeed we do not care about stabilization points here.
myspline.updatedp(newdp, dpmethod="leave")
return myspline.r2(nostab=True, nosquare=nosquare)
def mltv(lcs, spline, weight=True):
"""
Calculates the TV norm of the difference between a lightcurve (disregarding any microlensing !) and the spline.
I return the sum over the curves in lcs.
Also returns a abs(chi) like distance between the lcs without ML and the spline
If weight is True, we weight the terms in sums according to their error bars.
Idea : weight the total variation somehow by the error bars ! Not sure if needed, the spline is already weighted.
"""
#import matplotlib.pyplot as plt
tv = 0.0
dist = 0.0
for l in lcs:
# We have a spline, and a lightcurve
lmags = l.getmags(noml = True) # We get the mags without ML (but with mag and fluxshift !)
ljds = l.getjds() # Inluding any time shifts.
# Evaluating the spline at those jds :
splinemags = spline.eval(ljds)
# The residues :
res = lmags - splinemags
#plt.plot(ljds, res, "r.")
#plt.show()
if weight == False:
tv += np.sum(np.fabs(res[1:] - res[:-1]))
dist += np.sum(np.fabs(res))
else:
magerrs = l.getmagerrs()
a = res[1:]
aerrs = magerrs[1:]
b = res[:-1]
berrs = magerrs[:-1]
vari = np.fabs(a - b)
varierrs = np.sqrt(aerrs * aerrs + berrs * berrs)
tv += np.sum(vari/varierrs)
dist += np.sum(np.fabs(res) / np.fabs(magerrs))
return (tv, dist)
def optcmltv(lcs, spline, verbose=True):
"""
I will optimize the coefficients of the spline so to minimize the mltv.
I do not use the microlensing of the lcs at all !
Simple powell optimization, slow. A pity.
Add BOK and time shifts in there and it might be bingo !
Would be more efficient if we add knots on the fly
"""
inic = spline.getc(m=2)
def setc(c):
spline.setc(c, m=2)
def errorfct(c):
setc(c)
(tv, dist) = mltv(lcs, spline, weight=False)
print "put weight"
return tv + 0.1*spline.tv()
minout = spopt.fmin_powell(errorfct, inic, full_output=1, disp=verbose)
copt = minout[0]
# We find a common shift to all coeffs so that the level matches
meanc = np.mean(spline.getc(m=2))
meanmag = np.mean(np.concatenate([l.getmags(noml = True) for l in lcs]))
setc(copt)
spline.c += meanmag - meanc
|
COSMOGRAIL/PyCS
|
pycs/gen/spl.py
|
Python
|
gpl-3.0
| 40,478
|
identity = {
# https://www.census.gov/prod/cen2010/briefs/c2010br-03.pdf
'sex': [('M',49.2),('F',50.8)],
# https://en.wikipedia.org/wiki/Race_and_ethnicity_in_the_United_States
'race': [('O',72.4),('U',12.6)]
}
iq = {
# Class: (mu, sigma)
# http://www.iqcomparisonsite.com/sexdifferences.aspx
'M': (103.08, 14.54),
'F': (101.41, 13.55),
# https://commons.wikimedia.org/wiki/File:WAIS-IV_FSIQ_Scores_by_Race_and_Ethnicity.png
'O': (103.21, 13.77),
'U': (88.67, 13.68),
# http://isteve.blogspot.com/2005/12/do-black-women-have-higher-iqs-than.html
# See the URL above for the provenance of the figures. As heritable measures of IQ,
# they are probably mostly garbage. But they provide a representative basis for a
# certain kind of "scientific" view of the world. And they were the only ones
# I came across that broke down mu and sigma values by sex and race.
'UF': (90.8, 13.58),
'UM': (88.4, 13.30),
'OF': (103.6, 13.30),
'OM': (102.7, 14.75)
}
|
tatwell/hiring-curve
|
config/distributions.py
|
Python
|
gpl-3.0
| 1,031
|
import traceback
from datetime import datetime
from itertools import count
import zmq
def serve(procs, port=None, addr='tcp://*', context=None, debug=False):
"""Make some procedures available for remote calls via ØMQ."""
if context is None:
context = zmq.Context.instance()
with context.socket(zmq.REP) as socket:
if port is None:
port = socket.bind_to_random_port(addr)
else:
socket.bind('{}:{}'.format(addr, port))
print('Serving at {}:{}'.format(addr, port))
print('sending and receiving JSON')
for i in count(1):
idle = datetime.now()
print('{}: waiting for request #{}...'.format(idle, i))
message = socket.poll()
start = datetime.now()
print('{}: received request #{} after {}'
.format(start, i, start - idle))
try:
request = socket.recv_json()
name, *args = request
result = procs[name](*args)
reply = {'result': result}
print(reply)
socket.send_json(reply)
except Exception as exc:
if debug:
traceback.print_exc()
message = '{}: {}'.format(exc.__class__.__name__, exc)
reply = {'error': message}
print(reply)
socket.send_json(reply)
end = datetime.now()
print('{}: replied to #{} after {}'
.format(end, i, end - start))
if __name__ == '__main__':
data = {}
procs = {
'GET': data.__getitem__,
'SET': data.__setitem__,
'DEL': data.__delitem__,
}
serve(procs, 6379) # Look Ma, Redis!
|
doctaphred/phredutils
|
zmqrpc.py
|
Python
|
gpl-3.0
| 1,764
|
# -*- coding: utf-8 -*-
# Maestro Music Manager - https://github.com/maestromusic/maestro
# Copyright (C) 2009-2015 Martin Altmayer, Michael Helmling
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt
from maestro.gui import selection, actions
class TreeviewSelection(selection.Selection):
"""Objects of this class store a selection of nodes in a TreeView. Different than a QItemSelectionModel,
a Selection knows about Nodes, Elements etc and provides special methods to determine properties
of the selection. Actions can use this information to decide whether they are enabled or not.
*model* is a QItemSelectionModel.
"""
def __init__(self, level, model):
"""Initialize with the given *model* (instance of QItemSelectionModel). Computes and stores
all attributes."""
# Get the QAbstractItemModel from a QItemSelectionModel
super().__init__(level,[model.model().data(index) for index in model.selectedIndexes()])
self._model = model
def nodes(self, onlyToplevel=False):
"""Return all nodes that are currently selected. If *onlyToplevel* is True, nodes will be excluded
if an ancestor is also selected.
"""
if not onlyToplevel:
return self._nodes
else:
return [n for n in self._nodes
if not any(self._model.isSelected(self._model.model().getIndex(parent))
for parent in n.getParents())]
class TreeView(QtWidgets.QTreeView):
"""Base class for tree views that contain mostly wrappers. This class handles mainly the
ContextMenuProvider system, that allows plugins to insert entries into the context menus of playlist and
browser.
*level* is the level that contains all elements in the tree (never mix wrappers from different levels!)
*affectGlobalSelection* determines whether the treeview will change the global selection whenever nodes
in it are selected. This should be set to False for treeviews in dialogs.
"""
actionConf = actions.TreeActionConfiguration()
def __init__(self, level, parent=None, affectGlobalSelection=True):
super().__init__(parent)
self.level = level
self.affectGlobalSelection = affectGlobalSelection
self.setHeaderHidden(True)
self.setExpandsOnDoubleClick(False)
self.setAlternatingRowColors(True)
self.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.setDragEnabled(True)
self.setDefaultDropAction(Qt.CopyAction)
self.viewport().setMouseTracking(True)
self.treeActions = self.actionConf.createActions(self)
self.actionConf.actionDefinitionAdded.connect(self._handleActionDefAdded)
self.actionConf.actionDefinitionRemoved.connect(self._handleActionDefRemoved)
@classmethod
def addActionDefinition(cls, *args, **kwargs):
if 'actionConf' not in cls.__dict__:
cls.actionConf = actions.TreeActionConfiguration()
cls.actionConf.root.addActionDefinition(*args, **kwargs)
def _handleActionDefAdded(self, actionDef):
self.treeActions[actionDef.identifier] = actionDef.createAction(self)
self.addAction(self.treeActions[actionDef.identifier])
def _handleActionDefRemoved(self, name):
action = self.treeActions[name]
self.removeAction(action)
del self.treeActions[name]
def setModel(self, model):
super().setModel(model)
from . import delegates
if isinstance(self.itemDelegate(), delegates.abstractdelegate.AbstractDelegate):
self.itemDelegate().model = model
self.updateSelection()
def updateSelection(self):
selectionModel = self.selectionModel()
if selectionModel is not None: # happens if the view is empty
self.selection = TreeviewSelection(self.level, selectionModel)
for action in self.treeActions.values():
if isinstance(action, actions.TreeAction):
action.initialize(self.selection)
def localActions(self):
return [action for action in self.actions() if action not in self.treeActions.values()]
def contextMenuEvent(self, event):
menu = self.actionConf.createMenu(self)
for action in self.localActions():
menu.addAction(action)
if menu.isEmpty():
event.ignore()
else:
menu.popup(event.globalPos())
event.accept()
def selectionChanged(self, selected, deselected):
super().selectionChanged(selected, deselected)
self.updateSelection()
if self.affectGlobalSelection:
selection.setGlobalSelection(self.selection)
# def focusInEvent(self, event):
# super().focusInEvent(event)
# self.updateSelection() #TODO: raises a strange segfault bug without any exceptions
# if self.affectGlobalSelection:
# selection.setGlobalSelection(self.selection)
def currentNode(self):
current = self.currentIndex()
if current.isValid():
return current.internalPointer()
def selectedRanges(self):
"""Return the ranges of selected nodes. Each range is a 3-tuple of parent (which doesn't need to be
selected), first index of parent.contents that is selected and the last index that is selected.
"""
selection = self.selectionModel().selection()
return [(self.model().data(itemRange.parent()),itemRange.top(),itemRange.bottom())
for itemRange in selection]
class DraggingTreeView(TreeView):
"""This is the baseclass of tree views that allow to drag and drop wrappers, e.g. playlist and editor.
It handles the following issues:
- Drag&drop actions must be enclosed in one undo-macro.
- Drags between views of the same class default to a move, drags between different views to a copy.
Via the shift and control modifier this default can be overridden.
- Models might need to know when a drag&drop action is going on. For this DraggingTreeView will
call the methods startDrag and endDrag on models which provide them (both without arguments).
- Before dropMimeData is called a DraggingTreeView will set the attributes dndSource and dndTarget
of the receiving model to the sending widget and itself. If the drag was started in an external
application, dndSource will be None.
"""
def __init__(self, level, parent=None, affectGlobalSelection=True):
super().__init__(level, parent, affectGlobalSelection)
self.setDefaultDropAction(Qt.MoveAction)
self.setAcceptDrops(True)
self.setDropIndicatorShown(True)
@property
def stack(self):
"""Return the stack that is used for changes to this tree."""
from .. import stack
return stack.stack
def startDrag(self, supportedActions):
model = self.model()
self.stack.beginMacro("Drag and Drop")
if hasattr(model, 'startDrag'):
model.startDrag()
try:
super().startDrag(supportedActions)
finally:
if hasattr(model, 'endDrag'):
model.endDrag()
self.stack.endMacro(abortIfEmpty=True)
def _changeDropAction(self, event):
if event.keyboardModifiers() & Qt.ShiftModifier:
event.setDropAction(Qt.MoveAction)
elif event.keyboardModifiers() & Qt.ControlModifier:
event.setDropAction(Qt.CopyAction)
elif isinstance(event.source(), type(self)):
event.setDropAction(Qt.MoveAction)
else:
event.setDropAction(Qt.CopyAction)
def dragEnterEvent(self, event):
self._changeDropAction(event)
super().dragEnterEvent(event)
def dragMoveEvent(self, event):
self._changeDropAction(event)
super().dragMoveEvent(event)
def dropEvent(self, event):
# workaround due to bug #67
if event.mouseButtons() & Qt.LeftButton:
event.ignore()
return
self._changeDropAction(event)
self.model().dndSource = event.source()
self.model().dndTarget = self
super().dropEvent(event)
self.model().dndSource = None
self.model().dndTarget = None
self.updateSelection()
|
maestromusic/maestro
|
maestro/gui/treeview.py
|
Python
|
gpl-3.0
| 9,172
|
# (c) Nelen & Schuurmans. GPL licensed, see LICENSE.rst.
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import logging
import requests
logger = logging.getLogger(__name__)
def collect_filters(url):
"""Return filters from FEWS, cleaned and ready for storing as json."""
from_fews = _download(url)
result = []
for filter_dict in from_fews:
result.append(_process_filter_dict(filter_dict))
return result
def collect_parameters(url):
from_fews = _download(url)
# TODO
return from_fews
def collect_locations(url):
from_fews = _download(url)
# TODO
return from_fews
def _download(url):
r = requests.get(url)
r.raise_for_status() # Only raises an error when not succesful.
return r.json()
def _process_filter_dict(filter_dict):
# {'filter': {name, childfilters, etc}
content = filter_dict['filter']
name = content['name']
description = content['description']
if name == description:
# Description is only interesting if it is different from the name.
# Often it is the same, so we've got to filter it out.
description = ''
children = [_process_filter_dict(child_filter_dict)
for child_filter_dict in content.get('childFilters', [])]
result = {'id': content['id'],
'name': name,
'description': description,
'children': children}
return result
|
lizardsystem/lizard-fewsapi
|
lizard_fewsapi/collect.py
|
Python
|
gpl-3.0
| 1,482
|
from .chucky_neighborhood_tool import NeighborhoodTool
|
a0x77n/chucky-tools
|
src/chucky_tools/neighborhood/__init__.py
|
Python
|
gpl-3.0
| 55
|
# coding=utf-8
"""
a csv trace reader
Author: Jason Yang <peter.waynechina@gmail.com> 2016/06
"""
import string
from PyMimircache.const import ALLOW_C_MIMIRCACHE, INSTALL_PHASE
from PyMimircache.utils.printing import *
if ALLOW_C_MIMIRCACHE and not INSTALL_PHASE:
import PyMimircache.CMimircache.CacheReader as c_cacheReader
from PyMimircache.cacheReader.abstractReader import AbstractReader
class CsvReader(AbstractReader):
"""
CsvReader class
"""
all = ["read_one_req", "read_complete_req", "lines_dict",
"lines", "read_time_req", "reset", "copy", "get_params"]
def __init__(self, file_loc,
data_type='c',
init_params=None,
block_unit_size=0,
disk_sector_size=0,
open_c_reader=True,
**kwargs):
"""
:param file_loc: location of the file
:param data_type: type of data, can be "l" for int/long, "c" for string
:param init_params: the init_params for opening csv
:param block_unit_size: block size for storage system, 0 when disabled
:param disk_sector_size: size of disk sector
:param open_c_reader: bool for whether open reader in C backend
:param kwargs: not used now
"""
super(CsvReader, self).__init__(file_loc, data_type, block_unit_size, disk_sector_size,
open_c_reader, kwargs.get("lock", None))
assert init_params is not None, "please provide init_param for csvReader"
assert "label" in init_params, "please provide label for csv reader"
self.trace_file = open(file_loc, 'rb')
# self.trace_file = open(file_loc, 'r', encoding='utf-8', errors='ignore')
self.init_params = init_params
self.label_column = init_params['label']
self.time_column = init_params.get("real_time", )
self.size_column = init_params.get("size", )
if self.time_column != -1:
self.support_real_time = True
if self.size_column != -1:
self.support_size = True
if block_unit_size != 0:
assert "size" in init_params, "please provide size_column option to consider request size"
self.header_bool = init_params.get('header', )
self.delimiter = init_params.get('delimiter', ",")
if "delimiter" not in init_params:
INFO("open {} using default delimiter \",\" for CsvReader".format(file_loc))
if self.header_bool:
self.headers = [i.strip(string.whitespace) for i in
self.trace_file.readline().decode().split(self.delimiter)]
# self.trace_file.readline()
if ALLOW_C_MIMIRCACHE and open_c_reader:
self.c_reader = c_cacheReader.setup_reader(file_loc, 'c', data_type=data_type,
block_unit_size=block_unit_size,
disk_sector_size=disk_sector_size,
init_params=init_params)
def read_one_req(self):
"""
read one request, return the lbn/objID
:return:
"""
super().read_one_req()
line = self.trace_file.readline().decode('utf-8', 'ignore')
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
if line:
ret = line.split(self.delimiter)[self.label_column - 1].strip()
if self.data_type == 'l':
ret = int(ret)
if self.block_unit_size != 0 and self.disk_sector_size != 0:
ret = ret * self.disk_sector_size // self.block_unit_size
return ret
else:
return None
def read_complete_req(self):
"""
read the complete line, including request and its all related info
:return: a list of all info of the request
"""
super().read_one_req()
line = self.trace_file.readline().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
if line:
line_split = line.strip().split(self.delimiter)
if self.block_unit_size != 0 and self.disk_sector_size != 0:
line_split[self.label_column - 1] = line_split[self.label_column - 1] * \
self.disk_sector_size // self.block_unit_size
return line_split
else:
return None
def lines_dict(self):
"""
return a dict with column header->data
note this function does not convert lbn even if disk_sector_size and block_unit_size are set
:return:
"""
line = self.trace_file.readline().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
while line:
line_split = line.split(self.delimiter)
d = {}
if self.header_bool:
for i in range(len(self.headers)):
d[self.headers[i]] = line_split[i].strip(string.whitespace)
else:
for key, value in enumerate(line_split):
d[key] = value
line = self.trace_file.readline()
yield d
# raise StopIteration
def lines(self):
"""
a generator for reading all the information of current request/line
:return: a tuple of current request
"""
line = self.trace_file.readline().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
while line:
line_split = tuple(line.split(self.delimiter))
line = self.trace_file.readline()
yield line_split
# raise StopIteration
def read_time_req(self):
"""
return real_time information for the request in the form of (time, request)
:return:
"""
super().read_one_req()
line = self.trace_file.readline().strip().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
if line:
line = line.split(self.delimiter)
try:
time = float(line[self.time_column - 1].strip())
lbn = line[self.label_column - 1].strip()
if self.data_type == 'l':
lbn = int(lbn)
if self.block_unit_size != 0 and self.disk_sector_size != 0:
lbn = lbn * self.disk_sector_size // self.block_unit_size
return time, lbn
except Exception as e:
print("ERROR csvReader reading data: {}, current line: {}".format(e, line))
else:
return None
def skip_n_req(self, n):
"""
skip N requests from current position
:param n: the number of requests to skip
"""
for i in range(n):
self.read_one_req()
def reset(self):
"""
reset reader to initial state
:return:
"""
super().reset()
if self.header_bool:
self.trace_file.readline()
def copy(self, open_c_reader=False):
"""
reader a deep copy of current reader with everything reset to initial state,
the returned reader should not interfere with current reader
:param open_c_reader: whether open_c_reader_or_not, default not open
:return: a copied reader
"""
return CsvReader(self.file_loc, self.data_type, self.init_params,
self.block_unit_size, self.disk_sector_size, open_c_reader, lock=self.lock)
def get_params(self):
"""
return all the parameters for this reader instance in a dictionary
:return: a dictionary containing all parameters
"""
return {
"file_loc": self.file_loc,
"init_params": self.init_params,
"data_type": self.data_type,
"block_unit_size": self.block_unit_size,
"disk_sector_size": self.disk_sector_size,
"open_c_reader": self.open_c_reader,
"lock": self.lock
}
def __next__(self): # Python 3
super().__next__()
element = self.read_one_req()
if element is not None:
return element
else:
raise StopIteration
def __repr__(self):
return "csvReader for trace {}".format(self.file_loc)
|
1a1a11a/mimircache
|
PyMimircache/cacheReader/csvReader.py
|
Python
|
gpl-3.0
| 8,745
|
from __future__ import unicode_literals
from frappe import _
app_name = "erpnext"
app_title = "ERPNext"
app_publisher = "Frappe Technologies Pvt. Ltd."
app_description = """ERP made simple"""
app_icon = "fa fa-th"
app_color = "#e74c3c"
app_email = "info@erpnext.com"
app_license = "GNU General Public License (v3)"
source_link = "https://github.com/frappe/erpnext"
app_logo_url = "/assets/erpnext/images/erpnext-logo.svg"
develop_version = '13.x.x-develop'
app_include_js = "/assets/js/erpnext.min.js"
app_include_css = "/assets/css/erpnext.css"
web_include_js = "/assets/js/erpnext-web.min.js"
web_include_css = "/assets/css/erpnext-web.css"
doctype_js = {
"Address": "public/js/address.js",
"Communication": "public/js/communication.js",
"Event": "public/js/event.js",
"Newsletter": "public/js/newsletter.js"
}
override_doctype_class = {
'Address': 'erpnext.accounts.custom.address.ERPNextAddress'
}
welcome_email = "erpnext.setup.utils.welcome_email"
# setup wizard
setup_wizard_requires = "assets/erpnext/js/setup_wizard.js"
setup_wizard_stages = "erpnext.setup.setup_wizard.setup_wizard.get_setup_stages"
setup_wizard_test = "erpnext.setup.setup_wizard.test_setup_wizard.run_setup_wizard_test"
before_install = "erpnext.setup.install.check_setup_wizard_not_completed"
after_install = "erpnext.setup.install.after_install"
boot_session = "erpnext.startup.boot.boot_session"
notification_config = "erpnext.startup.notifications.get_notification_config"
get_help_messages = "erpnext.utilities.activation.get_help_messages"
leaderboards = "erpnext.startup.leaderboard.get_leaderboards"
filters_config = "erpnext.startup.filters.get_filters_config"
additional_print_settings = "erpnext.controllers.print_settings.get_print_settings"
on_session_creation = [
"erpnext.portal.utils.create_customer_or_supplier",
"erpnext.shopping_cart.utils.set_cart_count"
]
on_logout = "erpnext.shopping_cart.utils.clear_cart_count"
treeviews = ['Account', 'Cost Center', 'Warehouse', 'Item Group', 'Customer Group', 'Sales Person', 'Territory', 'Assessment Group', 'Department']
# website
update_website_context = ["erpnext.shopping_cart.utils.update_website_context", "erpnext.education.doctype.education_settings.education_settings.update_website_context"]
my_account_context = "erpnext.shopping_cart.utils.update_my_account_context"
calendars = ["Task", "Work Order", "Leave Application", "Sales Order", "Holiday List", "Course Schedule"]
domains = {
'Agriculture': 'erpnext.domains.agriculture',
'Distribution': 'erpnext.domains.distribution',
'Education': 'erpnext.domains.education',
'Healthcare': 'erpnext.domains.healthcare',
'Hospitality': 'erpnext.domains.hospitality',
'Manufacturing': 'erpnext.domains.manufacturing',
'Non Profit': 'erpnext.domains.non_profit',
'Retail': 'erpnext.domains.retail',
'Services': 'erpnext.domains.services',
}
website_generators = ["Item Group", "Item", "BOM", "Sales Partner",
"Job Opening", "Student Admission"]
website_context = {
"favicon": "/assets/erpnext/images/erpnext-favicon.svg",
"splash_image": "/assets/erpnext/images/erpnext-logo.svg"
}
website_route_rules = [
{"from_route": "/orders", "to_route": "Sales Order"},
{"from_route": "/orders/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Order",
"parents": [{"label": _("Orders"), "route": "orders"}]
}
},
{"from_route": "/invoices", "to_route": "Sales Invoice"},
{"from_route": "/invoices/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Invoice",
"parents": [{"label": _("Invoices"), "route": "invoices"}]
}
},
{"from_route": "/supplier-quotations", "to_route": "Supplier Quotation"},
{"from_route": "/supplier-quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Supplier Quotation",
"parents": [{"label": _("Supplier Quotation"), "route": "supplier-quotations"}]
}
},
{"from_route": "/purchase-orders", "to_route": "Purchase Order"},
{"from_route": "/purchase-orders/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Purchase Order",
"parents": [{"label": _("Purchase Order"), "route": "purchase-orders"}]
}
},
{"from_route": "/purchase-invoices", "to_route": "Purchase Invoice"},
{"from_route": "/purchase-invoices/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Purchase Invoice",
"parents": [{"label": _("Purchase Invoice"), "route": "purchase-invoices"}]
}
},
{"from_route": "/quotations", "to_route": "Quotation"},
{"from_route": "/quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Quotation",
"parents": [{"label": _("Quotations"), "route": "quotations"}]
}
},
{"from_route": "/shipments", "to_route": "Delivery Note"},
{"from_route": "/shipments/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Delivery Note",
"parents": [{"label": _("Shipments"), "route": "shipments"}]
}
},
{"from_route": "/rfq", "to_route": "Request for Quotation"},
{"from_route": "/rfq/<path:name>", "to_route": "rfq",
"defaults": {
"doctype": "Request for Quotation",
"parents": [{"label": _("Request for Quotation"), "route": "rfq"}]
}
},
{"from_route": "/addresses", "to_route": "Address"},
{"from_route": "/addresses/<path:name>", "to_route": "addresses",
"defaults": {
"doctype": "Address",
"parents": [{"label": _("Addresses"), "route": "addresses"}]
}
},
{"from_route": "/jobs", "to_route": "Job Opening"},
{"from_route": "/admissions", "to_route": "Student Admission"},
{"from_route": "/boms", "to_route": "BOM"},
{"from_route": "/timesheets", "to_route": "Timesheet"},
{"from_route": "/material-requests", "to_route": "Material Request"},
{"from_route": "/material-requests/<path:name>", "to_route": "material_request_info",
"defaults": {
"doctype": "Material Request",
"parents": [{"label": _("Material Request"), "route": "material-requests"}]
}
},
]
standard_portal_menu_items = [
{"title": _("Personal Details"), "route": "/personal-details", "reference_doctype": "Patient", "role": "Patient"},
{"title": _("Projects"), "route": "/project", "reference_doctype": "Project"},
{"title": _("Request for Quotations"), "route": "/rfq", "reference_doctype": "Request for Quotation", "role": "Supplier"},
{"title": _("Supplier Quotation"), "route": "/supplier-quotations", "reference_doctype": "Supplier Quotation", "role": "Supplier"},
{"title": _("Purchase Orders"), "route": "/purchase-orders", "reference_doctype": "Purchase Order", "role": "Supplier"},
{"title": _("Purchase Invoices"), "route": "/purchase-invoices", "reference_doctype": "Purchase Invoice", "role": "Supplier"},
{"title": _("Quotations"), "route": "/quotations", "reference_doctype": "Quotation", "role":"Customer"},
{"title": _("Orders"), "route": "/orders", "reference_doctype": "Sales Order", "role":"Customer"},
{"title": _("Invoices"), "route": "/invoices", "reference_doctype": "Sales Invoice", "role":"Customer"},
{"title": _("Shipments"), "route": "/shipments", "reference_doctype": "Delivery Note", "role":"Customer"},
{"title": _("Issues"), "route": "/issues", "reference_doctype": "Issue", "role":"Customer"},
{"title": _("Addresses"), "route": "/addresses", "reference_doctype": "Address"},
{"title": _("Timesheets"), "route": "/timesheets", "reference_doctype": "Timesheet", "role":"Customer"},
{"title": _("Lab Test"), "route": "/lab-test", "reference_doctype": "Lab Test", "role":"Patient"},
{"title": _("Prescription"), "route": "/prescription", "reference_doctype": "Patient Encounter", "role":"Patient"},
{"title": _("Patient Appointment"), "route": "/patient-appointments", "reference_doctype": "Patient Appointment", "role":"Patient"},
{"title": _("Fees"), "route": "/fees", "reference_doctype": "Fees", "role":"Student"},
{"title": _("Newsletter"), "route": "/newsletters", "reference_doctype": "Newsletter"},
{"title": _("Admission"), "route": "/admissions", "reference_doctype": "Student Admission", "role": "Student"},
{"title": _("Certification"), "route": "/certification", "reference_doctype": "Certification Application", "role": "Non Profit Portal User"},
{"title": _("Material Request"), "route": "/material-requests", "reference_doctype": "Material Request", "role": "Customer"},
{"title": _("Appointment Booking"), "route": "/book_appointment"},
]
default_roles = [
{'role': 'Customer', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Supplier', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Student', 'doctype':'Student', 'email_field': 'student_email_id'},
]
sounds = [
{"name": "incoming-call", "src": "/assets/erpnext/sounds/incoming-call.mp3", "volume": 0.2},
{"name": "call-disconnect", "src": "/assets/erpnext/sounds/call-disconnect.mp3", "volume": 0.2},
]
has_website_permission = {
"Sales Order": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Sales Invoice": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Supplier Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Purchase Order": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Purchase Invoice": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Material Request": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Delivery Note": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Issue": "erpnext.support.doctype.issue.issue.has_website_permission",
"Timesheet": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Lab Test": "erpnext.healthcare.web_form.lab_test.lab_test.has_website_permission",
"Patient Encounter": "erpnext.healthcare.web_form.prescription.prescription.has_website_permission",
"Patient Appointment": "erpnext.healthcare.web_form.patient_appointments.patient_appointments.has_website_permission",
"Patient": "erpnext.healthcare.web_form.personal_details.personal_details.has_website_permission"
}
dump_report_map = "erpnext.startup.report_data_map.data_map"
before_tests = "erpnext.setup.utils.before_tests"
standard_queries = {
"Customer": "erpnext.selling.doctype.customer.customer.get_customer_list",
"Healthcare Practitioner": "erpnext.healthcare.doctype.healthcare_practitioner.healthcare_practitioner.get_practitioner_list"
}
doc_events = {
"*": {
"on_submit": "erpnext.healthcare.doctype.patient_history_settings.patient_history_settings.create_medical_record",
"on_update_after_submit": "erpnext.healthcare.doctype.patient_history_settings.patient_history_settings.update_medical_record",
"on_cancel": "erpnext.healthcare.doctype.patient_history_settings.patient_history_settings.delete_medical_record"
},
"Stock Entry": {
"on_submit": "erpnext.stock.doctype.material_request.material_request.update_completed_and_requested_qty",
"on_cancel": "erpnext.stock.doctype.material_request.material_request.update_completed_and_requested_qty"
},
"User": {
"after_insert": "frappe.contacts.doctype.contact.contact.update_contact",
"validate": "erpnext.hr.doctype.employee.employee.validate_employee_role",
"on_update": ["erpnext.hr.doctype.employee.employee.update_user_permissions",
"erpnext.portal.utils.set_default_role"]
},
("Sales Taxes and Charges Template", 'Price List'): {
"on_update": "erpnext.shopping_cart.doctype.shopping_cart_settings.shopping_cart_settings.validate_cart_settings"
},
"Website Settings": {
"validate": "erpnext.portal.doctype.products_settings.products_settings.home_page_is_products"
},
"Tax Category": {
"validate": "erpnext.regional.india.utils.validate_tax_category"
},
"Sales Invoice": {
"on_submit": [
"erpnext.regional.create_transaction_log",
"erpnext.regional.italy.utils.sales_invoice_on_submit",
"erpnext.erpnext_integrations.taxjar_integration.create_transaction"
],
"on_cancel": [
"erpnext.regional.italy.utils.sales_invoice_on_cancel",
"erpnext.erpnext_integrations.taxjar_integration.delete_transaction"
],
"on_trash": "erpnext.regional.check_deletion_permission"
},
"Purchase Invoice": {
"validate": [
"erpnext.regional.india.utils.update_grand_total_for_rcm",
"erpnext.regional.united_arab_emirates.utils.update_grand_total_for_rcm",
"erpnext.regional.united_arab_emirates.utils.validate_returns"
]
},
"Payment Entry": {
"on_submit": ["erpnext.regional.create_transaction_log", "erpnext.accounts.doctype.payment_request.payment_request.update_payment_req_status", "erpnext.accounts.doctype.dunning.dunning.resolve_dunning"],
"on_trash": "erpnext.regional.check_deletion_permission"
},
'Address': {
'validate': ['erpnext.regional.india.utils.validate_gstin_for_india', 'erpnext.regional.italy.utils.set_state_code', 'erpnext.regional.india.utils.update_gst_category'],
'on_update':'erpnext.healthcare.utils.update_address_link'
},
'Supplier': {
'validate': 'erpnext.regional.india.utils.validate_pan_for_india'
},
('Sales Invoice', 'Sales Order', 'Delivery Note', 'Purchase Invoice', 'Purchase Order', 'Purchase Receipt'): {
'validate': ['erpnext.regional.india.utils.set_place_of_supply']
},
('Sales Invoice', 'Purchase Invoice'): {
'validate': ['erpnext.regional.india.utils.validate_document_name']
},
"Contact": {
"on_trash": "erpnext.support.doctype.issue.issue.update_issue",
"after_insert": "erpnext.telephony.doctype.call_log.call_log.link_existing_conversations",
"validate": "erpnext.crm.utils.update_lead_phone_numbers"
},
"Email Unsubscribe": {
"after_insert": "erpnext.crm.doctype.email_campaign.email_campaign.unsubscribe_recipient"
},
('Quotation', 'Sales Order', 'Sales Invoice'): {
'validate': ["erpnext.erpnext_integrations.taxjar_integration.set_sales_tax"]
}
}
# On cancel event Payment Entry will be exempted and all linked submittable doctype will get cancelled.
# to maintain data integrity we exempted payment entry. it will un-link when sales invoice get cancelled.
# if payment entry not in auto cancel exempted doctypes it will cancel payment entry.
auto_cancel_exempted_doctypes= [
"Payment Entry",
"Inpatient Medication Entry"
]
scheduler_events = {
"cron": {
"0/30 * * * *": [
"erpnext.utilities.doctype.video.video.update_youtube_data",
]
},
"all": [
"erpnext.projects.doctype.project.project.project_status_update_reminder",
"erpnext.healthcare.doctype.patient_appointment.patient_appointment.send_appointment_reminder",
"erpnext.crm.doctype.social_media_post.social_media_post.process_scheduled_social_media_posts"
],
"hourly": [
'erpnext.hr.doctype.daily_work_summary_group.daily_work_summary_group.trigger_emails',
"erpnext.accounts.doctype.subscription.subscription.process_all",
"erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_mws_settings.schedule_get_order_details",
"erpnext.accounts.doctype.gl_entry.gl_entry.rename_gle_sle_docs",
"erpnext.erpnext_integrations.doctype.plaid_settings.plaid_settings.automatic_synchronization",
"erpnext.projects.doctype.project.project.hourly_reminder",
"erpnext.projects.doctype.project.project.collect_project_status",
"erpnext.hr.doctype.shift_type.shift_type.process_auto_attendance_for_all_shifts",
"erpnext.support.doctype.issue.issue.set_service_level_agreement_variance",
"erpnext.erpnext_integrations.connectors.shopify_connection.sync_old_orders",
],
"daily": [
"erpnext.stock.reorder_item.reorder_item",
"erpnext.support.doctype.issue.issue.auto_close_tickets",
"erpnext.crm.doctype.opportunity.opportunity.auto_close_opportunity",
"erpnext.controllers.accounts_controller.update_invoice_status",
"erpnext.accounts.doctype.fiscal_year.fiscal_year.auto_create_fiscal_year",
"erpnext.hr.doctype.employee.employee.send_birthday_reminders",
"erpnext.projects.doctype.task.task.set_tasks_as_overdue",
"erpnext.assets.doctype.asset.depreciation.post_depreciation_entries",
"erpnext.hr.doctype.daily_work_summary_group.daily_work_summary_group.send_summary",
"erpnext.stock.doctype.serial_no.serial_no.update_maintenance_status",
"erpnext.buying.doctype.supplier_scorecard.supplier_scorecard.refresh_scorecards",
"erpnext.setup.doctype.company.company.cache_companies_monthly_sales_history",
"erpnext.assets.doctype.asset.asset.update_maintenance_status",
"erpnext.assets.doctype.asset.asset.make_post_gl_entry",
"erpnext.crm.doctype.contract.contract.update_status_for_contracts",
"erpnext.projects.doctype.project.project.update_project_sales_billing",
"erpnext.projects.doctype.project.project.send_project_status_email_to_users",
"erpnext.quality_management.doctype.quality_review.quality_review.review",
"erpnext.support.doctype.service_level_agreement.service_level_agreement.check_agreement_status",
"erpnext.crm.doctype.email_campaign.email_campaign.send_email_to_leads_or_contacts",
"erpnext.crm.doctype.email_campaign.email_campaign.set_email_campaign_status",
"erpnext.selling.doctype.quotation.quotation.set_expired_status",
"erpnext.healthcare.doctype.patient_appointment.patient_appointment.update_appointment_status",
"erpnext.buying.doctype.supplier_quotation.supplier_quotation.set_expired_status",
"erpnext.accounts.doctype.process_statement_of_accounts.process_statement_of_accounts.send_auto_email",
"erpnext.non_profit.doctype.membership.membership.set_expired_status"
],
"daily_long": [
"erpnext.setup.doctype.email_digest.email_digest.send",
"erpnext.manufacturing.doctype.bom_update_tool.bom_update_tool.update_latest_price_in_all_boms",
"erpnext.hr.doctype.leave_ledger_entry.leave_ledger_entry.process_expired_allocation",
"erpnext.hr.doctype.leave_policy_assignment.leave_policy_assignment.automatically_allocate_leaves_based_on_leave_policy",
"erpnext.hr.utils.generate_leave_encashment",
"erpnext.hr.utils.allocate_earned_leaves",
"erpnext.hr.utils.grant_leaves_automatically",
"erpnext.loan_management.doctype.process_loan_security_shortfall.process_loan_security_shortfall.create_process_loan_security_shortfall",
"erpnext.loan_management.doctype.process_loan_interest_accrual.process_loan_interest_accrual.process_loan_interest_accrual_for_term_loans",
"erpnext.crm.doctype.lead.lead.daily_open_lead"
],
"monthly_long": [
"erpnext.accounts.deferred_revenue.process_deferred_accounting",
"erpnext.loan_management.doctype.process_loan_interest_accrual.process_loan_interest_accrual.process_loan_interest_accrual_for_demand_loans"
]
}
email_brand_image = "assets/erpnext/images/erpnext-logo.jpg"
default_mail_footer = """
<span>
Sent via
<a class="text-muted" href="https://erpnext.com?source=via_email_footer" target="_blank">
ERPNext
</a>
</span>
"""
get_translated_dict = {
("doctype", "Global Defaults"): "frappe.geo.country_info.get_translated_dict"
}
bot_parsers = [
'erpnext.utilities.bot.FindItemBot',
]
get_site_info = 'erpnext.utilities.get_site_info'
payment_gateway_enabled = "erpnext.accounts.utils.create_payment_gateway_account"
communication_doctypes = ["Customer", "Supplier"]
accounting_dimension_doctypes = ["GL Entry", "Sales Invoice", "Purchase Invoice", "Payment Entry", "Asset",
"Expense Claim", "Expense Claim Detail", "Expense Taxes and Charges", "Stock Entry", "Budget", "Payroll Entry", "Delivery Note",
"Sales Invoice Item", "Purchase Invoice Item", "Purchase Order Item", "Journal Entry Account", "Material Request Item", "Delivery Note Item",
"Purchase Receipt Item", "Stock Entry Detail", "Payment Entry Deduction", "Sales Taxes and Charges", "Purchase Taxes and Charges", "Shipping Rule",
"Landed Cost Item", "Asset Value Adjustment", "Loyalty Program", "Fee Schedule", "Fee Structure", "Stock Reconciliation",
"Travel Request", "Fees", "POS Profile", "Opening Invoice Creation Tool", "Opening Invoice Creation Tool Item", "Subscription",
"Subscription Plan"
]
regional_overrides = {
'France': {
'erpnext.tests.test_regional.test_method': 'erpnext.regional.france.utils.test_method'
},
'India': {
'erpnext.tests.test_regional.test_method': 'erpnext.regional.india.utils.test_method',
'erpnext.controllers.taxes_and_totals.get_itemised_tax_breakup_header': 'erpnext.regional.india.utils.get_itemised_tax_breakup_header',
'erpnext.controllers.taxes_and_totals.get_itemised_tax_breakup_data': 'erpnext.regional.india.utils.get_itemised_tax_breakup_data',
'erpnext.accounts.party.get_regional_address_details': 'erpnext.regional.india.utils.get_regional_address_details',
'erpnext.controllers.taxes_and_totals.get_regional_round_off_accounts': 'erpnext.regional.india.utils.get_regional_round_off_accounts',
'erpnext.hr.utils.calculate_annual_eligible_hra_exemption': 'erpnext.regional.india.utils.calculate_annual_eligible_hra_exemption',
'erpnext.hr.utils.calculate_hra_exemption_for_period': 'erpnext.regional.india.utils.calculate_hra_exemption_for_period',
'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_regional_gl_entries': 'erpnext.regional.india.utils.make_regional_gl_entries',
'erpnext.controllers.accounts_controller.validate_einvoice_fields': 'erpnext.regional.india.e_invoice.utils.validate_einvoice_fields'
},
'United Arab Emirates': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.united_arab_emirates.utils.update_itemised_tax_data',
'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_regional_gl_entries': 'erpnext.regional.united_arab_emirates.utils.make_regional_gl_entries',
},
'Saudi Arabia': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.united_arab_emirates.utils.update_itemised_tax_data'
},
'Italy': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.italy.utils.update_itemised_tax_data',
'erpnext.controllers.accounts_controller.validate_regional': 'erpnext.regional.italy.utils.sales_invoice_validate',
}
}
user_privacy_documents = [
{
'doctype': 'Lead',
'match_field': 'email_id',
'personal_fields': ['phone', 'mobile_no', 'fax', 'website', 'lead_name'],
},
{
'doctype': 'Opportunity',
'match_field': 'contact_email',
'personal_fields': ['contact_mobile', 'contact_display', 'customer_name'],
}
]
# ERPNext doctypes for Global Search
global_search_doctypes = {
"Default": [
{"doctype": "Customer", "index": 0},
{"doctype": "Supplier", "index": 1},
{"doctype": "Item", "index": 2},
{"doctype": "Warehouse", "index": 3},
{"doctype": "Account", "index": 4},
{"doctype": "Employee", "index": 5},
{"doctype": "BOM", "index": 6},
{"doctype": "Sales Invoice", "index": 7},
{"doctype": "Sales Order", "index": 8},
{"doctype": "Quotation", "index": 9},
{"doctype": "Work Order", "index": 10},
{"doctype": "Purchase Order", "index": 11},
{"doctype": "Purchase Receipt", "index": 12},
{"doctype": "Purchase Invoice", "index": 13},
{"doctype": "Delivery Note", "index": 14},
{"doctype": "Stock Entry", "index": 15},
{"doctype": "Material Request", "index": 16},
{"doctype": "Delivery Trip", "index": 17},
{"doctype": "Pick List", "index": 18},
{"doctype": "Salary Slip", "index": 19},
{"doctype": "Leave Application", "index": 20},
{"doctype": "Expense Claim", "index": 21},
{"doctype": "Payment Entry", "index": 22},
{"doctype": "Lead", "index": 23},
{"doctype": "Opportunity", "index": 24},
{"doctype": "Item Price", "index": 25},
{"doctype": "Purchase Taxes and Charges Template", "index": 26},
{"doctype": "Sales Taxes and Charges", "index": 27},
{"doctype": "Asset", "index": 28},
{"doctype": "Project", "index": 29},
{"doctype": "Task", "index": 30},
{"doctype": "Timesheet", "index": 31},
{"doctype": "Issue", "index": 32},
{"doctype": "Serial No", "index": 33},
{"doctype": "Batch", "index": 34},
{"doctype": "Branch", "index": 35},
{"doctype": "Department", "index": 36},
{"doctype": "Employee Grade", "index": 37},
{"doctype": "Designation", "index": 38},
{"doctype": "Job Opening", "index": 39},
{"doctype": "Job Applicant", "index": 40},
{"doctype": "Job Offer", "index": 41},
{"doctype": "Salary Structure Assignment", "index": 42},
{"doctype": "Appraisal", "index": 43},
{"doctype": "Loan", "index": 44},
{"doctype": "Maintenance Schedule", "index": 45},
{"doctype": "Maintenance Visit", "index": 46},
{"doctype": "Warranty Claim", "index": 47},
],
"Healthcare": [
{'doctype': 'Patient', 'index': 1},
{'doctype': 'Medical Department', 'index': 2},
{'doctype': 'Vital Signs', 'index': 3},
{'doctype': 'Healthcare Practitioner', 'index': 4},
{'doctype': 'Patient Appointment', 'index': 5},
{'doctype': 'Healthcare Service Unit', 'index': 6},
{'doctype': 'Patient Encounter', 'index': 7},
{'doctype': 'Antibiotic', 'index': 8},
{'doctype': 'Diagnosis', 'index': 9},
{'doctype': 'Lab Test', 'index': 10},
{'doctype': 'Clinical Procedure', 'index': 11},
{'doctype': 'Inpatient Record', 'index': 12},
{'doctype': 'Sample Collection', 'index': 13},
{'doctype': 'Patient Medical Record', 'index': 14},
{'doctype': 'Appointment Type', 'index': 15},
{'doctype': 'Fee Validity', 'index': 16},
{'doctype': 'Practitioner Schedule', 'index': 17},
{'doctype': 'Dosage Form', 'index': 18},
{'doctype': 'Lab Test Sample', 'index': 19},
{'doctype': 'Prescription Duration', 'index': 20},
{'doctype': 'Prescription Dosage', 'index': 21},
{'doctype': 'Sensitivity', 'index': 22},
{'doctype': 'Complaint', 'index': 23},
{'doctype': 'Medical Code', 'index': 24},
],
"Education": [
{'doctype': 'Article', 'index': 1},
{'doctype': 'Video', 'index': 2},
{'doctype': 'Topic', 'index': 3},
{'doctype': 'Course', 'index': 4},
{'doctype': 'Program', 'index': 5},
{'doctype': 'Quiz', 'index': 6},
{'doctype': 'Question', 'index': 7},
{'doctype': 'Fee Schedule', 'index': 8},
{'doctype': 'Fee Structure', 'index': 9},
{'doctype': 'Fees', 'index': 10},
{'doctype': 'Student Group', 'index': 11},
{'doctype': 'Student', 'index': 12},
{'doctype': 'Instructor', 'index': 13},
{'doctype': 'Course Activity', 'index': 14},
{'doctype': 'Quiz Activity', 'index': 15},
{'doctype': 'Course Enrollment', 'index': 16},
{'doctype': 'Program Enrollment', 'index': 17},
{'doctype': 'Student Language', 'index': 18},
{'doctype': 'Student Applicant', 'index': 19},
{'doctype': 'Assessment Result', 'index': 20},
{'doctype': 'Assessment Plan', 'index': 21},
{'doctype': 'Grading Scale', 'index': 22},
{'doctype': 'Guardian', 'index': 23},
{'doctype': 'Student Leave Application', 'index': 24},
{'doctype': 'Student Log', 'index': 25},
{'doctype': 'Room', 'index': 26},
{'doctype': 'Course Schedule', 'index': 27},
{'doctype': 'Student Attendance', 'index': 28},
{'doctype': 'Announcement', 'index': 29},
{'doctype': 'Student Category', 'index': 30},
{'doctype': 'Assessment Group', 'index': 31},
{'doctype': 'Student Batch Name', 'index': 32},
{'doctype': 'Assessment Criteria', 'index': 33},
{'doctype': 'Academic Year', 'index': 34},
{'doctype': 'Academic Term', 'index': 35},
{'doctype': 'School House', 'index': 36},
{'doctype': 'Student Admission', 'index': 37},
{'doctype': 'Fee Category', 'index': 38},
{'doctype': 'Assessment Code', 'index': 39},
{'doctype': 'Discussion', 'index': 40},
],
"Agriculture": [
{'doctype': 'Weather', 'index': 1},
{'doctype': 'Soil Texture', 'index': 2},
{'doctype': 'Water Analysis', 'index': 3},
{'doctype': 'Soil Analysis', 'index': 4},
{'doctype': 'Plant Analysis', 'index': 5},
{'doctype': 'Agriculture Analysis Criteria', 'index': 6},
{'doctype': 'Disease', 'index': 7},
{'doctype': 'Crop', 'index': 8},
{'doctype': 'Fertilizer', 'index': 9},
{'doctype': 'Crop Cycle', 'index': 10}
],
"Non Profit": [
{'doctype': 'Certified Consultant', 'index': 1},
{'doctype': 'Certification Application', 'index': 2},
{'doctype': 'Volunteer', 'index': 3},
{'doctype': 'Membership', 'index': 4},
{'doctype': 'Member', 'index': 5},
{'doctype': 'Donor', 'index': 6},
{'doctype': 'Chapter', 'index': 7},
{'doctype': 'Grant Application', 'index': 8},
{'doctype': 'Volunteer Type', 'index': 9},
{'doctype': 'Donor Type', 'index': 10},
{'doctype': 'Membership Type', 'index': 11}
],
"Hospitality": [
{'doctype': 'Hotel Room', 'index': 0},
{'doctype': 'Hotel Room Reservation', 'index': 1},
{'doctype': 'Hotel Room Pricing', 'index': 2},
{'doctype': 'Hotel Room Package', 'index': 3},
{'doctype': 'Hotel Room Type', 'index': 4}
]
}
additional_timeline_content = {
'*': ['erpnext.telephony.doctype.call_log.call_log.get_linked_call_logs']
}
|
ESS-LLP/erpnext
|
erpnext/hooks.py
|
Python
|
gpl-3.0
| 28,711
|
import tkinter
tk = tkinter.Tk()
tk.title("Bounce")
tk.resizable(0, 0)
# Keep the window on the top
tk.wm_attributes("-topmost", 1)
canvas = tkinter.Canvas(tk, width=500, height=400)
# Remove border. Apparently no effect on Linux, but good on Mac
canvas.configure(bd=0)
# Make the 0 horizontal and vertical line apparent
canvas.configure(highlightthickness=0)
canvas.pack()
ball = canvas.create_oval(10, 10, 25, 25, fill='red')
def handle_timer_event():
canvas.move(ball, 10, 0)
tk.after(100, handle_timer_event)
handle_timer_event()
tk.mainloop()
|
amosnier/python_for_kids
|
course_code/13_039_animated_ball.py
|
Python
|
gpl-3.0
| 562
|
# -*- coding: utf-8 -*-
"""
Connection module
"""
from pyxmn.utils import support
# from psycopg2.pool import SimpleConnectionPool
import psycopg2.extras # load the psycopg extras module
class Pool():
"""
Connection Pool
"""
settings = None
connection = None
pool = None
@staticmethod
def commit():
(Pool.connection).commit()
@staticmethod
def connect(settings):
"""
Connects to a base
conn: host=localhost dbname=db_name user=postgres
"""
Pool.settings = settings
_conn_string = (
('host=%(HOST)s dbname=%(NAME)s user=%(USER)s ' +
'password=%(PASSWORD)s') %
settings.DATABASE[settings.HOSTNAME]
)
# Pool.conn_string = _conn_string
Pool.connection = psycopg2.connect(
dsn=_conn_string,
connection_factory=psycopg2.extras.NamedTupleConnection)
@staticmethod
def cursor():
cur = Pool.connection.cursor()
dbconf = Pool.settings.DATABASE[Pool.settings.HOSTNAME]
if 'SCHEMA' in dbconf:
Pool.execute(cur, 'SET search_path TO ' + dbconf['SCHEMA'])
return cur
@staticmethod
def execute(cursor, statement, arg=()):
"""
"""
if Pool.settings.DEBUG:
try:
support.log(statement % arg)
except:
pass
cursor.execute(statement, arg)
|
xmnlab/pyxmn
|
db/conn.py
|
Python
|
gpl-3.0
| 1,460
|
def progress(current, total, percent=10, iteration=None):
"""
Used in a loop to indicate progress
"""
current += 1
if current:
previous = current - 1
else:
previous = current
# print out every percent
frac = percent/100.
value = max(1, frac*total)
return not (int(current/value) == int(previous/value))
if __name__ == "__main__":
for i in range(17):
print(i)
if progress(i, 17):
print(r"Another 10% completed")
|
CDNoyes/EDL-Py
|
Utils/progress.py
|
Python
|
gpl-3.0
| 521
|
#!/usr/bin/env python3
# dirtool.py - diff tool for directories
# Copyright (C) 2018 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import signal
import sys
from PyQt5.QtCore import QCoreApplication, QFileSystemWatcher
def directory_changed(path):
print("directory_changed: {}".format(path))
def file_changed(path):
print("file_changed: {}".format(path))
def main(argv):
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = QCoreApplication([])
watcher = QFileSystemWatcher()
print("Watching /tmp/")
watcher.addPath("/tmp/")
watcher.addPath("/tmp/foo")
# Files have to be watched specifically for this to trigger.
# Deleting and recreating a file makes this no longer trigger.
watcher.fileChanged.connect(file_changed)
# This triggers on file creation and deletion
watcher.directoryChanged.connect(directory_changed)
print("files:", watcher.files())
print("directories:", watcher.directories())
sys.exit(app.exec())
if __name__ == "__main__":
main(sys.argv)
# EOF #
|
Grumbel/dirtool
|
experiments/qnotify/qnotify.py
|
Python
|
gpl-3.0
| 1,673
|
import unittest
import pandas as pd
from pandas_schema import Column
from pandas_schema.validation import CanConvertValidation, LeadingWhitespaceValidation, TrailingWhitespaceValidation
class SingleValidationColumn(unittest.TestCase):
"""
Test a column with one single validation
"""
NAME = 'col1'
col = Column(NAME, [CanConvertValidation(int)], allow_empty=False)
ser = pd.Series([
'a',
'b',
'c'
])
def test_name(self):
self.assertEqual(self.col.name, self.NAME, 'A Column does not store its name correctly')
def test_outputs(self):
results = self.col.validate(self.ser)
self.assertEqual(len(results), len(self.ser), 'A Column produces the wrong number of errors')
for i in range(2):
self.assertTrue(any([r.row == i for r in results]), 'A Column does not report errors for every row')
class DoubleValidationColumn(unittest.TestCase):
"""
Test a column with two different validations
"""
NAME = 'col1'
col = Column(NAME, [TrailingWhitespaceValidation(), LeadingWhitespaceValidation()], allow_empty=False)
ser = pd.Series([
' a ',
' b ',
' c '
])
def test_outputs(self):
results = self.col.validate(self.ser)
# There should be 6 errors, 2 for each row
self.assertEqual(len(results), 2 * len(self.ser), 'A Column produces the wrong number of errors')
for i in range(2):
in_row = [r for r in results if r.row == i]
self.assertEqual(len(in_row), 2, 'A Column does not report both errors for every row')
class AllowEmptyColumn(unittest.TestCase):
"""
Test a column with one single validation that allows empty columns
"""
NAME = 'col1'
col = Column(NAME, [CanConvertValidation(int)], allow_empty=True)
ser = pd.Series([
'',
])
def test_outputs(self):
results = self.col.validate(self.ser)
self.assertEqual(len(results), 0, 'allow_empty is not allowing empty columns')
|
TMiguelT/PandasSchema
|
test/test_column.py
|
Python
|
gpl-3.0
| 2,051
|
#!/usr/bin/env python
# Copyright (c) 2010 SubDownloader Developers - See COPYING - GPLv3
import languages.autodetect_lang as autodetect_lang
import re
import os.path
import logging
log = logging.getLogger("subdownloader.languages.Languages")
import __builtin__
__builtin__._ = lambda x : x
LANGUAGES = [{'locale':'sq', 'ISO639': 'sq', 'SubLanguageID': 'alb', 'LanguageName': _('Albanian')},
{'locale':'ar', 'ISO639': 'ar', 'SubLanguageID': 'ara', 'LanguageName': _('Arabic')},
{'locale':'hy', 'ISO639': 'hy', 'SubLanguageID': 'arm', 'LanguageName': _('Armenian')},
{'locale':'ms', 'ISO639': 'ms', 'SubLanguageID': 'may', 'LanguageName': _('Malay')},
{'locale':'bs', 'ISO639': 'bs', 'SubLanguageID': 'bos', 'LanguageName': _('Bosnian')},
{'locale':'bg', 'ISO639': 'bg', 'SubLanguageID': 'bul', 'LanguageName': _('Bulgarian')},
{'locale':'ca', 'ISO639': 'ca', 'SubLanguageID': 'cat', 'LanguageName': _('Catalan')},
{'locale':'eu', 'ISO639': 'eu', 'SubLanguageID': 'eus', 'LanguageName': _('Basque')},
{'locale':'zh_CN', 'ISO639': 'zh', 'SubLanguageID': 'chi', 'LanguageName': _('Chinese (China)')},
{'locale':'hr', 'ISO639': 'hr', 'SubLanguageID': 'hrv', 'LanguageName': _('Croatian')},
{'locale':'cs', 'ISO639': 'cs', 'SubLanguageID': 'cze', 'LanguageName': _('Czech')},
{'locale':'da', 'ISO639': 'da', 'SubLanguageID': 'dan', 'LanguageName': _('Danish')},
{'locale':'nl', 'ISO639': 'nl', 'SubLanguageID': 'dut', 'LanguageName': _('Dutch')},
{'locale':'en', 'ISO639': 'en', 'SubLanguageID': 'eng', 'LanguageName': _('English (US)')},
{'locale':'en_GB', 'ISO639': 'en', 'SubLanguageID': 'bre', 'LanguageName': _('English (UK)')},
{'locale':'eo', 'ISO639': 'eo', 'SubLanguageID': 'epo', 'LanguageName': _('Esperanto')},
{'locale':'et', 'ISO639': 'et', 'SubLanguageID': 'est', 'LanguageName': _('Estonian')},
{'locale':'fi', 'ISO639': 'fi', 'SubLanguageID': 'fin', 'LanguageName': _('Finnish')},
{'locale':'fr', 'ISO639': 'fr', 'SubLanguageID': 'fre', 'LanguageName': _('French')},
{'locale':'gl', 'ISO639': 'gl', 'SubLanguageID': 'glg', 'LanguageName': _('Galician')},
{'locale':'ka', 'ISO639': 'ka', 'SubLanguageID': 'geo', 'LanguageName': _('Georgian')},
{'locale':'de', 'ISO639': 'de', 'SubLanguageID': 'ger', 'LanguageName': _('German')},
{'locale':'el', 'ISO639': 'el', 'SubLanguageID': 'ell', 'LanguageName': _('Greek')},
{'locale':'he', 'ISO639': 'he', 'SubLanguageID': 'heb', 'LanguageName': _('Hebrew')},
{'locale':'hu', 'ISO639': 'hu', 'SubLanguageID': 'hun', 'LanguageName': _('Hungarian')},
{'locale':'id', 'ISO639': 'id', 'SubLanguageID': 'ind', 'LanguageName': _('Indonesian')},
{'locale':'it', 'ISO639': 'it', 'SubLanguageID': 'ita', 'LanguageName': _('Italian')},
{'locale':'ja', 'ISO639': 'ja', 'SubLanguageID': 'jpn', 'LanguageName': _('Japanese')},
{'locale':'kk', 'ISO639': 'kk', 'SubLanguageID': 'kaz', 'LanguageName': _('Kazakh')},
{'locale':'ko', 'ISO639': 'ko', 'SubLanguageID': 'kor', 'LanguageName': _('Korean')},
{'locale':'lv', 'ISO639': 'lv', 'SubLanguageID': 'lav', 'LanguageName': _('Latvian')},
{'locale':'lt', 'ISO639': 'lt', 'SubLanguageID': 'lit', 'LanguageName': _('Lithuanian')},
{'locale':'lb', 'ISO639': 'lb', 'SubLanguageID': 'ltz', 'LanguageName': _('Luxembourgish')},
{'locale':'mk', 'ISO639': 'mk', 'SubLanguageID': 'mac', 'LanguageName': _('Macedonian')},
{'locale':'no', 'ISO639': 'no', 'SubLanguageID': 'nor', 'LanguageName': _('Norwegian')},
{'locale':'fa', 'ISO639': 'fa', 'SubLanguageID': 'per', 'LanguageName': _('Persian')},
{'locale':'pl', 'ISO639': 'pl', 'SubLanguageID': 'pol', 'LanguageName': _('Polish')},
{'locale':'pt_PT', 'ISO639': 'pt', 'SubLanguageID': 'por', 'LanguageName': _('Portuguese (Portugal)')},
{'locale':'pt_BR', 'ISO639': 'pb', 'SubLanguageID': 'pob', 'LanguageName': _('Portuguese (Brazil)')},
{'locale':'ro', 'ISO639': 'ro', 'SubLanguageID': 'rum', 'LanguageName': _('Romanian')},
{'locale':'ru', 'ISO639': 'ru', 'SubLanguageID': 'rus', 'LanguageName': _('Russian')},
{'locale':'sr', 'ISO639': 'sr', 'SubLanguageID': 'scc', 'LanguageName': _('Serbian')},
{'locale':'sk', 'ISO639': 'sk', 'SubLanguageID': 'slo', 'LanguageName': _('Slovak')},
{'locale':'sl', 'ISO639': 'sl', 'SubLanguageID': 'slv', 'LanguageName': _('Slovenian')},
{'locale':'es_ES', 'ISO639': 'es', 'SubLanguageID': 'spa', 'LanguageName': _('Spanish (Spain)')},
{'locale':'sv', 'ISO639': 'sv', 'SubLanguageID': 'swe', 'LanguageName': _('Swedish')},
{'locale':'th', 'ISO639': 'th', 'SubLanguageID': 'tha', 'LanguageName': _('Thai')},
{'locale':'tr', 'ISO639': 'tr', 'SubLanguageID': 'tur', 'LanguageName': _('Turkish')},
{'locale':'uk', 'ISO639': 'uk', 'SubLanguageID': 'ukr', 'LanguageName': _('Ukrainian')},
{'locale':'vi', 'ISO639': 'vi', 'SubLanguageID': 'vie', 'LanguageName': _('Vietnamese')}]
def ListAll_xx():
temp = []
for lang in LANGUAGES:
temp.append(lang['ISO639'])
return temp
def ListAll_xxx():
temp = []
for lang in LANGUAGES:
temp.append(lang['SubLanguageID'])
return temp
def ListAll_locale():
temp = []
for lang in LANGUAGES:
temp.append(lang['locale'])
return temp
def ListAll_names():
temp = []
for lang in LANGUAGES:
temp.append(lang['LanguageName'])
return temp
def xx2xxx(xx):
for lang in LANGUAGES:
if lang['ISO639'] == xx:
return lang['SubLanguageID']
def xxx2xx(xxx):
for lang in LANGUAGES:
if lang['SubLanguageID'] == xxx:
return lang['ISO639']
def xxx2name(xxx):
for lang in LANGUAGES:
if lang['SubLanguageID'] == xxx:
return lang['LanguageName']
def locale2name(locale):
for lang in LANGUAGES:
if lang['locale'] == locale:
return lang['LanguageName']
def xx2name(xx):
for lang in LANGUAGES:
if lang['ISO639'] == xx:
return lang['LanguageName']
def name2xx(name):
for lang in LANGUAGES:
if lang['LanguageName'].lower() == name.lower():
return lang['ISO639']
def name2xxx(name):
for lang in LANGUAGES:
if lang['LanguageName'].lower() == name.lower():
return lang['SubLanguageID']
def CleanTagsFile(text):
p = re.compile( '<.*?>')
return p.sub('',text)
|
matachi/subdownloader
|
languages/Languages.py
|
Python
|
gpl-3.0
| 6,237
|
# -*- coding: utf-8 -*-
from patient_evaluation_report import *
|
kret0s/gnuhealth-live
|
tryton/server/trytond-3.8.3/trytond/modules/health_history/report/__init__.py
|
Python
|
gpl-3.0
| 66
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# ------------------------------------------------------------
import re
from core import httptools
from core import logger
from core import scrapertools
from core.item import Item
HOST = "http://documentales-online.com/"
def mainlist(item):
logger.info()
itemlist = list()
itemlist.append(Item(channel=item.channel, title="Novedades", action="listado", url=HOST))
itemlist.append(Item(channel=item.channel, title="Destacados", action="seccion", url=HOST, extra="destacados"))
itemlist.append(Item(channel=item.channel, title="Series Destacadas", action="seccion", url=HOST, extra="series"))
# itemlist.append(Item(channel=item.channel, title="Top 100", action="categorias", url=HOST))
# itemlist.append(Item(channel=item.channel, title="Populares", action="categorias", url=HOST))
itemlist.append(Item(channel=item.channel, title="Buscar por:"))
itemlist.append(Item(channel=item.channel, title=" Título", action="search"))
itemlist.append(Item(channel=item.channel, title=" Categorías", action="categorias", url=HOST))
# itemlist.append(Item(channel=item.channel, title=" Series y Temas", action="categorias", url=HOST))
return itemlist
def seccion(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", data)
if item.extra == "destacados":
patron_seccion = '<h4 class="widget-title">Destacados</h4><div class="textwidget"><ul>(.*?)</ul>'
action = "findvideos"
else:
patron_seccion = '<h4 class="widget-title">Series destacadas</h4><div class="textwidget"><ul>(.*?)</ul>'
action = "listado"
data = scrapertools.find_single_match(data, patron_seccion)
matches = re.compile('<a href="([^"]+)">(.*?)</a>', re.DOTALL).findall(data)
aux_action = action
for url, title in matches:
if item.extra != "destacados" and "Cosmos (Carl Sagan)" in title:
action = "findvideos"
else:
action = aux_action
itemlist.append(item.clone(title=title, url=url, action=action, fulltitle=title))
return itemlist
def listado(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", data)
pagination = scrapertools.find_single_match(data, '<div class="older"><a href="([^"]+)"')
if not pagination:
pagination = scrapertools.find_single_match(data, '<span class=\'current\'>\d</span>'
'<a class="page larger" href="([^"]+)">')
patron = '<ul class="sp-grid">(.*?)</ul>'
data = scrapertools.find_single_match(data, patron)
matches = re.compile('<a href="([^"]+)">(.*?)</a>', re.DOTALL).findall(data)
for url, title in matches:
itemlist.append(item.clone(title=title, url=url, action="findvideos", fulltitle=title))
if pagination:
itemlist.append(item.clone(title=">> Página siguiente", url=pagination))
return itemlist
def categorias(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", data)
data = scrapertools.find_single_match(data, 'a href="#">Categorías</a><ul class="sub-menu">(.*?)</ul>')
matches = re.compile('<a href="([^"]+)">(.*?)</a>', re.DOTALL).findall(data)
for url, title in matches:
itemlist.append(item.clone(title=title, url=url, action="listado", fulltitle=title))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "+")
try:
item.url = HOST + "?s=%s" % texto
return listado(item)
# Se captura la excepción, para no interrumpir al buscador global si un canal falla
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}|-\s", "", data)
if item.fulltitle == "Cosmos (Carl Sagan)":
matches = scrapertools.find_multiple_matches(data, '<p><strong>(.*?)</strong><br /><iframe.+?src="(https://www\.youtube\.com/[^?]+)')
for title, url in matches:
new_item = item.clone(title=title, url=url)
from core import servertools
aux_itemlist = servertools.find_video_items(new_item)
for videoitem in aux_itemlist:
videoitem.title = new_item.title
videoitem.fulltitle = new_item.title
videoitem.channel = item.channel
# videoitem.thumbnail = item.thumbnail
itemlist.extend(aux_itemlist)
else:
data = scrapertools.find_multiple_matches(data, '<iframe.+?src="(https://www\.youtube\.com/[^?]+)')
from core import servertools
itemlist.extend(servertools.find_video_items(data=",".join(data)))
for videoitem in itemlist:
videoitem.fulltitle = item.fulltitle
videoitem.channel = item.channel
# videoitem.thumbnail = item.thumbnail
return itemlist
|
r0balo/pelisalacarta
|
python/main-classic/channels/documentalesonline.py
|
Python
|
gpl-3.0
| 5,391
|
#!/usr/bin/python3
import sys, os, shutil
from os import path
from urllib.request import pathname2url
import subprocess
from subprocess import call
import sys
import re
import zipfile
import config
os.chdir(config.root_dir)
SUPPORTED_OPERATING_SYSTEMS = ('windows_x64', 'linux_x64', 'mac')#, 'linux-arm32', 'linux-arm64')
def make_dir(dir_path):
"""
make_dir(dir_path)
creates a directory if it does not already exist, including parent
directories.
dir_path - directory to create
"""
if not path.exists(dir_path):
os.makedirs(dir_path)
def make_parent_dir(file_path):
"""
make_parent_dir(file_path)
Creates the parent directory for the specified filepath if it does not
already exist.
file_path - path to some file
"""
parent_dir = path.dirname(file_path)
if parent_dir == '': # means parent is working directory
return
if not path.isdir(parent_dir):
os.makedirs(parent_dir)
def _del(filepath):
"""
Deletes a file or recursively deletes a directory. Use with caution.
"""
if(path.isdir(filepath)):
for f in os.listdir(filepath):
_del(path.join(filepath,f))
os.rmdir(filepath)
elif(path.exists(filepath)):
os.remove(filepath)
def del_file(filepath):
"""
del_file(filepath):
Deletes a file or recursively deletes a directory. Use with caution.
filepath - path to file or directory to delete
"""
if(path.isdir(filepath)):
for f in os.listdir(filepath):
_del(path.join(filepath,f))
os.rmdir(filepath)
elif(path.exists(filepath)):
os.remove(filepath)
def del_contents(dirpath):
"""
del_contents(dirpath)
Recursively deletes the contents of a directory, but not the directory itself
dirpath - path to directory to clean-out
"""
if(path.isdir(dirpath)):
for f in os.listdir(dirpath):
del_file(path.join(dirpath,f))
def list_files(dirpath):
"""
list_filetree(dirpath)
Returns a list of all files inside a directory (recursive scan)
dirpath - filepath of directory to scan
"""
if(type(dirpath) == str):
dir_list = [dirpath]
else:
dir_list = dirpath
file_list = []
for _dir_ in dir_list:
for base, directories, files in os.walk(_dir_):
for f in files:
file_list.append(path.join(base,f))
return file_list
def safe_quote_string(text):
"""
safe_quote_string(text)
returns the text in quotes, with escapes for any quotes in the text itself
text - input text to quote
returns: text in quotes with escapes
"""
if os.sep != '\\':
text2 = text.replace('\\', '\\\\')
text3 = text2.replace('"', '\\"')
else:
text3 = text.replace('\\', '/')
# windows does not allow " in file names anyway
return '"'+text3+'"'
def copy_tree(file_list, src_root, dest_root):
"""
copy_tree(file_list, src_root, dest_root)
Copies all files to directory dest_root (creating it if necessary),
preserving the folder structure relative to src_root
"""
for f in file_list:
rel_path = path.relpath(f, src_root)
dst_path = path.join(dest_root, rel_path)
make_parent_dir(dst_path)
shutil.copy(f, dst_path)
def zip_dir(dir_path, zip_path):
print('\nzipping %s to %s\n' % (dir_path, zip_path))
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
# zipf is zipfile handle
for root, dirs, files in os.walk(dir_path):
for file in files:
fname = path.basename(dir_path)
src_file = path.join(root, file)
dst_file = path.join(fname, path.relpath(src_file, dir_path) )
zipf.write(src_file, arcname=dst_file)
# done
# make dirs
make_dir(config.local_cache_dir)
make_dir(config.compile_dir)
make_dir(config.jar_dir)
make_dir(config.deploy_dir)
make_dir(config.deploy_image_dir)
make_dir(config.run_dir)
make_dir(config.src_dir)
make_dir(config.resource_dir)
# clean
del_contents(config.run_dir)
del_contents(config.jar_dir)
del_contents(config.compile_dir)
del_contents(config.deploy_image_dir)
del_contents(config.deploy_dir)
# compile (with jmods)
for release_OS in SUPPORTED_OPERATING_SYSTEMS:
print('\n',release_OS,'\n')
module_src_path = path.join(config.src_dir, config.module_name)
if(release_OS == 'windows_x64'):
#java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\windows-x64\\jdk-13.0.1'
jmod_dirs = [path.join('jmods','windows')] #[path.join(java_home,'jmods')] + config.jmod_dirs_windows_x64
elif(release_OS == 'linux_x64'):
#java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\linux-x64\\jdk-13.0.1'
jmod_dirs = [path.join('jmods','linux')] #[path.join(java_home,'jmods')] + config.jmod_dirs_linux_x64
elif(release_OS == 'mac'):
#java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\osx-x64\\jdk-13.0.1'
jmod_dirs = [path.join('jmods','mac')] #[path.join(java_home,'jmods')] + config.jmod_dirs_mac
else:
print('UNSUPPORTED OS: %s' % release_OS)
arg_file = path.join(config.local_cache_dir, 'javac-args.txt')
command_list = []
command_list += ['-encoding', 'utf8']
command_list += ['-d', config.compile_dir]
command_list += ['--module-source-path', config.src_dir]
command_list += ['--module', config.module_name]
module_paths = jmod_dirs + [f for f in list_files(config.dependency_dirs) if str(f).endswith('.jar')] # a .jmod file is auto-discoverable by --module-path
command_list += ['--module-path', os.pathsep.join(module_paths)]
with open(arg_file, 'w') as fout:
file_content = ' '.join(map(safe_quote_string, command_list))
fout.write(file_content)
print('@%s: %s' % (arg_file, file_content))
call([config.javac_exec, '@'+str(arg_file)], cwd=config.root_dir)
print()
# need to copy resources separately
resource_files = list_files(config.resource_dir)
resource_files += [f for f in list_files(config.src_dir) if str(f).endswith('.java') == False]
copy_tree(
list_files(config.resource_dir),
config.src_dir,
config.compile_dir
)
copy_tree(
[f for f in list_files(module_src_path) if str(f).endswith('.java') == False],
config.src_dir,
config.compile_dir
)
# jlink
arg_file = path.join(config.local_cache_dir, 'jlink-args.txt')
command_list = []
command_list += ['--module-path', os.pathsep.join(module_paths + [config.compile_dir])]
command_list += ['--add-modules', config.module_name]
image_dir = path.join(config.deploy_image_dir, release_OS, config.module_name)
command_list += ['--launcher', 'launch=%s/%s' % (config.module_name, config.main_class)]
command_list += ['--output', image_dir]
with open(arg_file, 'w') as fout:
file_content = ' '.join(map(safe_quote_string, command_list))
fout.write(file_content)
print('@%s: %s' % (arg_file, file_content))
call([config.jlink_exec, '@'+str(arg_file)], cwd=config.root_dir)
# launcher
if release_OS == 'windows_x64':
with open(path.join(image_dir, 'launch_%s.bat' % config.module_title),'w') as fout:
fout.write('"%~dp0\\bin\\launch.bat"\r\n')
if release_OS == 'linux_x64':
with open(path.join(image_dir, 'launch_%s.sh' % config.module_title),'w') as fout:
fout.write('#!/bin/bash\ncd "`dirname "$0"`"\n./bin/launch\n')
if release_OS == 'mac':
with open(path.join(image_dir, 'launch_%s.sh' % config.module_title),'w') as fout:
fout.write('#!/bin/sh\ncd "`dirname "$0"`"\n./bin/launch\n')
# package images
named_dir = path.join(config.deploy_image_dir, release_OS, config.module_title)
zip_file = path.join(config.deploy_image_dir, '%s_%s.zip' % (config.module_title, release_OS))
shutil.move(image_dir, named_dir)
zip_dir(dir_path=named_dir, zip_path=zip_file)
|
cyanobacterium/Cyanos-Planet-Factory
|
deploy.py
|
Python
|
gpl-3.0
| 7,423
|
# Authors:
# Jason Gerard DeRose <jderose@redhat.com>
#
# Copyright (C) 2009 Red Hat
# see file 'COPYING' for use and warranty contextrmation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Defers gettext translation till request time.
IPA presents some tricky gettext challenges. On the one hand, most translatable
message are defined as class attributes on the plugins, which means these get
evaluated at module-load time. But on the other hand, each request to the
server can be in a different locale, so the actual translation must not occur
till request time.
The `text` module provides a mechanism for for deferred gettext translation. It
was designed to:
1. Allow translatable strings to be marked with the usual ``_()`` and
``ngettext()`` functions so that standard tools like xgettext can still
be used
2. Allow programmers to mark strings in a natural way without burdening them
with details of the deferred translation mechanism
A typical plugin will use the deferred translation like this:
>>> from ipalib import Command, _, ngettext
>>> class my_plugin(Command):
... my_string = _('Hello, %(name)s.')
... my_plural = ngettext('%(count)d goose', '%(count)d geese', 0)
...
With normal gettext usage, the *my_string* and *my_plural* message would be
translated at module-load-time when your ``my_plugin`` class is defined. This
would mean that all message are translated in the locale of the server rather
than the locale of the request.
However, the ``_()`` function above is actually a `GettextFactory` instance,
which when called returns a `Gettext` instance. A `Gettext` instance stores the
message to be translated, and the gettext domain and localedir, but it doesn't
perform the translation till `Gettext.__unicode__()` is called. For example:
>>> my_plugin.my_string
Gettext('Hello, %(name)s.', domain='ipa', localedir=None)
>>> unicode(my_plugin.my_string)
u'Hello, %(name)s.'
Translation can also be performed via the `Gettext.__mod__()` convenience
method. For example, these two are equivalent:
>>> my_plugin.my_string % dict(name='Joe')
u'Hello, Joe.'
>>> unicode(my_plugin.my_string) % dict(name='Joe') # Long form
u'Hello, Joe.'
Similar to ``_()``, the ``ngettext()`` function above is actually an
`NGettextFactory` instance, which when called returns an `NGettext` instance.
An `NGettext` instance stores the singular and plural messages, and the gettext
domain and localedir, but it doesn't perform the translation till
`NGettext.__call__()` is called. For example:
>>> my_plugin.my_plural
NGettext('%(count)d goose', '%(count)d geese', domain='ipa', localedir=None)
>>> my_plugin.my_plural(1)
u'%(count)d goose'
>>> my_plugin.my_plural(2)
u'%(count)d geese'
Translation can also be performed via the `NGettext.__mod__()` convenience
method. For example, these two are equivalent:
>>> my_plugin.my_plural % dict(count=1)
u'1 goose'
>>> my_plugin.my_plural(1) % dict(count=1) # Long form
u'1 goose'
Lastly, 3rd-party plugins can create factories bound to a different gettext
domain. The default domain is ``'ipa'``, which is also the domain of the
standard ``ipalib._()`` and ``ipalib.ngettext()`` factories. But 3rd-party
plugins can create their own factories like this:
>>> from ipalib import GettextFactory, NGettextFactory
>>> _ = GettextFactory(domain='ipa_foo')
>>> ngettext = NGettextFactory(domain='ipa_foo')
>>> class foo(Command):
... msg1 = _('Foo!')
... msg2 = ngettext('%(count)d bar', '%(count)d bars', 0)
...
Notice that these messages are bound to the ``'ipa_foo'`` domain:
>>> foo.msg1
Gettext('Foo!', domain='ipa_foo', localedir=None)
>>> foo.msg2
NGettext('%(count)d bar', '%(count)d bars', domain='ipa_foo', localedir=None)
For additional details, see `GettextFactory` and `Gettext`, and for plural
forms, see `NGettextFactory` and `NGettext`.
"""
import threading
import locale
import gettext
import six
from ipalib.request import context
def create_translation(key):
assert key not in context.__dict__
(domain, localedir) = key
translation = gettext.translation(domain,
localedir=localedir,
languages=getattr(context, 'languages', None),
fallback=True,
)
context.__dict__[key] = translation
return translation
class LazyText(object):
"""
Base class for deferred translation.
This class is not used directly. See the `Gettext` and `NGettext`
subclasses.
Concatenating LazyText objects with the + operator gives
ConcatenatedLazyText objects.
"""
__slots__ = ('domain', 'localedir', 'key', 'args')
def __init__(self, domain=None, localedir=None):
"""
Initialize.
:param domain: The gettext domain in which this message will be
translated, e.g. ``'ipa'`` or ``'ipa_3rd_party'``; default is
``None``
:param localedir: The directory containing the gettext translations,
e.g. ``'/usr/share/locale/'``; default is ``None``, in which case
gettext will use the default system locale directory.
"""
self.domain = domain
self.localedir = localedir
self.key = (domain, localedir)
self.args = None
def __eq__(self, other):
"""
Return ``True`` if this instances is equal to *other*.
Note that this method cannot be used on the `LazyText` base class itself
as subclasses must define an *args* instance attribute.
"""
if type(other) is not self.__class__:
return False
return self.args == other.args
def __ne__(self, other):
"""
Return ``True`` if this instances is not equal to *other*.
Note that this method cannot be used on the `LazyText` base class itself
as subclasses must define an *args* instance attribute.
"""
return not self.__eq__(other)
def __add__(self, other):
return ConcatenatedLazyText(self) + other
def __radd__(self, other):
return other + ConcatenatedLazyText(self)
@six.python_2_unicode_compatible
class Gettext(LazyText):
"""
Deferred translation using ``gettext.ugettext()``.
Normally the `Gettext` class isn't used directly and instead is created via
a `GettextFactory` instance. However, for illustration, we can create one
like this:
>>> msg = Gettext('Hello, %(name)s.')
When you create a `Gettext` instance, the message is stored on the *msg*
attribute:
>>> msg.msg
'Hello, %(name)s.'
No translation is performed till `Gettext.__unicode__()` is called. This
will translate *msg* using ``gettext.ugettext()``, which will return the
translated string as a Python ``unicode`` instance. For example:
>>> unicode(msg)
u'Hello, %(name)s.'
`Gettext.__unicode__()` should be called at request time, which in a
nutshell means it should be called from within your plugin's
``Command.execute()`` method. `Gettext.__unicode__()` will perform the
translation based on the locale of the current request.
`Gettext.__mod__()` is a convenience method for Python "percent" string
formatting. It will translate your message using `Gettext.__unicode__()`
and then perform the string substitution on the translated message. For
example, these two are equivalent:
>>> msg % dict(name='Joe')
u'Hello, Joe.'
>>> unicode(msg) % dict(name='Joe') # Long form
u'Hello, Joe.'
See `GettextFactory` for additional details. If you need to pick between
singular and plural form, use `NGettext` instances via the
`NGettextFactory`.
"""
__slots__ = ('msg')
def __init__(self, msg, domain=None, localedir=None):
super(Gettext, self).__init__(domain, localedir)
self.msg = msg
self.args = (msg, domain, localedir)
def __repr__(self):
return '%s(%r, domain=%r, localedir=%r)' % (self.__class__.__name__,
self.msg, self.domain, self.localedir)
def __str__(self):
"""
Translate this message and return as a ``unicode`` instance.
"""
if self.key in context.__dict__:
g = context.__dict__[self.key].ugettext
else:
g = create_translation(self.key).ugettext
return g(self.msg)
def __json__(self):
return self.__unicode__() #pylint: disable=no-member
def __mod__(self, kw):
return self.__unicode__() % kw #pylint: disable=no-member
@six.python_2_unicode_compatible
class FixMe(Gettext):
"""
Non-translated place-holder for UI labels.
`FixMe` is a subclass of `Gettext` and is used for automatically created
place-holder labels. It generally behaves exactly like `Gettext` except no
translation is ever performed.
`FixMe` allows programmers to get plugins working without first filling in
all the labels that will ultimately be required, while at the same time it
creates conspicuous looking UI labels that remind the programmer to
"fix-me!". For example, the typical usage would be something like this:
>>> class Plugin(object):
... label = None
... def __init__(self):
... self.name = self.__class__.__name__
... if self.label is None:
... self.label = FixMe(self.name + '.label')
... assert isinstance(self.label, Gettext)
...
>>> class user(Plugin):
... pass # Oops, we didn't set user.label yet
...
>>> u = user()
>>> u.label
FixMe('user.label')
Note that as `FixMe` is a subclass of `Gettext`, is passes the above type
check using ``isinstance()``.
Calling `FixMe.__unicode__()` performs no translation, but instead returns
said conspicuous looking label:
>>> unicode(u.label)
u'<user.label>'
For more examples of how `FixMe` is used, see `ipalib.parameters`.
"""
__slots__ = tuple()
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.msg)
def __str__(self):
return u'<%s>' % self.msg
class NGettext(LazyText):
"""
Deferred translation for plural forms using ``gettext.ungettext()``.
Normally the `NGettext` class isn't used directly and instead is created via
a `NGettextFactory` instance. However, for illustration, we can create one
like this:
>>> msg = NGettext('%(count)d goose', '%(count)d geese')
When you create an `NGettext` instance, the singular and plural forms of
your message are stored on the *singular* and *plural* instance attributes:
>>> msg.singular
'%(count)d goose'
>>> msg.plural
'%(count)d geese'
The translation and number selection isn't performed till
`NGettext.__call__()` is called. This will translate and pick the correct
number using ``gettext.ungettext()``. As a callable, an `NGettext` instance
takes a single argument, an integer specifying the count. For example:
>>> msg(0)
u'%(count)d geese'
>>> msg(1)
u'%(count)d goose'
>>> msg(2)
u'%(count)d geese'
`NGettext.__mod__()` is a convenience method for Python "percent" string
formatting. It can only be used if your substitution ``dict`` contains the
count in a ``'count'`` item. For example:
>>> msg % dict(count=0)
u'0 geese'
>>> msg % dict(count=1)
u'1 goose'
>>> msg % dict(count=2)
u'2 geese'
Alternatively, these longer forms have the same effect as the three examples
above:
>>> msg(0) % dict(count=0)
u'0 geese'
>>> msg(1) % dict(count=1)
u'1 goose'
>>> msg(2) % dict(count=2)
u'2 geese'
A ``KeyError`` is raised if your substitution ``dict`` doesn't have a
``'count'`` item. For example:
>>> msg2 = NGettext('%(num)d goose', '%(num)d geese')
>>> msg2 % dict(num=0)
Traceback (most recent call last):
...
KeyError: 'count'
However, in this case you can still use the longer, explicit form for string
substitution:
>>> msg2(0) % dict(num=0)
u'0 geese'
See `NGettextFactory` for additional details.
"""
__slots__ = ('singular', 'plural')
def __init__(self, singular, plural, domain=None, localedir=None):
super(NGettext, self).__init__(domain, localedir)
self.singular = singular
self.plural = plural
self.args = (singular, plural, domain, localedir)
def __repr__(self):
return '%s(%r, %r, domain=%r, localedir=%r)' % (self.__class__.__name__,
self.singular, self.plural, self.domain, self.localedir)
def __mod__(self, kw):
count = kw['count']
return self(count) % kw
def __call__(self, count):
if self.key in context.__dict__:
ng = context.__dict__[self.key].ungettext
else:
ng = create_translation(self.key).ungettext
return ng(self.singular, self.plural, count)
@six.python_2_unicode_compatible
class ConcatenatedLazyText(object):
"""Concatenation of multiple strings, or any objects convertible to unicode
Used to concatenate several LazyTexts together.
This allows large strings like help text to be split, so translators
do not have to re-translate the whole text when only a small part changes.
Additional strings may be added to the end with the + or += operators.
"""
def __init__(self, *components):
self.components = list(components)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.components)
def __str__(self):
return u''.join(unicode(c) for c in self.components)
def __json__(self):
return unicode(self)
def __mod__(self, kw):
return unicode(self) % kw
def __add__(self, other):
if isinstance(other, ConcatenatedLazyText):
return ConcatenatedLazyText(*self.components + other.components)
else:
return ConcatenatedLazyText(*self.components + [other])
def __radd__(self, other):
if isinstance(other, ConcatenatedLazyText):
return ConcatenatedLazyText(*other.components + self.components)
else:
return ConcatenatedLazyText(*[other] + self.components)
class GettextFactory(object):
"""
Factory for creating ``_()`` functions.
A `GettextFactory` allows you to mark translatable messages that are
evaluated at initialization time, but deferred their actual translation till
request time.
When you create a `GettextFactory` you can provide a specific gettext
*domain* and *localedir*. By default the *domain* will be ``'ipa'`` and
the *localedir* will be ``None``. Both are available via instance
attributes of the same name. For example:
>>> _ = GettextFactory()
>>> _.domain
'ipa'
>>> _.localedir is None
True
When the *localedir* is ``None``, gettext will use the default system
localedir (typically ``'/usr/share/locale/'``). In general, you should
**not** provide a *localedir*... it is intended only to support in-tree
testing.
Third party plugins will most likely want to use a different gettext
*domain*. For example:
>>> _ = GettextFactory(domain='ipa_3rd_party')
>>> _.domain
'ipa_3rd_party'
When you call your `GettextFactory` instance, it will return a `Gettext`
instance associated with the same *domain* and *localedir*. For example:
>>> my_msg = _('Hello world')
>>> my_msg.domain
'ipa_3rd_party'
>>> my_msg.localedir is None
True
The message isn't translated till `Gettext.__unicode__()` is called, which
should be done during each request. See the `Gettext` class for additional
details.
"""
def __init__(self, domain='ipa', localedir=None):
"""
Initialize.
:param domain: The gettext domain in which this message will be
translated, e.g. ``'ipa'`` or ``'ipa_3rd_party'``; default is
``'ipa'``
:param localedir: The directory containing the gettext translations,
e.g. ``'/usr/share/locale/'``; default is ``None``, in which case
gettext will use the default system locale directory.
"""
self.domain = domain
self.localedir = localedir
def __repr__(self):
return '%s(domain=%r, localedir=%r)' % (self.__class__.__name__,
self.domain, self.localedir)
def __call__(self, msg):
return Gettext(msg, self.domain, self.localedir)
class NGettextFactory(GettextFactory):
"""
Factory for creating ``ngettext()`` functions.
`NGettextFactory` is similar to `GettextFactory`, except `NGettextFactory`
is for plural forms.
So that standard tools like xgettext can find your plural forms, you should
reference your `NGettextFactory` instance using a variable named
*ngettext*. For example:
>>> ngettext = NGettextFactory()
>>> ngettext
NGettextFactory(domain='ipa', localedir=None)
When you call your `NGettextFactory` instance to create a deferred
translation, you provide the *singular* message, the *plural* message, and
a dummy *count*. An `NGettext` instance will be returned. For example:
>>> my_msg = ngettext('%(count)d goose', '%(count)d geese', 0)
>>> my_msg
NGettext('%(count)d goose', '%(count)d geese', domain='ipa', localedir=None)
The *count* is ignored (because the translation is deferred), but you should
still provide it so parsing tools aren't confused. For consistency, it is
recommended to always provide ``0`` for the *count*.
See `NGettext` for details on how the deferred translation is later
performed. See `GettextFactory` for details on setting a different gettext
*domain* (likely needed for 3rd-party plugins).
"""
def __call__(self, singular, plural, count):
return NGettext(singular, plural, self.domain, self.localedir)
# Process wide factories:
_ = GettextFactory()
ngettext = NGettextFactory()
ugettext = _
|
msrb/freeipa
|
ipalib/text.py
|
Python
|
gpl-3.0
| 18,653
|
#!/usr/bin/env python
# Copyright (C) 2008,2011 Lanedo GmbH
#
# Author: Tim Janik
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, os, re, urllib, csv
pkginstall_configvars = {
'PACKAGE' : 'dummy', 'PACKAGE_NAME' : 'dummy', 'VERSION' : '0.0', 'REVISION' : 'uninstalled',
#@PKGINSTALL_CONFIGVARS_IN24LINES@ # configvars are substituted upon script installation
}
# TODO:
# - support mixing in comments.txt which has "bug# person: task"
bugurls = (
('gb', 'http://bugzilla.gnome.org/buglist.cgi?bug_id='),
('gnome', 'http://bugzilla.gnome.org/buglist.cgi?bug_id='),
('fd', 'https://bugs.freedesktop.org/buglist.cgi?bug_id='),
('freedesktop', 'https://bugs.freedesktop.org/buglist.cgi?bug_id='),
('mb', 'https://bugs.maemo.org/buglist.cgi?bug_id='),
('maemo', 'https://bugs.maemo.org/buglist.cgi?bug_id='),
('nb', 'https://projects.maemo.org/bugzilla/buglist.cgi?bug_id='),
('nokia', 'https://projects.maemo.org/bugzilla/buglist.cgi?bug_id='),
('gcc', 'http://gcc.gnu.org/bugzilla/buglist.cgi?bug_id='),
('libc', 'http://sources.redhat.com/bugzilla/buglist.cgi?bug_id='),
('moz', 'https://bugzilla.mozilla.org/buglist.cgi?bug_id='),
('mozilla', 'https://bugzilla.mozilla.org/buglist.cgi?bug_id='),
('xm', 'http://bugzilla.xamarin.com/buglist.cgi?id='),
('xamarin', 'http://bugzilla.xamarin.com/buglist.cgi?id='),
)
# URL authentication handling
def auth_urls():
import ConfigParser, os, re
cp = ConfigParser.SafeConfigParser()
cp.add_section ('authentication-urls')
cp.set ('authentication-urls', 'urls', '')
cp.read (os.path.expanduser ('~/.urlrc'))
urlstr = cp.get ('authentication-urls', 'urls') # space separated url list
urls = re.split ("\s*", urlstr.strip()) # list urls
urls = [u for u in urls if u] # strip empty urls
global auth_urls; auth_urls = lambda : urls # cache result for the future
return urls
def add_auth (url):
for ai in auth_urls():
prefix = re.sub ('//[^:/@]*:[^:/@]*@', '//', ai)
if url.startswith (prefix):
pl = len (prefix)
return ai + url[pl:]
return url
# carry out online bug queries
def bug_summaries (buglisturl):
if not buglisturl:
return []
# Bugzilla query to use
query = buglisturl + '&ctype=csv' # buglisturl.replace (',', '%2c')
query = add_auth (query)
f = urllib.urlopen (query)
csvdata = f.read()
f.close()
# read CSV lines
reader = csv.reader (csvdata.splitlines (1))
# parse head to interpret columns
col_bug_id = -1
col_description = -1
header = reader.next()
i = 0
for col in header:
col = col.strip()
if col == 'bug_id':
col_bug_id = i
if col == 'short_short_desc':
col_description = i
elif col_description < 0 and col == 'short_desc':
col_description = i
i = i + 1
if col_bug_id < 0:
print >>sys.stderr, 'Failed to identify bug_id from CSV data'
sys.exit (11)
if col_description < 0:
print >>sys.stderr, 'Failed to identify description columns from CSV data'
sys.exit (12)
# parse bug list
result = []
summary = ''
for row in reader:
bug_number = row[col_bug_id]
description = row[col_description]
result += [ (bug_number, description) ]
return result
# parse bug numbers and list bugs
def read_handle_bugs (config, url):
lines = sys.stdin.read()
# print >>sys.stderr, 'Using bugzilla URL: %s' % (bz, url)
for line in [ lines ]:
# find all bug numbers
bugs = re.findall (r'\b[0-9]+\b', line)
# int-convert, dedup and sort bug numbers
ibugs = []
if bugs:
bught = {}
for b in bugs:
b = int (b)
if not b or bught.has_key (b): continue
bught[b] = True
ibugs += [ b ]
del bugs
if config.get ('sort', False):
ibugs.sort()
# construct full query URL
fullurl = url + ','.join ([str (b) for b in ibugs])
# print fullurl
if len (ibugs) and config.get ('show-query', False):
print fullurl
# print bug summaries
if len (ibugs) and config.get ('show-list', False):
bught = {}
for bug in bug_summaries (fullurl):
bught[int (bug[0])] = bug[1] # bug summaries can have random order
for bugid in ibugs: # print bugs in user provided order
iid = int (bugid)
if bught.has_key (iid):
desc = bught[iid]
if len (desc) >= 70:
desc = desc[:67].rstrip() + '...'
print "% 7u - %s" % (iid, desc)
else:
print "% 7u (NOBUG)" % iid
def help (version = False, verbose = False):
print "buglist %s (%s, %s)" % (pkginstall_configvars['VERSION'],
pkginstall_configvars['PACKAGE_NAME'], pkginstall_configvars['REVISION'])
print "Redistributable under GNU GPLv3 or later: http://gnu.org/licenses/gpl.html"
if version: # version *only*
return
print "Usage: %s [options] <BUG-TRACKER> " % os.path.basename (sys.argv[0])
print "List or download bugs from a bug tracker. Bug numbers are read from stdin."
if not verbose:
print "Use the --help option for verbose usage information."
return
# 12345678911234567892123456789312345678941234567895123456789612345678971234567898
print "Options:"
print " -h, --help Print verbose help message."
print " -v, --version Print version information."
print " -U Keep bug list unsorted."
print " --bug-tracker-list List supported bug trackers."
print "Authentication:"
print " An INI-style config file is used to associate bugzilla URLs with account"
print " authentication for secured installations. The file should be unreadable"
print " by others to keep passwords secret, e.g. with: chmod 0600 ~/.urlrc"
print " A sample ~/.urlrc might look like this:"
print "\t# INI-style config file for URLs"
print "\t[authentication-urls]"
print "\turls =\thttps://USERNAME:PASSWORD@projects.maemo.org/bugzilla"
print "\t\thttp://BLOGGER:PASSWORD@blogs.gnome.org/BLOGGER/xmlrpc.php"
def main ():
import getopt
# default configuration
config = {
'sort' : True,
'show-query' : True,
'show-list' : True,
}
# parse options
try:
options, args = getopt.gnu_getopt (sys.argv[1:], 'vhU', [ 'help', 'version', 'bug-tracker-list' ])
except getopt.GetoptError, err:
print >>sys.stderr, "%s: %s" % (os.path.basename (sys.argv[0]), str (err))
help()
sys.exit (126)
for arg, val in options:
if arg == '-h' or arg == '--help': help (verbose=True); sys.exit (0)
if arg == '-v' or arg == '--version': help (version=True); sys.exit (0)
if arg == '-U': config['sort'] = False
if arg == '--bug-tracker-list':
print "Bug Tracker:"
for kv in bugurls:
print " %-20s %s" % kv
sys.exit (0)
if len (args) < 1:
print >>sys.stderr, "%s: Missing bug tracker argument" % os.path.basename (sys.argv[0])
help()
sys.exit (126)
trackerdict = dict (bugurls)
if not trackerdict.has_key (args[0]):
print >>sys.stderr, "%s: Unknown bug tracker: %s" % (os.path.basename (sys.argv[0]), args[0])
sys.exit (10)
# handle bugs
read_handle_bugs (config, trackerdict[args[0]])
if __name__ == '__main__':
main()
|
tim-janik/testbit-tools
|
buglist.py
|
Python
|
gpl-3.0
| 7,933
|