code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
#Section 1: Terminology
# 1) What is a recursive function?
# A recursive function is a function that calls itself.
#
#
# 2) What happens if there is no base case defined in a recursive function?
# The program would keep recursing itself.
#
#
# 3) What is the first thing to consider when designing a recursive function?
# What the base case is
#
#
# 4) How do we put data into a function call?
# Define a function
#
#
# 5) How do we get data out of a function call?
# Return
#
#
#Section 2: Reading
# Read the following function definitions and function calls.
# Then determine the values of the variables q1-q20.
#a1 = 8
#a2 = 8
#a3 = -1
#b1 = 2
#b2 = 2
#b3 = 4
#c1 = 3
#c2 = 4
#c3 = 5
#d1 = 6
#d2 = 8
#d3 = 4
#Section 3: Programming
#Write a script that asks the user to enter a series of numbers.
#When the user types in nothing, it should return the average of all the odd numbers
#that were typed in.
#In your code for the script, add a comment labeling the base case on the line BEFORE the base case.
#Also add a comment label BEFORE the recursive case.
#It is NOT NECESSARY to print out a running total with each user input.
import random
import math
def check(number,avg):
if int(number) == 1 or int + 2:
avg += int(number)
user(avg)
def user(avg):
number = raw_input("Next number:")
#this is base case
if number == "":
check(number,avg)
print "The average of your odd numbers are {}".format(avg)
#this is recursive case
else:
check(number,avg)
user(avg)
def main():
avg = 0
user(avg)
main()
| tonsom1592-cmis/tonsom1592-cmis-cs2 | cs3quiz.py | Python | cc0-1.0 | 1,537 |
"""Builder for websites."""
import string
from regolith.dates import date_to_float
doc_date_key = lambda x: date_to_float(
x.get("year", 1970), x.get("month", "jan")
)
ene_date_key = lambda x: date_to_float(
x.get("end_year", 4242), x.get("end_month", "dec")
)
category_val = lambda x: x.get("category", "<uncategorized>")
level_val = lambda x: x.get("level", "<no-level>")
id_key = lambda x: x.get("_id", "")
def date_key(x):
if "end_year" in x:
v = date_to_float(
x["end_year"], x.get("end_month", "jan"), x.get("end_day", 0)
)
elif "year" in x:
v = date_to_float(x["year"], x.get("month", "jan"), x.get("day", 0))
elif "begin_year" in x:
v = date_to_float(
x["begin_year"], x.get("begin_month", "jan"), x.get("begin_day", 0)
)
else:
raise KeyError("could not find year in " + str(x))
return v
POSITION_LEVELS = {
"": -1,
"editor": -1,
"unknown": -1,
"undergraduate research assistant": 1,
"intern": 1,
"masters research assistant": 2,
"visiting student": 1,
"graduate research assistant": 3,
"teaching assistant": 3,
"research assistant": 2,
"post-doctoral scholar": 4,
"research fellow": 4,
"assistant scientist": 4,
"assistant lecturer": 4,
"lecturer": 5,
"research scientist": 4.5,
"associate scientist": 5,
"adjunct scientist": 5,
"senior assistant lecturer": 5,
"research associate": 5,
"reader": 5,
"ajunct professor": 5,
"adjunct professor": 5,
"consultant": 5,
"programer": 5,
"programmer": 5,
"visiting scientist": 6,
"research assistant professor": 4,
"assistant professor": 8,
"assistant physicist": 8,
"associate professor": 9,
"associate physicist": 9,
"professor emeritus": 9,
"visiting professor": 9,
"manager": 10,
"director": 10,
"scientist": 10,
"engineer": 10,
"physicist": 10,
"professor": 11,
"president": 10,
"distinguished professor": 12
}
def position_key(x):
"""Sorts a people based on thier position in the research group."""
pos = x.get("position", "").lower()
first_letter_last = x.get("name", "zappa").rsplit(None, 1)[-1][0].upper()
backward_position = 26 - string.ascii_uppercase.index(first_letter_last)
return POSITION_LEVELS.get(pos, -1), backward_position
| scopatz/regolith | regolith/sorters.py | Python | cc0-1.0 | 2,389 |
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000102.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString) | biomodels/BIOMD0000000102 | BIOMD0000000102/model.py | Python | cc0-1.0 | 427 |
#!/usr/bin/env python
import threading
import spam
def grab_cores(threads=1, count=int(1e9)):
_threads = []
for i in range(threads):
thread = threading.Thread(target=spam.busy, args=(count,))
_threads.append(thread)
thread.start()
for thread in _threads:
thread.join()
if __name__ == '__main__':
import sys
grab_cores(threads=int(sys.argv[1]))
| wking/cpython-extension | grab-cores.py | Python | cc0-1.0 | 403 |
# Note - to use this script you need Jeff Garzik's python-bitcoinrpc
# https://github.com/jgarzik/python-bitcoinrpc
import os
import sys;
import json;
from bitcoinrpc.authproxy import AuthServiceProxy;
# SET THESE VALUES
rpc_user = "bitcoinrpc";
rpc_pass = "A7Xr149i7F6GxkhDbxWDTbmXooz1UZGhhyUYvaajA13Z";
rpc_host = "localhost";
rpc_port = 8332;
donation_minimum = 0;
donation_per_input = 3000;
donation_address = "1ForFeesAndDonationsSpendHerdtWbWy";
# http://stackoverflow.com/questions/626796/how-do-i-find-the-windows-common-application-data-folder-using-python
try:
from win32com.shell import shellcon, shell
config_file = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, 0, 0) + "/Bitcoin/bitcoin.conf"
except ImportError: # quick semi-nasty fallback for non-windows/win32com case
config_file = os.path.expanduser("~") + "/.bitcoin/bitcoin.conf"
# thanks ryan-c for this function
def asp_from_config(filename):
rpcport = '8332'
rpcconn = '127.0.0.1'
rpcuser = None
rpcpass = None
with open(filename, 'r') as f:
for line in f:
try:
(key, val) = line.rstrip().replace(' ', '').split('=')
except:
(key, val) = ("", "");
if key == 'rpcuser':
rpcuser = val
elif key == 'rpcpassword':
rpcpass = val
elif key == 'rpcport':
rpcport = val
elif key == 'rpcconnect':
rpcconn = val
f.close()
if rpcuser is not None and rpcpass is not None:
rpcurl = 'http://%s:%s@%s:%s' % (rpcuser, rpcpass, rpcconn, rpcport)
print('RPC server: %s' % rpcurl)
return AuthServiceProxy(rpcurl)
def to_satoshi(s):
return int (100000000 * float (s));
def from_satoshi(s):
return float (s) / 100000000;
if len(sys.argv) < 3:
print ("Usage: %s <input size> <target output size in BTC>" % sys.argv[0]);
exit (0);
#service = AuthServiceProxy ("http://%s:%s@%s:%d" % (rpc_user, rpc_pass, rpc_host, rpc_port));
service = asp_from_config (config_file);
balance = to_satoshi (service.getbalance());
unspent = service.listunspent();
target_in = to_satoshi (sys.argv[1]);
target_out = to_satoshi (sys.argv[2]);
if balance < target_in:
print ("Cannot spend %f; only have %f in wallet." % (from_satoshi (target_in), from_satoshi (balance)));
exit (0);
if target_out > target_in:
print ("Please have a smaller target output than input value.");
exit (0);
# FIND INPUTS
# TODO: have a smarter coin selection algo
# For now we just sort the coins by increasing abs(value - target output), then select in order
inputs = [];
donation = 0;
total_in = 0;
unspent.sort (key=lambda coin: abs(to_satoshi (coin['amount']) - target_in));
for coin in unspent:
total_in += to_satoshi (coin['amount']);
donation += donation_per_input;
inputs.append (dict (txid = coin['txid'], vout = coin['vout']));
if total_in > target_in:
break;
if donation < donation_minimum:
donation = donation_minimum;
# FIND OUTPUTS
outputs = dict ();
outputs[donation_address] = from_satoshi (donation);
total_in -= donation;
while total_in > target_out:
outputs[service.getnewaddress()] = from_satoshi (target_out);
total_in -= target_out;
outputs[service.getnewaddress()] = from_satoshi (total_in);
# Make the transaction
print service.createrawtransaction (inputs, outputs);
| apoelstra/coinjoin | generate-tx.py | Python | cc0-1.0 | 3,413 |
"""Assign Params to Attributes by Joel Hedlund <joel.hedlund at gmail.com>.
PyDev script for generating python code that assigns method parameter
values to attributes of self with the same name. Activates with 'a' by
default. Edit global constants ACTIVATION_STRING and WAIT_FOR_ENTER if this
does not suit your needs. See docs on the class AssignToAttribsOfSelf for
more details.
Contact the author for bug reports/feature requests.
Changed:Fabio Zadrozny (binded to Ctrl+1 too)
"""
__version__ = "1.0.1"
__copyright__ = """Available under the same conditions as PyDev.
See PyDev license for details.
http://pydev.sourceforge.net
"""
# Change this if the default does not suit your needs
ACTIVATION_STRING = 'a'
WAIT_FOR_ENTER = False
# For earlier Python versions
True, False = 1,0
# Set to True to force Jython script interpreter restart on save events.
# Useful for Jython PyDev script development, not useful otherwise.
DEBUG = False
# This is a magic trick that tells the PyDev Extensions editor about the
# namespace provided for pydev scripts:
if False:
from org.python.pydev.editor import PyEdit #@UnresolvedImport
cmd = 'command string'
editor = PyEdit
assert cmd is not None
assert editor is not None
if DEBUG and cmd == 'onSave':
from org.python.pydev.jython import JythonPlugin #@UnresolvedImport
editor.pyEditScripting.interpreter = JythonPlugin.newPythonInterpreter()
from org.eclipse.jface.action import Action #@UnresolvedImport
#=======================================================================================================================
# AssignToAttribsOfSelfAction
#=======================================================================================================================
class AssignToAttribsOfSelfAction(Action):
def __init__(self, assign_to_attribs_helper):
Action.__init__(self)
self.assign_to_attribs_helper = assign_to_attribs_helper
def run(self):
self.assign_to_attribs_helper.run()
#=======================================================================================================================
# Actually bind the actions
#=======================================================================================================================
if cmd == 'onCreateActions' or (DEBUG and cmd == 'onSave'):
from org.python.pydev.editor.correctionassist import PythonCorrectionProcessor #@UnresolvedImport
import assign_params_to_attributes_action as helper
import assign_params_to_attributes_assist
#---------------------------------------------------------------------------------------------- Bind it to Ctrl+2, a
sDescription = 'Assign method params to attribs of self'
assign_to_attribs_helper = helper.AssignToAttribsOfSelf(editor)
editor.addOfflineActionListener(
ACTIVATION_STRING, AssignToAttribsOfSelfAction(assign_to_attribs_helper), sDescription, WAIT_FOR_ENTER)
#------------------------------------------------------------------------------------------------- Bind it to Ctrl+1
ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST = 'ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST'
if not PythonCorrectionProcessor.hasAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST):
assist = assign_params_to_attributes_assist.AssistAssignParamsToAttributes()
PythonCorrectionProcessor.addAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST, assist)
| smkr/pyclipse | plugins/org.python.pydev.jython/jysrc/pyedit_assign_params_to_attributes.py | Python | epl-1.0 | 3,468 |
# coding: iso-8859-1 -*-
"""
Created on Wed Oct 22 21:49:24 2014
@author: fábioandrews
"""
import facebook
from DadosDeAmigoEmComum import DadosDeAmigoEmComum
class AfinidadeLikesEscolaELocalidades:
def __init__(self,ACCESS_TOKEN_FACEBOOK):
self.token_do_facebook = ACCESS_TOKEN_FACEBOOK
self.meusAmigos = []
self.amigosECoisasQueGostam = dict()
self.amigosELocalidades = dict()
self.pegarMeusAmigosECoisasQueElesGostam(ACCESS_TOKEN_FACEBOOK)
self.pegarAmigosELocalidades(ACCESS_TOKEN_FACEBOOK)
self.pegarAmigosEEscolas(ACCESS_TOKEN_FACEBOOK)
def pegarMeusAmigosECoisasQueElesGostam(self,ACCESS_TOKEN_FACEBOOK):
g = facebook.GraphAPI(ACCESS_TOKEN_FACEBOOK)
meusAmigosESeusIds = g.get_connections("me", "friends")['data'] #eh um hashmap com o nome do amigo sendo a chave e o id dele como valor
likesDeMeusAmigosComCategoriasDataECoisasInuteis = { friend['name'] : g.get_connections(friend['id'], "likes")['data'] for friend in meusAmigosESeusIds }
#a funcao acima retorna meus amigos associados as coisas que gostam, mas nao eh apenas o nome daquilo que gostam, tem data, categoria etc
chaves_de_likes = likesDeMeusAmigosComCategoriasDataECoisasInuteis.keys() #a chaves_de_likes eh um arranjo com nomes de meus amigos
amigos_e_likes_simplificados = dict() #criarei um hashmap que simplifica meus amigos e seus likes. So preciso do nome do amigo associado a todos os likes dele
for nomeAmigo in chaves_de_likes:
likes_de_um_amigo = likesDeMeusAmigosComCategoriasDataECoisasInuteis[nomeAmigo]
for umLike in likes_de_um_amigo:
umLikeSimplificado = umLike['name']
nomeAmigoEmUTf8 = nomeAmigo.encode(encoding='utf_8',errors='ignore') #estava retornando u'stringqualquer' se eu nao fizesse isso. Eh um tipo diferente de string normal
umLikeSimplificadoEmUtf8 = umLikeSimplificado.encode(encoding='utf_8',errors='ignore')
if(nomeAmigoEmUTf8 not in amigos_e_likes_simplificados.keys()):
amigos_e_likes_simplificados[nomeAmigoEmUTf8] = [umLikeSimplificadoEmUtf8]
else:
amigos_e_likes_simplificados[nomeAmigoEmUTf8].append(umLikeSimplificadoEmUtf8);
self.amigosECoisasQueGostam = amigos_e_likes_simplificados
self.meusAmigos = self.amigosECoisasQueGostam.keys()
def pegarAmigosELocalidades(self,ACCESS_TOKEN_FACEBOOK):
g = facebook.GraphAPI(ACCESS_TOKEN_FACEBOOK)
amigosELocalizacoesComplexo = g.get_connections("me", "friends", fields="location, name")
amigos_e_localidades = dict() #eh um dictionary que relaciona o nome de um amigo com a localidade dele
for fr in amigosELocalizacoesComplexo['data']:
if 'location' in fr:
#print fr['name'] + ' ' + fr['location']["name"] #location eh um dictionary com chaves id e name, referentes a uma localidade
nomeAmigoUtf8 = fr['name'].encode(encoding='utf_8',errors='ignore')
localidadeUtf8 = fr['location']["name"].encode(encoding='utf_8',errors='ignore')
amigos_e_localidades[nomeAmigoUtf8] = localidadeUtf8 #location eh um dictionary com chaves id e name, referentes a uma localidade
self.amigosELocalidades = amigos_e_localidades
#no final dessa funcao, eu tenho um dict tipo assim: {'Felipe Dantas Moura': ['High School%Instituto Maria Auxiliadora', 'College%Spanish Courses Colegio Delibes', 'College%Federal University of Rio Grande do Norte'],...}
def pegarAmigosEEscolas(self,ACCESS_TOKEN_FACEBOOK):
g = facebook.GraphAPI(ACCESS_TOKEN_FACEBOOK)
amigosEEscolasComplexo = g.get_connections("me","friends",fields="education, name")
amigos_e_escolas = dict() #eh um dictionary que relaciona o nome de um amigo com as escolas dele, Pode ter duas: college ou high school, por isso o valor nesse dict serah um arranjo tipo ["High School%Maria Auxilidadora","college%Federal University of Rio Grande do Norte"]
for fr in amigosEEscolasComplexo['data']:
if 'education' in fr:
nomeAmigoUtf8 = fr['name'].encode(encoding='utf_8',errors='ignore')
arranjoEducation = fr['education'] #uma pessoa pode ter varios high school ou college e tb pode ter graduate school
arranjoEducacaoMeuAmigo = []
for elementoArranjoEducation in arranjoEducation:
nomeEscola = elementoArranjoEducation['school']['name'].encode(encoding='utf_8',errors='ignore')
tipoEscola = elementoArranjoEducation['type'].encode(encoding='utf_8',errors='ignore') #pode ser high school ou college ou Graduate school. College eh a faculdade
arranjoEducacaoMeuAmigo.append(tipoEscola + "%" + nomeEscola)
amigos_e_escolas[nomeAmigoUtf8] = arranjoEducacaoMeuAmigo
self.amigosEEscolas = amigos_e_escolas
#dado um amigo, eu irei receber tipo {giovanni:DadosDeAmigoEmComum}, onde giovanni eh amigo de meuAmigo
#e DadosDeAmigoEmComum terah a nota associada e um arranjo com os likes que giovanni tem em comum com meuAmigo
def acharCompatibilidadeEntreLikesDePaginas(self,meuAmigo):
meuAmigo = meuAmigo.encode(encoding='utf_8',errors='ignore')
pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses = dict()
for outroAmigo in self.amigosECoisasQueGostam.keys():
if(outroAmigo != meuAmigo):
#os amigos sao diferentes. Vamos ver se tem likes iguais
likesEmComumEntreOsDois = []
for umLikeMeuAmigo in self.amigosECoisasQueGostam[meuAmigo]:
for umLikeOutroAmigo in self.amigosECoisasQueGostam[outroAmigo]:
if(umLikeMeuAmigo == umLikeOutroAmigo):
#achamos um like em comum entre um Amigo e outro Amigo
likesEmComumEntreOsDois.append(umLikeMeuAmigo)
if(len(likesEmComumEntreOsDois) > 0):
# ha algo em comum entre os dois amigos e eles sao diferentes
pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses[outroAmigo] = likesEmComumEntreOsDois
#ate agora eu tenho tipo {giovanni:['games','musica']} giovanni eh compativel com meuAmigo
#hora de calcular pontuacoes
quantasCoisasMeuAmigoGosta = len(self.amigosECoisasQueGostam[meuAmigo])
pessoasCompativeisComMeuAmigoSegundoLikes = dict() #o retorno da funcao
for amigoParecidoComMeuAmigo in pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses.keys():
quantasCoisasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo = len(pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses[amigoParecidoComMeuAmigo])
nota = (10.0 * quantasCoisasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo) / quantasCoisasMeuAmigoGosta
dadosDeAmigoEmComumAmigoParecido = DadosDeAmigoEmComum(nota,pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses[amigoParecidoComMeuAmigo])
pessoasCompativeisComMeuAmigoSegundoLikes[amigoParecidoComMeuAmigo] = dadosDeAmigoEmComumAmigoParecido
return pessoasCompativeisComMeuAmigoSegundoLikes
def acharCompatibilidadeEntreLocalidade(self,meuAmigo):
meuAmigo = meuAmigo.encode(encoding='utf_8',errors='ignore')
pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade = dict()
for outroAmigo in self.amigosELocalidades.keys():
if(outroAmigo != meuAmigo):
#os amigos sao diferentes. Vamos ver se tem mesma localidade
if(self.amigosELocalidades[outroAmigo] == self.amigosELocalidades[meuAmigo]):
# ha algo em comum entre os dois amigos e eles sao diferentes
pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade[outroAmigo] = self.amigosELocalidades[outroAmigo]
#ate agora eu tenho tipo {giovanni:'natal'} giovanni eh compativel com meuAmigo
#hora de calcular pontuacoes
pessoasCompativeisComMeuAmigoSegundoLocalidade = dict() #o retorno da funcao
for amigoParecidoComMeuAmigo in pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade.keys():
nota = 10.0
dadosDeAmigoEmComumAmigoParecido = DadosDeAmigoEmComum(nota,pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade[amigoParecidoComMeuAmigo])
pessoasCompativeisComMeuAmigoSegundoLocalidade[amigoParecidoComMeuAmigo] = dadosDeAmigoEmComumAmigoParecido
return pessoasCompativeisComMeuAmigoSegundoLocalidade
def acharCompatibilidadeEntreEscolas(self,meuAmigo):
meuAmigo = meuAmigo.encode(encoding='utf_8',errors='ignore')
pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas = dict()
for outroAmigo in self.amigosEEscolas.keys():
if(outroAmigo != meuAmigo):
#os amigos sao diferentes. Vamos ver se tem escolas iguais
escolasEmComumEntreOsDois = []
for umaEscolaMeuAmigo in self.amigosEEscolas[meuAmigo]:
for umaEscolaOutroAmigo in self.amigosEEscolas[outroAmigo]:
if(umaEscolaMeuAmigo == umaEscolaOutroAmigo):
#achamos uma escola em comum entre um Amigo e outro Amigo
escolasEmComumEntreOsDois.append(umaEscolaMeuAmigo)
if(len(escolasEmComumEntreOsDois) > 0):
# ha algo em comum entre os dois amigos e eles sao diferentes
pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas[outroAmigo] = escolasEmComumEntreOsDois
#ate agora eu tenho tipo {giovanni:['High School%Instituto Maria Auxiliadora', 'College%UFRN - Universidade Federal do Rio Grande do Norte']} giovanni eh compativel com meuAmigo
#hora de calcular pontuacoes
quantasEscolasMeuAmigoCursou = len(self.amigosEEscolas[meuAmigo])
pessoasCompativeisComMeuAmigoSegundoEscolas = dict() #o retorno da funcao
for amigoParecidoComMeuAmigo in pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas.keys():
quantasEscolasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo = len(pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas[amigoParecidoComMeuAmigo])
nota = (10.0 * quantasEscolasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo) / quantasEscolasMeuAmigoCursou
dadosDeAmigoEmComumAmigoParecido = DadosDeAmigoEmComum(nota,pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas[amigoParecidoComMeuAmigo])
pessoasCompativeisComMeuAmigoSegundoEscolas[amigoParecidoComMeuAmigo] = dadosDeAmigoEmComumAmigoParecido
return pessoasCompativeisComMeuAmigoSegundoEscolas
#os testes...
"""calculaAfinidades = AfinidadeLikesEscolaELocalidades('CAACEdEose0cBAG2OxI7v1nXVTzIX4JCoPSZByGR4OOr9leuRT2cjmNYo7nLg1sf9lRQstvd0HaIZCa1T9mK68GynHqqzhD5u6cCZATHZBrX99fHpWPBrM6NpTVKEXYNi5l45fk6ZAi87i8psDDAtOtjzA8hnymZAeN77LV3p2DtODu9l1na4gCz8hkgeHBHDjFC6TnVVFd8iivK0uhZAZBre')
amigosDePhillipEmComum = calculaAfinidades.acharCompatibilidadeEntreLikesDePaginas("Fábio Phillip Rocha Marques")
#faltou pegar o jeito de imprimir esse resultado de phillip
print "!!!!!!!!!!!!!!!amigos com mesmos likes de meu amigo Fábio Phillip!!!!!!!!!!!!!!!"
for amigoEmComum in amigosDePhillipEmComum.keys():
print "######" , amigoEmComum
amigosDePhillipEmComum[amigoEmComum].imprimirDadosDeAmigoEmComum();"""
"""amigosDePhillipEmComumLocalidades = calculaAfinidades.acharCompatibilidadeEntreLocalidade("Fábio Phillip Rocha Marques")
print "!!!!!!!!!!!!!!!amigos com mesma localidade de meu amigo Fábio Phillip!!!!!!!!!!!!!!!"
for amigoEmComum in amigosDePhillipEmComumLocalidades.keys():
print "######" , amigoEmComum
amigosDePhillipEmComumLocalidades[amigoEmComum].imprimirDadosDeAmigoEmComum();"""
"""print "!!!!!!!!!!!!!!!!! ESCOLAS DE FÁBIO PHILLIP !!!!!!!!!!!!!!!!!"
print calculaAfinidades.amigosEEscolas["Fábio Phillip Rocha Marques"]
amigosDePhillipEmComumEscolas = calculaAfinidades.acharCompatibilidadeEntreEscolas("Fábio Phillip Rocha Marques")
print "!!!!!!!!!!!!!!!amigos com mesmas escolas de meu amigo Fábio Phillip!!!!!!!!!!!!!!!"
for amigoEmComum in amigosDePhillipEmComumEscolas.keys():
print "######" , amigoEmComum
amigosDePhillipEmComumEscolas[amigoEmComum].imprimirDadosDeAmigoEmComum();""" | Topicos-3-2014/friendlyadvice | AfinidadeLikesEscolaELocalidades.py | Python | epl-1.0 | 12,463 |
# Copyright 2005-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import logging
import subprocess
import portage
from portage import os
from portage.util import writemsg_level
from portage.output import create_color_func
good = create_color_func("GOOD")
bad = create_color_func("BAD")
warn = create_color_func("WARN")
from portage.sync.syncbase import NewBase
class GitSync(NewBase):
'''Git sync class'''
short_desc = "Perform sync operations on git based repositories"
@staticmethod
def name():
return "GitSync"
def __init__(self):
NewBase.__init__(self, "git", portage.const.GIT_PACKAGE_ATOM)
def exists(self, **kwargs):
'''Tests whether the repo actually exists'''
return os.path.exists(os.path.join(self.repo.location, '.git'))
def new(self, **kwargs):
'''Do the initial clone of the repository'''
if kwargs:
self._kwargs(kwargs)
try:
if not os.path.exists(self.repo.location):
os.makedirs(self.repo.location)
self.logger(self.xterm_titles,
'Created new directory %s' % self.repo.location)
except IOError:
return (1, False)
sync_uri = self.repo.sync_uri
if sync_uri.startswith("file://"):
sync_uri = sync_uri[6:]
git_cmd_opts = ""
if self.settings.get("PORTAGE_QUIET") == "1":
git_cmd_opts += " --quiet"
if self.repo.sync_depth is not None:
git_cmd_opts += " --depth %d" % self.repo.sync_depth
git_cmd = "%s clone%s %s ." % (self.bin_command, git_cmd_opts,
portage._shell_quote(sync_uri))
writemsg_level(git_cmd + "\n")
exitcode = portage.process.spawn_bash("cd %s ; exec %s" % (
portage._shell_quote(self.repo.location), git_cmd),
**portage._native_kwargs(self.spawn_kwargs))
if exitcode != os.EX_OK:
msg = "!!! git clone error in %s" % self.repo.location
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
return (os.EX_OK, True)
def update(self):
''' Update existing git repository, and ignore the syncuri. We are
going to trust the user and assume that the user is in the branch
that he/she wants updated. We'll let the user manage branches with
git directly.
'''
git_cmd_opts = ""
if self.settings.get("PORTAGE_QUIET") == "1":
git_cmd_opts += " --quiet"
git_cmd = "%s pull%s" % (self.bin_command, git_cmd_opts)
writemsg_level(git_cmd + "\n")
rev_cmd = [self.bin_command, "rev-list", "--max-count=1", "HEAD"]
previous_rev = subprocess.check_output(rev_cmd,
cwd=portage._unicode_encode(self.repo.location))
exitcode = portage.process.spawn_bash("cd %s ; exec %s" % (
portage._shell_quote(self.repo.location), git_cmd),
**portage._native_kwargs(self.spawn_kwargs))
if exitcode != os.EX_OK:
msg = "!!! git pull error in %s" % self.repo.location
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
current_rev = subprocess.check_output(rev_cmd,
cwd=portage._unicode_encode(self.repo.location))
return (os.EX_OK, current_rev != previous_rev)
| gmt/portage | pym/portage/sync/modules/git/git.py | Python | gpl-2.0 | 3,109 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
#
# Copyright (c) 2008-2014 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>,
# 2008-2013.
#
# Version: 1.0
#
''' A view functions is simply a Python function that takes a Web request and
returns a Web response. This response can be the HTML contents of a Web page,
or a redirect, or the 404 and 500 error, or an XML document, or an image...
or anything.'''
import traceback
import logging
import datetime
import omeroweb.webclient.views
from omero_version import build_year
from omero_version import omero_version
from django.template import loader as template_loader
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext as Context
from django.utils.translation import ugettext as _
from django.utils.encoding import smart_str
from forms import ForgottonPasswordForm, ExperimenterForm, GroupForm
from forms import GroupOwnerForm, MyAccountForm, ChangePassword
from forms import UploadPhotoForm, EmailForm
from omeroweb.http import HttpJPEGResponse
from omeroweb.webclient.decorators import login_required, render_response
from omeroweb.connector import Connector
logger = logging.getLogger(__name__)
##############################################################################
# decorators
class render_response_admin(omeroweb.webclient.decorators.render_response):
"""
Subclass for adding additional data to the 'context' dict passed to
templates
"""
def prepare_context(self, request, context, *args, **kwargs):
"""
We extend the webclient render_response to check if any groups are
created.
If not, add an appropriate message to the template context
"""
super(render_response_admin, self).prepare_context(request, context,
*args, **kwargs)
if 'conn' not in kwargs:
return
conn = kwargs['conn']
noGroupsCreated = conn.isAnythingCreated()
if noGroupsCreated:
msg = _('User must be in a group - You have not created any'
' groups yet. Click <a href="%s">here</a> to create a'
' group') % (reverse(viewname="wamanagegroupid",
args=["new"]))
context['ome']['message'] = msg
context['ome']['email'] = request.session \
.get('server_settings', False) \
.get('email', False)
##############################################################################
# utils
import omero
from omero.model import PermissionsI
def prepare_experimenter(conn, eid=None):
if eid is None:
eid = conn.getEventContext().userId
experimenter = conn.getObject("Experimenter", eid)
defaultGroup = experimenter.getDefaultGroup()
otherGroups = list(experimenter.getOtherGroups())
hasAvatar = conn.hasExperimenterPhoto()
isLdapUser = experimenter.isLdapUser()
return experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar
def otherGroupsInitialList(groups, excluded_names=("user", "guest"),
excluded_ids=list()):
formGroups = list()
for gr in groups:
flag = False
if gr.name in excluded_names:
flag = True
if gr.id in excluded_ids:
flag = True
if not flag:
formGroups.append(gr)
formGroups.sort(key=lambda x: x.getName().lower())
return formGroups
def ownedGroupsInitial(conn, excluded_names=("user", "guest", "system"),
excluded_ids=list()):
groupsList = list(conn.listOwnedGroups())
ownedGroups = list()
for gr in groupsList:
flag = False
if gr.name in excluded_names:
flag = True
if gr.id in excluded_ids:
flag = True
if not flag:
ownedGroups.append(gr)
ownedGroups.sort(key=lambda x: x.getName().lower())
return ownedGroups
# myphoto helpers
def attach_photo(conn, newFile):
if newFile.content_type.startswith("image"):
f = newFile.content_type.split("/")
format = f[1].upper()
else:
format = newFile.content_type
conn.uploadMyUserPhoto(smart_str(newFile.name), format, newFile.read())
# permission helpers
def setActualPermissions(permissions):
permissions = int(permissions)
if permissions == 0:
p = PermissionsI("rw----")
elif permissions == 1:
p = PermissionsI("rwr---")
elif permissions == 2:
p = PermissionsI("rwra--")
elif permissions == 3:
p = PermissionsI("rwrw--")
else:
p = PermissionsI()
return p
def getActualPermissions(group):
p = None
if group.details.getPermissions() is None:
raise AttributeError('Object has no permissions')
else:
p = group.details.getPermissions()
flag = None
if p.isGroupWrite():
flag = 3
elif p.isGroupAnnotate():
flag = 2
elif p.isGroupRead():
flag = 1
elif p.isUserRead():
flag = 0
return flag
# getters
def getSelectedGroups(conn, ids):
if ids is not None and len(ids) > 0:
return list(conn.getObjects("ExperimenterGroup", ids))
return list()
def getSelectedExperimenters(conn, ids):
if ids is not None and len(ids) > 0:
return list(conn.getObjects("Experimenter", ids))
return list()
def mergeLists(list1, list2):
if not list1 and not list2:
return list()
if not list1:
return list(list2)
if not list2:
return list(list1)
result = list()
result.extend(list1)
result.extend(list2)
return set(result)
@login_required()
@render_response()
def drivespace_json(request, query=None, groupId=None, userId=None, conn=None,
**kwargs):
"""
Returns a json list of {"label":<Name>, "data": <Value>, "groupId /
userId": <id>} for plotting disk usage by users or groups.
If 'query' is "groups" or "users", this is for an Admin to show all data
on server divided into groups or users.
Else, if groupId is not None, we return data for that group, split by user.
Else, if userId is not None, we return data for that user, split by group.
"""
diskUsage = []
# diskUsage.append({"label": "Free space", "data":conn.getFreeSpace()})
queryService = conn.getQueryService()
ctx = conn.SERVICE_OPTS.copy()
params = omero.sys.ParametersI()
params.theFilter = omero.sys.Filter()
def getBytes(ctx, eid=None):
bytesInGroup = 0
pixelsQuery = "select sum(cast( p.sizeX as double ) * p.sizeY * p.sizeZ * p.sizeT * p.sizeC * pt.bitSize / 8) " \
"from Pixels p join p.pixelsType as pt join p.image i left outer join i.fileset f " \
"join p.details.owner as owner " \
"where f is null"
filesQuery = "select sum(origFile.size) from OriginalFile as origFile " \
"join origFile.details.owner as owner"
if eid is not None:
params.add('eid', omero.rtypes.rlong(eid))
pixelsQuery = pixelsQuery + " and owner.id = (:eid)"
filesQuery = filesQuery + " where owner.id = (:eid)"
# Calculate disk usage via Pixels
result = queryService.projection(pixelsQuery, params, ctx)
if len(result) > 0 and len(result[0]) > 0:
bytesInGroup += result[0][0].val
# Now get Original File usage
result = queryService.projection(filesQuery, params, ctx)
if len(result) > 0 and len(result[0]) > 0:
bytesInGroup += result[0][0]._val
return bytesInGroup
sr = conn.getAdminService().getSecurityRoles()
if query == 'groups':
for g in conn.listGroups():
# ignore 'user' and 'guest' groups
if g.getId() in (sr.guestGroupId, sr.userGroupId):
continue
ctx.setOmeroGroup(g.getId())
b = getBytes(ctx)
if b > 0:
diskUsage.append({"label": g.getName(), "data": b,
"groupId": g.getId()})
elif query == 'users':
ctx.setOmeroGroup('-1')
for e in conn.getObjects("Experimenter"):
b = getBytes(ctx, e.getId())
if b > 0:
diskUsage.append({"label": e.getNameWithInitial(), "data": b,
"userId": e.getId()})
elif userId is not None:
eid = long(userId)
for g in conn.getOtherGroups(eid):
# ignore 'user' and 'guest' groups
if g.getId() in (sr.guestGroupId, sr.userGroupId):
continue
ctx.setOmeroGroup(g.getId())
b = getBytes(ctx, eid)
if b > 0:
diskUsage.append({"label": g.getName(), "data": b,
"groupId": g.getId()})
# users within a single group
elif groupId is not None:
ctx.setOmeroGroup(groupId)
for e in conn.getObjects("Experimenter"):
b = getBytes(ctx, e.getId())
if b > 0:
diskUsage.append({"label": e.getNameWithInitial(),
"data": b, "userId": e.getId()})
diskUsage.sort(key=lambda x: x['data'], reverse=True)
return diskUsage
##############################################################################
# views control
def forgotten_password(request, **kwargs):
request.session.modified = True
template = "webadmin/forgotten_password.html"
conn = None
error = None
def getGuestConnection(server_id):
return Connector(server_id, True).create_guest_connection('OMERO.web')
if request.method == 'POST':
form = ForgottonPasswordForm(data=request.POST.copy())
if form.is_valid():
server_id = form.cleaned_data['server']
try:
conn = getGuestConnection(server_id)
except Exception:
logger.error(traceback.format_exc())
error = "Internal server error, please contact administrator."
if conn is not None:
try:
req = omero.cmd.ResetPasswordRequest(
smart_str(form.cleaned_data['username']),
smart_str(form.cleaned_data['email']))
handle = conn.c.sf.submit(req)
try:
conn._waitOnCmd(handle)
finally:
handle.close()
error = "Password was reset. Check your mailbox."
form = None
except omero.CmdError, exp:
logger.error(exp.err)
try:
error = exp.err.parameters[
exp.err.parameters.keys()[0]]
except:
error = exp
else:
form = ForgottonPasswordForm()
context = {'error': error, 'form': form, 'build_year': build_year,
'omero_version': omero_version}
t = template_loader.get_template(template)
c = Context(request, context)
rsp = t.render(c)
return HttpResponse(rsp)
@login_required()
def index(request, **kwargs):
conn = None
try:
conn = kwargs["conn"]
except:
logger.error(traceback.format_exc())
if conn.isAdmin():
return HttpResponseRedirect(reverse("waexperimenters"))
else:
return HttpResponseRedirect(reverse("wamyaccount"))
@login_required()
def logout(request, **kwargs):
omeroweb.webclient.views.logout(request, **kwargs)
return HttpResponseRedirect(reverse("waindex"))
@login_required(isAdmin=True)
@render_response_admin()
def experimenters(request, conn=None, **kwargs):
template = "webadmin/experimenters.html"
experimenterList = list(conn.getObjects("Experimenter"))
can_modify_user = 'ModifyUser' in conn.getCurrentAdminPrivileges()
context = {'experimenterList': experimenterList,
'can_modify_user': can_modify_user}
context['template'] = template
return context
@login_required(isAdmin=True)
@render_response_admin()
def manage_experimenter(request, action, eid=None, conn=None, **kwargs):
template = "webadmin/experimenter_form.html"
groups = list(conn.getObjects("ExperimenterGroup"))
groups.sort(key=lambda x: x.getName().lower())
if action == 'new':
user_id = conn.getUserId()
user_privileges = conn.getCurrentAdminPrivileges()
# Only Full Admin can set 'Role' of new experimenter
user_full_admin = 'ReadSession' in user_privileges
can_modify_user = 'ModifyUser' in user_privileges
form = ExperimenterForm(
can_edit_role=user_full_admin,
can_modify_user=can_modify_user,
initial={'with_password': True,
'active': True,
'groups': otherGroupsInitialList(groups)})
admin_groups = [
conn.getAdminService().getSecurityRoles().systemGroupId]
context = {'form': form,
'admin_groups': admin_groups,
'can_modify_user': can_modify_user}
elif action == 'create':
if request.method != 'POST':
return HttpResponseRedirect(
reverse(viewname="wamanageexperimenterid", args=["new"]))
else:
name_check = conn.checkOmeName(request.POST.get('omename'))
email_check = conn.checkEmail(request.POST.get('email'))
my_groups = getSelectedGroups(
conn,
request.POST.getlist('other_groups'))
initial = {'with_password': True,
'my_groups': my_groups,
'groups': otherGroupsInitialList(groups)}
form = ExperimenterForm(
initial=initial, data=request.POST.copy(),
name_check=name_check, email_check=email_check)
if form.is_valid():
logger.debug("Create experimenter form:" +
str(form.cleaned_data))
omename = form.cleaned_data['omename']
firstName = form.cleaned_data['first_name']
middleName = form.cleaned_data['middle_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
institution = form.cleaned_data['institution']
role = form.cleaned_data['role']
admin = role in ('administrator', 'restricted_administrator')
active = form.cleaned_data['active']
defaultGroup = form.cleaned_data['default_group']
otherGroups = form.cleaned_data['other_groups']
password = form.cleaned_data['password']
# default group
# if default group was not selected take first from the list.
if defaultGroup is None:
defaultGroup = otherGroups[0]
for g in groups:
if long(defaultGroup) == g.id:
dGroup = g
break
listOfOtherGroups = set()
# rest of groups
for g in groups:
for og in otherGroups:
# remove defaultGroup from otherGroups if contains
if long(og) == long(dGroup.id):
pass
elif long(og) == g.id:
listOfOtherGroups.add(g)
expId = conn.createExperimenter(
omename, firstName, lastName, email, admin, active,
dGroup, listOfOtherGroups, password, middleName,
institution)
# Update 'AdminPrivilege' config roles for user
conn.setConfigRoles(expId, form)
return HttpResponseRedirect(reverse("waexperimenters"))
context = {'form': form}
elif action == 'edit':
experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar = \
prepare_experimenter(conn, eid)
try:
defaultGroupId = defaultGroup.id
except:
defaultGroupId = None
initial = {
'omename': experimenter.omeName,
'first_name': experimenter.firstName,
'middle_name': experimenter.middleName,
'last_name': experimenter.lastName,
'email': experimenter.email,
'institution': experimenter.institution,
'active': experimenter.isActive(),
'default_group': defaultGroupId,
'my_groups': otherGroups,
'other_groups': [g.id for g in otherGroups],
'groups': otherGroupsInitialList(groups)}
# Load 'AdminPrivilege' roles for 'initial'
delete_perms = []
write_perms = []
script_perms = []
privileges = conn.getAdminPrivileges(experimenter.id)
for privilege in privileges:
if privilege in ('DeleteOwned', 'DeleteFile', 'DeleteManagedRepo'):
delete_perms.append(privilege)
elif privilege in ('WriteOwned', 'WriteFile', 'WriteManagedRepo'):
write_perms.append(privilege)
elif privilege in ('WriteScriptRepo', 'DeleteScriptRepo'):
script_perms.append(privilege)
else:
initial[privilege] = True
# if ALL the Delete/Write permissions are found, Delete/Write is True
if set(delete_perms) == \
set(('DeleteOwned', 'DeleteFile', 'DeleteManagedRepo')):
initial['Delete'] = True
if set(write_perms) == \
set(('WriteOwned', 'WriteFile', 'WriteManagedRepo')):
initial['Write'] = True
if set(script_perms) == \
set(('WriteScriptRepo', 'DeleteScriptRepo')):
initial['Script'] = True
role = 'user'
if experimenter.isAdmin():
if 'ReadSession' in privileges:
role = 'administrator'
else:
role = 'restricted_administrator'
initial['role'] = role
root_id = [conn.getAdminService().getSecurityRoles().rootId]
user_id = conn.getUserId()
user_privileges = conn.getCurrentAdminPrivileges()
experimenter_root = long(eid) == root_id
experimenter_me = long(eid) == user_id
user_full_admin = 'ReadSession' in user_privileges
can_modify_user = 'ModifyUser' in user_privileges
# Only Full Admin can edit 'Role' of experimenter
can_edit_role = user_full_admin and not (experimenter_me
or experimenter_root)
form = ExperimenterForm(
can_modify_user=can_modify_user,
can_edit_role=can_edit_role,
experimenter_me=experimenter_me,
experimenter_root=experimenter_root,
initial=initial)
password_form = ChangePassword()
admin_groups = [
conn.getAdminService().getSecurityRoles().systemGroupId]
context = {'form': form, 'eid': eid, 'ldapAuth': isLdapUser,
'can_modify_user': can_modify_user,
'password_form': password_form,
'admin_groups': admin_groups}
elif action == 'save':
experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar = \
prepare_experimenter(conn, eid)
if request.method != 'POST':
return HttpResponseRedirect(
reverse(viewname="wamanageexperimenterid",
args=["edit", experimenter.id]))
else:
name_check = conn.checkOmeName(request.POST.get('omename'),
experimenter.omeName)
email_check = conn.checkEmail(request.POST.get('email'),
experimenter.email)
my_groups = getSelectedGroups(
conn,
request.POST.getlist('other_groups'))
initial = {'my_groups': my_groups,
'groups': otherGroupsInitialList(groups)}
form = ExperimenterForm(initial=initial, data=request.POST.copy(),
name_check=name_check,
email_check=email_check)
if form.is_valid():
logger.debug("Update experimenter form:" +
str(form.cleaned_data))
omename = form.cleaned_data['omename']
firstName = form.cleaned_data['first_name']
middleName = form.cleaned_data['middle_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
institution = form.cleaned_data['institution']
role = form.cleaned_data['role']
admin = role in ('administrator', 'restricted_administrator')
active = form.cleaned_data['active']
rootId = conn.getAdminService().getSecurityRoles().rootId
# User can't disable themselves or 'root'
if experimenter.getId() in [conn.getUserId(), rootId]:
# disabled checkbox not in POST: do it manually
active = True
defaultGroup = form.cleaned_data['default_group']
otherGroups = form.cleaned_data['other_groups']
# default group
# if default group was not selected take first from the list.
if defaultGroup is None:
defaultGroup = otherGroups[0]
for g in groups:
if long(defaultGroup) == g.id:
dGroup = g
break
listOfOtherGroups = set()
# rest of groups
for g in groups:
for og in otherGroups:
# remove defaultGroup from otherGroups if contains
if long(og) == long(dGroup.id):
pass
elif long(og) == g.id:
listOfOtherGroups.add(g)
# Update 'AdminPrivilege' config roles for user
# If role is empty, roles section of form is disabled - ignore
# since disabled privileges will not show up in POST data
if role != '':
conn.setConfigRoles(long(eid), form)
conn.updateExperimenter(
experimenter, omename, firstName, lastName, email, admin,
active, dGroup, listOfOtherGroups, middleName,
institution)
return HttpResponseRedirect(reverse("waexperimenters"))
context = {'form': form, 'eid': eid, 'ldapAuth': isLdapUser}
# elif action == "delete":
# conn.deleteExperimenter()
# return HttpResponseRedirect(reverse("waexperimenters"))
else:
return HttpResponseRedirect(reverse("waexperimenters"))
context['template'] = template
return context
@login_required()
@render_response_admin()
def manage_password(request, eid, conn=None, **kwargs):
template = "webadmin/password.html"
error = None
if request.method == 'POST':
password_form = ChangePassword(data=request.POST.copy())
if not password_form.is_valid():
error = password_form.errors
else:
old_password = password_form.cleaned_data['old_password']
password = password_form.cleaned_data['password']
# if we're trying to change our own password...
if conn.getEventContext().userId == int(eid):
try:
conn.changeMyPassword(password, old_password)
except Exception, x:
error = x.message # E.g. old_password not valid
elif conn.isAdmin():
exp = conn.getObject("Experimenter", eid)
try:
conn.changeUserPassword(exp.omeName, password,
old_password)
except Exception, x:
error = x.message
else:
raise AttributeError("Can't change another user's password"
" unless you are an Admin")
context = {'error': error, 'password_form': password_form, 'eid': eid}
context['template'] = template
return context
@login_required(isAdmin=True)
@render_response_admin()
def groups(request, conn=None, **kwargs):
template = "webadmin/groups.html"
groups = conn.getObjects("ExperimenterGroup")
can_modify_group = 'ModifyGroup' in conn.getCurrentAdminPrivileges()
context = {'groups': groups, 'can_modify_group': can_modify_group}
context['template'] = template
return context
@login_required(isAdmin=True)
@render_response_admin()
def manage_group(request, action, gid=None, conn=None, **kwargs):
template = "webadmin/group_form.html"
msgs = []
experimenters = list(conn.getObjects("Experimenter"))
experimenters.sort(key=lambda x: x.getLastName().lower())
def getEditFormContext():
group = conn.getObject("ExperimenterGroup", gid)
ownerIds = [e.id for e in group.getOwners()]
memberIds = [m.id for m in group.getMembers()]
permissions = getActualPermissions(group)
can_modify_group = 'ModifyGroup' in conn.getCurrentAdminPrivileges()
system_groups = [
conn.getAdminService().getSecurityRoles().systemGroupId,
conn.getAdminService().getSecurityRoles().userGroupId,
conn.getAdminService().getSecurityRoles().guestGroupId]
group_is_current_or_system = (
(conn.getEventContext().groupId == long(gid)) or
(long(gid) in system_groups))
form = GroupForm(initial={
'name': group.name,
'description': group.description,
'permissions': permissions,
'owners': ownerIds,
'members': memberIds,
'experimenters': experimenters},
can_modify_group=can_modify_group,
group_is_current_or_system=group_is_current_or_system)
admins = [conn.getAdminService().getSecurityRoles().rootId]
if long(gid) in system_groups:
# prevent removing 'root' or yourself from group if it's a system
# group
admins.append(conn.getUserId())
return {'form': form, 'gid': gid, 'permissions': permissions,
'admins': admins, 'can_modify_group': can_modify_group}
if action == 'new':
can_modify_group = 'ModifyGroup' in conn.getCurrentAdminPrivileges()
form = GroupForm(initial={'experimenters': experimenters,
'permissions': 0})
context = {'form': form, 'can_modify_group': can_modify_group}
elif action == 'create':
if request.method != 'POST':
return HttpResponseRedirect(reverse(viewname="wamanagegroupid",
args=["new"]))
else:
name_check = conn.checkGroupName(request.POST.get('name'))
form = GroupForm(initial={'experimenters': experimenters},
data=request.POST.copy(), name_check=name_check)
if form.is_valid():
logger.debug("Create group form:" + str(form.cleaned_data))
name = form.cleaned_data['name']
description = form.cleaned_data['description']
owners = form.cleaned_data['owners']
members = form.cleaned_data['members']
permissions = form.cleaned_data['permissions']
perm = setActualPermissions(permissions)
listOfOwners = getSelectedExperimenters(conn, owners)
gid = conn.createGroup(name, perm, listOfOwners, description)
new_members = getSelectedExperimenters(
conn, mergeLists(members, owners))
group = conn.getObject("ExperimenterGroup", gid)
conn.setMembersOfGroup(group, new_members)
return HttpResponseRedirect(reverse("wagroups"))
context = {'form': form}
elif action == 'edit':
context = getEditFormContext()
elif action == 'save':
group = conn.getObject("ExperimenterGroup", gid)
if request.method != 'POST':
return HttpResponseRedirect(reverse(viewname="wamanagegroupid",
args=["edit", group.id]))
else:
permissions = getActualPermissions(group)
name_check = conn.checkGroupName(request.POST.get('name'),
group.name)
form = GroupForm(initial={'experimenters': experimenters},
data=request.POST.copy(), name_check=name_check)
context = {'form': form, 'gid': gid, 'permissions': permissions}
if form.is_valid():
logger.debug("Update group form:" + str(form.cleaned_data))
name = form.cleaned_data['name']
description = form.cleaned_data['description']
owners = form.cleaned_data['owners']
permissions = form.cleaned_data['permissions']
members = form.cleaned_data['members']
listOfOwners = getSelectedExperimenters(conn, owners)
if permissions != int(permissions):
perm = setActualPermissions(permissions)
else:
perm = None
context = getEditFormContext()
context['ome'] = {}
try:
msgs = conn.updateGroup(group, name, perm, listOfOwners,
description)
except omero.SecurityViolation, ex:
if ex.message.startswith('Cannot change permissions'):
msgs.append("Downgrade to private group not currently"
" possible")
else:
msgs.append(ex.message)
new_members = getSelectedExperimenters(
conn, mergeLists(members, owners))
removalFails = conn.setMembersOfGroup(group, new_members)
if len(removalFails) == 0 and len(msgs) == 0:
return HttpResponseRedirect(reverse("wagroups"))
# If we've failed to remove user...
# prepare error messages
for e in removalFails:
url = reverse("wamanageexperimenterid",
args=["edit", e.id])
msgs.append("Can't remove user <a href='%s'>%s</a> from"
" their only group" % (url, e.getFullName()))
# refresh the form and add messages
context = getEditFormContext()
else:
return HttpResponseRedirect(reverse("wagroups"))
context['userId'] = conn.getEventContext().userId
context['template'] = template
if len(msgs) > 0:
context['ome'] = {}
context['ome']['message'] = "<br>".join(msgs)
return context
@login_required(isGroupOwner=True)
@render_response_admin()
def manage_group_owner(request, action, gid, conn=None, **kwargs):
template = "webadmin/group_form_owner.html"
group = conn.getObject("ExperimenterGroup", gid)
experimenters = list(conn.getObjects("Experimenter"))
userId = conn.getEventContext().userId
def getEditFormContext():
group = conn.getObject("ExperimenterGroup", gid)
memberIds = [m.id for m in group.getMembers()]
ownerIds = [e.id for e in group.getOwners()]
permissions = getActualPermissions(group)
form = GroupOwnerForm(initial={'permissions': permissions,
'members': memberIds,
'owners': ownerIds,
'experimenters': experimenters})
context = {'form': form, 'gid': gid, 'permissions': permissions,
"group": group}
experimenterDefaultIds = list()
for e in experimenters:
if (e != userId and e.getDefaultGroup() is not None and
e.getDefaultGroup().id == group.id):
experimenterDefaultIds.append(str(e.id))
context['experimenterDefaultGroups'] = ",".join(experimenterDefaultIds)
context['ownerIds'] = (",".join(str(x) for x in ownerIds
if x != userId))
return context
msgs = []
if action == 'edit':
context = getEditFormContext()
elif action == "save":
if request.method != 'POST':
return HttpResponseRedirect(
reverse(viewname="wamanagegroupownerid",
args=["edit", group.id]))
else:
form = GroupOwnerForm(data=request.POST.copy(),
initial={'experimenters': experimenters})
if form.is_valid():
members = form.cleaned_data['members']
owners = form.cleaned_data['owners']
permissions = form.cleaned_data['permissions']
listOfOwners = getSelectedExperimenters(conn, owners)
conn.setOwnersOfGroup(group, listOfOwners)
new_members = getSelectedExperimenters(conn, members)
removalFails = conn.setMembersOfGroup(group, new_members)
permissions = int(permissions)
if getActualPermissions(group) != permissions:
perm = setActualPermissions(permissions)
try:
msg = conn.updatePermissions(group, perm)
if msg is not None:
msgs.append(msg)
except omero.SecurityViolation, ex:
if ex.message.startswith('Cannot change permissions'):
msgs.append("Downgrade to private group not"
" currently possible")
else:
msgs.append(ex.message)
if len(removalFails) == 0 and len(msgs) == 0:
return HttpResponseRedirect(reverse("wamyaccount"))
# If we've failed to remove user...
# prepare error messages
for e in removalFails:
url = reverse("wamanageexperimenterid",
args=["edit", e.id])
msgs.append("Can't remove user <a href='%s'>%s</a> from"
" their only group" % (url, e.getFullName()))
# refresh the form and add messages
context = getEditFormContext()
else:
context = {'gid': gid, 'form': form}
else:
return HttpResponseRedirect(reverse("wamyaccount"))
context['userId'] = userId
context['template'] = template
if len(msgs) > 0:
context['ome'] = {}
context['ome']['message'] = "<br>".join(msgs)
return context
@login_required()
@render_response_admin()
def my_account(request, action=None, conn=None, **kwargs):
template = "webadmin/myaccount.html"
experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar = \
prepare_experimenter(conn)
try:
defaultGroupId = defaultGroup.id
except:
defaultGroupId = None
ownedGroups = ownedGroupsInitial(conn)
password_form = ChangePassword()
form = None
if action == "save":
if request.method != 'POST':
return HttpResponseRedirect(reverse(viewname="wamyaccount",
args=["edit"]))
else:
email_check = conn.checkEmail(request.POST.get('email'),
experimenter.email)
form = MyAccountForm(data=request.POST.copy(),
initial={'groups': otherGroups},
email_check=email_check)
if form.is_valid():
firstName = form.cleaned_data['first_name']
middleName = form.cleaned_data['middle_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
institution = form.cleaned_data['institution']
defaultGroupId = form.cleaned_data['default_group']
conn.updateMyAccount(
experimenter, firstName, lastName, email, defaultGroupId,
middleName, institution)
return HttpResponseRedirect(reverse("wamyaccount"))
else:
form = MyAccountForm(initial={
'omename': experimenter.omeName,
'first_name': experimenter.firstName,
'middle_name': experimenter.middleName,
'last_name': experimenter.lastName,
'email': experimenter.email,
'institution': experimenter.institution,
'default_group': defaultGroupId,
'groups': otherGroups})
context = {'form': form, 'ldapAuth': isLdapUser,
'experimenter': experimenter, 'ownedGroups': ownedGroups,
'password_form': password_form}
context['freeSpace'] = conn.getFreeSpace()
context['template'] = template
return context
@login_required()
def myphoto(request, conn=None, **kwargs):
photo = conn.getExperimenterPhoto()
return HttpJPEGResponse(photo)
@login_required()
@render_response_admin()
def manage_avatar(request, action=None, conn=None, **kwargs):
template = "webadmin/avatar.html"
edit_mode = False
photo_size = None
form_file = UploadPhotoForm()
if action == "upload":
if request.method == 'POST':
form_file = UploadPhotoForm(request.POST, request.FILES)
if form_file.is_valid():
attach_photo(conn, request.FILES['photo'])
return HttpResponseRedirect(
reverse(viewname="wamanageavatar",
args=[conn.getEventContext().userId]))
elif action == "crop":
x1 = long(request.POST.get('x1'))
x2 = long(request.POST.get('x2'))
y1 = long(request.POST.get('y1'))
y2 = long(request.POST.get('y2'))
box = (x1, y1, x2, y2)
conn.cropExperimenterPhoto(box)
return HttpResponseRedirect(reverse("wamyaccount"))
elif action == "editphoto":
photo_size = conn.getExperimenterPhotoSize()
if photo_size is not None:
edit_mode = True
elif action == "deletephoto":
conn.deleteExperimenterPhoto()
return HttpResponseRedirect(reverse("wamyaccount"))
photo_size = conn.getExperimenterPhotoSize()
context = {'form_file': form_file, 'edit_mode': edit_mode,
'photo_size': photo_size}
context['template'] = template
return context
@login_required()
@render_response_admin()
def stats(request, conn=None, **kwargs):
template = "webadmin/statistics.html"
freeSpace = conn.getFreeSpace()
context = {'template': template, 'freeSpace': freeSpace}
return context
# @login_required()
# def load_drivespace(request, conn=None, **kwargs):
# offset = request.POST.get('offset', 0)
# rv = usersData(conn, offset)
# return HttpJsonResponse(rv)
@login_required(isAdmin=True)
@render_response_admin()
def email(request, conn=None, **kwargs):
"""
View to gather recipients, subject and message for sending email
announcements
"""
# Check that the appropriate web settings are available
if (not request.session.get('server_settings', False)
.get('email', False)):
return {'template': 'webadmin/noemail.html'}
context = {'template': 'webadmin/email.html'}
# Get experimenters and groups.
experimenter_list = list(conn.getObjects("Experimenter"))
group_list = list(conn.getObjects("ExperimenterGroup"))
# Sort experimenters and groups
experimenter_list.sort(key=lambda x: x.getFirstName().lower())
group_list.sort(key=lambda x: x.getName().lower())
if request.method == 'POST': # If the form has been submitted...
# ContactForm was defined in the the previous section
form = EmailForm(experimenter_list, group_list, conn, request,
data=request.POST.copy())
if form.is_valid(): # All validation rules pass
subject = form.cleaned_data['subject']
message = form.cleaned_data['message']
experimenters = form.cleaned_data['experimenters']
groups = form.cleaned_data['groups']
everyone = form.cleaned_data['everyone']
inactive = form.cleaned_data['inactive']
req = omero.cmd.SendEmailRequest(subject=subject, body=message,
groupIds=groups,
userIds=experimenters,
everyone=everyone,
inactive=inactive)
handle = conn.c.sf.submit(req)
if handle is not None:
request.session.modified = True
request.session['callback'][str(handle)] = {
'job_type': 'send_email',
'status': 'in progress', 'error': 0,
'start_time': datetime.datetime.now()}
form = EmailForm(experimenter_list, group_list, conn, request)
context['non_field_errors'] = ("Email sent."
"Check status in activities.")
else:
context['non_field_errors'] = "Email wasn't sent."
else:
form = EmailForm(experimenter_list, group_list, conn, request)
context['form'] = form
return context
# Problem where render_response_admin was not populating required
# admin details:
# Explanation is that the CBV FormView returns an http response so the
# decorator render_response_admin simply bails out and returns this
# I think maybe the render_response decorator should not be adding context
# because it fails in situations like this, better to insert that context
# using a template tag when required
| simleo/openmicroscopy | components/tools/OmeroWeb/omeroweb/webadmin/views.py | Python | gpl-2.0 | 43,638 |
"""
Author: Seyed Hamidreza Mohammadi
This file is part of the shamidreza/uniselection software.
Please refer to the LICENSE provided alongside the software (which is GPL v2,
http://www.gnu.org/licenses/gpl-2.0.html).
This file includes the code for putting all the pieces together.
"""
from utils import *
from extract_unit_info import *
from search import *
from generate_speech import *
if __name__ == "__main__":
if 0: # test pit2gci
pit_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.lf0'
target_gci = pit2gci(pit_file)
if 1: # test read_dur,pit,for methods
dur_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.dur'
for_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.for'
pit_file='/Users/hamid/Code/hts/HTS-demo_CMU-ARCTIC-SLT2/gen/qst001/ver1/2mix/2/alice01.lf0'
#a=read_hts_for(for_file)
#b=read_hts_pit(pit_file)
#c=read_hts_dur(dur_file)
fname = 'arctic_a0001'
lab_name=corpus_path+'/lab/'+fname+'.lab'
wav_name=corpus_path+'/wav/'+fname+'.wav'
pm_name=corpus_path+'/pm/'+fname+'.pm'
##target_units = load_input(lab_name)
#times, labs = read_lab(lab_name)
##tmp_units=extract_info(lab_name, wav_name, 0,0)
times, pits, vox_times, vox_vals = read_hts_pit(pit_file)
frm_time, frm_val = read_hts_for(for_file)
gcis=pit2gci(times, pits, vox_times, vox_vals)
tmp_units, times=read_input_lab(dur_file, pit_file)
#tmp_units = tmp_units[128:140]##
target_units = np.zeros(len(tmp_units), 'object')
for j in xrange(len(tmp_units)):
target_units[j] = tmp_units[j]
if 0:
units, fnames=load_units()
units = units[:int(units.shape[0]*(100.0/100.0))]
best_units_indice=search(target_units, units,limit=20)
best_units = units[best_units_indice]
f=open('tmp2.pkl','w+')
import pickle
pickle.dump(best_units,f)
pickle.dump(fnames,f)
f.flush()
f.close()
else:
f=open('tmp2.pkl','r')
import pickle
best_units=pickle.load(f)
fnames=pickle.load(f)
#best_units = best_units[128:140]##
f.close()
for i in xrange(target_units.shape[0]):
print target_units[i].phone, best_units[i].phone, best_units[i].unit_id
#wavs=concatenate_units_overlap(best_units, fnames)
#gcis = gcis[(gcis>times[128]) * (gcis<times[140])]
#gcis -= times[128]
##$frm_time, frm_val = units2for(best_units, fnames, times, frm_time, frm_val)
frm_time *= 16000.0
gcis=units2gci(best_units, fnames)##$
gcis = np.array(gcis)
##$gcis *= 16000
gcis = gcis.astype(np.uint32)
old_times = np.array(times).copy()
old_times *= 16000.0
times=units2dur(best_units, fnames)##$
times = np.array(times)
##$times *= 16000
times = times.astype(np.uint32)
#times = times[128:141]##
#aa=times[0]##
#for i in range(len(times)):##
#times[i] -= aa##
#frm_time *= 16000
wavs=concatenate_units_psola_har_overlap(best_units, fnames, old_times, times, gcis, frm_time, frm_val, overlap=0.5)
#wavs=concatenate_units_nooverlap(best_units, fnames)
ftime, fval = get_formant(wavs, 16000)
from scipy.io.wavfile import write as wwrite
wwrite('out.wav', 16000, wavs)
print 'successfully saved out.wav' | shamidreza/unitselection | experiment.py | Python | gpl-2.0 | 3,464 |
#!/adsc/DDEA_PROTO/bin/python
from df_data_analysis_ddea import ddea_analysis
from datetime import datetime
import traceback
import sys
if __name__ == '__main__':
try:
if 3 <= len(sys.argv):
###urls = open(sys.argv[1]).readlines()
start_time = sys.argv[1]
end_time = sys.argv[2]
stime = datetime.strptime(start_time, "%y-%m-%d")
etime = datetime.strptime(end_time, "%y-%m-%d")
ddea_analysis('', stime, etime)
else:
raise "Invalid Arguments"
except:
print traceback.print_exc()
print("Example: %s 14-01-01 14-02-02" % sys.argv[0])
raise SystemExit
| TinyOS-Camp/DDEA-DEV | Archive/[14_10_11] Dr_Jung_Update/ddea_cli.py | Python | gpl-2.0 | 688 |
ES_INDEX_NAME = "brainiak"
ES_TYPE_NAME = "query"
| bmentges/brainiak_api | src/brainiak/stored_query/__init__.py | Python | gpl-2.0 | 50 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
MENU_FILE = "File"
FILE_NEW = "New"
FILE_OPEN = "Open"
FILE_EXIT = "Exit"
TAB_DATA = "Data"
TAB_SQL = "SQL"
BUTTON_EXIT = "Exit"
| Smaed/pyDbManager | lib/Lang.py | Python | gpl-2.0 | 228 |
# Symantec BackupExec
# CVE-2007-6016,CVE-2007-6017
import logging
log = logging.getLogger("Thug")
def Set_DOWText0(self, val):
self.__dict__['_DOWText0'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _DOWText0",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Set_DOWText6(self, val):
self.__dict__['_DOWText6'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _DOWText6",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Set_MonthText0(self, val):
self.__dict__['_MonthText0'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _MonthText6",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Set_MonthText11(self, val):
self.__dict__['_MonthText11'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _MonthText11",
cve = 'CVE-2007-6016')
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2007-6016", None)
log.DFT.check_shellcode(val)
def Save(self, a, b):
return
| fedelemantuano/thug | thug/ActiveX/modules/SymantecBackupExec.py | Python | gpl-2.0 | 2,143 |
# -*- coding: utf-8 -*-
'''
Created on Nov 14, 2014
Implementation of a hand speed-based segmentation module
@author: Arturo Curiel
'''
import zope.interface as zi
try:
import magic
except:
import nixtla.core.tools.magic_win as magic
from nixtla.core.base_module import BaseModule
from nixtla.segmentation.interface import ISegmentationModule
from nixtla.segmentation.handspeed_based.adapters import FromTextTracking
from nixtla.segmentation.handspeed_based.markers import IHandSpeedBasedSegments
from nixtla.segmentation.handspeed_based.tools import IntervalList
class SegmentationModule(BaseModule):
"""Hand speed-based segmentation"""
zi.implements(ISegmentationModule)
def __init__(self, **args):
try:
self.analysis_window = int(args['analysis_window'])
except:
self.analysis_window = 5
try:
self.speed_threshold = int(args['speed_threshold'])
except:
self.speed_threshold = 6.0
try:
self.articulators = args['articulators'].\
replace('[','').\
replace(']','').\
replace(' ','').\
split(",")
except:
self.articulators = ['right_hand', 'left_hand']
self.numeric_data = None
# Register adapters
self.register_module_adapters(FromTextTracking)
self.interval_list = IntervalList(self.speed_threshold,
articulators=self.articulators,
driver=self.articulators[0])
super(SegmentationModule, self).__init__(IHandSpeedBasedSegments)
def callable(self, input_data):
signer_id, numeric_data = input_data
self.numeric_data = numeric_data
for i in range(len(numeric_data)-1):
row = numeric_data[i:i+1]
# We get each information row and pass it to determine where in
# the segmentation it belongs
for articulator in self.articulators:
results = self.interval_list.\
include_in_articulator_interval_2window(
articulator,
row)
if results:
for result in results:
self.send_to_channels((signer_id,
{articulator:result}
))
return True
def check_input_compliance(self, input_data):
"""Assert that input_data is segmentable"""
# Check that we are still passing a valid
# video file and an id
assert len(input_data) == 2
assert "str" in str(type(input_data[0]))
# Check that we are passing speeds, the only
# measure truly needed to calculate segments
for articulator in self.articulators:
assert not input_data[1][articulator+'_v'].empty | acuriel/Nixtla | nixtla/segmentation/handspeed_based/implementation.py | Python | gpl-2.0 | 3,204 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: Red Hat Inc. 2015
# Author: Cleber Rosa <cleber@redhat.com>
"""
This is the main entry point for the rest client cli application
"""
import sys
import types
import importlib
import functools
from . import parser
from .. import connection
from ... import settings
from ... import output
from ... import exit_codes
__all__ = ['App']
class App(object):
"""
Base class for CLI application
"""
def __init__(self):
"""
Initializes a new app instance.
This class is intended both to be used by the stock client application
and also to be reused by custom applications. If you want, say, to
limit the amount of command line actions and its arguments, you can
simply supply another argument parser class to this constructor. Of
course another way to customize it is to inherit from this and modify
its members at will.
"""
self.connection = None
self.parser = parser.Parser()
self.parser.add_arguments_on_all_modules()
self.view = output.View()
def initialize_connection(self):
"""
Initialize the connection instance
"""
try:
self.connection = connection.Connection(
hostname=self.args.hostname,
port=self.args.port,
username=self.args.username,
password=self.args.password)
except connection.InvalidConnectionError:
self.view.notify(event="error",
msg="Error: could not connect to the server")
sys.exit(exit_codes.AVOCADO_JOB_FAIL)
except connection.InvalidServerVersionError:
self.view.notify(event="error",
msg=("REST server version is higher than "
"than this client can support."))
self.view.notify(event="error",
msg=("Please use a more recent version "
"of the REST client application."))
sys.exit(exit_codes.AVOCADO_JOB_FAIL)
def dispatch_action(self):
"""
Calls the actions that was specified via command line arguments.
This involves loading the relevant module file.
"""
module_name = "%s.%s" % ('avocado.core.restclient.cli.actions',
self.args.top_level_action)
try:
module = importlib.import_module(module_name)
except ImportError:
return
# Filter out the attributes out of the loaded module that look
# like command line actions, based on type and 'is_action' attribute
module_actions = {}
for attribute_name in module.__dict__:
attribute = module.__dict__[attribute_name]
if (isinstance(attribute, types.FunctionType) and
hasattr(attribute, 'is_action')):
if attribute.is_action:
module_actions[attribute_name] = attribute
chosen_action = None
for action in module_actions.keys():
if getattr(self.args, action, False):
chosen_action = action
break
kallable = module_actions.get(chosen_action, None)
if kallable is not None:
self.initialize_connection()
return kallable(self)
else:
self.view.notify(event="error",
msg="Action specified is not implemented")
def run(self):
"""
Main entry point for application
"""
action_result = None
try:
self.args = self.parser.parse_args()
action_result = self.dispatch_action()
except KeyboardInterrupt:
print 'Interrupted'
if isinstance(action_result, int):
sys.exit(action_result)
elif isinstance(action_result, bool):
if action_result is True:
sys.exit(0)
else:
sys.exit(1)
| Hao-Liu/avocado | avocado/core/restclient/cli/app.py | Python | gpl-2.0 | 4,537 |
# -*- coding: utf-8 -*-
from gi.overrides.keysyms import musicalflat
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
from PIL import Image
from pygame.mixer import music
class BancoReserva:
def __init__(self):
self.textura1 = glGenTextures(1)
self.obj = GLuint()
def carrega_imagem(self):
im = Image.open("../objs/logo.png", "r")
try:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBA", 0, -1)
except SystemError:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBX", 0, -1)
#glBindTexture(GL_TEXTURE_2D, textura1)
glPixelStorei(GL_UNPACK_ALIGNMENT, 1)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexImage2D(
GL_TEXTURE_2D, 0, 3, ix, iy, 0,
GL_RGBA, GL_UNSIGNED_BYTE, image
)
def desenhar(self):
self.obj = glGenLists(43)
glNewList(self.obj,GL_COMPILE)
glPushMatrix()
glTranslate(0,0,-15)
glPushMatrix()
for s in range(3):
glTranslate(0.0, 0.0, 5)
self.banco()
glPopMatrix()
glPopMatrix()
glEndList()
def banco(self):
glPushMatrix()
glTranslate(-1.8,-0.85,0)
glRotate(-90, 0,1,0)
glScalef(.05,.05,.05)
glPushMatrix()
#base
glPushMatrix()
glColor3f(1,1,1)
glScalef(16,0.2,4)
glTranslate(0,-17,-0.5)
glutSolidCube(1)
glPopMatrix()
#costas
glPushMatrix()
glColor3f(0,0,0)
glScalef(15,7,1)
glTranslate(0,0,0)
glutSolidCube(1)
glPopMatrix()
#cima
glPushMatrix()
glColor3f(0,0,0)
glScalef(15,0.2,4)
glTranslate(0,17,-0.6)
glutSolidCube(1)
glPopMatrix()
#bancos
glPushMatrix()
glColor3f(0.9,0.9,0.9)
glScalef(13,2,1.5)
glTranslate(0,-1.2,-0.6)
glutSolidCube(1)
glPopMatrix()
#Textura
self.carrega_imagem()
glEnable(GL_TEXTURE_2D)
glRotate(90, 0,1,0)
glRotate(90, 0,0,1)
glTranslate(1.55,-2,0)
glScale(2,2.2,2)
glBegin(GL_QUADS)
glColor3f(1,1,1)
glTexCoord2f(1.0, 0.0)
glVertex3f( 1.0, -1.0, -1.0)
glTexCoord2f(1.0, 1.0)
glVertex3f( 1.0, 1.0, -1.0)
glTexCoord2f(0.0, 1.0)
glVertex3f( 1.0, 1.0, 1.0)
glTexCoord2f(0.0, 0.0)
glVertex3f( 1.0, -1.0, 1.0)
glEnd()
glDisable(GL_TEXTURE_2D)
glPopMatrix()
glPopMatrix()
def executar(self):
glCallList(self.obj)
class Ceu:
def __init__(self):
self.obj = GLuint()
self.quad = gluNewQuadric()
self.texturaID = GLuint()
self._textureID = self.carrega_textura("../objs/ceu.jpg")
self.rotate = 0
def carrega_textura(self, caminho):
im = Image.open(caminho, "r")
try:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBA", 0, -1)
except SystemError:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBX", 0, -1)
self.textura1 = glGenTextures(1, self.texturaID)
glBindTexture(GL_TEXTURE_2D, self.texturaID)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, ix, iy, 0, GL_RGBA, GL_UNSIGNED_BYTE, image)
return self.texturaID
def desenhar(self):
self._textureID = self.carrega_textura("../objs/ceu.jpg")
glEnable(GL_TEXTURE_2D)
glEnable(GL_DEPTH_TEST)
glEnable(GL_NORMALIZE)
glEnable(GL_COLOR_MATERIAL)
#glBindTexture(GL_TEXTURE_2D, self._textureID)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glFrontFace(GL_CW)
#glMaterial(GL_FRONT_AND_BACK)
glRotatef(-45,1,1,1)
glPushMatrix()
glPushMatrix()
gluQuadricTexture(self.quad, 1)
glDisable(GL_CULL_FACE)
glRotate(self.rotate,0,1,0)
gluSphere(self.quad, 50, 50, 50)
glEnable(GL_DEPTH_TEST)
glDisable(GL_TEXTURE_2D)
glFrontFace(GL_CCW)
glPopMatrix()
glPopMatrix()
glutSwapBuffers()
def executar(self):
self.desenhar()
self.rotate += .15
glutPostRedisplay()
class Placar:
def __init__(self):
self.textura1 = glGenTextures(1)
self.obj = GLuint()
def carrega_imagem(self):
im = Image.open("../objs/placar.jpg", "r")
try:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBA", 0, -1)
except SystemError:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBX", 0, -1)
#glBindTexture(GL_TEXTURE_2D, textura1)
glPixelStorei(GL_UNPACK_ALIGNMENT, 1)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexImage2D(
GL_TEXTURE_2D, 0, 3, ix, iy, 0,
GL_RGBA, GL_UNSIGNED_BYTE, image
)
def desenhar(self):
self.obj = glGenLists(3)
glNewList(self.obj,GL_COMPILE)
glPushMatrix()
glTranslatef(10, -.15, 5)
glScale(.05,.05,.05)
glRotate(35, 0, 1, 0)
#Coluna Esq
glPushMatrix()
glColor3f(1,1,1)
glScalef(2,70,1.5)
glTranslate(-0.7,0,0)
glutSolidCube(0.5)
glPopMatrix()
#Coluna Dir
glPushMatrix()
glColor3f(1,1,1)
glScalef(2,70,1.5)
glTranslate(6.9,0,0)
glutSolidCube(0.5)
glPopMatrix()
#Bloco Princ
glPushMatrix()
glColor3f(0.3,0.3,0.3)
glScalef(25,14,1)
glTranslate(0.25,1.7,0)
glutSolidCube(1)
glPopMatrix()
# Textura placar
self.carrega_imagem()
glEnable(GL_TEXTURE_2D)
glRotate(90, 0,1,0)
glTranslate(-12.3,24,6)
glScale(13,7,12)
glBegin(GL_QUADS)
glColor3f(1,1,1)
glTexCoord2f(1.0, 0.0)
glVertex3f( 1.0, -1.0, -1.0)
glTexCoord2f(1.0, 1.0)
glVertex3f( 1.0, 1.0, -1.0)
glTexCoord2f(0.0, 1.0)
glVertex3f( 1.0, 1.0, 1.0)
glTexCoord2f(0.0, 0.0)
glVertex3f( 1.0, -1.0, 1.0)
glEnd()
glDisable(GL_TEXTURE_2D)
glPopMatrix()
glEndList()
def executar(self):
glCallList(self.obj)
class Campo:
def __init__(self):
self.textura1 = glGenTextures(1)
self.obj = GLuint()
def carrega_textura(self):
im = Image.open("../objs/campo.jpg", "r")
try:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBA", 0, -1)
except SystemError:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBX", 0, -1)
glPixelStorei(GL_UNPACK_ALIGNMENT, 1)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexImage2D(GL_TEXTURE_2D, 0, 3, ix, iy, 0, GL_RGBA, GL_UNSIGNED_BYTE, image)
def trave(self):
glTranslate(4,0,0)
glScalef(0.4, 1, 3)
glPushMatrix()
self.rede()
glPushMatrix(0, 0, -5)
glScalef(1.5, 1, 1)
glTranslate(0.0, 5.0, 0.0)
glRotatef(90, 0, 1, 0)
self.rede()
glPopMatrix()
glPushMatrix()
glScalef(1.5, 1, 1)
glTranslate(0.0, 5.0, 0.0)
glRotatef(90, 0, 0, 1)
self.rede()
glTranslate(-5.0, 5.0, 0.0)
self.rede()
glPopMatrix()
glPopMatrix()
def rede(self):
glPushMatrix()
glColor3f(1, 1, 1)
#Isolando para que nao aja problemas quando rotacionar e transladar.
glPushMatrix()
glutSolidCylinder(0.05, 5, 30, 10)
glTranslate( 0.0, -5.0, 0.0)
glutSolidCylinder(0.05, 5, 30, 10)
glPopMatrix()
glPushMatrix()
glRotatef(90, 1.0, 0.0, 0.0)
glutSolidCylinder(0.05, 5, 30, 10)
glTranslate( 0.0, 5.0, 0.0)
glutSolidCylinder(0.05, 5, 30, 10)
glPopMatrix()
glPopMatrix()
#Estrura interna vertical.
glPushMatrix()
glScalef(0.0, 10, 0.5)
glTranslate(0.0, -0.25, -0.7)
for s in range(11):
glTranslate( 0.0, 0.0, 0.95)
glutWireCube(0.5)
glPopMatrix()
#Estrura interna horizontal.
glRotatef(90, .1, .0, .0)
glPushMatrix()
glScalef(0.0, 10, 0.5)
glTranslate(0.0, 0.25, -0.6)
for s in range(10):
glTranslate(0.0, 0.0, 0.95)
glutWireCube(0.5)
glPopMatrix()
def redeTrasGol(self):
glPushMatrix()
glScalef(1, 1, 3)
glColor3f(0.2, 0.2, 0.2)
glutSolidCylinder(0.02, 5, 100, 50)
distanciaEntreTraves = -15
glTranslate( 0.0, distanciaEntreTraves, 0.0)
glutSolidCylinder(0.015, 5, 30, 30)
glPopMatrix()
#Estrura interna horizontal.
glPushMatrix()
glScalef(0.0, 30, 0.67)
glTranslate(0.0, -0.25, -0.55)
for s in range(23):
glTranslate(0.0, 0.0, 1.0)
glutWireCube(0.5)
glPopMatrix()
#Estrura interna .vertical
glRotatef(90, .1, .0, .0)
glPushMatrix()
glScalef(0.0, 30, 0.64)
glTranslate( 0.0, 0.255, -0.5)
for s in range(23):
glTranslate(0.0, 0.0, 1.0)
glutWireCube(0.5)
glPopMatrix()
def Campo(self):
self.carrega_textura()
glEnable(GL_TEXTURE_2D)
glBegin(GL_POLYGON)# objeto
glColor3f(1, 1, 1)
glVertex3f(8.0, -1, 3.0) # ponto de vertice
glTexCoord2f(1.0, 0.0)
glVertex3f(8.0, -1, -14.0) # ponto de vertice
glTexCoord2f(1.0, 1.0)
glVertex3f(-2.0, -1, -14.0) # ponto de vertice
glTexCoord2f(0.0, 1.0)
glVertex3f(-2.0, -1, 3.0) # ponto de vertice
glTexCoord2f(0.0, 0.0)
glEnd()
glDisable(GL_TEXTURE_2D)
def bandeirinha(self):
glPushMatrix()
glColor3f(0.8, 0.8, 0.8)
glRotatef(90, 1.0, 0.0, 0.0)
glTranslate(-1.1, 1.8, 0.7)
glutSolidCylinder(0.004, 0.3, 30, 30)
glPopMatrix()
glPushMatrix()
glColor3f(1, 1, 0)
glScalef(0.01, 0.1, 0.2)
glTranslate(-110, -7, 8.6)
glutSolidCube(1)
glPopMatrix()
def desenhar(self):
self.obj = glGenLists(4)
glNewList(self.obj,GL_COMPILE)
glPushMatrix()
glPushMatrix()
glRotatef(90, 1, 0, 0)
glRotatef(90, 0, 0, 1)
glTranslate(2.7, -1.6, -0.2)
alturaTrave, larguraTrave = 0.08, 0.2
glScalef(1, larguraTrave, alturaTrave)
self.redeTrasGol()
glPopMatrix()
glPushMatrix()
glRotatef(90, 1, 0, 0)
glRotatef(90, 0, 0, 1)
glTranslate(-13.7, -1.6, -0.2)
alturaTrave, larguraTrave = 0.08, 0.2
glScalef(1, larguraTrave, alturaTrave)
self.redeTrasGol()
glPopMatrix()
glPushMatrix()
glScalef(0.1, 0.1, 0.1)
glRotate(90, 0.0, 1.0, 0.0)
glTranslate(-25, -5, 23)
self.trave()
glPopMatrix()
glPushMatrix()
glScalef(0.1, 0.1, 0.1)
glRotate(-90, 0.0, 1.0, 0.0)
glTranslate(-135, -5, -38)
self.trave()
glPopMatrix()
glPushMatrix()
self.Campo()
glPopMatrix()
glPushMatrix()
self.bandeirinha()
glPopMatrix()
glPushMatrix()
glTranslate(8.2, 0, 0)
self.bandeirinha()
glPopMatrix()
glPushMatrix()
glTranslate(0, 0, -14.7)
self.bandeirinha()
glPopMatrix()
glPushMatrix()
glTranslate(8.2, 0, -14.7)
self.bandeirinha()
glPopMatrix()
glPopMatrix()
glEndList()
def executar(self):
glCallList(self.obj)
class ArqAlta:
def __init__(self):
self.obj = GLuint()
def degrau(self):
glPushMatrix()
glScalef(2, 80, 1.0)
glutSolidCube(0.5)
glPopMatrix()
def degrau2(self):
glPushMatrix()
glScalef(2, 700, 1.0)
glutSolidCube(0.5)
glPopMatrix()
def haste(self):
# Coluna
glPushMatrix()
glColor3f(0.1,0.1,0.1)
glRotate(90, 1.0, 0.0, 0.0)
glTranslate(-6.4,3.6,-7.5)
glutSolidCylinder(0.05, 6.0, 40, 10)
glPopMatrix()
def janela(self):
glPushMatrix()
glColor3f(0,0,0)
glTranslate(-6.9,6.3,2.85)
glScalef(1,1,1.4)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(0.8,0.8,0.8)
glTranslate(-6.8,6.3,2.85)
glScalef(1,1,1.4)
glutSolidCube(0.9)
glPopMatrix()
def muroDireito(self):
# MURO
glPushMatrix()
glColor3f(1,1,1)
glScalef(0.1,1,2.5)
glTranslate(-100,5.3,-3.3)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-10,6,-9.6)
glutSolidCylinder(0.02, 2.6, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glRotate(90, 1.0, 0.0, 0.0)
glTranslate(-10,-8.3,-6)
glutSolidCylinder(0.04, 0.5, 40, 10)
glPopMatrix()
def muroEsquerdo(self):
glPushMatrix()
glColor3f(1,1,1)
glScalef(0.1,1,2.5)
glTranslate(-100,5.3,3.3)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-10,6,7)
glutSolidCylinder(0.02, 2.6, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glRotate(90, 1.0, 0.0, 0.0)
glTranslate(-10,8.3,-6)
glutSolidCylinder(0.04, 0.5, 40, 10)
glPopMatrix()
def corrimao(self):
# CORRIMÃO
glPushMatrix()
glColor3f(0.3,0.3,0.3)
glTranslate(-0.6,3.5,17)
glutSolidCylinder(0.02, 3.0, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.8,0.8,0.8)
glTranslate(-0.6,3.4,17)
glutSolidCylinder(0.02, 3.0, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.8,0.8,0.8)
glTranslate(-0.6,3.3,17)
glutSolidCylinder(0.02, 3.0, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.3,0.3,0.3)
glTranslate(-0.6,3.2,17)
glutSolidCylinder(0.02, 3.0, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.3,0.3,0.3)
glRotate(90, 1.0, 0.0, 0.0)
glTranslate(-0.6,18,-3.5)
glutSolidCylinder(0.02, 0.5, 40, 10)
glPopMatrix()
def coluna(self):
glPushMatrix()
glColor3f(1,1,1)
glRotatef(90, 1,0,0)
glScalef(0.3,0.3,10)
glTranslate(-28.5,-120,0.1)
glutSolidCube(1)
glPopMatrix()
def refletor(self):
#Base Vertical.
glRotatef(90, 1.0, 0.0 , 0.0)
glColor3f(0.5,0.5,0.5)
glutSolidCylinder(0.07, 15.0, 40, 10)
glRotatef(5, 1.0, 0.0 , 0.0)
#Base Luzes.
glTranslate( -0.75, 0.0, -1.02)
glColor3f(0.8, 0.8, 0.8) # cor RGB
#Luzes do meio.
contador = 0
glTranslate( -0.1, 0.1, 0.5)
while(contador < 6):
glTranslate( 0.25, 0.0, 0.0)
glutWireCube(0.18,100)
glutSolidCube(0.15)
contador += 1
#Luzes de cima.
contador = 0
glTranslate( -1.5, 0.0, 0.3)
while(contador < 6):
glTranslate( 0.25, 0.0, 0.0)#Nao altera. Definir espaco entre as lampadas.
glutWireCube(0.18,100)
glutSolidCube(0.15)
contador += 1
#Luzes de baixo.
contador = 0
glTranslate( -1.5, 0.0, -0.6)
while(contador < 6):
glTranslate( 0.25, 0.0, 0.0)#Nao altera.Definir espaco entre as lampadas.
glutWireCube(0.18,100)
glutSolidCube(0.15)
contador += 1
glTranslate( -0.65, -0.1, 0.3)
glPushMatrix()
glColor3f(1, 1, 1) # cor RGB
glScalef(3.5, 0.2, 2.0)
glutWireCube(0.52)
glPopMatrix()
#Fios que seguram as lampadas.
glPushMatrix()
glRotatef(90, 0.0, 1.0 , 0.0)
glTranslate( -0.3, 0.0, -0.92)
for s in range(3):
glutSolidCylinder(0.008, 1.83, 10, 1)
glTranslate( 0.3, 0, 0)
glPopMatrix()
def executar(self):
glCallList(self.obj)
def desenhar(self):
self.obj = glGenLists(1)
glNewList(self.obj, GL_COMPILE)
glPushMatrix()
glScalef(.2,.2,.2)
glTranslate(-15,-4,-25)
# Parte Baixa
glPushMatrix()
glScalef(1,1,2)
glRotatef(90, 1.0, 0.0, 0.0)
contador = 0
while contador <= 4:
if contador%2==0:
glColor3f(0.1,0.1,0.1)
self.degrau()
glTranslate(0.5,0,0.2)
else:
glColor3f(1,1,1)
self.degrau()
glTranslate(0.5,0,0.2)
contador+=1
glPopMatrix()
# Divisão
glPushMatrix()
glRotatef(90, 1.0, 0.0, 0.0)
glColor3f(1,1,1)
glScalef(0.1,80,7)
glScalef(1,2,1)
glTranslate(-6,0,-0.2)
glutSolidCube(0.5)
glPopMatrix()
# Piso parte alta
glPushMatrix()
glColor3f(0.5,0.5,0.5)
glScalef(2.5,1,1)
glScalef(1,1,2)
glTranslate(-0.75,2,0)
glRotatef(90, 1.0, 0.0, 0.0)
self.degrau()
glPopMatrix()
# Parte Alta Esquerda
glPushMatrix()
glTranslate(-9.5,4.6,23.2)
glRotatef(90, 1.0, 0.0, 0.0)
glScalef(1,2.7,1)
contador = 0
glScalef(1, 0.3, 1)
while contador <= 12:
if contador%2==0:
glColor3f(1,1,1)
self.degrau()
glTranslate(0.5,0,0.2)
else:
glColor3f(0.1,0.1,0.1)
self.degrau()
glTranslate(0.5,0,0.2)
contador+=1
glPopMatrix()
# Parte Alta Direita
glPushMatrix()
glTranslate(-9.5,4.6,-23.2)
glRotatef(90, 1.0, 0.0, 0.0)
glScalef(1,2.7,1)
contador = 0
glScalef(1, 0.3, 1)
while contador <= 12:
if contador%2==0:
glColor3f(1,1,1)
self.degrau()
glTranslate(0.5,0,0.2)
else:
glColor3f(0.1,0.1,0.1)
self.degrau()
glTranslate(0.5,0,0.2)
contador+=1
glPopMatrix()
# Parte Alta Meio
glPushMatrix()
glTranslate(-6,3.2,0)
glRotatef(90, 1.0, 0.0, 0.0)
glScalef(1,1.17,1)
contador = 0
glScalef(1, 0.3, 1)
while contador <= 5:
if contador%2==0:
glColor3f(0.1,0.1,0.1)
self.degrau()
glTranslate(0.5,0,0.2)
else:
glColor3f(1,1,1)
self.degrau()
glTranslate(0.5,0,0.2)
contador+=1
glPopMatrix()
# Cabines de imprensa
# BLOCO
glPushMatrix()
glColor3f(0.9,0.9,0.9)
glTranslate(-8.0,5,-0.15)
glScalef(1,1.5,2.6)
glutSolidCube(3)
glPopMatrix()
# JANELAS
glPushMatrix()
contador = 0
while contador <= 4:
self.janela()
glTranslate(0,0,-1.5)
contador+=1
glPopMatrix()
# HASTES
glPushMatrix()
contador = 0
while contador <= 5:
self.haste()
glTranslate(0,0,-1.5)
contador+=1
glPopMatrix()
# TETO
glPushMatrix()
glColor3f(0.1,0.1,0.1)
glTranslate(-4.2,7.4,0)
glScalef(1.5,0.05,2.7)
glutSolidCube(3)
glPopMatrix()
# BALCÃO
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-6,4,3.7)
glScalef(1,1.2,0.1)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-6,4,-3.7)
glScalef(1,1.2,0.1)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-5.5,4,-2.1)
glScalef(0.1,1.2,3.1)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-5.5,4,2.1)
glScalef(0.1,1.2,3.1)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(0,0,0)
glTranslate(-5.7,4.6,2.1)
glScalef(0.3,0.1,3.1)
glutSolidCube(1)
glPopMatrix()
glPushMatrix()
glColor3f(0,0,0)
glTranslate(-5.7,4.6,-2.1)
glScalef(0.3,0.1,3.1)
glutSolidCube(1)
glPopMatrix()
# CORRIMÕES
glPushMatrix()
glTranslate(0,0,20)
contador = 0
while contador <= 51:
self.corrimao()
glTranslate(0,0,-1.5)
contador+=1
glPopMatrix()
# MURO DIR
glPushMatrix()
contador = 0
while contador <= 20:
self.muroDireito()
glTranslate(0,0,-1.5)
contador+=1
glPopMatrix()
# perto cabine
glPushMatrix()
glTranslate(3.5,-1.5,2.8)
self.muroDireito()
glColor3f(1,0,0)
glPopMatrix()
# MURO ESQ
glPushMatrix()
contador = 0
while contador <= 20:
self.muroEsquerdo()
glTranslate(0,0,1.5)
contador+=1
glPopMatrix()
glPushMatrix()
glTranslate(3.5,-1.5,-2.8)
self.muroEsquerdo()
glColor3f(1,0,0)
glPopMatrix()
# COLUNA SUSTENTAÇÃO
glPushMatrix()
contador = 0
while contador <= 8:
self.coluna()
glTranslate(0,0,9)
contador+=1
glPopMatrix()
# REFLETOR DIREITO
glPushMatrix()
glTranslate(-11,20,-20)
glRotatef(90, 0,1,0)
glScalef(2,2,2)
self.refletor()
glPopMatrix()
# REFLETOR ESQUERDO
glPushMatrix()
glTranslate(-11,20,20)
glRotatef(90, 0,1,0)
glScalef(2,2,2)
self.refletor()
glPopMatrix()
glPopMatrix()
glEndList()
#------------------------------------------------------------------------------
class Terreno:
def __init__(self):
self.textura1 = glGenTextures(1)
self.obj = GLuint()
def carrega_imagem(self):
im = Image.open("../objs/terreno.jpg", "r")
try:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBA", 0, -1)
except SystemError:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBX", 0, -1)
#glBindTexture(GL_TEXTURE_2D, textura1)
glPixelStorei(GL_UNPACK_ALIGNMENT, 1)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexImage2D(
GL_TEXTURE_2D, 0, 3, ix, iy, 0,
GL_RGBA, GL_UNSIGNED_BYTE, image
)
def desenhar(self):
self.obj = glGenLists(97)
glNewList(self.obj, GL_COMPILE)
self.carrega_imagem()
glEnable(GL_TEXTURE_2D)
glPushMatrix()
glTranslatef(0,-1.1,0)
glBegin(GL_QUADS)
glColor3f(1,1,1)
glVertex3f(-100.0, 0.0, -100.0)
glTexCoord2f(0.0, 150)
glVertex3f(-100.0, 0.0, 100.0)
glTexCoord2f(150, 150)
glVertex3f(100.0, 0.0, 100.0)
glTexCoord2f(150, 0.0)
glVertex3f(100.0, 0.0, -100.0)
glTexCoord2f(0.0, 0.0)
glEnd()
glPopMatrix()
glDisable(GL_TEXTURE_2D)
glEndList()
def executar(self):
glCallList(self.obj)
class Bola:
def __init__(self):
music.load("../objs/torcidaASA.mp3")
self.obj = GLuint()
glEnable(GL_DEPTH_TEST)
glEnable(GL_NORMALIZE)
glEnable(GL_COLOR_MATERIAL)
self.quad = gluNewQuadric()
self.textura1 = ''
self.axisX = 3.5
self.axisZ = -5
self.esqdir = 0
self.cimabaixo = 0
self.texturaID = GLuint()
self._textureID = self.carrega_textura("../objs/soccer_ball.jpeg")
def carrega_textura(self, caminho):
im = Image.open(caminho, "r")
try:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBA", 0, -1)
except SystemError:
ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBX", 0, -1)
self.textura1 = glGenTextures(1, self.texturaID)
glBindTexture(GL_TEXTURE_2D, self.texturaID)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, ix, iy, 0, GL_RGBA, GL_UNSIGNED_BYTE, image)
return self.texturaID
def desenhar(self):
self._textureID = self.carrega_textura("../objs/soccer_ball.jpeg")
glEnable(GL_TEXTURE_2D)
glBindTexture(GL_TEXTURE_2D, self._textureID)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glPushMatrix()
glTranslatef(self.axisX, -.95, self.axisZ)
glScale(.02, .02, .02)
glPushMatrix()
glRotatef(90, 1.0, 0.0, 0.0)
glRotatef(self.esqdir, 0, 0, 1)
glRotatef(self.cimabaixo, 0, 1, 0)
gluQuadricTexture(self.quad, 1)
gluSphere(self.quad, 2, 20, 20)
glPopMatrix()
glPopMatrix()
glDisable(GL_TEXTURE_2D)
glutSwapBuffers()
def teclado(self, tecla, x, y):
if tecla == b'a' and self.axisX >= -1:
self.esqdir += - 20
self.axisX -= .1
if tecla == b's'and self.axisZ <= 1.7:
self.cimabaixo += - 20
self.axisZ += .1
if tecla == b'w' and self.axisZ >= -12.8:
self.cimabaixo += + 20
self.axisZ -= .1
if tecla == b'd' and self.axisX <= 6.9:
self.esqdir += + 20
self.axisX += .1
# TRAVE - X 2.4 a 3.7
if self.axisZ < -12.8 and 2.4 <= self.axisX <= 3.7:
music.play(0)
# 1.8 / 3.7 a 2.4
if self.axisZ > 1.7 and 2.4 <= self.axisX <= 3.7:
music.play(0)
print self.axisX
print self.axisZ
glutPostRedisplay()
class ArqGrade:
def __init__(self):
self.obj = GLuint()
def grade(self, qtd):
glRotate(-90,1,0,0)
glPushMatrix()
glColor(0,0,0)
for i in range(qtd):
glutSolidCylinder(0.08, (i+1), 10, 10)
glTranslate(1,0,0)
glPopMatrix()
glRotate(90,1,0,0)
def bancos(self, qtd):
glPushMatrix()
glScale(.5,.4,2)
glColor(1,1,1)
for i in range(qtd):
glutSolidCube(0.5)
glTranslate(0.5,0,0)
glPopMatrix()
def refletor(self):
#Base Vertical.
glRotatef(90, 1.0, 0.0 , 0.0)
glColor3f(0.5,0.5,0.5)
glutSolidCylinder(0.07, 10.0, 40, 10)
glRotatef(5, 1.0, 0.0 , 0.0)
#Base Luzes.
glTranslate( -0.75, 0.0, -1.02)
glColor3f(0.8, 0.8, 0.8) # cor RGB
#Luzes do meio.
contador = 0
glTranslate( -0.1, 0.1, 0.5)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
while(contador < 6):
glTranslate( 0.25, 0.0, 0.0)
glutWireCube(0.18,100)
glutSolidCube(0.15)
contador += 1
#Luzes de cima.
contador = 0
glTranslate( -1.5, 0.0, 0.3)
while(contador < 6):
glTranslate( 0.25, 0.0, 0.0)#Nao altera. Definir espaco entre as lampadas.
glutWireCube(0.18,100)
glutSolidCube(0.15)
contador += 1
#Luzes de baixo.
contador = 0
glTranslate( -1.5, 0.0, -0.6)
while(contador < 6):
glTranslate( 0.25, 0.0, 0.0)#Nao altera.Definir espaco entre as lampadas.
glutWireCube(0.18,100)
glutSolidCube(0.15)
contador += 1
glTranslate( -0.65, -0.1, 0.3)
glPushMatrix()
glColor3f(1, 1, 1) # cor RGB
glScalef(3.5, 0.2, 2.0)
glutWireCube(0.52)
glPopMatrix()
#Fios que seguram as lampadas.
glPushMatrix()
glRotatef(90, 0.0, 1.0 , 0.0)
glTranslate( -0.3, 0.0, -0.92)
for s in range(3):
glutSolidCylinder(0.008, 1.83, 10, 1)
glTranslate( 0.3, 0, 0)
glPopMatrix()
def corrimao(self):
# CORRIMÃO
glPushMatrix()
glColor3f(0.3,0.3,0.3)
glTranslate(-0.6,3.5,17)
glutSolidCylinder(0.01, 3.4, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.8,0.8,0.8)
glTranslate(-0.6,3.4,17)
glutSolidCylinder(0.01, 3.4, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.8,0.8,0.8)
glTranslate(-0.6,3.3,17)
glutSolidCylinder(0.01, 3.4, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.3,0.3,0.3)
glRotate(90, 1.0, 0.0, 0.0)
glTranslate(-0.6,18,-3.5)
glutSolidCylinder(0.01, 0.5, 40, 10)
glPopMatrix()
def corrimaoCima(self):
# CORRIMÃO
glPushMatrix()
glColor3f(0.3,0.3,0.3)
glTranslate(-0.6,3.5,17)
glutSolidCylinder(0.02, 3.4, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.8,0.8,0.8)
glTranslate(-0.6,3.4,17)
glutSolidCylinder(0.02, 3.4, 40, 10)
glPopMatrix()
glPushMatrix()
glColor3f(0.8,0.8,0.8)
glTranslate(-0.6,3.3,17)
glutSolidCylinder(0.02, 3.4, 40, 10)
glPopMatrix()
def desenhar(self):
self.obj = glGenLists(11)
glNewList(self.obj, GL_COMPILE)
glPushMatrix()
glScale(.2,.2,.2)
glTranslate(45,-5,-70)
# PISO PASSAGEM
glPushMatrix()
glTranslate(0,1,82)
glRotate(90,0,1,0)
for i in range(1):
glScale(1,1,2)
self.bancos(327)
glPopMatrix()
glPushMatrix()
glTranslate(2,-15,-85)
glScale(5,5,5)
for i in range(14):
self.corrimao()
glTranslate(0,0,1)
glPopMatrix()
# CORRIMAO DE CIMA
glPushMatrix()
glTranslate(12.4,-7,-85)
glScale(5,5,5)
for i in range(14):
self.corrimaoCima()
glTranslate(0,0,1)
glPopMatrix()
glPushMatrix()
glTranslate(0.4,1,82)
glRotate(90,0,1,0)
for i in range(9):
if i % 2 == 0:
glColor3f(0,0,0)
self.bancos(328)
glTranslate(0,1,1)
else:
glColor3f(1,1,1)
self.bancos(328)
glTranslate(0,1,1)
glPopMatrix()
# CORRIMAO LADO ESQ
glPushMatrix()
glTranslate(-43.2,-53.5,-2.2)
glScale(4,4,4)
glRotate(90, 0,1,0)
glRotate(-41, 1,0,0)
for i in range(1):
self.corrimaoCima()
glTranslate(0,0,1)
glPopMatrix()
# CORRIMAO LADO DIR
glPushMatrix()
glTranslate(-43.2,-53.5,79.7)
glScale(4,4,4)
glRotate(90, 0,1,0)
glRotate(-41, 1,0,0)
for i in range(1):
self.corrimaoCima()
glTranslate(0,0,1)
glPopMatrix()
for i in range(42):
glPushMatrix()
self.grade(10)
glRotate(-180,0,1,0)
glRotate(-90,0,0,1)
glTranslate(-9,-9,0)
self.grade(10)
glPopMatrix()
glTranslate(0,0,2)
glPopMatrix()
# REFLETOR DIREITO
glPushMatrix()
glTranslate(12.5,3,-10)
glRotatef(-90, 0,1,0)
glScalef(0.5,0.5,0.5)
self.refletor()
glPopMatrix()
# REFLETOR ESQUERDO
glPushMatrix()
glTranslate(12.5,3,0)
glRotatef(-90, 0,1,0)
glScalef(0.5,0.5,0.5)
self.refletor()
glPopMatrix()
glEndList()
def executar(self):
glCallList(self.obj)
class ArqFrente:
def __init__(self):
self.obj = GLuint()
def cobertura(self):
glPushMatrix()
glBegin(GL_POLYGON)
glColor3f(1,1,1)
glVertex3f(0.0, 0.0, 0.0)
glVertex3f(14.0, 0.0, 0.0)
glVertex3f(14.0, 1.0, 0.0)
glVertex3f(13.0, 1.0, 0.0)
glVertex3f(13.0, 2.0, 0.0)
glVertex3f(12.0, 2.0, 0.0)
glVertex3f(12.0, 3.0, 0.0)
glVertex3f(11.0, 3.0, 0.0)
glVertex3f(11.0, 4.0, 0.0)
glVertex3f(10.0, 4.0, 0.0)
glVertex3f(10.0, 5.0, 0.0)
glVertex3f(9.0, 5.0, 0.0)
glVertex3f(9.0, 6.0, 0.0)
glVertex3f(8.0, 6.0, 0.0)
glVertex3f(8.0, 7.0, 0.0)
glVertex3f(7.0, 7.0, 0.0)
glVertex3f(7.0, 8.0, 0.0)
glVertex3f(6.0, 8.0, 0.0)
glVertex3f(6.0, 9.0, 0.0)
glVertex3f(5.0, 9.0, 0.0)
glVertex3f(5.0, 10.0, 0.0)
glVertex3f(4.0, 10.0, 0.0)
glVertex3f(4.0, 11.0, 0.0)
glVertex3f(3.0, 11.0, 0.0)
glVertex3f(3.0, 12.0, 0.0)
glVertex3f(2.0, 12.0, 0.0)
glVertex3f(2.0, 13.0, 0.0)
glVertex3f(1.0, 13.0, 0.0)
glVertex3f(1.0, 14.0, 0.0)
glVertex3f(0.0, 14.0, 0.0)
glEnd()
glPopMatrix()
def degrau(self,tamanho):
glPushMatrix()
glScalef(2, tamanho, 1.0)
glutSolidCube(0.5)
glPopMatrix()
def escadinha(self):
contador = 0
glPushMatrix()
glTranslate(-10,0,-6.95)
while contador < 13:
glPushMatrix()
glScalef(0.5,0.05,0.5)
self.degrau(100)
glPopMatrix()
glTranslate(0.8,0,0.5)
contador+=1
glPopMatrix()
def arquibancada(self):
#Apenas para testar com zoom, porem poderia se tornar definitivo.
glScalef(0.2,0.2,0.2)
# Parte Alta Esquerda
glPushMatrix()
glScalef(0.9,1.1,1.1)
glTranslate(0,0,0)
glRotatef(90, 1.0, 0.0, 0.0)
contador = 0
glScalef(1, 0.3, 1)
while contador <= 14:
if contador%2==0:
glColor3f(1,1,1)
self.degrau(300)
glTranslate(0.8,0,0.5)
else:
glColor3f(0.0,0.0,0.0)
self.degrau(300)
glTranslate(0.8,0,0.5)
contador+=1
#Escadinha aux.
glPushMatrix()
glColor3f(1,1,0)
glTranslate(-0.5,50,0.1)
self.escadinha()
glTranslate(0,-100,0)
self.escadinha()
glPopMatrix()
#Costa.
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-12.5,-0.5,-4)
glScalef(0.5, 1.02, 15)
self.degrau(300)
glPopMatrix()
glPopMatrix()
#Cobertura lateral.
glPushMatrix()
glTranslate(-0.6,-8.05,24.4) # nao altera, sobe e desce, frente e tras.
glScalef(0.75, 0.6,0.9)
self.cobertura()
glTranslate(0.0,0.0,-54.6)
self.cobertura()
glPopMatrix()
def arquibancada2(self):
#Apenas para testar com zoom, porem poderia se tornar definitivo.
glScalef(0.2,0.2,0.2)
# Parte Alta Esquerda
glPushMatrix()
glScalef(0.9,1.1,1.1)
glTranslate(0,0,0)
glRotatef(90, 1.0, 0.0, 0.0)
contador = 0
glScalef(1, 0.3, 1)
while contador <= 8:
if contador%2==0:
glColor3f(1,1,1)
self.degrau(125)
glTranslate(0.8,0,0.5)
else:
glColor3f(0.0,0.0,0.0)
self.degrau(125)
glTranslate(0.8,0,0.5)
contador+=1
glPopMatrix()
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-0,-2,-2)
glScalef(1, 0.06, 32)
self.degrau(150)
glPopMatrix()
def desenho(self):
glPushMatrix()
self.arquibancada()
glPopMatrix()
glPushMatrix()
glTranslate(0,0,13)
self.arquibancada()
glPopMatrix()
glTranslate(0,0,7)
self.arquibancada2()
def desenhar(self):
self.obj = glGenLists(1)
glNewList(self.obj, GL_COMPILE)
glPushMatrix()
glTranslate(6,.1,-16.3)
glScalef(0.45,.7,.5)
glRotatef(-90,0,1,0)
self.desenho()
glPopMatrix()
glEndList()
def executar(self):
glCallList(self.obj)
class ArqTras:
def __init__(self):
self.obj = GLuint()
def cobertura(self):
glPushMatrix()
glBegin(GL_POLYGON)
glColor3f(1,1,1)
glVertex3f(0.0, 0.0, 0.0)
glVertex3f(14.0, 0.0, 0.0)
glVertex3f(14.0, 1.0, 0.0)
glVertex3f(13.0, 1.0, 0.0)
glVertex3f(13.0, 2.0, 0.0)
glVertex3f(12.0, 2.0, 0.0)
glVertex3f(12.0, 3.0, 0.0)
glVertex3f(11.0, 3.0, 0.0)
glVertex3f(11.0, 4.0, 0.0)
glVertex3f(10.0, 4.0, 0.0)
glVertex3f(10.0, 5.0, 0.0)
glVertex3f(9.0, 5.0, 0.0)
glVertex3f(9.0, 6.0, 0.0)
glVertex3f(8.0, 6.0, 0.0)
glVertex3f(8.0, 7.0, 0.0)
glVertex3f(7.0, 7.0, 0.0)
glVertex3f(7.0, 8.0, 0.0)
glVertex3f(6.0, 8.0, 0.0)
glVertex3f(6.0, 9.0, 0.0)
glVertex3f(5.0, 9.0, 0.0)
glVertex3f(5.0, 10.0, 0.0)
glVertex3f(4.0, 10.0, 0.0)
glVertex3f(4.0, 11.0, 0.0)
glVertex3f(3.0, 11.0, 0.0)
glVertex3f(3.0, 12.0, 0.0)
glVertex3f(2.0, 12.0, 0.0)
glVertex3f(2.0, 13.0, 0.0)
glVertex3f(1.0, 13.0, 0.0)
glVertex3f(1.0, 14.0, 0.0)
glVertex3f(0.0, 14.0, 0.0)
glEnd()
glPopMatrix()
def degrau(self):
glPushMatrix()
glScalef(2, 300, 1.0)
glutSolidCube(0.5)
glPopMatrix()
def escadinha(self):
contador = 0
glPushMatrix()
glTranslate(-10,0,-6.95)
while contador < 13:
glPushMatrix()
glScalef(0.5,0.05,0.5)
self.degrau()
glPopMatrix()
glTranslate(0.8,0,0.5)
contador+=1
glPopMatrix()
def arquibancada(self):
#Apenas para testar com zoom.
glScalef(0.3,0.3,0.3)
# Parte Alta Esquerda
glPushMatrix()
glScalef(0.9,1.1,1.1)
glTranslate(0,0,0)
glRotatef(90, 1.0, 0.0, 0.0)
contador = 0
glScalef(1, 0.3, 1)
while contador <= 14:
if contador%2==0:
glColor3f(1,1,1)
self.degrau()
glTranslate(0.8,0,0.5)
else:
glColor3f(0.0,0.0,0.0)
self.degrau()
glTranslate(0.8,0,0.5)
contador+=1
#Escadinha aux.
glPushMatrix()
glColor3f(1,1,0)
glTranslate(-0.5,50,0.1)
glPushMatrix()
glScalef(1,.3,1)
self.escadinha()
glPopMatrix()
glTranslate(0,-50,0)
glPushMatrix()
glScalef(1,.3,1)
self.escadinha()
glPopMatrix()
glTranslate(0,-50,0)
glPushMatrix()
glScalef(1,.3,1)
self.escadinha()
glPopMatrix()
glPopMatrix()
#Costa.
glPushMatrix()
glColor3f(1,1,1)
glTranslate(-12.5,-0.5,-4)
glScalef(0.5, 1.02, 15)
self.degrau()
glPopMatrix()
glPopMatrix()
#Cobertura lateral.
glPushMatrix()
glTranslate(-0.5,-8.1,24.8) # nao altera, sobe e desce, frente e tras.
glScalef(0.8, 0.6,1)
self.cobertura()
glTranslate(0.0,0.0,-49.6)
self.cobertura()
glPopMatrix()
def desenhar(self):
self.obj = glGenLists(9)
glNewList(self.obj, GL_COMPILE)
glPushMatrix()
glTranslate(3,.2,5.5)
glScalef(0.65,.5,.5)
glRotatef(90,0,1,0)
self.arquibancada()
glPopMatrix()
glEndList()
def executar(self):
glCallList(self.obj)
#----------------------------------------------------------------
class Grade:
def __init__(self):
self.obj = GLuint()
def estrutura(self):
#Estrura externa.
glPushMatrix()
#Isolando para que nao aja problemas quando rotacionar e transladar.
glPushMatrix()
glColor3f(.1, .1, .1)
glutSolidCylinder(0.05, 5, 30, 10)
glTranslate( 0.0, -5.0, 0.0)
glutSolidCylinder(0.05, 5, 30, 10)
glPopMatrix()
glPushMatrix()
glRotatef(90, 1.0, 0.0, 0.0)
glutSolidCylinder(0.05, 5, 30, 10)
glTranslate( 0.0, 5.0, 0.0)
glutSolidCylinder(0.05, 5, 30, 10)
glPopMatrix()
glPopMatrix()
#Estrura interna vertical.
glPushMatrix()
glScalef(0.0, 10, 0.6)
glTranslate( 0.0, -0.25, -0.5)
for s in range(8):
glTranslate( 0.0, 0.0, 1.0)
glutWireCube(0.5)
glPopMatrix()
#Estrura interna horizontal.
glRotatef(90,.1,.0,.0)
glPushMatrix()
glScalef(0.0, 10, 1)
glTranslate( 0.0, 0.25, -0.5)
for s in range(5):
glTranslate( 0.0, 0.0, 1.0)
glutWireCube(0.5)
glPopMatrix()
def grade(self):
glPushMatrix()
glPushMatrix()
self.estrutura()
glTranslate( 0.0, 5.0, 0.0)
glRotatef(125,.0,.1,.0)
glScalef(1.0, 1.0, 0.4)
self.estrutura()
glPopMatrix()
glTranslate( 0.0, -5.0, 2.5)
glScalef(1,3,10.5)
glColor(0,0,0)
glutSolidCube(0.5)
glPopMatrix()
def curva(self):
glPushMatrix()
self.grade()
glTranslate(0,0,5.1)
glPushMatrix()
glScalef(1,1,0.5)
glRotatef(15,0,1,0)
self.grade()
glPopMatrix()
glTranslate(1.3,0,2.4)
glPushMatrix()
glScalef(1,1,0.5)
glRotatef(30,0,1,0)
self.grade()
glPopMatrix()
glTranslate(2.6,0,2.1)
glPushMatrix()
glScalef(1,1,0.5)
glRotatef(45,0,1,0)
self.grade()
glPopMatrix()
glTranslate(3.5,0,1.6)
glPushMatrix()
glScalef(1,1,0.5)
glRotatef(60,0,1,0)
self.grade()
glPopMatrix()
glTranslate(4.3,0,1.2)
glPushMatrix()
glScalef(1,1,0.5)
glRotatef(75,0,1,0)
self.grade()
glPopMatrix()
glTranslate(4.7,0,.5)
glPushMatrix()
glScalef(1,1,0.5)
glRotatef(90,0,1,0)
self.grade()
glPopMatrix()
glPopMatrix()
def seguimento(self, qtd):
for i in range(qtd):
glTranslate(0,0,5)
glPushMatrix()
glPushMatrix()
glColor(0,0,0)
self.estrutura()
glTranslate( 0.0, 5.0, 0.0)
glRotatef(125,.0,.1,.0)
glScalef(1.0, 1.0, 0.4)
self.estrutura()
glPopMatrix()
glTranslate( 0.0, -5.0, 2.5)
glScalef(1,3,10.5)
glColor(0,0,0)
glutSolidCube(0.5)
glPopMatrix()
def desenhar(self):
self.obj = glGenLists(2)
glNewList(self.obj,GL_COMPILE)
glPushMatrix()
glColor(0,0,0)
glScale(.2,.1,.175)
glTranslate(-11,-7.45,-77)
glScalef(.5,.5,.5)
glPushMatrix()
qtd = 0
for i in range(4):
if i%2 == 0:
qtd = 36
else:
qtd = 15
self.seguimento(qtd)
self.curva()
glTranslate(16,0,13)
glRotatef(90,0,1,0)
glPopMatrix()
glPopMatrix()
glEndList()
def executar(self):
glCallList(self.obj) | diogenesfilho/Estadio | Estádio/projeto_orientado/app/Complex.py | Python | gpl-2.0 | 45,005 |
#!/usr/bin/env python
import os,sys
import glob
def mysystem(s):
print(s)
retval = os.system(s)
return retval
def main():
alphas = range(-8,9)
orders = [1]
machs = [0.55,0.65,0.75,0.85,0.95,1.05]
#now, we need to recursively move everybody back
for order in orders:
for mach in machs:
for alpha in alphas:
result = '/home/vbetro/online_edu/images/order%d/mach%0.2f/alpha%+03d/naca0012_%d_%0.2f_%+03d_02.png'%(order,mach,alpha,order,mach,alpha);
if not os.path.exists(result):
mysystem('cp /home/vbetro/online_edu/images/order%d/mach%0.2f/alpha%+03d/naca0012_%d_%0.2f_%+03d_01.png /home/vbetro/online_edu/images/order%d/mach%0.2f/alpha%+03d/naca0012_%d_%0.2f_%+03d_02.png'%(order,mach,alpha,order,mach,alpha,order,mach,alpha,order,mach,alpha));
if __name__ == "__main__":
main()
| vincentbetro/NACA-SIM | imagescripts/plotfixEND1.py | Python | gpl-2.0 | 873 |
# -*- coding: utf-8 -*-
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ludacity.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| branchard/ludacity | manage.py | Python | gpl-2.0 | 253 |
## simple file type detection.
## if i had unlimited api access, i would send every single md5 to something like VirusTotal
forbidden_types = ['text/x-bash', 'text/scriptlet', 'application/x-opc+zip', 'application/com', 'application/x-msmetafile', 'application/x-shellscript', 'text/x-sh', 'text/x-csh', 'application/x-com', 'application/x-helpfile', 'application/hta', 'application/x-bat', 'application/x-php', 'application/x-winexe', 'application/x-msdownload', 'text/x-javascript', 'application/x-msdos-program', 'application/bat', 'application/x-winhelp', 'application/vnd.ms-powerpoint', 'text/x-perl', 'application/x-javascript', 'application/x-ms-shortcut', 'application/vnd.msexcel', 'application/x-msdos-windows', 'text/x-python', 'application/x-download', 'text/javascript', 'text/x-php', 'application/exe', 'application/x-exe', 'application/x-winhlp', 'application/msword', 'application/zip']
from config import *
import bparser
import notifiers
for i in bparser.parseentries('files.log'):
if not i['local_orig'] or i['mime_type'] in forbidden_types: ## scan all of these types
print
print "{} downloaded a file from {} via {}".format(i['rx_hosts'],i['tx_hosts'],i['source'])
print "Filename: {} length: {} mime type: {}".format(i['filename'],i['total_bytes'],i['mime_type'])
print "MD5: {} SHA1: {}".format(i['md5'],i['sha1'])
| red-green/broscanner | file_scanning.py | Python | gpl-2.0 | 1,354 |
#!/usr/bin/python
#Covered by GPL V2.0
from encoders import *
from payloads import *
# generate_dictio evolution
class dictionary:
def __init__(self,dicc=None):
if dicc:
self.__payload=dicc.getpayload()
self.__encoder=dicc.getencoder()
else:
self.__payload=payload()
self.__encoder = [lambda x: encoder().encode(x)]
self.restart()
def count (self):
return self.__payload.count() * len(self.__encoder)
def setpayload(self,payl):
self.__payload = payl
self.restart()
def setencoder(self,encd):
self.__encoder=encd
self.generator = self.gen()
def getpayload (self):
return self.__payload
def getencoder (self):
return self.__encoder
def generate_all(self):
dicc=[]
for i in self.__payload:
dicc.append(self.__encoder.encode(i))
return dicc
def __iter__(self):
self.restart()
return self
def gen(self):
while 1:
pl=self.iter.next()
for encode in self.__encoder:
yield encode(pl)
def next(self):
return self.generator.next()
def restart(self):
self.iter=self.__payload.__iter__()
self.generator = self.gen()
| GHubgenius/wfuzz-1 | dictio.py | Python | gpl-2.0 | 1,098 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
SuroLeveling
A QGIS plugin
todo
-------------------
begin : 2016-02-12
git sha : $Format:%H$
copyright : (C) 2016 by Ondřej Pešek
email : ondra.lobo@seznam.cz
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import QSettings, QTranslator, qVersion, QCoreApplication, Qt
from PyQt4.QtGui import QAction, QIcon
# Initialize Qt resources from file resources.py
import resources
# Import the code for the DockWidget
#from suro_leveling_dockwidget import PositionCorrectionDockWidget#SuroLevelingDockWidget
from position_correction_dockwidget import PositionCorrectionDockWidget
import os.path
class PositionCorrection:#SuroLeveling:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'SuroLeveling_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Declare instance attributes
self.actions = []
self.menu = self.tr(u'&GPS position lag correction')
#print "** INITIALIZING SuroLeveling"
self.pluginIsActive = False
self.dockwidget = None
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('GPS position lag correction', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_menu:
self.iface.addPluginToMenu(
self.menu,
action)
self.actions.append(action)
self.iface.addToolBarIcon(action)
return action
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/plugins/SuroLeveling/icon.png'
self.add_action(
icon_path,
text=self.tr(u'GPS position lag correction'),
callback=self.run,
parent=self.iface.mainWindow())
#--------------------------------------------------------------------------
#def onClosePlugin(self): CAUSE OF ENABLE SECOND OPENING
# """Cleanup necessary items here when plugin dockwidget is closed"""
# print "** CLOSING SuroLeveling"
# disconnects
# self.dockwidget.closingPlugin.disconnect(self.onClosePlugin)
# remove this statement if dockwidget is to remain
# for reuse if plugin is reopened
# Commented next statement since it causes QGIS crashe
# when closing the docked window:
# self.dockwidget = None
# self.pluginIsActive = False
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
#print "** UNLOAD SuroLeveling"
for action in self.actions:
self.iface.removePluginMenu(
self.tr(u'&GPS position lag correction'),
action)
self.iface.removeToolBarIcon(action)
#--------------------------------------------------------------------------
def run(self):
"""Run method that loads and starts the plugin"""
if not self.pluginIsActive:
self.pluginIsActive = True
#print "** STARTING SuroLeveling"
# dockwidget may not exist if:
# first run of plugin
# removed on close (see self.onClosePlugin method)
if self.dockwidget == None:
# Create the dockwidget (after translation) and keep reference
self.dockwidget = PositionCorrectionDockWidget()#SuroLevelingDockWidget()
# connect to provide cleanup on closing of dockwidget
# self.dockwidget.closingPlugin.connect(self.onClosePlugin) CAUSE OF ENABLE SECOND OPENING
# show the dockwidget
self.iface.addDockWidget(Qt.LeftDockWidgetArea, self.dockwidget)
self.dockwidget.show()
self.pluginIsActive = False | ctu-osgeorel-proj/bp-pesek-2016 | src/position_correction.py | Python | gpl-2.0 | 7,629 |
#
# Copyright 2015 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import json
import libvirt
import netaddr
import os
import os.path
import subprocess
from lxml import etree
from ..driverbase import VirtDeployDriverBase
from ..errors import InstanceNotFound
from ..errors import VirtDeployException
from ..utils import execute
from ..utils import random_password
DEFAULT_NET = 'default'
DEFAULT_POOL = 'default'
BASE_FORMAT = 'qcow2'
BASE_SIZE = '20G'
INSTANCE_DEFAULTS = {
'cpus': 2,
'memory': 1024,
'arch': 'x86_64',
'network': DEFAULT_NET,
'pool': DEFAULT_POOL,
'password': None,
}
_NET_ADD_LAST = libvirt.VIR_NETWORK_UPDATE_COMMAND_ADD_LAST
_NET_MODIFY = libvirt.VIR_NETWORK_UPDATE_COMMAND_MODIFY
_NET_DELETE = libvirt.VIR_NETWORK_UPDATE_COMMAND_DELETE
_NET_DNS_HOST = libvirt.VIR_NETWORK_SECTION_DNS_HOST
_NET_DHCP_HOST = libvirt.VIR_NETWORK_SECTION_IP_DHCP_HOST
_NET_UPDATE_FLAGS = (
libvirt.VIR_NETWORK_UPDATE_AFFECT_CONFIG |
libvirt.VIR_NETWORK_UPDATE_AFFECT_LIVE
)
_IMAGE_OS_TABLE = {
'centos-6': 'centos6.5', # TODO: fix versions
'centos-7.1': 'centos7.0', # TODO: fix versions
'centos-7.2': 'centos7.0', # TODO: fix versions
'rhel-6.7': 'rhel6', # TODO: fix versions
'rhel-7.2': 'rhel7', # TODO: fix versions
}
class VirtDeployLibvirtDriver(VirtDeployDriverBase):
def __init__(self, uri='qemu:///system'):
self._uri = uri
def _libvirt_open(self):
def libvirt_callback(ctx, err):
pass # add logging only when required
libvirt.registerErrorHandler(libvirt_callback, ctx=None)
return libvirt.open(self._uri)
def template_list(self):
templates = _get_virt_templates()
if templates['version'] != 1:
raise VirtDeployException('Unsupported template list version')
return [{'id': x['os-version'], 'name': x['full-name']}
for x in templates['templates']]
def instance_create(self, vmid, template, **kwargs):
kwargs = dict(INSTANCE_DEFAULTS.items() + kwargs.items())
name = '{0}-{1}-{2}'.format(vmid, template, kwargs['arch'])
image = '{0}.qcow2'.format(name)
conn = self._libvirt_open()
pool = conn.storagePoolLookupByName(kwargs['pool'])
net = conn.networkLookupByName(kwargs['network'])
repository = _get_pool_path(pool)
path = os.path.join(repository, image)
if os.path.exists(path):
raise OSError(os.errno.EEXIST, "Image already exists")
base = _create_base(template, kwargs['arch'], repository)
execute(('qemu-img', 'create', '-f', 'qcow2', '-b', base, image),
cwd=repository)
hostname = 'vm-{0}'.format(vmid)
domainname = _get_network_domainname(net)
if domainname is None:
fqdn = hostname
else:
fqdn = '{0}.{1}'.format(hostname, domainname)
if kwargs['password'] is None:
kwargs['password'] = random_password()
password_string = 'password:{0}'.format(kwargs['password'])
execute(('virt-customize',
'-a', path,
'--hostname', fqdn,
'--root-password', password_string))
network = 'network={0}'.format(kwargs['network'])
try:
conn.nwfilterLookupByName('clean-traffic')
except libvirt.libvirtError as e:
if e.get_error_code() != libvirt.VIR_ERR_NO_NWFILTER:
raise
else:
network += ',filterref=clean-traffic'
disk = 'path={0},format=qcow2,bus=scsi,discard=unmap'.format(path)
channel = 'unix,name=org.qemu.guest_agent.0'
execute(('virt-install',
'--quiet',
'--connect={0}'.format(self._uri),
'--name', name,
'--cpu', 'host-model-only,+vmx',
'--vcpus', str(kwargs['cpus']),
'--memory', str(kwargs['memory']),
'--controller', 'scsi,model=virtio-scsi',
'--disk', disk,
'--network', network,
'--graphics', 'spice',
'--channel', channel,
'--os-variant', _get_image_os(template),
'--import',
'--noautoconsole',
'--noreboot'))
netmac = _get_domain_mac_addresses(_get_domain(conn, name)).next()
ipaddress = _new_network_ipaddress(net)
# TODO: fix race between _new_network_ipaddress and ip reservation
_add_network_host(net, hostname, ipaddress)
_add_network_dhcp_host(net, hostname, netmac['mac'], ipaddress)
return {
'name': name,
'password': kwargs['password'],
'mac': netmac['mac'],
'hostname': fqdn,
'ipaddress': ipaddress,
}
def instance_address(self, vmid, network=None):
conn = self._libvirt_open()
dom = _get_domain(conn, vmid)
netmacs = _get_domain_macs_by_network(dom)
if network:
netmacs = {k: v for k, v in netmacs.iteritems()}
addresses = set()
for name, macs in netmacs.iteritems():
net = conn.networkLookupByName(name)
for lease in _get_network_dhcp_leases(net):
if lease['mac'] in macs:
addresses.add(lease['ip'])
return list(addresses)
def instance_start(self, vmid):
dom = _get_domain(self._libvirt_open(), vmid)
try:
dom.create()
except libvirt.libvirtError as e:
if e.get_error_code() != libvirt.VIR_ERR_OPERATION_INVALID:
raise
def instance_stop(self, vmid):
dom = _get_domain(self._libvirt_open(), vmid)
try:
dom.shutdownFlags(
libvirt.VIR_DOMAIN_SHUTDOWN_GUEST_AGENT |
libvirt.VIR_DOMAIN_SHUTDOWN_ACPI_POWER_BTN
)
except libvirt.libvirtError as e:
if e.get_error_code() != libvirt.VIR_ERR_OPERATION_INVALID:
raise
def instance_delete(self, vmid):
conn = self._libvirt_open()
dom = _get_domain(conn, vmid)
try:
dom.destroy()
except libvirt.libvirtError as e:
if e.get_error_code() != libvirt.VIR_ERR_OPERATION_INVALID:
raise
xmldesc = etree.fromstring(dom.XMLDesc())
for disk in xmldesc.iterfind('./devices/disk/source'):
try:
os.remove(disk.get('file'))
except OSError as e:
if e.errno != os.errno.ENOENT:
raise
netmacs = _get_domain_macs_by_network(dom)
for network, macs in netmacs.iteritems():
net = conn.networkLookupByName(network)
for x in _get_network_dhcp_hosts(net):
if x['mac'] in macs:
_del_network_host(net, x['name'])
_del_network_dhcp_host(net, x['name'])
dom.undefineFlags(libvirt.VIR_DOMAIN_UNDEFINE_SNAPSHOTS_METADATA)
def _get_image_os(image):
try:
return _IMAGE_OS_TABLE[image]
except KeyError:
return image.replace('-', '')
def _create_base(template, arch, repository):
name = '_{0}-{1}.{2}'.format(template, arch, BASE_FORMAT)
path = os.path.join(repository, name)
if not os.path.exists(path):
execute(('virt-builder', template,
'-o', path,
'--size', BASE_SIZE,
'--format', BASE_FORMAT,
'--arch', arch,
'--root-password', 'locked:disabled'))
# As mentioned in the virt-builder man in section "CLONES" the
# resulting image should be cleaned before bsing used as template.
# TODO: handle half-backed templates
execute(('virt-sysprep', '-a', path))
return name
def _get_virt_templates():
stdout, _ = execute(('virt-builder', '-l', '--list-format', 'json'),
stdout=subprocess.PIPE)
return json.loads(stdout)
def _get_domain(conn, name):
try:
return conn.lookupByName(name)
except libvirt.libvirtError as e:
if e.get_error_code() == libvirt.VIR_ERR_NO_DOMAIN:
raise InstanceNotFound(name)
raise
def _get_domain_mac_addresses(dom):
xmldesc = etree.fromstring(dom.XMLDesc())
netxpath = './devices/interface[@type="network"]'
for iface in xmldesc.iterfind(netxpath):
network = iface.find('./source').get('network')
mac = iface.find('./mac').get('address')
yield {'mac': mac, 'network': network}
def _get_domain_macs_by_network(dom):
netmacs = {}
for x in _get_domain_mac_addresses(dom):
netmacs.setdefault(x['network'], []).append(x['mac'])
return netmacs
def _get_pool_path(pool):
xmldesc = etree.fromstring(pool.XMLDesc())
for x in xmldesc.iterfind('.[@type="dir"]/target/path'):
return x.text
raise OSError(os.errno.ENOENT, 'Path not found for pool')
def _get_network_domainname(net):
xmldesc = etree.fromstring(net.XMLDesc())
for domain in xmldesc.iterfind('./domain'):
return domain.get('name')
def _add_network_host(net, hostname, ipaddress):
xmlhost = etree.Element('host')
xmlhost.set('ip', ipaddress)
etree.SubElement(xmlhost, 'hostname').text = hostname
# Attempt to delete if present
_del_network_host(net, hostname)
net.update(_NET_ADD_LAST, _NET_DNS_HOST, 0, etree.tostring(xmlhost),
_NET_UPDATE_FLAGS)
def _del_network_host(net, hostname):
xmlhost = etree.Element('host')
etree.SubElement(xmlhost, 'hostname').text = hostname
try:
net.update(_NET_DELETE, _NET_DNS_HOST, 0, etree.tostring(xmlhost),
_NET_UPDATE_FLAGS)
except libvirt.libvirtError as e:
if e.get_error_code() != libvirt.VIR_ERR_OPERATION_INVALID:
raise
def _add_network_dhcp_host(net, hostname, mac, ipaddress):
xmlhost = etree.Element('host')
xmlhost.set('mac', mac)
xmlhost.set('name', hostname)
xmlhost.set('ip', ipaddress)
# Attempt to delete if present
_del_network_dhcp_host(net, hostname)
net.update(_NET_ADD_LAST, _NET_DHCP_HOST, 0, etree.tostring(xmlhost),
_NET_UPDATE_FLAGS)
def _del_network_dhcp_host(net, hostname):
xmlhost = etree.Element('host')
xmlhost.set('name', hostname)
try:
net.update(_NET_DELETE, _NET_DHCP_HOST, 0, etree.tostring(xmlhost),
_NET_UPDATE_FLAGS)
except libvirt.libvirtError as e:
if e.get_error_code() != libvirt.VIR_ERR_OPERATION_INVALID:
raise
def _get_network_dhcp_hosts(net):
xmldesc = etree.fromstring(net.XMLDesc())
for x in xmldesc.iterfind('./ip/dhcp/host'):
yield {'name': x.get('name'), 'mac': x.get('mac'),
'ip': x.get('ip')}
def _get_network_dhcp_leases(net):
for x in _get_network_dhcp_hosts(net):
yield x
for x in net.DHCPLeases():
yield {'name': x['hostname'], 'mac': x['mac'],
'ip': x['ipaddr']}
def _new_network_ipaddress(net):
xmldesc = etree.fromstring(net.XMLDesc())
hosts = _get_network_dhcp_leases(net)
addresses = set(netaddr.IPAddress(x['ip']) for x in hosts)
localip = xmldesc.find('./ip').get('address')
netmask = xmldesc.find('./ip').get('netmask')
addresses.add(netaddr.IPAddress(localip))
for ip in netaddr.IPNetwork(localip, netmask)[1:-1]:
if ip not in addresses:
return str(ip)
| lyarwood/virt-deploy | virtdeploy/drivers/libvirt.py | Python | gpl-2.0 | 12,400 |
from Tools.Profile import profile
profile("LOAD:ElementTree")
import xml.etree.cElementTree
import os
profile("LOAD:enigma_skin")
from enigma import eSize, ePoint, eRect, gFont, eWindow, eLabel, ePixmap, eWindowStyleManager, addFont, gRGB, eWindowStyleSkinned, getDesktop
from Components.config import ConfigSubsection, ConfigText, config, ConfigYesNo, ConfigSelection, ConfigNothing
from Components.Converter.Converter import Converter
from Components.Sources.Source import Source, ObsoleteSource
from Components.SystemInfo import SystemInfo
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_SKIN_IMAGE, SCOPE_FONTS, SCOPE_ACTIVE_SKIN, SCOPE_ACTIVE_LCDSKIN, SCOPE_CURRENT_SKIN, SCOPE_CONFIG, fileExists
from Tools.Import import my_import
from Tools.LoadPixmap import LoadPixmap
from Components.RcModel import rc_model
from boxbranding import getBoxType
config.vfd = ConfigSubsection()
config.vfd.show = ConfigSelection([("skin_text.xml", _("Channel Name")), ("skin_text_clock.xml", _("Clock"))], "skin_text.xml")
if not os.path.exists("/usr/share/enigma2/skin_text.xml"):
config.vfd.show = ConfigNothing()
colorNames = {}
colorNamesHuman = {}
fonts = {
"Body": ("Regular", 18, 22, 16),
"ChoiceList": ("Regular", 20, 24, 18),
}
parameters = {}
constant_widgets = {}
variables = {}
DEFAULT_SKIN = "OPD-Blue-Line/skin.xml"
DEFAULT_DISPLAY_SKIN = "skin_display.xml"
def dump(x, i=0):
print " " * i + str(x)
try:
for n in x.childNodes:
dump(n, i + 1)
except:
None
class SkinError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return "[Skin] {%s}: %s. Please contact the skin's author!" % (config.skin.primary_skin.value, self.msg)
class DisplaySkinError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return "[Skin] {%s}: %s. Please contact the skin's author!" % (config.skin.display_skin.value, self.msg)
dom_skins = [ ]
def addSkin(name, scope = SCOPE_SKIN):
if name is None or not len(name):
print "[SKIN ERROR] attempt to add a skin without filename"
return False
filename = resolveFilename(scope, name)
if fileExists(filename):
mpath = os.path.dirname(filename) + "/"
try:
file = open(filename, 'r')
dom_skins.append((mpath, xml.etree.cElementTree.parse(file).getroot()))
except:
print "[SKIN ERROR] error in %s" % filename
return False
else:
return True
return False
def get_modular_files(name, scope = SCOPE_SKIN):
dirname = resolveFilename(scope, name + 'mySkin/')
file_list = []
if fileExists(dirname):
skin_files = (os.listdir(dirname))
if len(skin_files):
for f in skin_files:
if f.startswith('skin_') and f.endswith('.xml'):
file_list.append(("mySkin/" + f))
file_list = sorted(file_list, key=str.lower)
return file_list
def skin_user_skinname():
name = "skin_user_" + config.skin.primary_skin.value[:config.skin.primary_skin.value.rfind('/')] + ".xml"
filename = resolveFilename(SCOPE_CONFIG, name)
if fileExists(filename):
return name
return None
config.skin = ConfigSubsection()
config.skin.primary_skin = ConfigText(default = DEFAULT_SKIN)
if SystemInfo["FrontpanelDisplay"] or SystemInfo["LcdDisplay"] or SystemInfo["OledDisplay"] or SystemInfo["FBLCDDisplay"]:
config.skin.display_skin = ConfigText(default = "skin_display.xml")
else:
config.skin.display_skin = ConfigText(default = "skin_display_text.xml")
def skinExists(skin = False):
if not skin or not isinstance(skin, skin):
skin = config.skin.primary_skin.value
skin = resolveFilename(SCOPE_SKIN, skin)
if not fileExists(skin):
if fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)):
config.skin.primary_skin.value = DEFAULT_SKIN
else:
config.skin.primary_skin.value = "skin.xml"
config.skin.primary_skin.save()
skinExists()
def getSkinPath():
primary_skin_path = config.skin.primary_skin.value.replace('skin.xml', '')
if not primary_skin_path.endswith('/'):
primary_skin_path = primary_skin_path + '/'
return primary_skin_path
primary_skin_path = getSkinPath()
profile("LoadSkin")
res = None
name = skin_user_skinname()
if name:
res = addSkin(name, SCOPE_CONFIG)
if not name or not res:
addSkin('skin_user.xml', SCOPE_CONFIG)
addSkin('skin_box.xml')
addSkin('skin_second_infobar.xml')
display_skin_id = 1
if SystemInfo["FrontpanelDisplay"] or SystemInfo["LcdDisplay"] or SystemInfo["OledDisplay"] or SystemInfo["FBLCDDisplay"]:
if fileExists('/usr/share/enigma2/display/skin_display.xml'):
if fileExists(resolveFilename(SCOPE_CONFIG, config.skin.display_skin.value)):
addSkin(config.skin.display_skin.value, SCOPE_CONFIG)
else:
addSkin('display/' + config.skin.display_skin.value)
if addSkin('skin_display.xml'):
display_skin_id = 2
try:
addSkin(config.vfd.show.value)
except:
addSkin('skin_text.xml')
addSkin('skin_subtitles.xml')
try:
addSkin(primary_skin_path + 'skin_user_colors.xml', SCOPE_SKIN)
print "[SKIN] loading user defined colors for skin", (primary_skin_path + 'skin_user_colors.xml')
except (SkinError, IOError, AssertionError), err:
print "[SKIN] not loading user defined colors for skin"
try:
addSkin(primary_skin_path + 'skin_user_header.xml', SCOPE_SKIN)
print "[SKIN] loading user defined header file for skin", (primary_skin_path + 'skin_user_header.xml')
except (SkinError, IOError, AssertionError), err:
print "[SKIN] not loading user defined header file for skin"
def load_modular_files():
modular_files = get_modular_files(primary_skin_path, SCOPE_SKIN)
if len(modular_files):
for f in modular_files:
try:
addSkin(primary_skin_path + f, SCOPE_SKIN)
print "[SKIN] loading modular skin file : ", (primary_skin_path + f)
except (SkinError, IOError, AssertionError), err:
print "[SKIN] failed to load modular skin file : ", err
load_modular_files()
try:
if not addSkin(config.skin.primary_skin.value):
raise SkinError, "primary skin not found"
except Exception, err:
print "SKIN ERROR:", err
skin = DEFAULT_SKIN
if config.skin.primary_skin.value == skin:
skin = 'skin.xml'
print "defaulting to standard skin...", skin
config.skin.primary_skin.value = skin
addSkin(skin)
del skin
addSkin('skin_default.xml')
profile("LoadSkinDefaultDone")
def parseCoordinate(s, e, size=0, font=None):
s = s.strip()
if s == "center":
if not size:
val = 0
else:
val = (e - size)/2
elif s == '*':
return None
else:
if s[0] is 'e':
val = e
s = s[1:]
elif s[0] is 'c':
val = e/2
s = s[1:]
else:
val = 0
if s:
if s[-1] is '%':
val += e * int(s[:-1]) / 100
elif s[-1] is 'w':
val += fonts[font][3] * int(s[:-1])
elif s[-1] is 'h':
val += fonts[font][2] * int(s[:-1])
else:
val += int(s)
if val < 0:
val = 0
return val
def getParentSize(object, desktop):
size = eSize()
if object:
parent = object.getParent()
if parent and parent.size().isEmpty():
parent = parent.getParent()
if parent:
size = parent.size()
elif desktop:
size = desktop.size()
return size
def parsePosition(s, scale, object = None, desktop = None, size = None):
if s in variables:
s = variables[s]
x, y = s.split(',')
parentsize = eSize()
if object and (x[0] in ('c', 'e') or y[0] in ('c', 'e')):
parentsize = getParentSize(object, desktop)
xval = parseCoordinate(x, parentsize.width(), size and size.width())
yval = parseCoordinate(y, parentsize.height(), size and size.height())
return ePoint(xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1])
def parseSize(s, scale, object = None, desktop = None):
if s in variables:
s = variables[s]
x, y = s.split(',')
parentsize = eSize()
if object and (x[0] in ('c', 'e') or y[0] in ('c', 'e')):
parentsize = getParentSize(object, desktop)
xval = parseCoordinate(x, parentsize.width())
yval = parseCoordinate(y, parentsize.height())
return eSize(xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1])
def parseFont(s, scale):
try:
f = fonts[s]
name = f[0]
size = f[1]
except:
name, size = s.split(';')
return gFont(name, int(size) * scale[0][0] / scale[0][1])
def parseColor(s):
if s[0] != '#':
try:
return colorNames[s]
except:
raise SkinError("color '%s' must be #aarrggbb or valid named color" % s)
return gRGB(int(s[1:], 0x10))
def collectAttributes(skinAttributes, node, context, skin_path_prefix=None, ignore=(), filenames=frozenset(("pixmap", "pointer", "seek_pointer", "backgroundPixmap", "selectionPixmap", "sliderPixmap", "scrollbarbackgroundPixmap"))):
# walk all attributes
size = None
pos = None
font = None
for attrib, value in node.items():
if attrib not in ignore:
if attrib in filenames:
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, value, path_prefix=skin_path_prefix)
if fileExists(resolveFilename(SCOPE_ACTIVE_LCDSKIN, value, path_prefix=skin_path_prefix)):
pngfile = resolveFilename(SCOPE_ACTIVE_LCDSKIN, value, path_prefix=skin_path_prefix)
value = pngfile
if attrib == 'size':
size = value.encode("utf-8")
elif attrib == 'position':
pos = value.encode("utf-8")
elif attrib == 'font':
font = value.encode("utf-8")
skinAttributes.append((attrib, font))
else:
skinAttributes.append((attrib, value.encode("utf-8")))
if pos is not None:
pos, size = context.parse(pos, size, font)
skinAttributes.append(('position', pos))
if size is not None:
skinAttributes.append(('size', size))
def morphRcImagePath(value):
if rc_model.rcIsDefault() is False:
if value == '/usr/share/enigma2/skin_default/rc.png' or value == '/usr/share/enigma2/skin_default/rcold.png':
value = rc_model.getRcLocation() + 'rc.png'
elif value == '/usr/share/enigma2/skin_default/rc0.png' or value == '/usr/share/enigma2/skin_default/rc1.png' or value == '/usr/share/enigma2/skin_default/rc2.png':
value = rc_model.getRcLocation() + 'rc.png'
return value
def loadPixmap(path, desktop):
cached = False
option = path.find("#")
if option != -1:
options = path[option+1:].split(',')
path = path[:option]
cached = "cached" in options
ptr = LoadPixmap(morphRcImagePath(path), desktop, cached)
if ptr is None:
print("pixmap file %s not found!" % path)
return ptr
pngcache = []
def cachemenu():
pixmaplist = []
for (path, skin) in dom_skins:
for x in skin.findall("screen"):
if x.attrib.get('name') == 'menu_mainmenu':
print x.attrib.get('name')
for s in x.findall("ePixmap"):
if s.attrib.get('pixmap','') is not '':
pixmaplist.append(s.attrib.get('pixmap',''))
for s in x.findall('widget'):
if s.attrib.get('pixmap','') is not '':
pixmaplist.append(s.attrib.get('pixmap',''))
desktop = getDesktop(0)
for s in pixmaplist:
value ='/usr/share/enigma2/'+s
ptr = loadPixmap(value, desktop)
pngcache.append((value,ptr))
try:
if config.skin.primary_skin.value == "OPD-Blue-Line/skin.xml" or config.skin.primary_skin.value == DEFAULT_SKIN:
cachemenu()
except:
print "fail cache main menu"
class AttributeParser:
def __init__(self, guiObject, desktop, scale=((1,1),(1,1))):
self.guiObject = guiObject
self.desktop = desktop
self.scaleTuple = scale
def applyOne(self, attrib, value):
try:
getattr(self, attrib)(value)
except AttributeError:
print "[SKIN] Attribute \"%s\" with value \"%s\" in object of type \"%s\" is not implemented" % (attrib, value, self.guiObject.__class__.__name__)
except SkinError, ex:
print "\033[91m[SKIN] Error:", ex,
print "\033[0m"
except:
print "[Skin] attribute \"%s\" with wrong (or unknown) value \"%s\" in object of type \"%s\"" % (attrib, value, self.guiObject.__class__.__name__)
def applyAll(self, attrs):
for attrib, value in attrs:
try:
getattr(self, attrib)(value)
except AttributeError:
print "[SKIN] Attribute \"%s\" with value \"%s\" in object of type \"%s\" is not implemented" % (attrib, value, self.guiObject.__class__.__name__)
except SkinError, ex:
print "\033[91m[Skin] Error:", ex,
print "\033[0m"
except:
print "[Skin] attribute \"%s\" with wrong (or unknown) value \"%s\" in object of type \"%s\"" % (attrib, value, self.guiObject.__class__.__name__)
def conditional(self, value):
pass
def position(self, value):
if isinstance(value, tuple):
self.guiObject.move(ePoint(*value))
else:
self.guiObject.move(parsePosition(value, self.scaleTuple, self.guiObject, self.desktop, self.guiObject.csize()))
def size(self, value):
if isinstance(value, tuple):
self.guiObject.resize(eSize(*value))
else:
self.guiObject.resize(parseSize(value, self.scaleTuple, self.guiObject, self.desktop))
def animationPaused(self, value):
pass
def NoAnimationAfter(self, value):
pass
def Animation(self, value):
self.guiObject.setAnimationMode(
{ "disable": 0x00,
"off": 0x00,
"offshow": 0x10,
"offhide": 0x01,
"onshow": 0x01,
"onhide": 0x10,
"disable_onshow": 0x10,
"disable_onhide": 0x01,
}[value])
def animationMode(self, value):
self.guiObject.setAnimationMode(
{ "disable": 0x00,
"off": 0x00,
"offshow": 0x10,
"offhide": 0x01,
"onshow": 0x01,
"onhide": 0x10,
"disable_onshow": 0x10,
"disable_onhide": 0x01,
}[value])
def title(self, value):
self.guiObject.setTitle(_(value))
def text(self, value):
self.guiObject.setText(_(value))
def font(self, value):
self.guiObject.setFont(parseFont(value, self.scaleTuple))
def secondfont(self, value):
self.guiObject.setSecondFont(parseFont(value, self.scaleTuple))
def zPosition(self, value):
self.guiObject.setZPosition(int(value))
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setPixmap(ptr)
def backgroundPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setBackgroundPicture(ptr)
def selectionPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSelectionPicture(ptr)
def sliderPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSliderPicture(ptr)
def scrollbarbackgroundPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarBackgroundPicture(ptr)
def scrollbarSliderPicture(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarSliderPicture(ptr)
def scrollbarBackgroundPicture(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarBackgroundPicture(ptr)
def alphatest(self, value):
self.guiObject.setAlphatest(
{ "on": 1,
"off": 0,
"blend": 2,
}[value])
def scale(self, value):
self.guiObject.setScale(1)
def orientation(self, value):
try:
self.guiObject.setOrientation(*
{ "orVertical": (self.guiObject.orVertical, False),
"orTopToBottom": (self.guiObject.orVertical, False),
"orBottomToTop": (self.guiObject.orVertical, True),
"orHorizontal": (self.guiObject.orHorizontal, False),
"orLeftToRight": (self.guiObject.orHorizontal, False),
"orRightToLeft": (self.guiObject.orHorizontal, True),
}[value])
except KeyError:
print "[Skin] Orientation must be either Vertical or Horizontal!, not %s. Please contact the skin's author!" % value
def valign(self, value):
try:
self.guiObject.setVAlign(
{ "top": self.guiObject.alignTop,
"center": self.guiObject.alignCenter,
"bottom": self.guiObject.alignBottom
}[value])
except KeyError:
print "[Skin] Valign must be either top, center or bottom!, not %s. Please contact the skin's author!" % value
def halign(self, value):
try:
self.guiObject.setHAlign(
{ "left": self.guiObject.alignLeft,
"center": self.guiObject.alignCenter,
"right": self.guiObject.alignRight,
"block": self.guiObject.alignBlock
}[value])
except KeyError:
print "[Skin] Halign must be either left, center, right or block!, not %s. Please contact the skin's author!" % value
def textOffset(self, value):
if value in variables:
value = variables[value]
x, y = value.split(',')
self.guiObject.setTextOffset(ePoint(int(x) * self.scaleTuple[0][0] / self.scaleTuple[0][1], int(y) * self.scaleTuple[1][0] / self.scaleTuple[1][1]))
if isVTISkin:
self.guiObject.setUseVTIWorkaround()
def flags(self, value):
if value in variables:
value = variables[value]
flags = value.split(',')
for f in flags:
try:
fv = eWindow.__dict__[f]
self.guiObject.setFlag(fv)
except KeyError:
print "illegal flag %s!" % f
def backgroundColor(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def backgroundColorSelected(self, value):
self.guiObject.setBackgroundColorSelected(parseColor(value))
def foregroundColor(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def foregroundColorSelected(self, value):
self.guiObject.setForegroundColorSelected(parseColor(value))
def foregroundNotCrypted(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def backgroundNotCrypted(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def foregroundCrypted(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def backgroundCrypted(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def foregroundEncrypted(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def backgroundEncrypted(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def shadowColor(self, value):
self.guiObject.setShadowColor(parseColor(value))
def selectionDisabled(self, value):
self.guiObject.setSelectionEnable(0)
def transparent(self, value):
self.guiObject.setTransparent(int(value))
def borderColor(self, value):
self.guiObject.setBorderColor(parseColor(value))
def borderWidth(self, value):
self.guiObject.setBorderWidth(int(value))
def scrollbarSliderBorderWidth(self, value):
self.guiObject.setScrollbarSliderBorderWidth(int(value))
def scrollbarWidth(self, value):
self.guiObject.setScrollbarWidth(int(value))
def scrollbarSliderBorderColor(self, value):
self.guiObject.setSliderBorderColor(parseColor(value))
def scrollbarSliderForegroundColor(self, value):
self.guiObject.setSliderForegroundColor(parseColor(value))
def scrollbarMode(self, value):
self.guiObject.setScrollbarMode(getattr(self.guiObject, value))
def enableWrapAround(self, value):
self.guiObject.setWrapAround(True)
def pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(0, ptr, pos)
def seek_pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(1, ptr, pos)
def shadowOffset(self, value):
self.guiObject.setShadowOffset(parsePosition(value, self.scaleTuple))
def noWrap(self, value):
self.guiObject.setNoWrap(int(value))
def linelength(self, value):
pass
def OverScan(self, value):
self.guiObject.setOverscan(value)
def applySingleAttribute(guiObject, desktop, attrib, value, scale = ((1,1),(1,1))):
AttributeParser(guiObject, desktop, scale).applyOne(attrib, value)
def applyAllAttributes(guiObject, desktop, attributes, scale):
AttributeParser(guiObject, desktop, scale).applyAll(attributes)
def loadSingleSkinData(desktop, skin, path_prefix):
"""loads skin data like colors, windowstyle etc."""
assert skin.tag == "skin", "root element in skin must be 'skin'!"
for c in skin.findall("output"):
id = c.attrib.get('id')
if id:
id = int(id)
else:
id = 0
if id == 0:
for res in c.findall("resolution"):
get_attr = res.attrib.get
xres = get_attr("xres")
if xres:
xres = int(xres)
else:
xres = 720
yres = get_attr("yres")
if yres:
yres = int(yres)
else:
yres = 576
bpp = get_attr("bpp")
if bpp:
bpp = int(bpp)
else:
bpp = 32
from enigma import gMainDC
gMainDC.getInstance().setResolution(xres, yres)
desktop.resize(eSize(xres, yres))
if bpp != 32:
pass
for skininclude in skin.findall("include"):
filename = skininclude.attrib.get("filename")
if filename:
skinfile = resolveFilename(SCOPE_ACTIVE_SKIN, filename, path_prefix=path_prefix)
if not fileExists(skinfile):
skinfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)
if fileExists(skinfile):
print "[SKIN] loading include:", skinfile
loadSkin(skinfile)
for c in skin.findall("colors"):
for color in c.findall("color"):
get_attr = color.attrib.get
name = get_attr("name")
color = get_attr("value")
if name and color:
colorNames[name] = parseColor(color)
if color[0] != '#':
for key in colorNames:
if key == color:
colorNamesHuman[name] = colorNamesHuman[key]
break
else:
humancolor = color[1:]
if len(humancolor) >= 6:
colorNamesHuman[name] = int(humancolor,16)
else:
print("need color and name, got %s %s" % (name, color))
for c in skin.findall("fonts"):
for font in c.findall("font"):
get_attr = font.attrib.get
filename = get_attr("filename", "<NONAME>")
name = get_attr("name", "Regular")
scale = get_attr("scale")
if scale:
scale = int(scale)
else:
scale = 100
is_replacement = get_attr("replacement") and True or False
render = get_attr("render")
if render:
render = int(render)
else:
render = 0
resolved_font = resolveFilename(SCOPE_FONTS, filename, path_prefix=path_prefix)
if not fileExists(resolved_font):
resolved_font = resolveFilename(SCOPE_ACTIVE_SKIN, filename)
if fileExists(resolveFilename(SCOPE_CURRENT_SKIN, filename)):
resolved_font = resolveFilename(SCOPE_CURRENT_SKIN, filename)
elif fileExists(resolveFilename(SCOPE_ACTIVE_LCDSKIN, filename)):
resolved_font = resolveFilename(SCOPE_ACTIVE_LCDSKIN, filename)
addFont(resolved_font, name, scale, is_replacement, render)
for alias in c.findall("alias"):
get = alias.attrib.get
try:
name = get("name")
font = get("font")
size = int(get("size"))
height = int(get("height", size))
width = int(get("width", size))
global fonts
fonts[name] = (font, size, height, width)
except Exception, ex:
print "[SKIN] bad font alias", ex
for c in skin.findall("parameters"):
for parameter in c.findall("parameter"):
get = parameter.attrib.get
try:
name = get("name")
value = get("value")
if name.find('Font') != -1:
font = value.split(";")
if isinstance(font, list) and len(font) == 2:
parameters[name] = (str(font[0]), int(font[1]))
else:
parameters[name] = map(int, value.split(","))
except Exception, ex:
print "[SKIN] bad parameter", ex
for c in skin.findall("constant-widgets"):
for constant_widget in c.findall("constant-widget"):
get = constant_widget.attrib.get
name = get("name")
if name:
constant_widgets[name] = constant_widget
for c in skin.findall("variables"):
for parameter in c.findall("variable"):
get = parameter.attrib.get
name = get("name")
value = get("value")
x, y = value.split(',')
if value and name:
variables[name] = str(x) + "," + str(y)
for c in skin.findall("subtitles"):
from enigma import eSubtitleWidget
scale = ((1,1),(1,1))
for substyle in c.findall("sub"):
get_attr = substyle.attrib.get
font = parseFont(get_attr("font"), scale)
col = get_attr("foregroundColor")
if col:
foregroundColor = parseColor(col)
haveColor = 1
else:
foregroundColor = gRGB(0xFFFFFF)
haveColor = 0
col = get_attr("borderColor")
if col:
borderColor = parseColor(col)
else:
borderColor = gRGB(0)
borderwidth = get_attr("borderWidth")
if borderwidth is None:
borderWidth = 3
else:
borderWidth = int(borderwidth)
face = eSubtitleWidget.__dict__[get_attr("name")]
eSubtitleWidget.setFontStyle(face, font, haveColor, foregroundColor, borderColor, borderWidth)
for windowstyle in skin.findall("windowstyle"):
style = eWindowStyleSkinned()
style_id = windowstyle.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
font = gFont("Regular", 20)
offset = eSize(20, 5)
for title in windowstyle.findall("title"):
get_attr = title.attrib.get
offset = parseSize(get_attr("offset"), ((1,1),(1,1)))
font = parseFont(get_attr("font"), ((1,1),(1,1)))
style.setTitleFont(font)
style.setTitleOffset(offset)
for borderset in windowstyle.findall("borderset"):
bsName = str(borderset.attrib.get("name"))
for pixmap in borderset.findall("pixmap"):
get_attr = pixmap.attrib.get
bpName = get_attr("pos")
filename = get_attr("filename")
if filename and bpName:
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, filename, path_prefix=path_prefix)
if fileExists(resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)):
pngfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)
png = loadPixmap(pngfile, desktop)
try:
style.setPixmap(eWindowStyleSkinned.__dict__[bsName], eWindowStyleSkinned.__dict__[bpName], png)
except:
pass
for color in windowstyle.findall("color"):
get_attr = color.attrib.get
colorType = get_attr("name")
color = parseColor(get_attr("color"))
try:
style.setColor(eWindowStyleSkinned.__dict__["col" + colorType], color)
except:
raise SkinError("Unknown color %s" % colorType)
x = eWindowStyleManager.getInstance()
x.setStyle(style_id, style)
for margin in skin.findall("margin"):
style_id = margin.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
r = eRect(0,0,0,0)
v = margin.attrib.get("left")
if v:
r.setLeft(int(v))
v = margin.attrib.get("top")
if v:
r.setTop(int(v))
v = margin.attrib.get("right")
if v:
r.setRight(int(v))
v = margin.attrib.get("bottom")
if v:
r.setBottom(int(v))
getDesktop(style_id).setMargins(r)
dom_screens = {}
def loadSkin(name, scope = SCOPE_SKIN):
global display_skin_id
global dom_screens
filename = resolveFilename(scope, name)
if fileExists(filename):
path = os.path.dirname(filename) + "/"
file = open(filename, 'r')
for elem in xml.etree.cElementTree.parse(file).getroot():
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
elem.clear()
continue
if name in dom_screens:
dom_screens[name][0].clear()
dom_screens[name] = (elem, path)
else:
elem.clear()
else:
elem.clear()
file.close()
def loadSkinData(desktop):
global dom_skins, dom_screens, display_skin_id
skins = dom_skins[:]
skins.reverse()
for (path, dom_skin) in skins:
loadSingleSkinData(desktop, dom_skin, path)
for elem in dom_skin:
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
elem.clear()
continue
if name in dom_screens:
dom_screens[name][0].clear()
dom_screens[name] = (elem, path)
else:
elem.clear()
else:
elem.clear()
del dom_skins
class additionalWidget:
def __init__(self):
pass
class SizeTuple(tuple):
def split(self, *args):
return str(self[0]), str(self[1])
def strip(self, *args):
return '%s,%s' % self
def __str__(self):
return '%s,%s' % self
class SkinContext:
def __init__(self, parent=None, pos=None, size=None, font=None):
if parent is not None:
if pos is not None:
pos, size = parent.parse(pos, size, font)
self.x, self.y = pos
self.w, self.h = size
else:
self.x = None
self.y = None
self.w = None
self.h = None
def __str__(self):
return "Context (%s,%s)+(%s,%s) " % (self.x, self.y, self.w, self.h)
def parse(self, pos, size, font):
if size in variables:
size = variables[size]
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
self.w = 0
self.h = 0
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
self.h -= h
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
self.h -= h
self.y += h
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
self.x += w
self.w -= w
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
self.w -= w
else:
if pos in variables:
pos = variables[pos]
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return SizeTuple(pos), SizeTuple(size)
class SkinContextStack(SkinContext):
def parse(self, pos, size, font):
if size in variables:
size = variables[size]
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
else:
if pos in variables:
pos = variables[pos]
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return SizeTuple(pos), SizeTuple(size)
def readSkin(screen, skin, names, desktop):
if not isinstance(names, list):
names = [names]
global dom_screens
for n in names:
myscreen, path = dom_screens.get(n, (None,None))
if myscreen is not None:
name = n
break
else:
name = "<embedded-in-'%s'>" % screen.__class__.__name__
if myscreen is None:
myscreen = getattr(screen, "parsedSkin", None)
if myscreen is None and getattr(screen, "skin", None):
skin = screen.skin
print "[SKIN] Parsing embedded skin", name
if isinstance(skin, tuple):
for s in skin:
candidate = xml.etree.cElementTree.fromstring(s)
if candidate.tag == 'screen':
sid = candidate.attrib.get('id', None)
if (not sid) or (int(sid) == display_skin_id):
myscreen = candidate
break
else:
print "[SKIN] Hey, no suitable screen!"
else:
myscreen = xml.etree.cElementTree.fromstring(skin)
if myscreen:
screen.parsedSkin = myscreen
if myscreen is None:
print "[SKIN] No skin to read..."
myscreen = screen.parsedSkin = xml.etree.cElementTree.fromstring("<screen></screen>")
screen.skinAttributes = [ ]
skin_path_prefix = getattr(screen, "skin_path", path)
context = SkinContextStack()
s = desktop.bounds()
context.x = s.left()
context.y = s.top()
context.w = s.width()
context.h = s.height()
del s
collectAttributes(screen.skinAttributes, myscreen, context, skin_path_prefix, ignore=("name",))
context = SkinContext(context, myscreen.attrib.get('position'), myscreen.attrib.get('size'))
screen.additionalWidgets = [ ]
screen.renderer = [ ]
visited_components = set()
def process_constant_widget(constant_widget, context):
get_attr = constant_widget.attrib.get
wname = get_attr('name')
if wname:
try:
cwvalue = constant_widgets[wname]
except KeyError:
if config.crash.skin_error_crash.value:
print "[SKIN] ERROR - given constant-widget: '%s' not found in skin" % wname
else:
print "\033[91m[SKIN] ERROR - given constant-widget: '%s' not found in skin\033[0m" % wname
return
if cwvalue:
for x in cwvalue:
myscreen.append((x))
try:
myscreen.remove(constant_widget)
except ValueError:
pass
def process_none(widget, context):
pass
def process_widget(widget, context):
get_attr = widget.attrib.get
wname = get_attr('name')
wsource = get_attr('source')
if wname is None and wsource is None:
print "widget has no name and no source!"
return
if wname:
visited_components.add(wname)
try:
attributes = screen[wname].skinAttributes = [ ]
except:
print "component with name '" + wname + "' was not found in skin of screen '" + name + "'!"
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('name',))
elif wsource:
while True: # until we found a non-obsolete source
scr = screen
path = wsource.split('.')
while len(path) > 1:
scr = screen.getRelatedScreen(path[0])
if scr is None:
print("specified related screen '" + wsource + "' was not found in screen '" + name + "'!")
path = path[1:]
source = scr.get(path[0])
if isinstance(source, ObsoleteSource):
print "WARNING: SKIN '%s' USES OBSOLETE SOURCE '%s', USE '%s' INSTEAD!" % (name, wsource, source.new_source)
print "OBSOLETE SOURCE WILL BE REMOVED %s, PLEASE UPDATE!" % source.removal_date
if source.description:
print source.description
wsource = source.new_source
else:
break
if source is None:
if config.crash.skin_error_crash.value:
raise SkinError("source '" + wsource + "' was not found in screen '" + name + "'!")
else:
print("\033[91m[Skin] Error: Source '" + wsource + "' was not found in screen '" + name + "'!")
wrender = get_attr('render')
if not wrender:
print("you must define a renderer with render= for source '%s'" % wsource)
for converter in widget.findall("convert"):
ctype = converter.get('type')
assert ctype, "'convert'-tag needs a 'type'-attribute"
try:
parms = converter.text.strip()
except:
parms = ""
try:
converter_class = my_import('.'.join(("Components", "Converter", ctype))).__dict__.get(ctype)
except ImportError:
if config.crash.skin_error_crash.value:
raise SkinError("[Skin] Error: Converter '%s' not found" % ctype)
else:
print("\033[91m[Skin] Error: Converter '%s' not found\033[0m" % ctype)
c = None
for i in source.downstream_elements:
if isinstance(i, converter_class) and i.converter_arguments == parms:
c = i
if c is None:
c = converter_class(parms)
c.connect(source)
source = c
try:
renderer_class = my_import('.'.join(("Components", "Renderer", wrender))).__dict__.get(wrender)
except ImportError:
if config.crash.skin_error_crash.value:
raise SkinError("[Skin] Error: Renderer '%s' not found" % wrender)
else:
print("\033[91m[Skin] Error: Renderer '%s' not found\033[0m" % wrender)
return
renderer = renderer_class()
renderer.connect(source)
attributes = renderer.skinAttributes = [ ]
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('render', 'source'))
screen.renderer.append(renderer)
def process_applet(widget, context):
try:
codeText = widget.text.strip()
widgetType = widget.attrib.get('type')
code = compile(codeText, "skin applet", "exec")
except Exception, ex:
raise SkinError("applet failed to compile: " + str(ex))
if widgetType == "onLayoutFinish":
screen.onLayoutFinish.append(code)
else:
raise SkinError("[Skin] applet type '%s' unknown!" % widgetType)
def process_elabel(widget, context):
w = additionalWidget()
w.widget = eLabel
w.skinAttributes = []
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_epixmap(widget, context):
w = additionalWidget()
w.widget = ePixmap
w.skinAttributes = []
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_screen(widget, context):
def process(w):
conditional = w.attrib.get('conditional')
if conditional and not [i for i in conditional.split(",") if i in screen.keys()]:
return
p = processors.get(w.tag, process_none)
try:
p(w, context)
except SkinError, e:
print "[SKIN] SKIN ERROR in screen '%s' widget '%s':" % (name, w.tag), e
cw = widget.findall("constant-widget")
if cw:
for w in cw:
process(w)
for w in myscreen.findall("widget"):
process(w)
for w in widget.getchildren():
if cw and w.tag in ("constant-widget","widget"):
continue
process(w)
def process_panel(widget, context):
n = widget.attrib.get('name')
if n:
try:
s = dom_screens.get(n, None)
if s is None:
print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name)
return
except KeyError:
print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name)
else:
process_screen(s[0], context)
layout = widget.attrib.get('layout')
if layout == 'stack':
cc = SkinContextStack
else:
cc = SkinContext
try:
c = cc(context, widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'))
except Exception, ex:
raise SkinError("Failed to create skincontext (%s,%s,%s) in %s: %s" % (widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'), context, ex) )
process_screen(widget, c)
processors = {
None: process_none,
"constant-widget": process_constant_widget,
"widget": process_widget,
"applet": process_applet,
"eLabel": process_elabel,
"ePixmap": process_epixmap,
"panel": process_panel
}
try:
print "[SKIN] processing screen %s:" % name
context.x = 0
context.y = 0
process_screen(myscreen, context)
except Exception, e:
print "[SKIN] SKIN ERROR in %s:" % name, e
from Components.GUIComponent import GUIComponent
nonvisited_components = [x for x in set(screen.keys()) - visited_components if isinstance(x, GUIComponent)]
assert not nonvisited_components, "the following components in %s don't have a skin entry: %s" % (name, ', '.join(nonvisited_components))
screen = None
visited_components = None
def parseAvailableSkinColor(color):
if color in colorNamesHuman:
return colorNamesHuman[color]
else:
print "color %s ist not available at used skin" % color
return None
| trunca/enigma2 | skin.py | Python | gpl-2.0 | 39,906 |
# Add any code that updates the current probability
# values of any of the nodes here.
# For example, here is a method that updates the probability of
# a single node, where this node is assumed to have a single parent.
def update_node_with_one_parent(n):
'''
For all possible values pv of the current node,
For all possible values ppv of the parent,
Look up the conditional probability of pv given ppv.
and multiply it by the current prob. of that parent state (ppv)
and accumulate these to get the current probability of pv.
'''
if len(n.parents)!= 1:
print "The function update_node_with_one_parent cannot handle node "+n.name
print "It does not have exactly one parent."
return
parent = n.parents[0]
for pv in n.possible_values:
n.current_prob[pv] = 0.0
for ppv in n.parents[0].possible_values:
conditional = n.name+'='+str(pv)+'|'+parent.name+'='+str(ppv)
n.current_prob[pv] += n.p[conditional] * parent.current_prob[ppv]
def gen_cartesian_product(sets):
'''Return the cartesian product of a list of sets.
For example: [['a','b'],[0,1],[7,8,9]] should give a 12 element set of triples.'''
if len(sets)==1:
return map(lambda set: [set], sets[0])
subproduct = gen_cartesian_product(sets[1:])
prod = []
for elt in sets[0]:
new_tuples = map(lambda tup: [elt]+tup, subproduct)
prod = prod + new_tuples
return prod
def update_node_with_k_parents(n):
'''
For all possible values pv of the current node,
For all possible values ppv of each of the parents,
Look up the conditional probability of pv given ppv.
and multiply it by the current prob. of that parent state (ppv)
and accumulate these to get the current probability of pv.
'''
print "Updating node: "+n.name
if len(n.parents) < 1:
print "The function update_node_with_k_parents cannot handle node "+n.name
print "It does not have any parents."
return
cartesian_prod = gen_cartesian_product(map(lambda p: p.possible_values, n.parents))
parent_names = map(lambda p: p.name, n.parents)
for pv in n.possible_values:
n.current_prob[pv] = 0.0
print " Updating current prob. of "+pv
for ppv_tuple in cartesian_prod:
print " Adding the contribution for "+str(ppv_tuple)
conditional = n.name+'='+pv+'|'+str(parent_names) +'='+str(ppv_tuple)
parent_vector_prob = reduce(lambda a,b:a*b, map(lambda p, pv:p.current_prob[pv], n.parents, ppv_tuple))
n.current_prob[pv] += n.p[conditional] * parent_vector_prob
#update_node_with_one_parent(nodeB)
| uraplutonium/adtree-py | src/BayesUpdating.py | Python | gpl-2.0 | 2,726 |
#!bpy
"""
to run:
(aAtually I have not been able to run this from command line - the thing
exits without rendering. If I add bpy.ops.render, it alwys renders layers, rather
then compositing output)
blender -b --python this_fnm.py
"""
import bpy
from math import radians
import fnmatch
import os
###################################
def delete_old_stuff():
# escape edit mode
if bpy.ops.object.mode_set.poll():
bpy.ops.object.mode_set(mode='OBJECT')
# delete all mesh objects
bpy.ops.object.select_by_type(type='MESH')
bpy.ops.object.delete()
# delete all lamps
bpy.ops.object.select_by_type(type='LAMP')
bpy.ops.object.delete()
# delete all font objects
bpy.ops.object.select_by_type(type='FONT')
bpy.ops.object.delete()
# delete all render layers but one
render_layers = bpy.context.scene.render.layers
for active_index in range (1,len(render_layers)):
render_layers.active_index = active_index
render_layers.remove(render_layers.active)
# delete all materials
for i in bpy.data.materials.values():
bpy.data.materials.remove(i)
# delete all textures
for i in bpy.data.textures.values():
bpy.data.textures.remove(i)
#####################################################################
def makeGlossyTextured (object, image_loaded, material_name):
material = bpy.data.materials.new(material_name)
# as soon as we do this we have Diffuse BSDF and Material Output nodes, linked:
material.use_nodes = True
nodes = material.node_tree.nodes
links = material.node_tree.links
# uv map node
uv_node = nodes.new('ShaderNodeUVMap')
uv_node.uv_map = object.data.uv_textures.active.name
# image texture node
image_texture_node = nodes.new(type='ShaderNodeTexImage')
image_texture_node.image = image_loaded
links.new(uv_node.outputs['UV'], image_texture_node.inputs['Vector'])
# diffuse node and Material output are already generated, and linked
# so we just need to pipe in the testure into Diffuse BSDF node
diffuse_node = nodes.get("Diffuse BSDF")
links.new(image_texture_node.outputs[0], diffuse_node.inputs[0])
# add a glossy BSDF
glossy_node = nodes.new(type='ShaderNodeBsdfGlossy')
glossy_node.inputs["Color"].default_value = [1.0, 1.0, 1.0, 1.0]
glossy_node.inputs["Roughness"].default_value = 0.0
# add a mix node
mix_node = nodes.new(type='ShaderNodeMixShader')
links.new(diffuse_node.outputs[0], mix_node.inputs[1]) # whats mix.inputs[0]?
links.new(glossy_node.outputs[0], mix_node.inputs[2])
# output of the mix node into Material Output
mat_output = nodes.get("Material Output")
links.new(mix_node.outputs[0], mat_output.inputs[0])
return material
#####################################################################
def makeEmission (material_name):
material = bpy.data.materials.new(material_name)
# as soon as we do this we]'' have Diffuse BSDF and Material Output nodes, linked:
material.use_nodes = True
nodes = material.node_tree.nodes
links = material.node_tree.links
# add an emission node
emission_node = nodes.new(type='ShaderNodeEmission')
emission_node.inputs["Color"].default_value = [0.335, 0.583, 0.8, 1.0]
emission_node.inputs["Strength"].default_value = 20.0
mat_output = nodes.get("Material Output")
links.new(emission_node.outputs[0], mat_output.inputs[0])
return material
#####################################################################
def set_camera(scene):
# Set camera rotation in euler angles
scene.camera.rotation_mode = 'XYZ'
scene.camera.rotation_euler[0] = radians(12)
scene.camera.rotation_euler[1] = 0.0
scene.camera.rotation_euler[2] = 0.0
# Set camera translation
scene.camera.location.x = 0.34
scene.camera.location.y = -1.2
scene.camera.location.z = 6.7
#####################################################################
def set_lights(scene):
# ambient
scene.world.light_settings.use_ambient_occlusion = True
scene.world.light_settings.distance = 2
# spotlight - sun
bpy.ops.object.lamp_add(type='SUN', location=(-2.0, 0.32, 6.5), rotation=(radians(-21), radians(-5), radians(69)))
lamp = bpy.context.object
#lamp.color = (0.43, 0.78,1.0,1.0) # the code does not complain, but not sure if it does anything
# lamp does not have strength, but the associated rendering node does (sigh)
# lamp.strength = 5.0 # this does not work
lamp.data.node_tree.nodes['Emission'].inputs['Strength'].default_value= 5.0
lamp.data.node_tree.nodes['Emission'].inputs['Color'].default_value= (0.43, 0.78,1.0,1.0)
lamp.cycles_visibility.shadow = False
# light emmission plane
bpy.ops.mesh.primitive_plane_add(location=(-10.0, 3.5, 12.0),
rotation=(radians(-104), radians(-98), radians(80)))
emission_plane = bpy.context.object
emission_plane.scale = (3.3, -5.5, -28.3)
emission_plane.name = "emission plane"
emission_plane.data.materials.append (makeEmission ("emission mat"))
emission_plane.cycles_visibility.shadow = False
#####################################################################
def create_object():
bpy.ops.mesh.primitive_cube_add(location=(0.0, 0.0, 0.7), enter_editmode=True, layers= [l==0 for l in range(20)])
bpy.ops.mesh.subdivide(number_cuts=4)
obj = bpy.context.object
obj.name = "cube"
obj.location.z += obj.dimensions.z/4
obj.modifiers.new("cube_subsurf", "SUBSURF")
obj.modifiers["cube_subsurf"].subdivision_type = 'CATMULL_CLARK'
obj.modifiers["cube_subsurf"].render_levels = 4
mesh = obj.data
bpy.ops.object.editmode_toggle()
# show mesh as smooth
for p in mesh.polygons:
p.use_smooth = True
# texture layer: Smart projection
bpy.ops.mesh.uv_texture_add()
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.uv.cube_project(cube_size=0.1*obj.dimensions.x)
bpy.ops.object.mode_set(mode='OBJECT')
return obj
#####################################################################
def compositing(scene, outdir, outfile_base_name):
# let's try to work in some layers
render_layers = scene.render.layers
render_layers.active.name = "main"
render_layers.new("shadow")
render_layers["main"].layers = [l==0 for l in range(20)]
render_layers["shadow"].layers = [l==1 for l in range(20)]
render_layers["shadow"].use_pass_shadow = True
scene.layers[0] = True
scene.layers[1] = True
# and now ... compositing!
# I'll want transaprent background
scene.cycles.film_transparent = True
# switch on nodes and get reference
scene.use_nodes = True
tree = scene.node_tree
# the default nodes are Composite and RenderLayers, that contains out main layer already
# I better remove them if I am going to run this script repeatedly
for node in tree.nodes:
tree.nodes.remove(node)
links = tree.links
main_layer_node = tree.nodes.new('CompositorNodeRLayers')
main_layer_node.layer = "main"
main_layer_node.location = 200, -100
shadow_layer_node = tree.nodes.new('CompositorNodeRLayers')
shadow_layer_node.layer = "shadow"
shadow_layer_node.location = -400, 100
# note here: mix, not math
subtract_node = tree.nodes.new('CompositorNodeMixRGB')
subtract_node.blend_type = "SUBTRACT" # the default is add
subtract_node.location = -200, 200
# inputs[0] here is 'Fac' (?)
links.new(shadow_layer_node.outputs['Alpha'], subtract_node.inputs[1])
links.new(shadow_layer_node.outputs['Shadow'], subtract_node.inputs[2])
set_alpha_node = tree.nodes.new('CompositorNodeSetAlpha')
set_alpha_node.location = 0, 200
links.new(subtract_node.outputs['Image'], set_alpha_node.inputs['Alpha'])
blur_node = tree.nodes.new('CompositorNodeBlur')
blur_node.filter_type = 'FAST_GAUSS'
blur_node.size_x = 5
blur_node.size_y = 5
blur_node.location = 200, 200
links.new(set_alpha_node.outputs['Image'], blur_node.inputs['Image'])
alpha_over_node = tree.nodes.new('CompositorNodeAlphaOver')
alpha_over_node.location = 400, 0
# inputs[0] here is 'Fac' (?)
links.new(blur_node.outputs['Image'], alpha_over_node.inputs[1])
links.new(main_layer_node.outputs['Image'], alpha_over_node.inputs[2])
# create output node
#out_node = tree.nodes.new('CompositorNodeComposite')
out_node = tree.nodes.new('CompositorNodeOutputFile')
out_node.base_path = outdir
out_node.file_slots[0].path = outfile_base_name
out_node.location = 600,0
links.new(alpha_over_node.outputs['Image'], out_node.inputs['Image'])
# create Viewer Node
# viewer_node = tree.nodes.new('CompositorNodeViewer')
# viewer_node.location = 600, 200
# links.new(alpha_over_node.outputs['Image'], viewer_node.inputs['Image'])
###################################
if __name__ == '__main__':
delete_old_stuff()
scene = bpy.context.scene
scene.render.engine="CYCLES"
scene.unit_settings.system='METRIC'
image_loaded = bpy.data.images.load(os.path.abspath('Bck_v1.png'))
set_camera(scene)
set_lights(scene)
# floor - that' where the shadows are cast
# we are placing the floor in the second layer
bpy.ops.mesh.primitive_plane_add(location=(0.0, 0.0, 0.0), layers=[l==1 for l in range(20)])
floor = bpy.context.object
floor.scale = ((4,4,4))
floor.name = "floor"
floor.data.materials.append (bpy.data.materials.new('Plain Diffuse'))
# cube
object = create_object()
# image as a texture on the curface + gloss
material = makeGlossyTextured (object, image_loaded, 'Img Txtr Material')
# bind object and material
object.data.materials.append(material)
# do some compositing work to get shadow on transparent background
scene.render.resolution_y = 512
scene.render.resolution_x = int (scene.render.resolution_y*(object.dimensions.x/object.dimensions.y))
compositing(scene, os.getcwd(), "test")
# not working:
bpy.ops.render.render(use_viewport=True)
| ivanamihalek/blender | texture_shadow_no_bg/cube_to_png.py | Python | gpl-2.0 | 10,222 |
#Aditya Joshi
#Enumerating Oriented Gene Ordering
from itertools import permutations,product
from math import fabs
n = int(raw_input())
def make_set(n):
set = []
for x in range(1,n+1):
set += [x]
return set
def plusAndMinusPermutations(items):
for p in permutations(items,len(items)):
for signs in product([-1,1], repeat=len(items)):
yield [a*sign for a,sign in zip(p,signs)]
def array_to_string(list):
string = ""
string += str(list[0]) + " " + str(list[1])
return string
count = 0
for x in plusAndMinusPermutations(make_set(n)):
print array_to_string(x)
count += 1
print count
| adijo/rosalind | old/gene_enumerations.py | Python | gpl-2.0 | 682 |
from __future__ import division
from django.conf import settings
from django.core.management.base import BaseCommand
import time
import datetime
import logging
from cali_water.usage_data_tasks import TasksForMonthlyWaterUseReport
logger = logging.getLogger("accountability_tracker")
class Command(BaseCommand):
help = "Begin a request to State Water Resources Board for latest usage report"
def handle(self, *args, **options):
task_run = TasksForMonthlyWaterUseReport()
task_run._init()
self.stdout.write("\nTask finished at %s\n" % str(datetime.datetime.now()))
| SCPR/accountability-tracker | cali_water/management/commands/usage_tasks.py | Python | gpl-2.0 | 597 |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# vim:ai:sta:et:ts=4:sw=4:sts=4
"""kernelng 0.x
Tool for maintaining customized overlays of kernel-ng.eclass-based ebuilds
Copyright 2005-2014 Gentoo Foundation
Copyright (C) 2014 Gregory M. Turner <gmt@be-evil.net>
Distributed under the terms of the GNU General Public License v2
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import os
import sys
import re
from collections import OrderedDict
from itertools import chain, islice, count, repeat
import click
from click._compat import iteritems
from .output import has_verbose_level, echov, sechov, trace, suppress_tracing
import portage
try:
portage.proxy.lazyimport.lazyimport(globals(),
'portage.data:portage_uid,portage_gid')
except ImportError:
portage_uid = 250
portage_gid = 250
# eprefixifiable dummy value
EPREFIX = "@GENTOO_PORTAGE_EPREFIX@"
# non-eprefixified fallback behavior: ask portage or assume empty
if EPREFIX == "@GENTOO_%s_EPREFIX@" % "PORTAGE":
try:
from portage.const import EPREFIX as _EPREFIX
except ImportError:
_EPREFIX = ''
EPREFIX = _EPREFIX
PROGNAME = sys.argv[0].split(os.path.sep)[-1] if len(sys.argv) >= 1 else 'kernelng'
PROGDESC = 'kernel-ng-util'
FRAMEWORK = 'kernel-ng'
PORTAGE_CONF_DIR = '/etc/portage'
REPOS_CONF = 'repos.conf'
REPOS_CONF_FILE = ''.join((
EPREFIX,
PORTAGE_CONF_DIR,
os.path.sep,
REPOS_CONF
))
KERNELNG_CONF = '%s.conf' % FRAMEWORK
KERNELNG_CONF_DIR = '/etc/%s' % FRAMEWORK
EKERNELNG_CONF_DIR = '%s%s' % (EPREFIX, KERNELNG_CONF_DIR)
KERNELNG_CONF_FILE = ''.join((
EKERNELNG_CONF_DIR,
os.path.sep,
KERNELNG_CONF,
))
CONST_RE = re.compile('%\([^)]*\)[^\W\d_]', re.UNICODE)
SUBCONSTS = {
'prog': PROGNAME,
'progdesc': PROGDESC,
'framework': FRAMEWORK,
'kngconf': KERNELNG_CONF,
'kngconffile': KERNELNG_CONF_FILE,
'eprefix': EPREFIX,
'lc': '%s%s' % (
click.style('LOADCONFIG', fg='blue', bold=True),
click.style(':', fg='white', bold=True)
)
}
CONFIG_COMMENT_RE = re.compile('\s*#|\s*$', re.UNICODE)
CONFIG_SECTION_RE = re.compile('\s*\[\s*([^][]*[^][\s]+)\s*\]\s*$', re.UNICODE)
CONFIG_SETTING_RE = re.compile('\s*([^\d\W][\w-]*)\s*=\s*($|.*\S+)\s*$', re.UNICODE)
def subconsts(text, subconsts=SUBCONSTS):
"""Utility function to make substitutions from a dictionary of constants."""
try:
return text % subconsts if re.search(CONST_RE, text) else text
except ValueError as e:
echov('subconsts: error substituting in "%s": %s.' % (text, str(e)), err=True)
raise
# convenience alias
_sc = subconsts
class KNGConfigItemUnknownReason(Exception):
def __init__(self, key, value, reason):
super(KNGConfigItemUnknownReason, self).__init__(
'Unknown KNGConfigItem reason "%s", assigning "%s" to "%s"' % (
reason, value, key))
VALID_KNGCONFIGITEMREASONS=['stored', 'default', 'override']
def ValidateKNGConfigItemReason(key, value, reason):
if reason not in VALID_KNGCONFIGITEMREASONS:
raise KNGConfigItemUnknownReason(key, value, reason)
# KNGConfigItem, KNGConfigItems, KNGConfig, and fetal-ness/daddy
# ==============================================================
# The interface here is tolerable but the plumbing is ugly and inelegant
# due to code evolution by incremental hacking. The whole thing should probably be
# scrapped and re-coded from scratch, truth be told, now that I've figued out
# what it is I'm trying to accomplish.
#
# The basic data-structure we are building could be thought of as a dict of {<str>: <list>}
# items; the lists could be thought of as containing (<str>, <str>) tuples. In fact,
# that's an oversimplification. The dict is actually a KNGConfig, which is an OrderedDict
# subclass that logically represents the entire contents of a kernel-ng.conf file, with
# each dictionary key representing a section. The list is actually a KNGConfigItems instance
# and the list-items are KNGConfigItem instances (the analogue of the (<str>, <str>) tuples).
# Each KNGConfigItem either represents a configuration-file comment or a standard configuration-file
# line-item (i.e.: key=value).
#
# We use the OrderedDict so that we can round-trip the configuration file without re-ordering
# the sections. Initially this will be fairly broken, but the enhancements to achieve full
# .conf => OO => .conf round-trip capabilities are simply to saving off some formatting metadata
# at the KNGConfigItem level during "deserialization" -- aka parsing, what-have-you. First,
# .conf-file deserialization of /any/ sort will need to be implemented :S.
#
# The motivation for much of the crazyness below is that I wanted consumers to be able to say:
# "kngconfig['foo']['bar'] = 'baz'", and have the bar setting in the foo section recieve a value of
# 'baz'. Even so, thereafter, kngconfig['foo']['bar'] would not be 'baz', but a KNGConfigItem
# with value 'baz' and key 'bar', but that's fine, kngconfig['foo']['bar'].value would be our 'baz'.
#
# To achieve this, I used the __missing__ feature at the top dict level, added hybrid-dict features
# to KNGConfigItems (so that KNGConfigItems.__getattr__ will search the KNGConfigItem instances
# it contains for the provided index, or otherwise call a "_missing" API which works just like
# "__missing__" but, obviously is not a built-in magic name thingy so-preferably-not-to-speak.
# BUT, crap, I thought, this would mean that as soon as the API consumer simply looks at
# kngconfig['foo'], the 'foo' section must come into being. Which wouldn't be a problem except
# that a 'kernelng_foo' package would fail to be generated during "kernelng overlay update" due
# to (amazingly!) there being no */foo package in all of portage. Clearly this would not be what
# most API consumers meant by kngconfig['foo'].
#
# To solve this dilemma, I created the concept of "fetal" KNGConfigItem and KNGConfigItems
# instances. In this scheme, two new properties are created: "daddy" and "fetal". Daddy maps back
# to the container that contains the instance (nb: implications wrt. i.e., deepclone() are not
# dealt with yet); meanwhile, fetal tells us:
#
# KNGConfigItem: if the instance has never had a non-None "value" property set
# KNGConfigItems: if the instance has ever had any non-fetal KNGConfigItem instances in it.
#
# Once these are "born", there is back-propogation through the "daddy"s so that the KNGConfigItems
# get born themselves, the instant they become grandparents, if necessary.
#
# The purpose of all these acrobatics is to censor the fetuses during deserialization, ensuring
# that no gross side effects occur due to the objects generated by __missing__ and _missing.
#
# Yes, I know this is all kinds of ugly but the interface is almost reasonable (eliminating the
# requirement to pass a "daddy" keyword argument to constructors would be nice and will eventually
# get done; the ability for multiple containers to be pregnant with the same fetus is not
# needed but my implementation also sort-of breaks the ability for multiple containers to contain
# the same non-fetal containee, which clearly sucks and should also be fixed).
#
# Each KNGConfigItem has a "reason" property which explains its semantic purpose. Three "reasons"
# are supported: "stored" is the standard reason and simply means the KNGConfigItem represents
# a setting which should persist when the KNGConfig containing it is deserialized. The "default"
# reason signifies that the key=>value mapping is not stored in the configuration file, and serves
# only as an in-memory means of tracking the default value (a default property also stores the
# default value if applicable; in this case, del(conf['foo']['bar']) will not delete the
# conf['foo']['bar'] KNGConfigItem from conf['foo'] -- instead it will set its reason to "default"
# which will cause the KNGConfigItem to disappear in the deserialized .conf file). The third
# "reason" is as-yet unused and probably broken: "override" is intended to represent a temporary
# in-memory # change to the configuration that will not persist. The problem is that there is no
# provisions yet in place to track the persistent value being overriden. Perhaps the "override"
# reason is not needed and can be removed.
class KNGConfigItem(object):
@trace
def __init__(self, key, value='__comment__', default=None, reason=None, daddy=None):
'''
This constructor has two forms: KNGConfigItem(<comment-str>) and
KNGConfigItem(<key>, <value>). default and reason apply only to the second
form -- for comments, the default is always None and the reason is always 'stored'
'''
if reason is not None:
ValidateKNGConfigItemReason(key, value, reason)
if value == '__comment__':
key, value = value, key
default=None
reason='stored'
elif reason is None and default is None:
reason = 'stored'
elif reason is None: # and default is set
if value == default:
# note: value is not None because default is not None
reason = 'default'
elif value is not None:
reason = 'stored'
# else, None is the right thing to have in reason for now, we'll have
# to figure it out when we are born.
self._key = key
self._value = value
if reason == 'default' and default is None:
self._default = value
else:
self._default = default
self._reason = reason
self._daddy = daddy
@suppress_tracing
def __repr__(self):
if self.iscomment:
return 'KNGConfigItem(%r, reason=%r)' % (self.comment, self.reason)
else:
return 'KNGConfigItem(%r, %r, default=%r, reason=%r)' % (
self.key, self.value, self.default, self.reason)
@property
def key(self):
return self._key
# note: "value" as a name for a property makes for confusing reading here but
# foo.key/foo.value is imo a nice self-evident naming scheme for our consumers
@property
def value(self):
return self._value
@value.setter
@trace
def value(self, newvalue):
if newvalue is None:
# We need to know if we have left "fetal mode" during an assignment;
# we track "fetal mode" using a convention that value == None <==> is_fetal
# Values should always be strings anyhow (nb: I've deliberately opted not
# to enforce that for pythonicicity reasons).
raise ValueError('None is not an allowed value for KNGConfigItems.')
if self._value == newvalue:
# avoid any side-effects as no change is required.
return
if self._value is None:
if self._daddy is None:
raise ValueError('fetal-mode state-machine thinko')
else:
# it is possible that determining reason has been deferred 'till now
if self._reason is None:
if self._default is None:
self._reason = 'stored'
elif newvalue == self._default:
self._reason = 'default'
else:
self._reason = 'stored'
self._daddy.christen()
if self.reason == 'default':
# if the value has changed to a non-default value, then
# reason will need to change to 'stored'. Pretty sure the
# newvalue != self._default is a noop but relying on that
# here seems obscure and future-fragile.
if self._default is not None and newvalue != self._default:
self.reason = 'stored'
# else: nothing to do: once stored, always stored.
self._value = newvalue
@value.deleter
@trace
def value(self):
if self._default is not None:
self._value = self._default
self._reason = 'default'
elif self._daddy is not None:
del self._daddy[self.key]
else:
raise ValueError('Unanticipated wierd corner case. This is a bug.')
@property
def default(self):
return self._default
@property
def reason(self):
return self._reason
@reason.setter
@trace
def reason(self, value):
ValidateKNGConfigItemReason(self.key, self.value, value)
self._reason = value
@property
def fetal(self):
return self._value is None
@property
@trace
def isexplicit(self):
if self.reason == 'default':
return False
elif self.reason == 'override':
# FIXME: This result suggests "isexplicit" is the wrong name for this.
return False
elif self.value is None:
# fetal mode
return False
else:
return True
@property
def iscomment(self):
return (self.key == '__comment__')
@property
def comment(self):
return self.value
@property
def daddy(self):
return self._daddy
@trace
def __eq__(self, other):
if isinstance(other, KNGConfigItem):
if other.key != self.key:
return False
if other.value != self.value:
return False
if other.reason != self.reason:
return False
return True
else:
# fuck it
return NotImplemented
@trace
def __ne__(self, other):
return not (self == other)
@trace
def __gt__(self, other):
if isinstance(other, KNGConfigItem):
return self.key > other.key or (self.key == other.key and self.value > other.value) \
or (self.key == other.key and self.value == other.value and self.reason > other.reason)
else:
return NotImplemented
@trace
def __le__(self, other):
return not self.__gt__(other)
@trace
def __lt__(self, other):
return (not self.__eq__(other)) and self.__le__(other)
@trace
def __ge__(self, other):
return self.__eq__(other) or self.__gt__(other)
kng_example_config_data = None
@trace
def KNGExampleConfigData():
global kng_example_config_data
if kng_example_config_data:
return kng_example_config_data.copy()
result = OrderedDict()
# format of the innermost key=>val tuples:
# ( key, val, [force_stored=False, [no_default=False]] )
result['implicit_global'] = (
'# %(framework)s.conf',
'',
'# This file is designed to contain sensible default values for',
'# a plurality of real-world systems; however, it can and often should',
'# be modified to match your system\'s needs.',
'#',
'# %(framework)s.conf has a "Windows .ini"-style syntax, consisting of',
'# name => value mappings, i.e.:',
'#',
'# <name> = <value>',
'#',
'# and section headings enclosed in brackets, i.e.:',
'#',
'# [<section>]',
'#',
'# Each section (with one exception, described below) corresponds to',
'# a portage package atom. For example, the header:',
'#',
'# [=sys-kernel/gentoo-sources-3.15*]',
'#',
'# would contain specifics about how to map from portage packages',
'# matching the "=sys-kernel/gentoo-sources-3.15*" portage "atom"',
'# to %(framework)s packages in the site-specific %(framework)s',
'# overlay (n.b.: the %(prog)s utility contains all the secret sauce to',
'# create and maintain these site-specific overlays. Run "%(prog)s -h",',
'# or "man %(prog)s" if that\'s Greek to you, and you\'re not Greek).',
'#',
'# Lines beginning with a "#" are treated as comments. Empty lines',
'# are ignored. Quotation marks are not needed and will not be',
'# preserved by the %(prog)s utility -- their use is discouraged.',
'#',
'# A "[global]" section is also supported. Any "<name> = <value>"',
'# pairs appearing before any section header are considered',
'# implicitly to be in the global section, so the "[global]" header',
'# may be omitted, so long as all global settings come first.',
'',
)
result['global'] = (
'',
'# overlay',
'# -------',
'# default value: site-%(framework)s',
'# scope: global only',
'#',
'# Name of the site-wide %(framework)s portage overlay.',
'# The overlay need not exist to be named here. If it does',
'# not exist it will be created automatically as required or',
'# when the "%(prog)s overlay create" command is executed.',
'',
( 'overlay', 'site-%(framework)s', True ),
'',
'# name_prefix',
'# ==========',
'# default value: %(prog)s_',
'# scope: any',
'#',
'# Prefix applied to ng-sources package names in the overlay. For',
'# example, if name_prefix is "foo", then the %(framework)s package',
'# mirroring portage kernel package sys-kernel/bar-sources in the',
'# %(framework)s overlay would be named sys-kernel/foobar-sources.',
'# Making this empty would result in identically named packages and',
'# is therefore strongly discouraged, although not technocratically',
'# prohibited by %(progdesc)s.',
'',
( 'name_prefix', '%(prog)s_' ),
'',
'# no_name_prefix',
'# ==============',
'# default value: no_',
'# scope: any',
'#',
'# Prefix applied to no-sources package names in the overlay. For',
'# example, if no_name_prefix is "no_", then the no-sources package',
'# mirroring the portage kernel package sys-kernel/shit-sources in',
'# the %(framework)s overlay would be named sys-kernel/no_shit-sources.',
'# Making this empty would result in identically named packages and',
'# is therefore strongly discouraged, although not technocratically',
'# prohibited by %(progdesc)s.',
'',
( 'no_name_prefix', 'no_' ),
'',
'# repos_conf',
'# ==========',
'# default value: %(eprefix)s/etc/portage/repos.conf',
'# scope: global only',
'#',
'# Location of portage\'s repos.conf file. If empty, i.e.:',
'#',
'# repos_conf =',
'#',
'# %(framework)s will not automatically maintain the repos.conf file;',
'# otherwise, when the overlay is created, this file will be',
'# automatically modified to activate the %(framework)s overlay in',
'# portage if and when the overlay is created.',
'',
( 'repos_conf', '%(eprefix)s/etc/portage/repos.conf' ),
'',
)
result['sys-kernel/gentoo-sources'] = (
'',
'# name_override',
'# =============',
'# No default value',
'# scope: sectional only',
'#',
'# Instead of the name_prefix scheme, it is possible to specify a',
'# name explicitly for the overlay packages generated by %(progdesc)s',
'# to mirror the portage package in a given section. For example,',
'# if we put name_override = %(prog)s in the [sys-kernel/gentoo-sources]',
'# section, then the overlay package mirroring sys-kernel/gentoo-sources',
'# generated by %(progdesc)s would be named sys-kernel/%(prog)s.',
'',
( 'name_override', '%(prog)s-sources', True, True ),
'',
'# no_name_override',
'# ================',
'# No default value',
'# scope: sectional only',
'#',
'# Instead of the no_name_prefix scheme, it is possible to specify a',
'# name explicitly for the no-sources overlay packages generated by',
'# %(progdesc)s to mirror the portage package in a given section. For',
'# example if we put no_name_override = nope in the',
'# [sys-kernel/gentoo-sources] section, then the no-sources package',
'# mirroring sys-kernel/gentoo-sources in the overlay generated by',
'# %(progdesc)s would be named sys-kernel/nope.',
'',
( 'no_name_override', 'no-sources', True, True ),
'',
)
for key in result.keys():
val = result[key]
result[key] = tuple(
tuple(
valsubitem if isinstance(valsubitem, bool) else subconsts(valsubitem)
for valsubitem in valitem
) if isinstance(valitem, tuple) else subconsts(valitem)
for valitem in val
)
kng_example_config_data = result.copy()
return result
kng_global_defaults = None
@trace
def KNGGlobalDefaults():
global kng_global_defaults
if kng_global_defaults:
return kng_global_defaults.copy()
ecd = KNGExampleConfigData()
implicit = ecd['implicit_global'] if 'implicit_global' in ecd else ()
explicit = ecd['global'] if 'global' in ecd else ()
result = {
valitem[0]: valitem[1]
for valitem in chain(implicit, explicit)
if isinstance(valitem, tuple) and (len(valitem) < 4 or not valitem[3])
}
kng_global_defaults = result.copy()
return result
class KNGConfigItems(list):
'''
Implements a list of KNGConfigItem instances with some dict-like interfaces
for, i.e., determining whether a particular configuration key is already in
the list, or setting the key in-place via __getitem__. For dict-like behaviors,
the comments are ignored.
'''
@trace
def __init__(self, *args, **kwargs):
if 'fetal' in kwargs:
self._fetal = kwargs.pop('fetal')
else:
self._fetal = False
if 'daddy' in kwargs:
self._daddy = kwargs.pop('daddy')
else:
self._daddy = None
if self._fetal and self._daddy is None:
raise TypeError('KNGConfigItems.__init__: fetal requires daddy.')
super(KNGConfigItems, self).__init__(*args, **kwargs)
@property
def fetal(self):
return self.is_fetal()
def is_fetal(self):
return self._fetal
@trace
def __contains__(self, key):
for item in self:
if item.key == key:
return True
return super(KNGConfigItems, self).__contains__(key)
@suppress_tracing
def __repr__(self):
return 'KNGConfigItems(%s)' % super(KNGConfigItems, self).__repr__()
@trace
def iterkeypairs(self):
return ( (item.key, item.value) for item in self if (not item.fetal) and (not item.iscomment) )
@trace
def iterkeys(self):
return ( item[0] for item in self.iterkeypairs() )
@trace
def itervalues(self):
return ( item[1] for item in self.iterkeypairs() )
@trace
def iterexplicit(self):
return ( item for item in self if item.isexplicit )
@trace
def find_default(self, key):
'''
Returns any default that would be associated with the provided key in
the current section or None, if none can be found, using the global
defaults dict. Raises TypeError if we have no daddy.
'''
if self._daddy is None:
raise TypeError('find_default requires daddy')
if self._daddy.section_of(self) in ['global', 'implicit_global']:
if key in KNGGlobalDefaults():
return KNGGlobalDefaults()[key]
return None
@trace
def __getitem__(self, index):
if isinstance(index, slice) or isinstance(index, int):
return super(KNGConfigItems, self).__getitem__(index)
for item in self:
if (not item.iscomment) and item.key == index:
# note: this will return any existing "fetus" with the requested key.
return item
return self._missing(index)
@trace
def _missing(self, key):
# add a "fetal" KNGConfigItem for the provided key, analogous to __missing__ in dict
rv = KNGConfigItem(key, None, default=self.find_default(key), daddy=self)
self.append(rv)
return rv
@trace
def __setitem__(self, index, value):
if value is None:
raise ValueError('KNGConfigItems.__setitem__: use del instead? assigning None is prohibited.')
elif index == '__comment__':
# always treat this as a request to append a new comment
self._fetal = False
self.append(KNGConfigItem(value, daddy=self))
return
elif isinstance(index, slice) or isinstance(index, int):
if self._fetal and isinstance(value, KNGConfigItem) and not value.fetal:
self._fetal = False
super(KNGConfigItems, self).__setitem__(index, value)
return
for itemindex, item in enumerate(self):
if (not item.iscomment) and item.key == index:
if isinstance(value, KNGConfigItem):
if not value.fetal:
self._fetal = False
self[itemindex] = value
return
else:
item.value = value
return
if isinstance(value, KNGConfigItem):
self.append(value)
else:
self.append(KNGConfigItem(index, value, daddy=self))
@trace
def __delitem__(self, index):
if isinstance(index, slice) or isinstance(index, int):
super(KNGConfigItems, self).__delitem__(index)
else:
for itemindex, item in enumerate(self):
if (not item.iscomment) and item.key == index:
super(KNGConfigItems, self).__delitem__(itemindex)
return
raise IndexError('Could not find item matching index "%s" in %s to delete' % (index, self))
@trace
def insert(self, index, value):
if isinstance(index, int):
super(KNGConfigItems, self).insert(index, value)
else:
for itemindex, item in enumerate(self):
if (not item.iscomment) and item.key == index:
super(KNGConfigItems, self).insert(itemindex, value)
return
raise IndexError('Could not find item matching insertion index "%s" in %s' % (index, self))
@trace
def append(self, value):
for itemindex, item in enumerate(self):
if (not item.iscomment) and item.key == value.key:
del(self[itemindex])
super(KNGConfigItems, self).append(value)
if isinstance(value, KNGConfigItem):
if not value.fetal:
self._fetal = False
@trace
def appendnew(self, *args, **kwargs):
'''
Constructs a new KNGConfigItem using the provided arguments.
If no daddy keyword argument is provided, then daddy=<this KNGConfigItems>
will be added to the provided KNGConfigItem constructor arguments. The
constructed item is then appended to this KNGConfigItems and returned.
'''
kwargs['daddy'] = kwargs.pop('daddy', self)
rv = KNGConfigItem(*args, **kwargs)
self.append(rv)
return rv
@trace
def extend(self, values):
for v in values:
self.append(v)
@trace
def pop(self, index=-1):
v = self[index]
del self[index]
return v
@trace
def christen(self):
# item is not used ATM, this is just a notification that we now have at least
# one nonfetal item, which is enough.
self._fetal = False
@trace
def __iadd__(self, values):
self.extend(values)
return self
def __imul__(self, value):
raise NotImplementedError('KNGConfigItems.__imul__')
def __mul__ (self, other):
raise NotImplementedError('KNGConfigItems.__mul__')
def __rmul__ (self, other):
raise NotImplementedError('KNGConfigItems.__rmul__')
class KNGGlobalConfigItemsProxy(KNGConfigItems):
@trace
def __init__(self, daddy):
self._implicit = daddy['implicit_global']
self._explicit = daddy['global']
super(KNGGlobalConfigItemsProxy, self).__init__(daddy=daddy, fetal=self.fetal)
@trace
def __contains__(self, key):
return self._implicit.__contains__(key) or self._explicit.__contains__(key)
@trace
def __len__(self):
return len(self._implicit) + len(self._explicit)
def _fake_self_for_query(self):
return list(self._implicit) + list(self._explicit)
def append_destination_guess(self):
if not self._explicit.fetal:
return self._explicit
elif not self._implicit.fetal:
return self._implicit
else:
return self._explicit
@suppress_tracing
def __repr__(self):
return 'KNGConfigItems(%s)' % self._fake_self_for_query()
def is_fetal(self):
return self._implicit.fetal and self._explicit.fetal
@trace
def iterkeypairs(self):
return (
(item.key, item.value)
for item in self._fake_self_for_query()
if (not item.fetal) and (not item.iscomment)
)
@trace
def iterkeys(self):
return ( item[0] for item in self.iterkeypairs() )
@trace
def itervalues(self):
return ( item[1] for item in self.iterkeypairs() )
@trace
def iterexplicit(self):
return ( item for item in self._fake_self_for_query() if item.isexplicit )
@trace
def find_default(self, key):
'''
Returns any default that would be associated with the provided key in
the current section or None, if none can be found, using the global
defaults dict. Raises TypeError if we have no daddy.
'''
# section_of won't work but thankfully we don't need it!
if key in KNGGlobalDefaults():
return KNGGlobalDefaults()[key]
return None
@trace
def __getitem__(self, index):
if isinstance(index, slice) or isinstance(index, int):
return self._fake_self_for_query().__getitem__(index)
for item in self._fake_self_for_query():
if (not item.iscomment) and item.key == index:
# note: this will return any existing "fetus" with the requested key.
return item
return self._missing(index)
@trace
def _missing(self, key):
# add a "fetal" KNGConfigItem for the provided key, analogous to __missing__ in dict
real_daddy=self.append_destination_guess()
rv = KNGConfigItem(key, None, default=self.find_default(key), daddy=real_daddy)
real_daddy.append(rv)
return rv
@trace
def __setitem__(self, index, value):
if value is None:
raise ValueError('KNGGlobalConfigItemsProxy.__setitem__: use del instead? assigning None is prohibited.')
elif index == '__comment__':
# always treat this as a request to append a new comment
real_daddy = self.append_destination_guess()
real_daddy._fetal = False
real_daddy.append(KNGConfigItem(value, daddy=real_daddy))
return
elif isinstance(index, int):
if index >= len(self._implicit):
self._explicit[index - len(self._implicit)] = value
else:
self._implicit[index] = value
return
elif isinstance(index, slice):
start, stop, step = index.indices(len(self))
if step != 1:
raise NotImplementedError('Fancy stepping behavior not supported here.')
if start < len(self._implicit) and stop > len(self._implicit):
raise NotImplementedError('No soap, honky-lips: %s, %s.' % (slice(start,stop,step), len(self._implicit)))
if start < len(self._implicit):
self._implicit[slice(start,stop,step)] = value
else:
start -= len(self._implicit)
stop -= len(self._implicit)
self._explicit[slice(start, stop, step)] = value
return
# done!
for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)),
zip(enumerate(self._explicit), repeat(self._explicit))):
if (not item.iscomment) and item.key == index:
if isinstance(value, KNGConfigItem):
# this is fucked, what if daddy didn't match up? just copy the value i guess...
# FIXME
realdeal[itemindex].value = value.value
return
else:
item.value = value
return
if isinstance(value, KNGConfigItem):
self.append_destination_guess().append(value)
else:
self.append_destination_guess().append(KNGConfigItem(index, value, daddy=self))
@trace
def __delitem__(self, index):
if isinstance(index, slice):
start, stop, step = index.indices(len(self))
if step != 1:
raise NotImplementedError('Fancy stepping behavior not supported here.')
if start < len(self._implicit) and stop > len(self._implicit):
raise NotImplementedError('No soap, honky-lips: %s, %s.' % (slice(start,stop,step), len(self._implicit)))
if start < len(self._implicit):
del(self._implicit[slice(start,stop,step)])
else:
start -= len(self._implicit)
stop -= len(self._implicit)
del(self._explicit[slice(start, stop, step)])
return
elif isinstance(index, int):
if index >= len(self._implicit):
del(self._explicit[index - len(self._implicit)])
else:
del(self._implicit[index])
return
for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)),
zip(enumerate(self._explicit), repeat(self._explicit))):
if (not item.iscomment) and item.key == index:
del(realdeal[itemindex])
return
raise IndexError('Could not find item matching index "%s" in %s to delete' % (index, self))
@trace
def insert(self, index, value):
if isinstance(index, int):
if index < len(self._implicit):
self._implicit.insert(index, value)
else:
self._explicit.insert(index - len(self._implicit), value)
return
for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)),
zip(enumerate(self._explicit), repeat(self._explicit))):
if (not item.iscomment) and item.key == index:
realdeal.insert(itemindex, value)
return
raise IndexError('Could not find item matching insertion index "%s" in %s' % (index, self))
@trace
def append(self, value):
for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)),
zip(enumerate(self._explicit), repeat(self._explicit))):
if (not item.iscomment) and item.key == value.key:
del(realdeal[itemindex])
realdeal.append(value)
return
self.append_destination_guess().append(value)
@trace
def appendnew(self, *args, **kwargs):
self.append_destination_guess().appendnew(*args, **kwargs)
@trace
def clear(self):
self._implicit.clear()
self._explicit.clear()
@trace
def index(self, *args):
return self._fake_self_for_query().index(*args)
@trace
def pop(self, index=None):
if index is None:
index = len(self) - 1
if index >= len(self._implicit):
return self._explicit.pop(index - len(self._implicit))
else:
return self._implicit.pop(index)
@trace
def remove(self, value):
if value in self._implicit:
self._implicit.remove(value)
else:
self._explicit.remove(value)
def reverse(self):
raise NotImplementedError('KNGGlobalCojnfigItemsProxy.reverse')
def __eq__(self, other):
return self._fake_self_for_query().__eq__(other)
def __ge__(self, other):
return self._fake_self_for_query().__ge__(other)
def __gt__(self, other):
return self._fake_self_for_query().__gt__(other)
def __hash__(self):
return self._fake_self_for_query().__hash__()
@trace
def __iter__(self, *args, **kwargs):
return self._fake_self_for_query().__iter__(*args, **kwargs)
def __le__(self, other):
return self._fake_self_for_query().__le__(other)
def __lt__(self, other):
return self._fake_self_for_query().__lt__(other)
def __ne__(self, other):
return self._fake_self_for_query().__ne__(other)
def sort(self):
raise NotImplementedError('KNGGlobalConfigItemsProxy.sort')
def __reversed__(self):
raise NotImplementedError('KNGGlobalConfigItemsProxy.__reversed__')
def __sizeof__(self):
return self._implicit.__sizeof__() + self._explicit.__sizeof__()
@trace
def christen(self, item):
# should never happen since the KNGConfigItems should have the "real" daddys
raise NotImplementedError('KNGGlobalConfigItemsProxy.christen!?')
class KNGConfig(OrderedDict):
@trace
def __init__(self, kernelng_conf_file=KERNELNG_CONF_FILE, repos_conf_file=REPOS_CONF_FILE):
self._kernelng_conf_file = kernelng_conf_file
self._repos_conf_file = repos_conf_file
self._globals = None
super(KNGConfig, self).__init__()
@trace
def section_of(self, configitems):
for section, cfgitems in list(self.items()):
if cfgitems is configitems:
return section
raise ValueError(configitems)
@trace
def loadExampleConfig(self):
self.clear()
ecd = KNGExampleConfigData()
for key in ecd.keys():
self[key] = KNGConfigItems(daddy=self)
val = ecd[key]
for item in val:
if isinstance(item, tuple):
if len(item) > 3 and item[3]:
# when item[3] is true (no default), then this config. parameter will
# not appear in KNGGlobalDefaults and therefore stored, no default is the only
# sensible interpretation regardless of item[2] (force-stored).
self[key].append(KNGConfigItem(item[0], item[1], reason='stored', daddy=self[key]))
elif len(item) > 2 and item[2]:
# When item[3] is False (meaning, the config. parameter item[0] does have
# a default value and it's item[1]), but item[2] is true, this amounts to
# saying "item[0] is set to item[1], which happens to be the default value,
# but despite this, please force the config. parameter to appear in the .conf
# file anyhow. We achieve this miracle like so:
self[key].append(KNGConfigItem(item[0], item[1], default=item[1], reason='stored', daddy=self[key]))
else:
# add a comment item "illustrating" the default value in "pseudo-prose", as, otherwise,
# the KNGConfigItem for the item[0] => item[1] setting would not appear anywhere in the
# example configuration file (because its reason will be 'default', not 'stored')
self[key].append(KNGConfigItem('# %(confkey)s = %(confval)s' % {
'confkey': item[0], 'confval': item[1] }))
# add the KNGConfigItem mapping the config. parameter to its default value
self[key].append(KNGConfigItem(item[0], item[1], default=item[1], reason='default', daddy=self[key]))
else:
self[key].append(KNGConfigItem(item, daddy=self[key]))
@property
def globals(self):
'''
Returns a virtualized KNGConfigItems proxy which treats the 'global' and 'implicit_global'
sections as a unified section. This helps prevent accidental mistakes like adding the
same configuration key to both sections, and simplifies various usages. When both global
and implicit_global sections exist, new items go into the explicit global section; when
only one of these sections exist, new items go into it; when neither section exists, new
items go into an explicit global section which will be created on demand.
'''
if self._globals is None:
self._globals = KNGGlobalConfigItemsProxy(self)
return self._globals
@trace
def writeConfigText(self, file=None, no_comments=False):
'''
Write the currently loaded configuration to a given file.
:param file: If provided, the output will be written into the provided click.File object.
If not provided, output will go to standard output.
'''
keys = self.keys()
for key in keys:
vlist = self[key]
if vlist and not vlist.fetal:
if key != 'implicit_global':
click.echo('[%s]' % key, file=file)
for item in vlist.iterexplicit():
if item.iscomment:
if not no_comments:
click.echo(item.comment, file=file)
else:
click.echo('%(itemkey)s = %(itemvalue)s' % { 'itemkey': item.key, 'itemvalue': item.value }, file=file)
@trace
def loadConfigText(self, file=None, dirty=False):
'''
Loads the active configuration from a configuration file. If the file cannot be parsed, then
raises a SyntaxError.
:param file: If provided, this file will be used. It can be a python stream, a filename, or
omitted entirely, in which case loadConfigText will look the default filename of
kernelng.config.KERNELNG_CONF_FILE.
:param dirty: If True, the active configuration object will not be cleaned before loading
from the specified file. This will overwrite any settings which conflict and
append any new settings values to the end of their corresponding sections.
'''
if file is None:
file = click.open_file(KERNELNG_CONF_FILE, mode='r')
with file:
self.clear()
section = 'implicit_global'
for lineindex, line in enumerate((line.rstrip('\n') for line in file)):
if CONFIG_COMMENT_RE.match(line):
self[section].appendnew(line)
continue
m = CONFIG_SECTION_RE.match(line)
if m:
section = m.group(1)
self[section].christen()
echov(_sc('%s read section header: "%s"' % ('%(lc)s', click.style(section, fg='yellow', bold=True))), 2)
continue
m = CONFIG_SETTING_RE.match(line)
if m:
key, val = m.groups()
if key in self[section]:
raise KeyError('%s (line %s): [%s].%s first assigned as '
'"%s", then re-assigned as "%s".' % (click.format_filename(file.name), lineindex, section,
key, self[section][key].value, val))
self[section][key] = val
echov(_sc('%s loaded configuration setting: %s%s%s%s%s %s %s%s%s' % (
'%(lc)s',
click.style('[', fg='white', bold=True),
click.style(section, fg='yellow', bold=True),
click.style(']', fg='white', bold=True),
click.style('.', fg='white', bold=True),
click.style(key, fg='blue', bold=True),
click.style('=', fg='white', bold=True),
click.style('"', fg='white', bold=True),
click.style(val, fg='blue', bold=True),
click.style('"', fg='white', bold=True)
)), 2)
continue
raise SyntaxError('%s (line %s): Syntax error: "%s" unrecognized.' % (click.format_filename(file.name), lineindex, line))
# ATM we need these dummy default settings around... maybe later they should be
# virtualized or something, this is pretty gross....?
gd = KNGGlobalDefaults()
for key in gd.keys():
if not key in self.globals:
self.globals.appendnew(key=key, value=gd[key], reason='default')
@trace
def createOverlay(self, uid, gid, perm):
pass
@trace
def __missing__(self, index):
rv=KNGConfigItems(fetal=True, daddy=self)
self[index] = rv
return rv
| gmt/kernel-ng-util | kernelng/config.py | Python | gpl-2.0 | 46,364 |
# encoding: utf-8
# module samba.dcerpc.drsuapi
# from /usr/lib/python2.7/dist-packages/samba/dcerpc/drsuapi.so
# by generator 1.135
""" drsuapi DCE/RPC """
# imports
import dcerpc as __dcerpc
import talloc as __talloc
class DsReplicaObjMetaData2Ctr(__talloc.Object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
array = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
enumeration_context = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/samba/dcerpc/drsuapi/DsReplicaObjMetaData2Ctr.py | Python | gpl-2.0 | 880 |
import pandas
array = hash()
array['a']='b'
| michaelpantic/tolScreenCleaner | test.py | Python | gpl-2.0 | 45 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints comments posted for the record
"""
__revision__ = "$Id$"
from invenio.webcomment import get_first_comments_or_remarks
def format_element(bfo, nbReviews='all', nbComments='all'):
"""
Prints comments posted for the record.
@param nbReviews: The max number of reviews to print
@param nbComments: The max number of comments to print
"""
nb_reviews = nbReviews
if nb_reviews.isdigit():
nb_reviews = int(nb_reviews)
nb_comments = nbComments
if nb_comments.isdigit():
nb_comments = int(nb_comments)
(comments, reviews) = get_first_comments_or_remarks(recID=bfo.recID,
ln=bfo.lang,
nb_comments=nb_comments,
nb_reviews=nb_reviews,
voted=-1,
reported=-1,
user_info=bfo.user_info)
return comments + reviews
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
| pombredanne/invenio | modules/bibformat/lib/elements/bfe_comments.py | Python | gpl-2.0 | 2,111 |
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect
from django.views.generic import View, ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView
from content.models import Sub, SubFollow, Post, Commit
from content.forms import SubForm, PostForm, CommitForm
from notify.models import Noty
from core.core import random_avatar_sub
class CreateSubView(CreateView):
template_name = 'content/sub_create.html'
form_class = SubForm
def form_valid(self, form):
obj = form.save(commit=False)
obj.save()
obj.image = 'sub/%s.png' % (obj.slug)
obj.save()
random_avatar_sub(obj.slug)
return HttpResponseRedirect('/sub')
class SubView(ListView):
template_name = 'content/sub.html'
model = Sub
class FrontView(ListView):
template_name = 'layouts/post_list.html'
paginate_by = 4
def get(self, request, *args, **kwargs):
if request.is_ajax(): self.template_name = 'ajax/post_list.html'
return super(FrontView, self).get(request, *args, **kwargs)
def get_queryset(self):
if self.kwargs['tab'] == 'top': return Post.objects.last_commited()
else: return Post.objects.created()
def get_context_data(self, **kwargs):
context = super(FrontView, self).get_context_data(**kwargs)
context['list'] = 'portada'
context['tab_show'] = self.kwargs['tab']
if self.kwargs['tab'] == 'top': context['list_url'] = '/'
else: context['list_url'] = '/new'
return context
class SubPostListView(ListView):
template_name = 'content/sub_post_list.html'
paginate_by = 4
def get(self, request, *args, **kwargs):
if request.is_ajax(): self.template_name = 'ajax/post_list.html'
return super(SubPostListView, self).get(request, *args, **kwargs)
def get_queryset(self):
if self.kwargs['tab'] == 'top': return Post.objects.sub_last_commited(self.kwargs['sub'])
else: return Post.objects.sub_created(self.kwargs['sub'])
def get_context_data(self, **kwargs):
context = super(SubPostListView, self).get_context_data(**kwargs)
sub = Sub.objects.get(pk=self.kwargs['sub'])
user = self.request.user
if self.kwargs['tab'] == 'followers': context['followers'] = True
context['tab_show'] = self.kwargs['tab']
context['list'] = sub
context['tab'] = self.kwargs['tab']
if self.kwargs['tab'] == 'top': context['list_url'] = '/sub/%s' % sub
else: context['list_url'] = '/sub/%s/new' % sub
context['action'] = 'follow'
if user.is_authenticated():
follow_state = SubFollow.objects.by_id(sub_followid='%s>%s' % (user.pk, sub.pk))
if follow_state: context['action'] = 'unfollow'
else: context['action'] = 'follow'
return context
class PostCommitView(CreateView):
template_name = 'layouts/post_detail.html'
form_class = CommitForm
def get_context_data(self, **kwargs):
context = super(PostCommitView, self).get_context_data(**kwargs)
pk, slug = self.kwargs['pk'], self.kwargs['slug']
context['object'] = Post.objects.by_post(pk, slug)
return context
def form_valid(self, form):
if self.request.user.is_authenticated():
user = self.request.user
post = Post.objects.get(postid=self.kwargs['pk'])
obj = form.save(commit=False)
obj.create_commit(user, post)
if not obj.post.user.pk == user.pk:
noty = Noty.objects.create(user_id=obj.post.user_id, category='C', commit=obj)
noty.create_noty()
return HttpResponseRedirect(obj.get_commit_url())
else:
commit_url = '/post/%s/%s/' % (self.kwargs['pk'], self.kwargs['slug'])
return HttpResponseRedirect('/login/?next=%s' % (commit_url))
class CreatePostView(CreateView):
template_name = 'layouts/post_create.html'
form_class = PostForm
def form_valid(self, form):
obj = form.save(commit=False)
obj.user = self.request.user
obj.save()
if obj.draft: return HttpResponseRedirect('/created')
else:
obj.user.last_commited = obj.created
obj.user.save()
obj.sub.last_commited = obj.created
obj.sub.save()
obj.last_commited = obj.created
obj.save()
return HttpResponseRedirect(obj.get_absolute_url())
class UpdatePostView(UpdateView):
template_name = 'layouts/post_create.html'
form_class = PostForm
def get_queryset(self):
return Post.objects.by_user(self.request.user)
def form_valid(self, form):
obj = form.save(commit=False)
if not obj.last_commited and not obj.draft:
now = datetime.now()
obj.last_commited = now
obj.user.last_commited = now
obj.user.save()
obj.sub.last_commited = now
obj.sub.save()
obj.save()
if obj.draft: return HttpResponseRedirect('/created')
else: return HttpResponseRedirect(obj.get_absolute_url())
class PostUserCreatedView(ListView):
template_name = 'content/post_user_created.html'
def get_queryset(self):
return Post.objects.by_user(self.request.user)
class SubFollowCreate(View):
def post(self, request, *args, **kwargs):
user = self.request.user
sub_followed = self.kwargs['followed']
sub_followed_obj = SubFollow.objects.create(follower=user, sub_id=sub_followed)
sub_followed_obj.save()
sub_followed_obj.follower.sub_following_number += 1
sub_followed_obj.follower.save()
sub_followed_obj.sub.follower_number += 1
sub_followed_obj.sub.save()
return HttpResponse(status=200)
class SubFollowDelete(View):
def post(self, request, *args, **kwargs):
sub_unfollowed = self.kwargs['unfollowed']
sub_unfollowed_obj = SubFollow.objects.get(follower=self.request.user, sub_id=sub_unfollowed)
sub_unfollowed_obj.follower.sub_following_number -= 1
sub_unfollowed_obj.follower.save()
sub_unfollowed_obj.sub.follower_number -= 1
sub_unfollowed_obj.sub.save()
sub_unfollowed_obj.delete()
return HttpResponse(status=200)
| ellipticaldoor/dfiid | project/content/views.py | Python | gpl-2.0 | 5,667 |
import time
import pygame
import colors
class Spritesheet():
surface = None
size = (0, 0)
def __init__(self, surface, size):
self.surface = surface
self.size = size
def get(self, x, y):
surf = pygame.Surface(self.size, pygame.SRCALPHA, 32)
croparea = (x * self.size[0], y * self.size[1], self.size[0], self.size[1])
surf.blit(self.surface, (0, 0), croparea)
return surf.convert_alpha()
class Animation():
frames = []
starttime = 0
fps = 0
hasStarted = False
def __init__(self, frames, fps):
self.frames = frames
self.fps = fps
def copy(self):
return Animation(self.frames, self.fps)
def start(self):
self.hasStarted = True
self.starttime = time.time()
def getFrames(self):
return len(self.frames)
def getFrame(self):
timeElapsed = time.time() - self.starttime
frame = int(timeElapsed * self.fps)
return frame
def getSurface(self):
try:
return self.frames[self.getFrame()]
except IndexError:
return self.frames[self.getFrames() - 1]
def isFinished(self):
return self.getFrame() >= self.getFrames() and self.hasStarted
def getRect(self):
return self.frames[0].get_rect()
def spriteAnimation(surf, fps):
w, h = surf.get_size()
sheet = Spritesheet(surf, (h, h))
frames = []
for x in range(0, int(w / h)):
frames.append(sheet.get(x, 0))
return Animation(frames, fps)
def main():
import assets
pygame.init()
d = pygame.display.set_mode((640, 480))
explosionAnimation = spriteAnimation(assets.explosionEnd1, 30)
explosionAnimation.start()
while True:
d.fill(colors.white)
d.blit(explosionAnimation.getSurface(), (0, 0))
pygame.display.update()
if __name__ == '__main__':
main()
| Plenglin/math-game | src/sprites.py | Python | gpl-2.0 | 1,911 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------#
# #
# This file is part of the Horus Project #
# #
# Copyright (C) 2014-2015 Mundo Reader S.L. #
# Copyright (C) 2013 David Braam from Cura Project #
# #
# Date: June 2014 #
# Author: Jesús Arroyo Torrens <jesus.arroyo@bq.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
#-----------------------------------------------------------------------#
__author__ = "Jesús Arroyo Torrens <jesus.arroyo@bq.com>"
__license__ = "GNU General Public License v2 http://www.gnu.org/licenses/gpl.html"
import os
import traceback
import math
import numpy
import re
import zlib
import base64
import sys
import types
if sys.version_info[0] < 3:
import ConfigParser
else:
import configparser as ConfigParser
from horus.util import validators, system
#The settings dictionary contains a key/value reference to all possible settings. With the setting name as key.
settingsDictionary = {}
#The settings list is used to keep a full list of all the settings. This is needed to keep the settings in the proper order,
# as the dictionary will not contain insertion order.
settingsList = []
#Currently selected machine (by index) Cura support multiple machines in the same preferences and can switch between them.
# Each machine has it's own index and unique name.
_selectedMachineIndex = 0
class setting(object):
"""
A setting object contains a configuration setting. These are globally accessible trough the quick access functions
and trough the settingsDictionary function.
Settings can be:
* profile settings (settings that effect the scan process and the scan result)
* preferences (settings that effect how horus works and acts)
* machine settings (settings that relate to the physical configuration of your machine)
Settings have validators that check if the value is valid, but do not prevent invalid values!
Settings have conditions that enable/disable this setting depending on other settings.
"""
def __init__(self, name, default, type, category, subcategory, store=True, tag=None):
self._name = name
self._label = subcategory
self._tooltip = ''
self._default = unicode(default)
self._values = []
self._type = type
self._category = category
self._subcategory = subcategory
self._validators = []
self._conditions = []
self._store = store
self._tag = tag
if type is types.FloatType:
validators.validFloat(self)
elif type is types.IntType:
validators.validInt(self)
global settingsDictionary
settingsDictionary[name] = self
global settingsList
settingsList.append(self)
def setLabel(self, label, tooltip = ''):
self._label = label
self._tooltip = tooltip
return self
def setRange(self, minValue=None, maxValue=None):
if len(self._validators) < 1:
return
self._validators[0].minValue = minValue
self._validators[0].maxValue = maxValue
return self
def getMinValue(self):
if len(self._validators) > 0:
return _(self._validators[0].minValue)
def getMaxValue(self):
if len(self._validators) > 0:
return _(self._validators[0].maxValue)
def getLabel(self):
return _(self._label)
def getTooltip(self):
return _(self._tooltip)
def getCategory(self):
return self._category
def getTag(self):
return self._tag
def getSubCategory(self):
return self._subcategory
def isPreference(self):
return self._category == 'preference'
def isMachineSetting(self):
return self._category == 'machine'
def isProfile(self):
return not self.isPreference() and not self.isMachineSetting()
def isStorable(self):
return self._store
def getName(self):
return self._name
def getType(self):
return self._type
def getValue(self, index = None):
if index is None:
index = self.getValueIndex()
if index >= len(self._values):
return self._default
return self._values[index]
def getDefault(self):
return self._default
def setValue(self, value, index = None):
if index is None:
index = self.getValueIndex()
while index >= len(self._values):
self._values.append(self._default)
self._values[index] = unicode(value)
def getValueIndex(self):
if self.isMachineSetting() or self.isProfile():
global _selectedMachineIndex
return _selectedMachineIndex
return 0
def validate(self):
result = validators.SUCCESS
msgs = []
for validator in self._validators:
res, err = validator.validate()
if res == validators.ERROR:
result = res
elif res == validators.WARNING and result != validators.ERROR:
result = res
if res != validators.SUCCESS:
msgs.append(err)
return result, '\n'.join(msgs)
def addCondition(self, conditionFunction):
self._conditions.append(conditionFunction)
def checkConditions(self):
for condition in self._conditions:
if not condition():
return False
return True
#########################################################
## Settings
#########################################################
#Define a fake _() function to fake the gettext tools in to generating strings for the profile settings.
def _(n):
return n
#-- Settings
setting('serial_name', '/dev/ttyUSB0', str, 'basic', _('Serial Name'))
setting('baud_rate', 115200, [9600, 14400, 19200, 38400, 57600, 115200], 'basic', _('Baud rate'))
setting('camera_id', '/dev/video0', str, 'basic', _('Camera Id'))
setting('board', 'BT ATmega328', ['Arduino Uno', 'BT ATmega328'], 'basic', _('Board'))
setting('invert_motor', False, bool, 'basic', _('Invert motor'))
# Hack to translate combo boxes:
_('High')
_('Medium')
_('Low')
setting('luminosity', 'Medium', ['High', 'Medium', 'Low'], 'basic', _('Luminosity'))
setting('brightness_control', 128, int, 'advanced', _('Brightness')).setRange(0, 255)
setting('contrast_control', 32, int, 'advanced', _('Contrast')).setRange(0, 255)
setting('saturation_control', 32, int, 'advanced', _('Saturation')).setRange(0, 255)
setting('exposure_control', 16, int, 'basic', _('Exposure')).setRange(1, 512)
setting('framerate_control', str('30'), [str('30'), str('25'), str('20'), str('15'), str('10'), str('5')], 'advanced', _('Framerate'))
setting('resolution_control', str('1280x960'), [str('1280x960'), str('960x720'), str('800x600'), str('320x240'), str('160x120')], 'advanced', _('Resolution'))
setting('use_distortion_control', False, bool, 'advanced', _('Use Distortion'))
setting('step_degrees_control', -0.45, float, 'basic', _('Step Degrees')).setRange(0.01)
setting('feed_rate_control', 200, int, 'advanced', _('Feed Rate')).setRange(1, 1000)
setting('acceleration_control', 200, int, 'advanced', _('Acceleration')).setRange(1, 1000)
setting('brightness_calibration', 100, int, 'advanced', _('Brightness')).setRange(0, 255)
setting('contrast_calibration', 32, int, 'advanced', _('Contrast')).setRange(0, 255)
setting('saturation_calibration', 100, int, 'advanced', _('Saturation')).setRange(0, 255)
setting('exposure_calibration', 16, int, 'basic', _('Exposure')).setRange(1, 512)
setting('framerate_calibration', str('30'), [str('30'), str('25'), str('20'), str('15'), str('10'), str('5')], 'advanced', _('Framerate'))
setting('resolution_calibration', str('1280x960'), [str('1280x960'), str('960x720'), str('800x600'), str('320x240'), str('160x120')], 'advanced', _('Resolution'))
setting('use_distortion_calibration', False, bool, 'advanced', _('Use Distortion'))
# Hack to translate combo boxes:
_('Simple Scan')
_('Texture Scan')
setting('scan_type', 'Texture Scan', ['Simple Scan', 'Texture Scan'], 'basic', _('Scan'))
# Hack to translate combo boxes:
_('Left')
_('Right')
_('Both')
setting('use_laser', 'Both', ['Left', 'Right', 'Both'], 'basic', _('Use Laser'))
setting('fast_scan', False, bool, 'advanced', _('Fast Scan (experimental)'))
setting('step_degrees_scanning', 0.45, float, 'basic', _('Step Degrees')).setRange(0.01)
setting('feed_rate_scanning', 200, int, 'advanced', _('Feed Rate')).setRange(1, 1000)
setting('acceleration_scanning', 300, int, 'advanced', _('Acceleration')).setRange(1, 1000)
setting('brightness_scanning', 100, int, 'advanced', _('Brightness')).setRange(0, 255)
setting('contrast_scanning', 32, int, 'advanced', _('Contrast')).setRange(0, 255)
setting('saturation_scanning', 32, int, 'advanced', _('Saturation')).setRange(0, 255)
setting('laser_exposure_scanning', 6, int, 'basic', _('Exposure'), tag='simple').setRange(1, 512)
setting('color_exposure_scanning', 10, int, 'basic', _('Exposure'), tag='texture').setRange(1, 512)
setting('framerate_scanning', str('30'), [str('30'), str('25'), str('20'), str('15'), str('10'), str('5')], 'advanced', _('Framerate'))
setting('resolution_scanning', str('1280x960'), [str('1280x960'), str('960x720'), str('800x600'), str('320x240'), str('160x120')], 'advanced', _('Resolution'))
setting('use_distortion_scanning', False, bool, 'advanced', _('Use Distortion'))
# Hack to translate combo boxes:
_('Laser')
_('Gray')
_('Line')
_('Color')
setting('img_type', 'Laser', ['Laser', 'Gray', 'Line', 'Color'], 'advanced', _('Image Type'))
setting('use_open', True, bool, 'advanced', _('Use Open'), tag='texture')
setting('open_value', 2, int, 'advanced', _('Open'), tag='texture').setRange(1, 10)
setting('use_threshold', True, bool, 'advanced', _('Use Threshold'), tag='texture')
setting('threshold_value', 25, int, 'advanced', _('Threshold'), tag='texture').setRange(0, 255)
setting('use_cr_threshold', True, bool, 'advanced', _('Use Threshold'), tag='simple')
setting('cr_threshold_value', 140, int, 'advanced', _('Threshold'), tag='simple').setRange(0, 255)
setting('view_roi', False, bool, 'advanced', _('View ROI'))
setting('roi_diameter', 200, int, 'advanced', _('Diameter')).setRange(0, 250)
setting('roi_height', 200, int, 'advanced', _('Height')).setRange(0, 250)
setting('point_cloud_color', 'AAAAAA', str, 'advanced', _('Choose Point Cloud Color'))
setting('adjust_laser', True, bool, 'advanced', _('Adjust Laser'))
setting('camera_matrix', ([[1425.0,0.0,480.0],[0.0,1425.0,640.0],[0.0,0.0,1.0]]), numpy.ndarray, 'advanced', _('Calibration Matrix'))
setting('distortion_vector',([0.0,0.0,0.0,0.0,0.0]),numpy.ndarray,'advanced',_('Distortion Vector'))
setting('laser_threshold_value', 120., int, 'advanced', _('Laser Threshold')).setRange(0, 255)
setting('distance_left', 0.0, float, 'advanced', _('Distance'))
setting('normal_left', ([0.0,0.0,0.0]), numpy.ndarray, 'advanced', _('Normal'))
setting('distance_right', 0.0, float, 'advanced', _('Distance'))
setting('normal_right', ([0.0,0.0,0.0]), numpy.ndarray, 'advanced', _('Normal'))
setting('rotation_matrix', ([[0.0,1.0,0.0],[0.0,0.0,-1.0],[-1.0,0.0,0.0]]), numpy.ndarray, 'advanced', _('Rotation Matrix'))
setting('translation_vector', ([5.0,80.0,320.0]), numpy.ndarray, 'advanced', _('Translation Matrix'))
setting('pattern_rows', 6, int, 'advanced', _('Pattern Rows'))
setting('pattern_columns', 11, int, 'advanced', _('Pattern Columns'))
setting('square_width', 13, int, 'advanced', _('Square width'))
setting('pattern_distance', 0, float, 'advanced', _('Pattern Distance'))
setting('extrinsics_step', -5.0, float, 'advanced', _('Extrinsics Step'), False)
setting('laser_coordinates', ([[480.0,480.0],[480.0,480.0]]), numpy.ndarray, 'advanced', _('Laser Coordinates'))
setting('laser_origin', ([0.0,0.0,0.0]), numpy.ndarray, 'advanced', _('Laser Origin'))
setting('laser_normal', ([0.0,0.0,0.0]), numpy.ndarray, 'advanced', _('Laser Normal'))
setting('left_button', '', str, 'basic', _('Left'), False)
setting('right_button', '', str, 'basic', _('Right'), False)
setting('move_button', '', str, 'basic', _('Move'), False)
setting('enable_button', '', str, 'basic', _('Enable'), False)
setting('gcode_gui', '', str, 'advanced', _('Send'), False)
setting('ldr_value', '', str, 'advanced', _('Send'), False)
setting('machine_name', '', str, 'machine', 'hidden')
setting('machine_type', 'ciclop', str, 'machine', 'hidden')
setting('machine_width', '200', float, 'machine', 'hidden').setLabel(_("Maximum width (mm)"), _("Size of the machine in mm"))
setting('machine_depth', '200', float, 'machine', 'hidden').setLabel(_("Maximum depth (mm)"), _("Size of the machine in mm"))
setting('machine_height', '200', float, 'machine', 'hidden').setLabel(_("Maximum height (mm)"), _("Size of the machine in mm"))
setting('machine_center_is_zero', 'True', bool, 'machine', 'hidden').setLabel(_("Machine center 0,0"), _("Machines firmware defines the center of the bed as 0,0 instead of the front left corner."))
setting('machine_shape', 'Circular', ['Square','Circular'], 'machine', 'hidden').setLabel(_("Build area shape"), _("The shape of machine build area."))
##-- Preferences
setting('language', 'English', str, 'preference', 'hidden').setLabel(_('Language'), _('Change the language in which Horus runs. Switching language requires a restart of Horus'))
# Hack to translate combo boxes:
_('Control workbench')
_('Calibration workbench')
_('Scanning workbench')
setting('workbench', 'Scanning workbench', ['Control workbench', 'Calibration workbench', 'Scanning workbench'], 'preference', 'hidden')
setting('show_welcome', True, bool, 'preference', 'hidden')
setting('check_for_updates', True, bool, 'preference', 'hidden')
setting('basic_mode', False, bool, 'preference', 'hidden')
setting('view_control_panel', True, bool, 'preference', 'hidden')
setting('view_control_video', True, bool, 'preference', 'hidden')
setting('view_calibration_panel', True, bool, 'preference', 'hidden')
setting('view_calibration_video', True, bool, 'preference', 'hidden')
setting('view_scanning_panel', False, bool, 'preference', 'hidden')
setting('view_scanning_video', False, bool, 'preference', 'hidden')
setting('view_scanning_scene', True, bool, 'preference', 'hidden')
# TODO: change default last file
setting('last_files', [], str, 'preference', 'hidden')
setting('last_file', os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'resources', 'example', 'default.stl')), str, 'preference', 'hidden')
setting('last_profile', os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'resources', 'example', 'default.ini')), str, 'preference', 'hidden')
setting('model_color', '#888899', str, 'preference', 'hidden').setLabel(_('Model color'), _('Display color for first extruder'))
#Remove fake defined _() because later the localization will define a global _()
del _
#########################################################
## Profile and preferences functions
#########################################################
def getSubCategoriesFor(category):
done = {}
ret = []
for s in settingsList:
if s.getCategory() == category and not s.getSubCategory() in done and s.checkConditions():
done[s.getSubCategory()] = True
ret.append(s.getSubCategory())
return ret
def getSettingsForCategory(category, subCategory = None):
ret = []
for s in settingsList:
if s.getCategory() == category and (subCategory is None or s.getSubCategory() == subCategory) and s.checkConditions():
ret.append(s)
return ret
## Profile functions
def getBasePath():
"""
:return: The path in which the current configuration files are stored. This depends on the used OS.
"""
if system.isWindows():
basePath = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
#If we have a frozen python install, we need to step out of the library.zip
if hasattr(sys, 'frozen'):
basePath = os.path.normpath(os.path.join(basePath, ".."))
else:
basePath = os.path.expanduser('~/.horus/')
if not os.path.isdir(basePath):
try:
os.makedirs(basePath)
except:
print "Failed to create directory: %s" % (basePath)
return basePath
def getAlternativeBasePaths():
"""
Search for alternative installations of Horus and their preference files. Used to load configuration from older versions of Horus.
"""
paths = []
basePath = os.path.normpath(os.path.join(getBasePath(), '..'))
for subPath in os.listdir(basePath):
path = os.path.join(basePath, subPath)
if os.path.isdir(path) and os.path.isfile(os.path.join(path, 'preferences.ini')) and path != getBasePath():
paths.append(path)
path = os.path.join(basePath, subPath, 'Horus')
if os.path.isdir(path) and os.path.isfile(os.path.join(path, 'preferences.ini')) and path != getBasePath():
paths.append(path)
return paths
def getDefaultProfilePath():
"""
:return: The default path where the currently used profile is stored and loaded on open and close of Horus.
"""
return os.path.join(getBasePath(), 'current_profile.ini')
def loadProfile(filename, allMachines = False):
"""
Read a profile file as active profile settings.
:param filename: The ini filename to save the profile in.
:param allMachines: When False only the current active profile is saved. If True all profiles for all machines are saved.
"""
global settingsList
profileParser = ConfigParser.ConfigParser()
try:
profileParser.read(filename)
except ConfigParser.ParsingError:
return
if allMachines:
n = 0
while profileParser.has_section('profile_%d' % (n)):
for set in settingsList:
if set.isPreference():
continue
section = 'profile_%d' % (n)
if profileParser.has_option(section, set.getName()):
set.setValue(unicode(profileParser.get(section, set.getName()), 'utf-8', 'replace'), n)
n += 1
else:
for set in settingsList:
if set.isPreference():
continue
section = 'profile'
if profileParser.has_option(section, set.getName()):
set.setValue(unicode(profileParser.get(section, set.getName()), 'utf-8', 'replace'))
def saveProfile(filename, allMachines = False):
"""
Save the current profile to an ini file.
:param filename: The ini filename to save the profile in.
:param allMachines: When False only the current active profile is saved. If True all profiles for all machines are saved.
"""
global settingsList
profileParser = ConfigParser.ConfigParser()
if allMachines:
for set in settingsList:
if set.isPreference() or set.isMachineSetting():
continue
for n in xrange(0, getMachineCount()):
section = 'profile_%d' % (n)
if not profileParser.has_section(section):
profileParser.add_section(section)
profileParser.set(section, set.getName(), set.getValue(n).encode('utf-8'))
else:
profileParser.add_section('profile')
for set in settingsList:
if set.isPreference() or set.isMachineSetting():
continue
if set.isStorable():
profileParser.set('profile', set.getName(), set.getValue().encode('utf-8'))
profileParser.write(open(filename, 'w'))
def resetProfile():
""" Reset the profile for the current machine to default. """
global settingsList
for set in settingsList:
if not set.isProfile():
continue
set.setValue(set.getDefault())
def resetProfileSetting(name):
""" Reset only the especified profile setting """
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isProfile():
settingsDictionary[name].setValue(settingsDictionary[name]._default)
def setProfileFromString(options):
"""
Parse an encoded string which has all the profile settings stored inside of it.
Used in combination with getProfileString to ease sharing of profiles.
"""
options = base64.b64decode(options)
options = zlib.decompress(options)
(profileOpts, alt) = options.split('\f', 1)
global settingsDictionary
for option in profileOpts.split('\b'):
if len(option) > 0:
(key, value) = option.split('=', 1)
if key in settingsDictionary:
if settingsDictionary[key].isProfile():
settingsDictionary[key].setValue(value)
for option in alt.split('\b'):
if len(option) > 0:
(key, value) = option.split('=', 1)
if key in settingsDictionary:
settingsDictionary[key].setValue(value)
def getProfileString():
"""
Get an encoded string which contains all profile settings.
Used in combination with setProfileFromString to share settings in files, forums or other text based ways.
"""
p = []
alt = []
global settingsList
for set in settingsList:
if set.isProfile():
p.append(set.getName() + "=" + set.getValue().encode('utf-8'))
ret = '\b'.join(p) + '\f' + '\b'.join(alt)
ret = base64.b64encode(zlib.compress(ret, 9))
return ret
def insertNewlines(string, every=64): #This should be moved to a better place then profile.
lines = []
for i in xrange(0, len(string), every):
lines.append(string[i:i+every])
return '\n'.join(lines)
def getPreferencesString():
"""
:return: An encoded string which contains all the current preferences.
"""
p = []
global settingsList
for set in settingsList:
if set.isPreference():
p.append(set.getName() + "=" + set.getValue().encode('utf-8'))
ret = '\b'.join(p)
ret = base64.b64encode(zlib.compress(ret, 9))
return ret
def getProfileSettingObject(name):
""" """
global settingsList
for set in settingsList:
if set.getName() is name:
return set
#TODO: get profile setting using getType
def getProfileSetting(name):
"""
Get the value of an profile setting.
:param name: Name of the setting to retrieve.
:return: Value of the current setting.
"""
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isProfile():
return settingsDictionary[name].getValue()
traceback.print_stack()
sys.stderr.write('Error: "%s" not found in profile settings\n' % (name))
return ''
def getDefaultProfileSetting(name):
"""
Get the default value of a profile setting.
:param name: Name of the setting to retrieve.
:return: Value of the current setting.
"""
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isProfile():
return settingsDictionary[name].getDefault()
traceback.print_stack()
sys.stderr.write('Error: "%s" not found in profile settings\n' % (name))
return ''
def getProfileSettingInteger(name):
try:
setting = getProfileSetting(name)
return int(eval(setting, {}, {}))
except:
return 0.0
def getProfileSettingFloat(name):
try:
setting = getProfileSetting(name).replace(',', '.')
return float(eval(setting, {}, {}))
except:
return 0.0
def getProfileSettingBool(name):
try:
setting = getProfileSetting(name)
return bool(eval(setting, {}, {}))
except:
return False
def getProfileSettingNumpy(name):
try:
setting = getProfileSetting(name)
return numpy.array(eval(setting, {}, {}))
except:
return []
def putProfileSetting(name, value):
""" Store a certain value in a profile setting. """
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isProfile():
settingsDictionary[name].setValue(value)
def putProfileSettingNumpy(name, value):
reprValue=repr(value)
reprValue=reprValue.replace('\n','')
reprValue=reprValue.replace('array(','')
reprValue=reprValue.replace(')','')
reprValue=reprValue.replace(' ','')
putProfileSetting(name,reprValue)
def isProfileSetting(name):
""" Check if a certain key name is actually a profile value. """
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isProfile():
return True
return False
## Preferences functions
def getPreferencePath():
"""
:return: The full path of the preference ini file.
"""
return os.path.join(getBasePath(), 'preferences.ini')
def getPreferenceFloat(name):
"""
Get the float value of a preference, returns 0.0 if the preference is not a invalid float
"""
try:
setting = getPreference(name).replace(',', '.')
return float(eval(setting, {}, {}))
except:
return 0.0
def getPreferenceBool(name):
"""
Get the float value of a preference, returns 0.0 if the preference is not a invalid float
"""
try:
setting = getPreference(name)
return bool(eval(setting, {}, {}))
except:
return False
def getPreferenceColor(name):
"""
Get a preference setting value as a color array. The color is stored as #RRGGBB hex string in the setting.
"""
colorString = getPreference(name)
return [float(int(colorString[1:3], 16)) / 255, float(int(colorString[3:5], 16)) / 255, float(int(colorString[5:7], 16)) / 255, 1.0]
def loadPreferences(filename):
"""
Read a configuration file as global config
"""
global settingsList
profileParser = ConfigParser.ConfigParser()
try:
profileParser.read(filename)
except ConfigParser.ParsingError:
return
for set in settingsList:
if set.isPreference():
if profileParser.has_option('preference', set.getName()):
set.setValue(unicode(profileParser.get('preference', set.getName()), 'utf-8', 'replace'))
n = 0
while profileParser.has_section('machine_%d' % (n)):
for set in settingsList:
if set.isMachineSetting():
if profileParser.has_option('machine_%d' % (n), set.getName()):
set.setValue(unicode(profileParser.get('machine_%d' % (n), set.getName()), 'utf-8', 'replace'), n)
n += 1
def savePreferences(filename):
global settingsList
#Save the current profile to an ini file
parser = ConfigParser.ConfigParser()
parser.add_section('preference')
for set in settingsList:
if set.isPreference():
parser.set('preference', set.getName(), set.getValue().encode('utf-8'))
n = 0
while getMachineSetting('machine_name', n) != '':
parser.add_section('machine_%d' % (n))
for set in settingsList:
if set.isMachineSetting():
parser.set('machine_%d' % (n), set.getName(), set.getValue(n).encode('utf-8'))
n += 1
parser.write(open(filename, 'w'))
def getPreference(name):
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isPreference():
return settingsDictionary[name].getValue()
traceback.print_stack()
sys.stderr.write('Error: "%s" not found in preferences\n' % (name))
return ''
def putPreference(name, value):
#Check if we have a configuration file loaded, else load the default.
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isPreference():
settingsDictionary[name].setValue(value)
savePreferences(getPreferencePath())
return
traceback.print_stack()
sys.stderr.write('Error: "%s" not found in preferences\n' % (name))
def isPreference(name):
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isPreference():
return True
return False
def getMachineSettingFloat(name, index = None):
try:
setting = getMachineSetting(name, index).replace(',', '.')
return float(eval(setting, {}, {}))
except:
return 0.0
def loadMachineSettings(filename):
global settingsList
#Read a configuration file as global config
profileParser = ConfigParser.ConfigParser()
try:
profileParser.read(filename)
except ConfigParser.ParsingError:
return
for set in settingsList:
if set.isMachineSetting():
if profileParser.has_option('machine', set.getName()):
set.setValue(unicode(profileParser.get('machine', set.getName()), 'utf-8', 'replace'))
checkAndUpdateMachineName()
def getMachineSetting(name, index = None):
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isMachineSetting():
return settingsDictionary[name].getValue(index)
traceback.print_stack()
sys.stderr.write('Error: "%s" not found in machine settings\n' % (name))
return ''
def putMachineSetting(name, value):
#Check if we have a configuration file loaded, else load the default.
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isMachineSetting():
settingsDictionary[name].setValue(value)
savePreferences(getPreferencePath())
def isMachineSetting(name):
global settingsDictionary
if name in settingsDictionary and settingsDictionary[name].isMachineSetting():
return True
return False
def getMachineCenterCoords():
if getMachineSetting('machine_center_is_zero') == 'True':
return [0, 0]
return [getMachineSettingFloat('machine_width') / 2, getMachineSettingFloat('machine_depth') / 2]
#Returns a list of convex polygons, first polygon is the allowed area of the machine,
# the rest of the polygons are the dis-allowed areas of the machine.
def getMachineSizePolygons():
size = numpy.array([getMachineSettingFloat('machine_width'), getMachineSettingFloat('machine_depth'), getMachineSettingFloat('machine_height')], numpy.float32)
return getSizePolygons(size)
def getSizePolygons(size):
ret = []
if getMachineSetting('machine_shape') == 'Circular':
circle = []
steps = 32
for n in xrange(0, steps):
circle.append([math.cos(float(n)/steps*2*math.pi) * size[0]/2, math.sin(float(n)/steps*2*math.pi) * size[1]/2])
ret.append(numpy.array(circle, numpy.float32))
if getMachineSetting('machine_type') == 'ciclop':
w = 20
h = 20
ret.append(numpy.array([[-size[0]/2,-size[1]/2],[-size[0]/2+w+2,-size[1]/2], [-size[0]/2+w,-size[1]/2+h], [-size[0]/2,-size[1]/2+h]], numpy.float32))
ret.append(numpy.array([[ size[0]/2-w-2,-size[1]/2],[ size[0]/2,-size[1]/2], [ size[0]/2,-size[1]/2+h],[ size[0]/2-w,-size[1]/2+h]], numpy.float32))
ret.append(numpy.array([[-size[0]/2+w+2, size[1]/2],[-size[0]/2, size[1]/2], [-size[0]/2, size[1]/2-h],[-size[0]/2+w, size[1]/2-h]], numpy.float32))
ret.append(numpy.array([[ size[0]/2, size[1]/2],[ size[0]/2-w-2, size[1]/2], [ size[0]/2-w, size[1]/2-h],[ size[0]/2, size[1]/2-h]], numpy.float32))
return ret | hroncok/horus | src/horus/util/profile.py | Python | gpl-2.0 | 30,519 |
# logging.py
# DNF Logging Subsystem.
#
# Copyright (C) 2013-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
import dnf.exceptions
import dnf.const
import dnf.util
import libdnf.repo
import logging
import os
import sys
import time
import warnings
# :api loggers are: 'dnf', 'dnf.plugin', 'dnf.rpm'
SUPERCRITICAL = 100 # do not use this for logging
CRITICAL = logging.CRITICAL
ERROR = logging.ERROR
WARNING = logging.WARNING
INFO = logging.INFO
DEBUG = logging.DEBUG
DDEBUG = 8
SUBDEBUG = 6
TRACE = 4
def only_once(func):
"""Method decorator turning the method into noop on second or later calls."""
def noop(*_args, **_kwargs):
pass
def swan_song(self, *args, **kwargs):
func(self, *args, **kwargs)
setattr(self, func.__name__, noop)
return swan_song
class _MaxLevelFilter(object):
def __init__(self, max_level):
self.max_level = max_level
def filter(self, record):
if record.levelno >= self.max_level:
return 0
return 1
_VERBOSE_VAL_MAPPING = {
0 : SUPERCRITICAL,
1 : logging.INFO,
2 : logging.INFO, # the default
3 : logging.DEBUG,
4 : logging.DEBUG,
5 : logging.DEBUG,
6 : logging.DEBUG, # verbose value
}
def _cfg_verbose_val2level(cfg_errval):
assert 0 <= cfg_errval <= 10
return _VERBOSE_VAL_MAPPING.get(cfg_errval, DDEBUG)
# Both the DNF default and the verbose default are WARNING. Note that ERROR has
# no specific level.
_ERR_VAL_MAPPING = {
0: SUPERCRITICAL,
1: logging.CRITICAL,
2: logging.ERROR
}
def _cfg_err_val2level(cfg_errval):
assert 0 <= cfg_errval <= 10
return _ERR_VAL_MAPPING.get(cfg_errval, logging.WARNING)
def _create_filehandler(logfile):
if not os.path.exists(logfile):
dnf.util.ensure_dir(os.path.dirname(logfile))
dnf.util.touch(logfile)
# By default, make logfiles readable by the user (so the reporting ABRT
# user can attach root logfiles).
os.chmod(logfile, 0o644)
handler = logging.FileHandler(logfile)
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s",
"%Y-%m-%dT%H:%M:%SZ")
formatter.converter = time.gmtime
handler.setFormatter(formatter)
return handler
def _paint_mark(logger):
logger.log(INFO, dnf.const.LOG_MARKER)
class Logging(object):
def __init__(self):
self.stdout_handler = self.stderr_handler = None
@only_once
def _presetup(self):
logging.addLevelName(DDEBUG, "DDEBUG")
logging.addLevelName(SUBDEBUG, "SUBDEBUG")
logging.addLevelName(TRACE, "TRACE")
logger_dnf = logging.getLogger("dnf")
logger_dnf.setLevel(TRACE)
# setup stdout
stdout = logging.StreamHandler(sys.stdout)
stdout.setLevel(INFO)
stdout.addFilter(_MaxLevelFilter(logging.WARNING))
logger_dnf.addHandler(stdout)
self.stdout_handler = stdout
# setup stderr
stderr = logging.StreamHandler(sys.stderr)
stderr.setLevel(WARNING)
logger_dnf.addHandler(stderr)
self.stderr_handler = stderr
@only_once
def _setup(self, verbose_level, error_level, logdir):
self._presetup()
logger_dnf = logging.getLogger("dnf")
# setup file logger
logfile = os.path.join(logdir, dnf.const.LOG)
handler = _create_filehandler(logfile)
logger_dnf.addHandler(handler)
# temporarily turn off stdout/stderr handlers:
self.stdout_handler.setLevel(SUPERCRITICAL)
self.stderr_handler.setLevel(SUPERCRITICAL)
# put the marker in the file now:
_paint_mark(logger_dnf)
# setup Python warnings
logging.captureWarnings(True)
logger_warnings = logging.getLogger("py.warnings")
logger_warnings.addHandler(self.stderr_handler)
logger_warnings.addHandler(handler)
lr_logfile = os.path.join(logdir, dnf.const.LOG_LIBREPO)
libdnf.repo.LibrepoLog.addHandler(lr_logfile, verbose_level <= DEBUG)
# setup RPM callbacks logger
logger_rpm = logging.getLogger("dnf.rpm")
logger_rpm.propagate = False
logger_rpm.setLevel(SUBDEBUG)
logfile = os.path.join(logdir, dnf.const.LOG_RPM)
handler = _create_filehandler(logfile)
logger_rpm.addHandler(self.stdout_handler)
logger_rpm.addHandler(self.stderr_handler)
logger_rpm.addHandler(handler)
_paint_mark(logger_rpm)
# bring std handlers to the preferred level
self.stdout_handler.setLevel(verbose_level)
self.stderr_handler.setLevel(error_level)
logging.raiseExceptions = False
def _setup_from_dnf_conf(self, conf):
verbose_level_r = _cfg_verbose_val2level(conf.debuglevel)
error_level_r = _cfg_err_val2level(conf.errorlevel)
logdir = conf.logdir
return self._setup(verbose_level_r, error_level_r, logdir)
class Timer(object):
def __init__(self, what):
self.what = what
self.start = time.time()
def __call__(self):
diff = time.time() - self.start
msg = 'timer: %s: %d ms' % (self.what, diff * 1000)
logging.getLogger("dnf").log(DDEBUG, msg)
_LIBDNF_TO_DNF_LOGLEVEL_MAPPING = {
libdnf.utils.Logger.Level_CRITICAL: CRITICAL,
libdnf.utils.Logger.Level_ERROR: ERROR,
libdnf.utils.Logger.Level_WARNING: WARNING,
libdnf.utils.Logger.Level_NOTICE: INFO,
libdnf.utils.Logger.Level_INFO: INFO,
libdnf.utils.Logger.Level_DEBUG: DEBUG,
libdnf.utils.Logger.Level_TRACE: TRACE
}
class LibdnfLoggerCB(libdnf.utils.Logger):
def __init__(self):
super(LibdnfLoggerCB, self).__init__()
self._logger = logging.getLogger("dnf")
def write(self, source, *args):
"""Log message.
source -- integer, defines origin (libdnf, librepo, ...) of message, 0 - unknown
"""
if len(args) == 2:
level, message = args
elif len(args) == 4:
time, pid, level, message = args
self._logger.log(_LIBDNF_TO_DNF_LOGLEVEL_MAPPING[level], message)
libdnfLoggerCB = LibdnfLoggerCB()
libdnf.utils.Log.setLogger(libdnfLoggerCB)
| dmach/dnf | dnf/logging.py | Python | gpl-2.0 | 7,178 |
"""
The singleton class that allows metadata and other attachables to be
attached to the entire website.
As the website at this level is one item of data rather than an entire
model, we have to use a singleton class to attach metadata to it.
"""
from django.conf import settings
from django.contrib.sites.models import Site
from metadata.models import PackageEntry, ImageMetadata, TextMetadata
from metadata.mixins import MetadataSubjectMixin
class Website(MetadataSubjectMixin):
"""
Class representing the website itself.
This does not hold any data on its own, so in order to acquire a
website object for running metadata queries, just run Website().
"""
def __init__(self, request):
"""
Initialises a Website object.
:param request: The HttpRequest object of the current page.
:type request: HttpRequest
:rtype: Website
"""
self.request = request
self.pk = 1 # Needed for the metadata system
def metadata_strands(self):
return {
"text": WebsiteTextMetadata.objects,
"image": WebsiteImageMetadata.objects,
}
def packages(self):
return WebsitePackageEntry.objects
## Template-exposed API ##
def root(self):
"""
Returns the URI of the root of the website, for concatenating
things like STATIC_URL onto it.
Please please PLEASE try using decoupling-friendly features
such as 'get_absolute_uri' and whatnot before this.
"""
return self.request.build_absolute_uri('/').rstrip('/')
def site(self):
"""
Returns the current Django Sites Framework site.
"""
try:
site = Site.objects.get_current()
except Site.DoesNotExist:
site = None
return site
WebsiteTextMetadata = TextMetadata.make_model(
Website,
'website',
table=getattr(
settings,
'WEBSITE_TEXT_METADATA_DB_TABLE',
None
),
id_column=getattr(
settings,
'WEBSITE_TEXT_METADATA_DB_ID_COLUMN',
None
),
fkey=None,
)
WebsiteImageMetadata = ImageMetadata.make_model(
Website,
'website',
table=getattr(
settings,
'WEBSITE_IMAGE_METADATA_DB_TABLE',
None
),
id_column=getattr(
settings,
'WEBSITE_IMAGE_METADATA_DB_ID_COLUMN',
None
),
fkey=None,
)
WebsitePackageEntry = PackageEntry.make_model(
Website,
'website',
table=getattr(
settings,
'WEBSITE_PACKAGE_ENTRY_DB_TABLE',
None
),
id_column=getattr(
settings,
'WEBSITE_PACKAGE_ENTRY_DB_ID_COLUMN',
None
),
fkey=None,
)
| CaptainHayashi/lass | website/models/website_class.py | Python | gpl-2.0 | 2,747 |
"""
BitBake 'Fetch' implementations
Classes for obtaining upstream sources for the
BitBake build tools.
"""
# Copyright (C) 2003, 2004 Chris Larson
# Copyright (C) 2012 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os, re
import signal
import logging
import urllib.request, urllib.parse, urllib.error
if 'git' not in urllib.parse.uses_netloc:
urllib.parse.uses_netloc.append('git')
import operator
import collections
import subprocess
import pickle
import errno
import bb.persist_data, bb.utils
import bb.checksum
import bb.process
import bb.event
__version__ = "2"
_checksum_cache = bb.checksum.FileChecksumCache()
logger = logging.getLogger("BitBake.Fetcher")
class BBFetchException(Exception):
"""Class all fetch exceptions inherit from"""
def __init__(self, message):
self.msg = message
Exception.__init__(self, message)
def __str__(self):
return self.msg
class UntrustedUrl(BBFetchException):
"""Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
def __init__(self, url, message=''):
if message:
msg = message
else:
msg = "The URL: '%s' is not trusted and cannot be used" % url
self.url = url
BBFetchException.__init__(self, msg)
self.args = (url,)
class MalformedUrl(BBFetchException):
"""Exception raised when encountering an invalid url"""
def __init__(self, url, message=''):
if message:
msg = message
else:
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
self.url = url
BBFetchException.__init__(self, msg)
self.args = (url,)
class FetchError(BBFetchException):
"""General fetcher exception when something happens incorrectly"""
def __init__(self, message, url = None):
if url:
msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
else:
msg = "Fetcher failure: %s" % message
self.url = url
BBFetchException.__init__(self, msg)
self.args = (message, url)
class ChecksumError(FetchError):
"""Exception when mismatched checksum encountered"""
def __init__(self, message, url = None, checksum = None):
self.checksum = checksum
FetchError.__init__(self, message, url)
class NoChecksumError(FetchError):
"""Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
class UnpackError(BBFetchException):
"""General fetcher exception when something happens incorrectly when unpacking"""
def __init__(self, message, url):
msg = "Unpack failure for URL: '%s'. %s" % (url, message)
self.url = url
BBFetchException.__init__(self, msg)
self.args = (message, url)
class NoMethodError(BBFetchException):
"""Exception raised when there is no method to obtain a supplied url or set of urls"""
def __init__(self, url):
msg = "Could not find a fetcher which supports the URL: '%s'" % url
self.url = url
BBFetchException.__init__(self, msg)
self.args = (url,)
class MissingParameterError(BBFetchException):
"""Exception raised when a fetch method is missing a critical parameter in the url"""
def __init__(self, missing, url):
msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
self.url = url
self.missing = missing
BBFetchException.__init__(self, msg)
self.args = (missing, url)
class ParameterError(BBFetchException):
"""Exception raised when a url cannot be proccessed due to invalid parameters."""
def __init__(self, message, url):
msg = "URL: '%s' has invalid parameters. %s" % (url, message)
self.url = url
BBFetchException.__init__(self, msg)
self.args = (message, url)
class NetworkAccess(BBFetchException):
"""Exception raised when network access is disabled but it is required."""
def __init__(self, url, cmd):
msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
self.url = url
self.cmd = cmd
BBFetchException.__init__(self, msg)
self.args = (url, cmd)
class NonLocalMethod(Exception):
def __init__(self):
Exception.__init__(self)
class MissingChecksumEvent(bb.event.Event):
def __init__(self, url, md5sum, sha256sum):
self.url = url
self.checksums = {'md5sum': md5sum,
'sha256sum': sha256sum}
bb.event.Event.__init__(self)
class URI(object):
"""
A class representing a generic URI, with methods for
accessing the URI components, and stringifies to the
URI.
It is constructed by calling it with a URI, or setting
the attributes manually:
uri = URI("http://example.com/")
uri = URI()
uri.scheme = 'http'
uri.hostname = 'example.com'
uri.path = '/'
It has the following attributes:
* scheme (read/write)
* userinfo (authentication information) (read/write)
* username (read/write)
* password (read/write)
Note, password is deprecated as of RFC 3986.
* hostname (read/write)
* port (read/write)
* hostport (read only)
"hostname:port", if both are set, otherwise just "hostname"
* path (read/write)
* path_quoted (read/write)
A URI quoted version of path
* params (dict) (read/write)
* query (dict) (read/write)
* relative (bool) (read only)
True if this is a "relative URI", (e.g. file:foo.diff)
It stringifies to the URI itself.
Some notes about relative URIs: while it's specified that
a URI beginning with <scheme>:// should either be directly
followed by a hostname or a /, the old URI handling of the
fetch2 library did not comform to this. Therefore, this URI
class has some kludges to make sure that URIs are parsed in
a way comforming to bitbake's current usage. This URI class
supports the following:
file:relative/path.diff (IETF compliant)
git:relative/path.git (IETF compliant)
git:///absolute/path.git (IETF compliant)
file:///absolute/path.diff (IETF compliant)
file://relative/path.diff (not IETF compliant)
But it does not support the following:
file://hostname/absolute/path.diff (would be IETF compliant)
Note that the last case only applies to a list of
"whitelisted" schemes (currently only file://), that requires
its URIs to not have a network location.
"""
_relative_schemes = ['file', 'git']
_netloc_forbidden = ['file']
def __init__(self, uri=None):
self.scheme = ''
self.userinfo = ''
self.hostname = ''
self.port = None
self._path = ''
self.params = {}
self.query = {}
self.relative = False
if not uri:
return
# We hijack the URL parameters, since the way bitbake uses
# them are not quite RFC compliant.
uri, param_str = (uri.split(";", 1) + [None])[:2]
urlp = urllib.parse.urlparse(uri)
self.scheme = urlp.scheme
reparse = 0
# Coerce urlparse to make URI scheme use netloc
if not self.scheme in urllib.parse.uses_netloc:
urllib.parse.uses_params.append(self.scheme)
reparse = 1
# Make urlparse happy(/ier) by converting local resources
# to RFC compliant URL format. E.g.:
# file://foo.diff -> file:foo.diff
if urlp.scheme in self._netloc_forbidden:
uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
reparse = 1
if reparse:
urlp = urllib.parse.urlparse(uri)
# Identify if the URI is relative or not
if urlp.scheme in self._relative_schemes and \
re.compile(r"^\w+:(?!//)").match(uri):
self.relative = True
if not self.relative:
self.hostname = urlp.hostname or ''
self.port = urlp.port
self.userinfo += urlp.username or ''
if urlp.password:
self.userinfo += ':%s' % urlp.password
self.path = urllib.parse.unquote(urlp.path)
if param_str:
self.params = self._param_str_split(param_str, ";")
if urlp.query:
self.query = self._param_str_split(urlp.query, "&")
def __str__(self):
userinfo = self.userinfo
if userinfo:
userinfo += '@'
return "%s:%s%s%s%s%s%s" % (
self.scheme,
'' if self.relative else '//',
userinfo,
self.hostport,
self.path_quoted,
self._query_str(),
self._param_str())
def _param_str(self):
return (
''.join([';', self._param_str_join(self.params, ";")])
if self.params else '')
def _query_str(self):
return (
''.join(['?', self._param_str_join(self.query, "&")])
if self.query else '')
def _param_str_split(self, string, elmdelim, kvdelim="="):
ret = collections.OrderedDict()
for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
ret[k] = v
return ret
def _param_str_join(self, dict_, elmdelim, kvdelim="="):
return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
@property
def hostport(self):
if not self.port:
return self.hostname
return "%s:%d" % (self.hostname, self.port)
@property
def path_quoted(self):
return urllib.parse.quote(self.path)
@path_quoted.setter
def path_quoted(self, path):
self.path = urllib.parse.unquote(path)
@property
def path(self):
return self._path
@path.setter
def path(self, path):
self._path = path
if not path or re.compile("^/").match(path):
self.relative = False
else:
self.relative = True
@property
def username(self):
if self.userinfo:
return (self.userinfo.split(":", 1))[0]
return ''
@username.setter
def username(self, username):
password = self.password
self.userinfo = username
if password:
self.userinfo += ":%s" % password
@property
def password(self):
if self.userinfo and ":" in self.userinfo:
return (self.userinfo.split(":", 1))[1]
return ''
@password.setter
def password(self, password):
self.userinfo = "%s:%s" % (self.username, password)
def decodeurl(url):
"""Decodes an URL into the tokens (scheme, network location, path,
user, password, parameters).
"""
m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
if not m:
raise MalformedUrl(url)
type = m.group('type')
location = m.group('location')
if not location:
raise MalformedUrl(url)
user = m.group('user')
parm = m.group('parm')
locidx = location.find('/')
if locidx != -1 and type.lower() != 'file':
host = location[:locidx]
path = location[locidx:]
elif type.lower() == 'file':
host = ""
path = location
else:
host = location
path = "/"
if user:
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
if m:
user = m.group('user')
pswd = m.group('pswd')
else:
user = ''
pswd = ''
p = collections.OrderedDict()
if parm:
for s in parm.split(';'):
if s:
if not '=' in s:
raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
s1, s2 = s.split('=')
p[s1] = s2
return type, host, urllib.parse.unquote(path), user, pswd, p
def encodeurl(decoded):
"""Encodes a URL from tokens (scheme, network location, path,
user, password, parameters).
"""
type, host, path, user, pswd, p = decoded
if not type:
raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
url = '%s://' % type
if user and type != "file":
url += "%s" % user
if pswd:
url += ":%s" % pswd
url += "@"
if host and type != "file":
url += "%s" % host
if path:
# Standardise path to ensure comparisons work
while '//' in path:
path = path.replace("//", "/")
url += "%s" % urllib.parse.quote(path)
if p:
for parm in p:
url += ";%s=%s" % (parm, p[parm])
return url
def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
if not ud.url or not uri_find or not uri_replace:
logger.error("uri_replace: passed an undefined value, not replacing")
return None
uri_decoded = list(decodeurl(ud.url))
uri_find_decoded = list(decodeurl(uri_find))
uri_replace_decoded = list(decodeurl(uri_replace))
logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
result_decoded = ['', '', '', '', '', {}]
for loc, i in enumerate(uri_find_decoded):
result_decoded[loc] = uri_decoded[loc]
regexp = i
if loc == 0 and regexp and not regexp.endswith("$"):
# Leaving the type unanchored can mean "https" matching "file" can become "files"
# which is clearly undesirable.
regexp += "$"
if loc == 5:
# Handle URL parameters
if i:
# Any specified URL parameters must match
for k in uri_find_decoded[loc]:
if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
return None
# Overwrite any specified replacement parameters
for k in uri_replace_decoded[loc]:
for l in replacements:
uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
result_decoded[loc][k] = uri_replace_decoded[loc][k]
elif (re.match(regexp, uri_decoded[loc])):
if not uri_replace_decoded[loc]:
result_decoded[loc] = ""
else:
for k in replacements:
uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
#bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
if loc == 2:
# Handle path manipulations
basename = None
if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
# If the source and destination url types differ, must be a mirrortarball mapping
basename = os.path.basename(mirrortarball)
# Kill parameters, they make no sense for mirror tarballs
uri_decoded[5] = {}
elif ud.localpath and ud.method.supports_checksum(ud):
basename = os.path.basename(ud.localpath)
if basename and not result_decoded[loc].endswith(basename):
result_decoded[loc] = os.path.join(result_decoded[loc], basename)
else:
return None
result = encodeurl(result_decoded)
if result == ud.url:
return None
logger.debug(2, "For url %s returning %s" % (ud.url, result))
return result
methods = []
urldata_cache = {}
saved_headrevs = {}
def fetcher_init(d):
"""
Called to initialize the fetchers once the configuration data is known.
Calls before this must not hit the cache.
"""
# When to drop SCM head revisions controlled by user policy
srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
if srcrev_policy == "cache":
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
try:
bb.fetch2.saved_headrevs = revs.items()
except:
pass
revs.clear()
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
_checksum_cache.init_cache(d)
for m in methods:
if hasattr(m, "init"):
m.init(d)
def fetcher_parse_save():
_checksum_cache.save_extras()
def fetcher_parse_done():
_checksum_cache.save_merge()
def fetcher_compare_revisions(d):
"""
Compare the revisions in the persistant cache with current values and
return true/false on whether they've changed.
"""
data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
data2 = bb.fetch2.saved_headrevs
changed = False
for key in data:
if key not in data2 or data2[key] != data[key]:
logger.debug(1, "%s changed", key)
changed = True
return True
else:
logger.debug(2, "%s did not change", key)
return False
def mirror_from_string(data):
mirrors = (data or "").replace('\\n',' ').split()
# Split into pairs
if len(mirrors) % 2 != 0:
bb.warn('Invalid mirror data %s, should have paired members.' % data)
return list(zip(*[iter(mirrors)]*2))
def verify_checksum(ud, d, precomputed={}):
"""
verify the MD5 and SHA256 checksum for downloaded src
Raises a FetchError if one or both of the SRC_URI checksums do not match
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
checksums specified.
Returns a dict of checksums that can be stored in a done stamp file and
passed in as precomputed parameter in a later call to avoid re-computing
the checksums from the file. This allows verifying the checksums of the
file against those in the recipe each time, rather than only after
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
"""
_MD5_KEY = "md5"
_SHA256_KEY = "sha256"
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
return {}
if _MD5_KEY in precomputed:
md5data = precomputed[_MD5_KEY]
else:
md5data = bb.utils.md5_file(ud.localpath)
if _SHA256_KEY in precomputed:
sha256data = precomputed[_SHA256_KEY]
else:
sha256data = bb.utils.sha256_file(ud.localpath)
if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
# If strict checking enabled and neither sum defined, raise error
strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
if strict == "1":
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
(ud.localpath, ud.md5_name, md5data,
ud.sha256_name, sha256data))
raise NoChecksumError('Missing SRC_URI checksum', ud.url)
bb.event.fire(MissingChecksumEvent(ud.url, md5data, sha256data), d)
if strict == "ignore":
return {
_MD5_KEY: md5data,
_SHA256_KEY: sha256data
}
# Log missing sums so user can more easily add them
logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
'SRC_URI[%s] = "%s"',
ud.localpath, ud.md5_name, md5data)
logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
'SRC_URI[%s] = "%s"',
ud.localpath, ud.sha256_name, sha256data)
# We want to alert the user if a checksum is defined in the recipe but
# it does not match.
msg = ""
mismatch = False
if ud.md5_expected and ud.md5_expected != md5data:
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
mismatch = True;
if ud.sha256_expected and ud.sha256_expected != sha256data:
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
mismatch = True;
if mismatch:
msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
if len(msg):
raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
return {
_MD5_KEY: md5data,
_SHA256_KEY: sha256data
}
def verify_donestamp(ud, d, origud=None):
"""
Check whether the done stamp file has the right checksums (if the fetch
method supports them). If it doesn't, delete the done stamp and force
a re-download.
Returns True, if the donestamp exists and is valid, False otherwise. When
returning False, any existing done stamps are removed.
"""
if not ud.needdonestamp or (origud and not origud.needdonestamp):
return True
if not os.path.exists(ud.localpath):
# local path does not exist
if os.path.exists(ud.donestamp):
# done stamp exists, but the downloaded file does not; the done stamp
# must be incorrect, re-trigger the download
bb.utils.remove(ud.donestamp)
return False
if (not ud.method.supports_checksum(ud) or
(origud and not origud.method.supports_checksum(origud))):
# if done stamp exists and checksums not supported; assume the local
# file is current
return os.path.exists(ud.donestamp)
precomputed_checksums = {}
# Only re-use the precomputed checksums if the donestamp is newer than the
# file. Do not rely on the mtime of directories, though. If ud.localpath is
# a directory, there will probably not be any checksums anyway.
if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
try:
with open(ud.donestamp, "rb") as cachefile:
pickled = pickle.Unpickler(cachefile)
precomputed_checksums.update(pickled.load())
except Exception as e:
# Avoid the warnings on the upgrade path from emtpy done stamp
# files to those containing the checksums.
if not isinstance(e, EOFError):
# Ignore errors, they aren't fatal
logger.warning("Couldn't load checksums from donestamp %s: %s "
"(msg: %s)" % (ud.donestamp, type(e).__name__,
str(e)))
try:
checksums = verify_checksum(ud, d, precomputed_checksums)
# If the cache file did not have the checksums, compute and store them
# as an upgrade path from the previous done stamp file format.
if checksums != precomputed_checksums:
with open(ud.donestamp, "wb") as cachefile:
p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
return True
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
logger.warning("Checksum mismatch for local file %s\n"
"Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
return False
def update_stamp(ud, d):
"""
donestamp is file stamp indicating the whole fetching is done
this function update the stamp after verifying the checksum
"""
if not ud.needdonestamp:
return
if os.path.exists(ud.donestamp):
# Touch the done stamp file to show active use of the download
try:
os.utime(ud.donestamp, None)
except:
# Errors aren't fatal here
pass
else:
try:
checksums = verify_checksum(ud, d)
# Store the checksums for later re-verification against the recipe
with open(ud.donestamp, "wb") as cachefile:
p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
logger.warning("Checksum mismatch for local file %s\n"
"Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
raise
def subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
# SIGPIPE errors are known issues with gzip/bash
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def get_autorev(d):
# only not cache src rev in autorev case
if d.getVar('BB_SRCREV_POLICY') != "cache":
d.setVar('BB_DONT_CACHE', '1')
return "AUTOINC"
def get_srcrev(d, method_name='sortable_revision'):
"""
Return the revision string, usually for use in the version string (PV) of the current package
Most packages usually only have one SCM so we just pass on the call.
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
have been set.
The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
incremental, other code is then responsible for turning that into an increasing value (if needed)
A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
that fetcher provides a method with the given name and the same signature as sortable_revision.
"""
scms = []
fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
for u in urldata:
if urldata[u].method.supports_srcrev():
scms.append(u)
if len(scms) == 0:
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
if len(rev) > 10:
rev = rev[:10]
if autoinc:
return "AUTOINC+" + rev
return rev
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
#
format = d.getVar('SRCREV_FORMAT')
if not format:
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
"The SCMs are:\n%s" % '\n'.join(scms))
name_to_rev = {}
seenautoinc = False
for scm in scms:
ud = urldata[scm]
for name in ud.names:
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
name_to_rev[name] = rev
# Replace names by revisions in the SRCREV_FORMAT string. The approach used
# here can handle names being prefixes of other names and names appearing
# as substrings in revisions (in which case the name should not be
# expanded). The '|' regular expression operator tries matches from left to
# right, so we need to sort the names with the longest ones first.
names_descending_len = sorted(name_to_rev, key=len, reverse=True)
name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
if seenautoinc:
format = "AUTOINC+" + format
return format
def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
return fetcher.localpath(url)
def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
"""
Run cmd returning the command output
Raise an error if interrupted or cmd fails
Optionally echo command output to stdout
Optionally remove the files/directories listed in cleanup upon failure
"""
# Need to export PATH as binary could be in metadata paths
# rather than host provided
# Also include some other variables.
# FIXME: Should really include all export varaiables?
exportvars = ['HOME', 'PATH',
'HTTP_PROXY', 'http_proxy',
'HTTPS_PROXY', 'https_proxy',
'FTP_PROXY', 'ftp_proxy',
'FTPS_PROXY', 'ftps_proxy',
'NO_PROXY', 'no_proxy',
'ALL_PROXY', 'all_proxy',
'GIT_PROXY_COMMAND',
'GIT_SSH',
'GIT_SSL_CAINFO',
'GIT_SMART_HTTP',
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
'SOCKS5_USER', 'SOCKS5_PASSWD',
'DBUS_SESSION_BUS_ADDRESS',
'P4CONFIG']
if not cleanup:
cleanup = []
# If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
# can end up in circular recursion here so give the option of breaking it
# in a data store copy.
try:
d.getVar("PV")
d.getVar("PR")
except bb.data_smart.ExpansionError:
d = bb.data.createCopy(d)
d.setVar("PV", "fetcheravoidrecurse")
d.setVar("PR", "fetcheravoidrecurse")
origenv = d.getVar("BB_ORIGENV", False)
for var in exportvars:
val = d.getVar(var) or (origenv and origenv.getVar(var))
if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
# Ensure that a _PYTHON_SYSCONFIGDATA_NAME value set by a recipe
# (for example via python3native.bbclass since warrior) is not set for
# host Python (otherwise tools like git-make-shallow will fail)
cmd = 'unset _PYTHON_SYSCONFIGDATA_NAME; ' + cmd
# Disable pseudo as it may affect ssh, potentially causing it to hang.
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
if workdir:
logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
else:
logger.debug(1, "Running %s", cmd)
success = False
error_message = ""
try:
(output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
success = True
except bb.process.NotFoundError as e:
error_message = "Fetch command %s" % (e.command)
except bb.process.ExecutionError as e:
if e.stdout:
output = "output:\n%s\n%s" % (e.stdout, e.stderr)
elif e.stderr:
output = "output:\n%s" % e.stderr
else:
output = "no output"
error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
except bb.process.CmdError as e:
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
if not success:
for f in cleanup:
try:
bb.utils.remove(f, True)
except OSError:
pass
raise FetchError(error_message)
return output
def check_network_access(d, info, url):
"""
log remote network access, and error if BB_NO_NETWORK is set or the given
URI is untrusted
"""
if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
raise NetworkAccess(url, info)
elif not trusted_network(d, url):
raise UntrustedUrl(url, info)
else:
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
def build_mirroruris(origud, mirrors, ld):
uris = []
uds = []
replacements = {}
replacements["TYPE"] = origud.type
replacements["HOST"] = origud.host
replacements["PATH"] = origud.path
replacements["BASENAME"] = origud.path.split("/")[-1]
replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
def adduri(ud, uris, uds, mirrors, tarballs):
for line in mirrors:
try:
(find, replace) = line
except ValueError:
continue
for tarball in tarballs:
newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
if not newuri or newuri in uris or newuri == origud.url:
continue
if not trusted_network(ld, newuri):
logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
continue
# Create a local copy of the mirrors minus the current line
# this will prevent us from recursively processing the same line
# as well as indirect recursion A -> B -> C -> A
localmirrors = list(mirrors)
localmirrors.remove(line)
try:
newud = FetchData(newuri, ld)
newud.setup_localpath(ld)
except bb.fetch2.BBFetchException as e:
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
logger.debug(1, str(e))
try:
# setup_localpath of file:// urls may fail, we should still see
# if mirrors of the url exist
adduri(newud, uris, uds, localmirrors, tarballs)
except UnboundLocalError:
pass
continue
uris.append(newuri)
uds.append(newud)
adduri(newud, uris, uds, localmirrors, tarballs)
adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
return uris, uds
def rename_bad_checksum(ud, suffix):
"""
Renames files to have suffix from parameter
"""
if ud.localpath is None:
return
new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
if not bb.utils.movefile(ud.localpath, new_localpath):
bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
def try_mirror_url(fetch, origud, ud, ld, check = False):
# Return of None or a value means we're finished
# False means try another url
if ud.lockfile and ud.lockfile != origud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
try:
if check:
found = ud.method.checkstatus(fetch, ud, ld)
if found:
return found
return False
if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
ud.method.download(ud, ld)
if hasattr(ud.method,"build_mirror_data"):
ud.method.build_mirror_data(ud, ld)
if not ud.localpath or not os.path.exists(ud.localpath):
return False
if ud.localpath == origud.localpath:
return ud.localpath
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
# If that tarball is a local file:// we need to provide a symlink to it
dldir = ld.getVar("DL_DIR")
if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
# Create donestamp in old format to avoid triggering a re-download
if ud.donestamp:
bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
open(ud.donestamp, 'w').close()
dest = os.path.join(dldir, os.path.basename(ud.localpath))
if not os.path.exists(dest):
# In case this is executing without any file locks held (as is
# the case for file:// URLs), two tasks may end up here at the
# same time, in which case we do not want the second task to
# fail when the link has already been created by the first task.
try:
os.symlink(ud.localpath, dest)
except FileExistsError:
pass
if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
origud.method.download(origud, ld)
if hasattr(origud.method, "build_mirror_data"):
origud.method.build_mirror_data(origud, ld)
return origud.localpath
# Otherwise the result is a local file:// and we symlink to it
ensure_symlink(ud.localpath, origud.localpath)
update_stamp(origud, ld)
return ud.localpath
except bb.fetch2.NetworkAccess:
raise
except IOError as e:
if e.errno in [errno.ESTALE]:
logger.warning("Stale Error Observed %s." % ud.url)
return False
raise
except bb.fetch2.BBFetchException as e:
if isinstance(e, ChecksumError):
logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
logger.warning(str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
logger.debug(1, str(e))
try:
ud.method.clean(ud, ld)
except UnboundLocalError:
pass
return False
finally:
if ud.lockfile and ud.lockfile != origud.lockfile:
bb.utils.unlockfile(lf)
def ensure_symlink(target, link_name):
if not os.path.exists(link_name):
if os.path.islink(link_name):
# Broken symbolic link
os.unlink(link_name)
# In case this is executing without any file locks held (as is
# the case for file:// URLs), two tasks may end up here at the
# same time, in which case we do not want the second task to
# fail when the link has already been created by the first task.
try:
os.symlink(target, link_name)
except FileExistsError:
pass
def try_mirrors(fetch, d, origud, mirrors, check = False):
"""
Try to use a mirrored version of the sources.
This method will be automatically called before the fetchers go.
d Is a bb.data instance
uri is the original uri we're trying to download
mirrors is the list of mirrors we're going to try
"""
ld = d.createCopy()
uris, uds = build_mirroruris(origud, mirrors, ld)
for index, uri in enumerate(uris):
ret = try_mirror_url(fetch, origud, uds[index], ld, check)
if ret != False:
return ret
return None
def trusted_network(d, url):
"""
Use a trusted url during download if networking is enabled and
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
Note: modifies SRC_URI & mirrors.
"""
if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
return True
pkgname = d.expand(d.getVar('PN', False))
trusted_hosts = None
if pkgname:
trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
if not trusted_hosts:
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
# Not enabled.
if not trusted_hosts:
return True
scheme, network, path, user, passwd, param = decodeurl(url)
if not network:
return True
network = network.split(':')[0]
network = network.lower()
for host in trusted_hosts.split(" "):
host = host.lower()
if host.startswith("*.") and ("." + network).endswith(host[1:]):
return True
if host == network:
return True
return False
def srcrev_internal_helper(ud, d, name):
"""
Return:
a) a source revision if specified
b) latest revision if SRCREV="AUTOINC"
c) None if not specified
"""
srcrev = None
pn = d.getVar("PN")
attempts = []
if name != '' and pn:
attempts.append("SRCREV_%s_pn-%s" % (name, pn))
if name != '':
attempts.append("SRCREV_%s" % name)
if pn:
attempts.append("SRCREV_pn-%s" % pn)
attempts.append("SRCREV")
for a in attempts:
srcrev = d.getVar(a)
if srcrev and srcrev != "INVALID":
break
if 'rev' in ud.parm and 'tag' in ud.parm:
raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
if 'rev' in ud.parm or 'tag' in ud.parm:
if 'rev' in ud.parm:
parmrev = ud.parm['rev']
else:
parmrev = ud.parm['tag']
if srcrev == "INVALID" or not srcrev:
return parmrev
if srcrev != parmrev:
raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
return parmrev
if srcrev == "INVALID" or not srcrev:
raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
if srcrev == "AUTOINC":
srcrev = ud.method.latest_revision(ud, d, name)
return srcrev
def get_checksum_file_list(d):
""" Get a list of files checksum in SRC_URI
Returns the resolved local paths of all local file entries in
SRC_URI as a space-separated string
"""
fetch = Fetch([], d, cache = False, localonly = True)
dl_dir = d.getVar('DL_DIR')
filelist = []
for u in fetch.urls:
ud = fetch.ud[u]
if ud and isinstance(ud.method, local.Local):
paths = ud.method.localpaths(ud, d)
for f in paths:
pth = ud.decodedurl
if '*' in pth:
f = os.path.join(os.path.abspath(f), pth)
if f.startswith(dl_dir):
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
if os.path.exists(f):
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
else:
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
filelist.append(f + ":" + str(os.path.exists(f)))
return " ".join(filelist)
def get_file_checksums(filelist, pn):
"""Get a list of the checksums for a list of local files
Returns the checksums for a list of local files, caching the results as
it proceeds
"""
return _checksum_cache.get_checksums(filelist, pn)
class FetchData(object):
"""
A class which represents the fetcher state for a given URI.
"""
def __init__(self, url, d, localonly = False):
# localpath is the location of a downloaded result. If not set, the file is local.
self.donestamp = None
self.needdonestamp = True
self.localfile = ""
self.localpath = None
self.lockfile = None
self.mirrortarballs = []
self.basename = None
self.basepath = None
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
self.date = self.getSRCDate(d)
self.url = url
if not self.user and "user" in self.parm:
self.user = self.parm["user"]
if not self.pswd and "pswd" in self.parm:
self.pswd = self.parm["pswd"]
self.setup = False
if "name" in self.parm:
self.md5_name = "%s.md5sum" % self.parm["name"]
self.sha256_name = "%s.sha256sum" % self.parm["name"]
else:
self.md5_name = "md5sum"
self.sha256_name = "sha256sum"
if self.md5_name in self.parm:
self.md5_expected = self.parm[self.md5_name]
elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
self.md5_expected = None
else:
self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
if self.sha256_name in self.parm:
self.sha256_expected = self.parm[self.sha256_name]
elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
self.sha256_expected = None
else:
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
self.ignore_checksums = False
self.names = self.parm.get("name",'default').split(',')
self.method = None
for m in methods:
if m.supports(self, d):
self.method = m
break
if not self.method:
raise NoMethodError(url)
if localonly and not isinstance(self.method, local.Local):
raise NonLocalMethod()
if self.parm.get("proto", None) and "protocol" not in self.parm:
logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
self.parm["protocol"] = self.parm.get("proto", None)
if hasattr(self.method, "urldata_init"):
self.method.urldata_init(self, d)
if "localpath" in self.parm:
# if user sets localpath for file, use it instead.
self.localpath = self.parm["localpath"]
self.basename = os.path.basename(self.localpath)
elif self.localfile:
self.localpath = self.method.localpath(self, d)
dldir = d.getVar("DL_DIR")
if not self.needdonestamp:
return
# Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
if self.localpath and self.localpath.startswith(dldir):
basepath = self.localpath
elif self.localpath:
basepath = dldir + os.sep + os.path.basename(self.localpath)
elif self.basepath or self.basename:
basepath = dldir + os.sep + (self.basepath or self.basename)
else:
bb.fatal("Can't determine lock path for url %s" % url)
self.donestamp = basepath + '.done'
self.lockfile = basepath + '.lock'
def setup_revisions(self, d):
self.revisions = {}
for name in self.names:
self.revisions[name] = srcrev_internal_helper(self, d, name)
# add compatibility code for non name specified case
if len(self.names) == 1:
self.revision = self.revisions[self.names[0]]
def setup_localpath(self, d):
if not self.localpath:
self.localpath = self.method.localpath(self, d)
def getSRCDate(self, d):
"""
Return the SRC Date for the component
d the bb.data module
"""
if "srcdate" in self.parm:
return self.parm['srcdate']
pn = d.getVar("PN")
if pn:
return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
return d.getVar("SRCDATE") or d.getVar("DATE")
class FetchMethod(object):
"""Base class for 'fetch'ing data"""
def __init__(self, urls=None):
self.urls = []
def supports(self, urldata, d):
"""
Check to see if this fetch class supports a given url.
"""
return 0
def localpath(self, urldata, d):
"""
Return the local filename of a given url assuming a successful fetch.
Can also setup variables in urldata for use in go (saving code duplication
and duplicate code execution)
"""
return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
def supports_checksum(self, urldata):
"""
Is localpath something that can be represented by a checksum?
"""
# We cannot compute checksums for directories
if os.path.isdir(urldata.localpath) == True:
return False
if urldata.localpath.find("*") != -1:
return False
return True
def recommends_checksum(self, urldata):
"""
Is the backend on where checksumming is recommended (should warnings
be displayed if there is no checksum)?
"""
return False
def _strip_leading_slashes(self, relpath):
"""
Remove leading slash as os.path.join can't cope
"""
while os.path.isabs(relpath):
relpath = relpath[1:]
return relpath
def setUrls(self, urls):
self.__urls = urls
def getUrls(self):
return self.__urls
urls = property(getUrls, setUrls, None, "Urls property")
def need_update(self, ud, d):
"""
Force a fetch, even if localpath exists?
"""
if os.path.exists(ud.localpath):
return False
return True
def supports_srcrev(self):
"""
The fetcher supports auto source revisions (SRCREV)
"""
return False
def download(self, urldata, d):
"""
Fetch urls
Assumes localpath was called first
"""
raise NoMethodError(urldata.url)
def unpack(self, urldata, rootdir, data):
iterate = False
file = urldata.localpath
# Localpath can't deal with 'dir/*' entries, so it converts them to '.',
# but it must be corrected back for local files copying
if urldata.basename == '*' and file.endswith('/.'):
file = '%s/%s' % (file.rstrip('/.'), urldata.path)
try:
unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
except ValueError as exc:
bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
(file, urldata.parm.get('unpack')))
base, ext = os.path.splitext(file)
if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
efile = os.path.join(rootdir, os.path.basename(base))
else:
efile = file
cmd = None
if unpack:
if file.endswith('.tar'):
cmd = 'tar x --no-same-owner -f %s' % file
elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
cmd = 'tar xz --no-same-owner -f %s' % file
elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
cmd = 'gzip -dc %s > %s' % (file, efile)
elif file.endswith('.bz2'):
cmd = 'bzip2 -dc %s > %s' % (file, efile)
elif file.endswith('.txz') or file.endswith('.tar.xz'):
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.xz'):
cmd = 'xz -dc %s > %s' % (file, efile)
elif file.endswith('.tar.lz'):
cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.lz'):
cmd = 'lzip -dc %s > %s' % (file, efile)
elif file.endswith('.tar.7z'):
cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
elif file.endswith('.7z'):
cmd = '7za x -y %s 1>/dev/null' % file
elif file.endswith('.zip') or file.endswith('.jar'):
try:
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
except ValueError as exc:
bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
(file, urldata.parm.get('dos')))
cmd = 'unzip -q -o'
if dos:
cmd = '%s -a' % cmd
cmd = "%s '%s'" % (cmd, file)
elif file.endswith('.rpm') or file.endswith('.srpm'):
if 'extract' in urldata.parm:
unpack_file = urldata.parm.get('extract')
cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
iterate = True
iterate_file = unpack_file
else:
cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
elif file.endswith('.deb') or file.endswith('.ipk'):
output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
datafile = None
if output:
for line in output.decode().splitlines():
if line.startswith('data.tar.'):
datafile = line
break
else:
raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
else:
raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
# If 'subdir' param exists, create a dir and use it as destination for unpack cmd
if 'subdir' in urldata.parm:
subdir = urldata.parm.get('subdir')
if os.path.isabs(subdir):
if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
unpackdir = subdir
else:
unpackdir = os.path.join(rootdir, subdir)
bb.utils.mkdirhier(unpackdir)
else:
unpackdir = rootdir
if not unpack or not cmd:
# If file == dest, then avoid any copies, as we already put the file into dest!
dest = os.path.join(unpackdir, os.path.basename(file))
if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
destdir = '.'
# For file:// entries all intermediate dirs in path must be created at destination
if urldata.type == "file":
# Trailing '/' does a copying to wrong place
urlpath = urldata.path.rstrip('/')
# Want files places relative to cwd so no leading '/'
urlpath = urlpath.lstrip('/')
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
cmd = 'cp -fpPRH %s %s' % (file, destdir)
if not cmd:
return
path = data.getVar('PATH')
if path:
cmd = "PATH=\"%s\" %s" % (path, cmd)
bb.note("Unpacking %s to %s/" % (file, unpackdir))
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
if ret != 0:
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
if iterate is True:
iterate_urldata = urldata
iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
self.unpack(urldata, rootdir, data)
return
def clean(self, urldata, d):
"""
Clean any existing full or partial download
"""
bb.utils.remove(urldata.localpath)
def try_premirror(self, urldata, d):
"""
Should premirrors be used?
"""
return True
def checkstatus(self, fetch, urldata, d):
"""
Check the status of a URL
Assumes localpath was called first
"""
logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
return True
def latest_revision(self, ud, d, name):
"""
Look in the cache for the latest revision, if not present ask the SCM.
"""
if not hasattr(self, "_latest_revision"):
raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(ud, d, name)
try:
return revs[key]
except KeyError:
revs[key] = rev = self._latest_revision(ud, d, name)
return rev
def sortable_revision(self, ud, d, name):
latest_rev = self._build_revision(ud, d, name)
return True, str(latest_rev)
def generate_revision_key(self, ud, d, name):
key = self._revision_key(ud, d, name)
return "%s-%s" % (key, d.getVar("PN") or "")
def latest_versionstring(self, ud, d):
"""
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
by searching through the tags output of ls-remote, comparing
versions and returning the highest match as a (version, revision) pair.
"""
return ('', '')
class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
if localonly and cache:
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
if len(urls) == 0:
urls = d.getVar("SRC_URI").split()
self.urls = urls
self.d = d
self.ud = {}
self.connection_cache = connection_cache
fn = d.getVar('FILE')
mc = d.getVar('__BBMULTICONFIG') or ""
if cache and fn and mc + fn in urldata_cache:
self.ud = urldata_cache[mc + fn]
for url in urls:
if url not in self.ud:
try:
self.ud[url] = FetchData(url, d, localonly)
except NonLocalMethod:
if localonly:
self.ud[url] = None
pass
if fn and cache:
urldata_cache[mc + fn] = self.ud
def localpath(self, url):
if url not in self.urls:
self.ud[url] = FetchData(url, self.d)
self.ud[url].setup_localpath(self.d)
return self.d.expand(self.ud[url].localpath)
def localpaths(self):
"""
Return a list of the local filenames, assuming successful fetch
"""
local = []
for u in self.urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
local.append(ud.localpath)
return local
def download(self, urls=None):
"""
Fetch all urls
"""
if not urls:
urls = self.urls
network = self.d.getVar("BB_NO_NETWORK")
premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
m = ud.method
localpath = ""
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
try:
self.d.setVar("BB_NO_NETWORK", network)
if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
localpath = ud.localpath
elif m.try_premirror(ud, self.d):
logger.debug(1, "Trying PREMIRRORS")
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors, False)
if localpath:
try:
# early checksum verification so that if the checksum of the premirror
# contents mismatch the fetcher can still try upstream and mirrors
update_stamp(ud, self.d)
except ChecksumError as e:
logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
logger.debug(1, str(e))
localpath = ""
if premirroronly:
self.d.setVar("BB_NO_NETWORK", "1")
firsterr = None
verified_stamp = verify_donestamp(ud, self.d)
if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
try:
if not trusted_network(self.d, ud.url):
raise UntrustedUrl(ud.url)
logger.debug(1, "Trying Upstream")
m.download(ud, self.d)
if hasattr(m, "build_mirror_data"):
m.build_mirror_data(ud, self.d)
localpath = ud.localpath
# early checksum verify, so that if checksum mismatched,
# fetcher still have chance to fetch from mirror
update_stamp(ud, self.d)
except bb.fetch2.NetworkAccess:
raise
except BBFetchException as e:
if isinstance(e, ChecksumError):
logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
logger.debug(1, str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
logger.debug(1, str(e))
firsterr = e
# Remove any incomplete fetch
if not verified_stamp:
m.clean(ud, self.d)
logger.debug(1, "Trying MIRRORS")
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors)
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
if firsterr:
logger.error(str(firsterr))
raise FetchError("Unable to fetch URL from any source.", u)
update_stamp(ud, self.d)
except IOError as e:
if e.errno in [errno.ESTALE]:
logger.error("Stale Error Observed %s." % u)
raise ChecksumError("Stale Error Detected")
except BBFetchException as e:
if isinstance(e, ChecksumError):
logger.error("Checksum failure fetching %s" % u)
raise
finally:
if ud.lockfile:
bb.utils.unlockfile(lf)
def checkstatus(self, urls=None):
"""
Check all urls exist upstream
"""
if not urls:
urls = self.urls
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
m = ud.method
logger.debug(1, "Testing URL %s", u)
# First try checking uri, u, from PREMIRRORS
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret:
# Next try checking from the original uri, u
ret = m.checkstatus(self, ud, self.d)
if not ret:
# Finally, try checking uri, u, from MIRRORS
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret:
raise FetchError("URL %s doesn't work" % u, u)
def unpack(self, root, urls=None):
"""
Unpack urls to root
"""
if not urls:
urls = self.urls
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
ud.method.unpack(ud, root, self.d)
if ud.lockfile:
bb.utils.unlockfile(lf)
def clean(self, urls=None):
"""
Clean files that the fetcher gets or places
"""
if not urls:
urls = self.urls
for url in urls:
if url not in self.ud:
self.ud[url] = FetchData(url, self.d)
ud = self.ud[url]
ud.setup_localpath(self.d)
if not ud.localfile and ud.localpath is None:
continue
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
ud.method.clean(ud, self.d)
if ud.donestamp:
bb.utils.remove(ud.donestamp)
if ud.lockfile:
bb.utils.unlockfile(lf)
class FetchConnectionCache(object):
"""
A class which represents an container for socket connections.
"""
def __init__(self):
self.cache = {}
def get_connection_name(self, host, port):
return host + ':' + str(port)
def add_connection(self, host, port, connection):
cn = self.get_connection_name(host, port)
if cn not in self.cache:
self.cache[cn] = connection
def get_connection(self, host, port):
connection = None
cn = self.get_connection_name(host, port)
if cn in self.cache:
connection = self.cache[cn]
return connection
def remove_connection(self, host, port):
cn = self.get_connection_name(host, port)
if cn in self.cache:
self.cache[cn].close()
del self.cache[cn]
def close_connections(self):
for cn in list(self.cache.keys()):
self.cache[cn].close()
del self.cache[cn]
from . import cvs
from . import git
from . import gitsm
from . import gitannex
from . import local
from . import svn
from . import wget
from . import ssh
from . import sftp
from . import s3
from . import perforce
from . import bzr
from . import hg
from . import osc
from . import repo
from . import clearcase
from . import npm
methods.append(local.Local())
methods.append(wget.Wget())
methods.append(svn.Svn())
methods.append(git.Git())
methods.append(gitsm.GitSM())
methods.append(gitannex.GitANNEX())
methods.append(cvs.Cvs())
methods.append(ssh.SSH())
methods.append(sftp.SFTP())
methods.append(s3.S3())
methods.append(perforce.Perforce())
methods.append(bzr.Bzr())
methods.append(hg.Hg())
methods.append(osc.Osc())
methods.append(repo.Repo())
methods.append(clearcase.ClearCase())
methods.append(npm.Npm())
| schleichdi2/OPENNFR-6.3-CORE | bitbake/lib/bb/fetch2/__init__.py | Python | gpl-2.0 | 67,666 |
import sys
import os
#For baseline and redundacy-detecion to prepare message size picture
def MessageSize(typePrefix, directory):
wf = open("%(typePrefix)s-msgsize.data"%vars(), "w")
wf.write("#Suggest Filename: %(typePrefix)s-message.data\n#Data for drawing message overall size in different Amount/Redundancy\n"%vars())
wf.write("#row: amount(10000, 20000, 30000, 40000, 50000)\n#col: redundancy(1, 2, 3, 4, 5)\n")
wf.write('#amount\tRedundancy 1\tRedundancy 2\tRedundancy 3\tRedundancy 4\tRedundancy 5\n')
num = 100 # may subject to change by the simulation node number
for amount in [10000,20000,30000,40000,50000]:
wf.write(str(amount) + " ")
for redundancy in [1,2,3,4,5]:
file = open("%(directory)s%(typePrefix)s_da%(amount)s_r%(redundancy)s_overhead.data"%vars())
for line in file:
if line[0] != "#":
numbers = line.split(' ')
wf.write(numbers[7]+" ")
wf.write("\n")
file.close()
def RecvToSendRatio(typePrefix, directory):
writefile = open("%(typePrefix)s-rsratio-nonce.data"%vars(), "w")
writefile.write("#Suggest Filename: %(typePrefix)s-rsratio.data\n#Data for drawing each package in different Amount/Redundancy\n"%vars())
writefile.write("#MPM100 ratio MPM200 ratio MPM300 ratio MPM400 ratio MPM500 ratio MPM600 ratio NoLimit ratio\n")
writefile.write("0 0 0 0 0 0 0 0 0 0 \n")
backofftime = 2.5 # may subject to change by the data amount wanted to observe
da=50000
msgcount = {}
ratiotemp = {}
ratio = {}
for redundancy in [1,2,3,4,5]:
msgcount[redundancy] = {}
for logcontent in ['SI','SD']:
file = open("%(directory)s%(typePrefix)s_da%(da)s_r%(redundancy)s_%(logcontent)s.data"%vars())
for line in file:
if line[0:2] == logcontent:
info = line.split(' ')
for x in info:
if x[0:2] == "Ho":
nonce = x.split(':')[1]
if msgcount[redundancy].has_key(nonce):
msgcount[redundancy][nonce]["s"] += 1
else:
msgcount[redundancy][nonce] = {}
msgcount[redundancy][nonce]["s"] = 1
msgcount[redundancy][nonce]["r"] = 0
msgcount[redundancy][nonce]["rs"] = 0
for logcontent in ['RI','DRD']:
file = open("%(directory)s%(typePrefix)s_da%(da)s_r%(redundancy)s_%(logcontent)s.data"%vars())
for line in file:
if line[0:2] == logcontent or line[0:3] == logcontent:
info = line.split(' ')
for x in info:
if x[0:2] == "Ho":
nonce = x.split(':')[1]
if(msgcount[redundancy].has_key(nonce)):
msgcount[redundancy][nonce]["r"] += 1
else:
print logcontent, redundancy, nonce
for nonce in msgcount[redundancy]:
msgcount[redundancy][nonce]['rs'] = float(msgcount[redundancy][nonce]['r']) / float(msgcount[redundancy][nonce]['s'])
msg = sorted(msgcount[redundancy].iteritems(), key=lambda s: s[1]['rs'])
for x in range(len(msg)):
ratiotemp[msg[x][1]["rs"]] = float(x+1) / len(msg);
ratio[redundancy] = sorted(ratiotemp.iteritems())
ratiotemp.clear()
length = max(len(ratio[1]),len(ratio[2]),len(ratio[3]),len(ratio[4]),len(ratio[5]))
for j in range(length):
for i in [1,2,3,4,5]:
if(len(ratio[i])<=j):
writefile.write("null null")
else:
writefile.write(str(ratio[i][j][0])+" "+str(ratio[i][j][1])+ " ")
writefile.write("\n")
def RecvToSendRatioHopnonce(typePrefix, directory):
writefile = open("%(typePrefix)s-rsratio-hopnonce.data"%vars())
writefile.write("#Suggest Filename: %(typePrefix)s-rsratio.data\n#Data for drawing each package in different Amount/Redundancy\n"%vars())
writefile.write("#MPM100 ratio MPM200 ratio MPM300 ratio MPM400 ratio MPM500 ratio MPM600 ratio NoLimit ratio\n")
writefile.write("0 0 0 0 0 0 0 0 0 0 0 0 0 0\n")
backofftime = 2.5 # may subject to change by the data amount wanted to observe
msgcount = {}
ratiotemp = {}
ratio = {}
for mms in [100,200,300,400,500,600,-1]:
msgcount[mms] = {}
for logcontent in ['SI','SD']:
file = open("%(directory)s%(typePrefix)s_mb%(backofftime)s_mms%(mms)s_%(logcontent)s.data"%vars())
for line in file:
if line[0:2] == logcontent:
info = line.split(' ')
for x in info:
if x[0:2] == "Ho":
nonce = x.split(':')[1]
if msgcount[mms].has_key(nonce):
msgcount[mms][nonce]["s"] += 1
else:
msgcount[mms][nonce] = {}
msgcount[mms][nonce]["s"] = 1
msgcount[mms][nonce]["r"] = 0
msgcount[mms][nonce]["rs"] = 0
for logcontent in ['RI','DRD']:
file = open("%(directory)s%(typePrefix)s_mb%(backofftime)s_mms%(mms)s_%(logcontent)s.data"%vars())
for line in file:
if line[0:2] == logcontent or line[0:3] == logcontent:
info = line.split(' ')
for x in info:
if x[0:2] == "Ho":
nonce = x.split(':')[1]
if(msgcount[mms].has_key(nonce)):
msgcount[mms][nonce]["r"] += 1
else:
print logcontent, mms, nonce
for nonce in msgcount[mms]:
msgcount[mms][nonce]['rs'] = float(msgcount[mms][nonce]['r']) / float(msgcount[mms][nonce]['s'])
msg = sorted(msgcount[mms].iteritems(), key=lambda s: s[1]['rs'])
for x in range(len(msg)):
ratiotemp[msg[x][1]["rs"]] = float(x+1) / len(msg);
ratio[mms] = sorted(ratiotemp.iteritems())
ratiotemp.clear()
length = max(len(ratio[100]),len(ratio[200]),len(ratio[300]),len(ratio[400]),len(ratio[500]),len(ratio[-1]))
for j in range(length):
for i in [100,200,300,400,500,600,-1]:
if(len(ratio[i])<=j):
writefile.write("null null")
else:
writefile.write(str(ratio[i][j][0])+" "+str(ratio[i][j][1]))
writefile.write("\n")
#Get recall and latency
def RecallAndLatency(typePrefix, directory):
recallf = open("./%(typePrefix)s-recall.data"%vars(), "w")
latencyf = open("./%(typePrefix)s-latency.data"%vars(), "w")
recallf.write("#Data for recall of the %(typePrefix)s\n"%vars())
latencyf.write("#Data for latency of the %(typePrefix)s\n"%vars())
recallf.write("# row: max_backoff(0 0.5 1 1.5 2 2.5 3)\n")
recallf.write("# col: max_message_size(-1, 200, 400, 600, 800, 1000)\n")
recallf.write("#MaxBackoff No Limits 100 200 300 400 500\n")
latencyf.write("# row: max_backoff(0 0.5 1 1.5 2 2.5 3)\n")
latencyf.write("# col: max_message_size(-1, 200, 400, 600, 800, 1000)\n")
latencyf.write("#MaxBackoff No Limits 100 200 300 400 500\n")
for amount in [10000,20000,30000,40000,50000]:
recallf.write(str(amount)+" ")
latencyf.write(str(amount)+" ")
for redundancy in [1,2,3,4,5]:
file = open("%(directory)s%(typePrefix)s_da%(amount)s_r%(redundancy)s_0.data"%vars())
line = file.readlines()[-1].split()
recallf.write(str(float(line[1])/amount)+" ")
latencyf.write(line[0]+" ")
file.close()
recallf.write("\n")
latencyf.write("\n")
recallf.close()
latencyf.close()
# os.system("gnuplot collision-avoidance-recall.gp")
# os.system("gnuplot collision-avoidance-latency.gp")
def RSRHeatmap(typePrefix, directory):
amount = 50000
redundancy = 1
sendList = []
recvList = []
ratiolist = []
for i in xrange(100):
sendList.append([])
recvList.append([])
ratiolist.append(0)
for logcontent in ['SI','SD']:
file = open("%(directory)s%(typePrefix)s_da%(amount)s_r%(redundancy)s_%(logcontent)s.data"%vars())
for line in file:
if(line[0:2] == logcontent):
info = line.split(" ")
hopnonce = 0
for x in info:
if x[0:2] == "Ho":
hopnonce = int(x.split(":")[1])
if hopnonce != 0:
sendList[int(info[1])].append(hopnonce)
file.close()
for logcontent in ['RI','DRD']:
file = open("%(directory)s%(typePrefix)s_da%(amount)s_r%(redundancy)s_%(logcontent)s.data"%vars())
for line in file:
if(line[0:2] == logcontent or line[0:3] == logcontent):
info = line.split(" ")
hopnonce = 0
for x in info:
if x[0:2] == "Ho":
hopnonce = int(x.split(":")[1])
if hopnonce != 0:
recvList[int(info[1])].append(hopnonce)
file.close()
for i in xrange(100):
for x in sendList[i]:
recv = 0
for ki in [-11,-10,-9,-1,1,9,10,11]:
if (i+ki >99 or i+ki<0):
continue
elif(i %10 == 0 and (ki == -1 or ki == -11 or ki == 9)):
continue
elif(i % 10 == 9 and (ki == 1 or ki == 11 or ki == -9)):
continue
recv += recvList[i+ki].count(x)
ratiolist[i] += recv
ratiolist[i] /= float(len(sendList[i]))
writefile = open("./%(typePrefix)s-heatmap.data"%vars(), "w")
writefile.write("#Data for receive send ratio on each ndoe of the %(typePrefix)s\n"%vars())
for i in xrange(10):
for j in xrange(10):
writefile.write(str(ratiolist[i*10 + j])+"\t")
writefile.write("\n")
writefile.close()
# os.system("gnuplot collision-avoidance-heatmap.gp")
#MessageSize("baseline", "F:\\Data_baseline\\")
#MessageSize("redundancy_detection", "F:\\Data_redundancy\\")
#RecvToSendRatio("collision_avoidance", "/home/theodore/pecns3/")
#RecallAndLatency("collision_avoidance", "/home/theodore/pecns3/")
#RecvToSendRatioHopnonce("collision_avoidance", "/home/theodore/pecns3/")
#ReceivedConsumer("baseline", "F:\\Data_baseline\\", 44)
#RSRHeatmap("collision_avoidance", "/home/theodore/pecns3/")
#main
if len(sys.argv) <=1:
print "This python program is the automatic processing for the data log of the ns3 simulator of the pec. \nThis particular for the collision avoidance.\n"
print "Useage: choose the function by the first parameter."
print "a. Receive send ratio of each message."
print "b. Receive send ratio of each message by hop."
print "c. Receive send ratio heatmap for each node by each message hop."
print "d. Recall and latency."
print "e. Message overall size"
print "Put this file into the same directory of the data files. Then run \"python\" + filename + chiose to get the picture direcely."
else:
if sys.argv[1] == "a":
RecvToSendRatio("baseline", "./")
elif sys.argv[1] == "b":
RecvToSendRatioHopnonce("baseline", "./")
elif sys.argv[1] == "c":
RSRHeatmap("baseline", "./")
elif sys.argv[1] == "d":
RecallAndLatency("baseline", "./")
elif sys.argv[1] == "e":
MessageSize("baseline", "./")
| momingsong/ns-3 | bash-py-gp/baseline_picdata.py | Python | gpl-2.0 | 12,175 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "decommentariis.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| scotartt/commentarius | decommentariis/manage.py | Python | gpl-2.0 | 256 |
## mappingsPage.py - show selinux mappings
## Copyright (C) 2006 Red Hat, Inc.
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
## Author: Dan Walsh
import string
import gtk
import gtk.glade
import os
import gobject
import sys
import seobject
##
## I18N
##
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
class loginsPage:
def __init__(self, xml):
self.xml = xml
self.view = xml.get_widget("mappingsView")
self.store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
self.store.set_sort_column_id(0, gtk.SORT_ASCENDING)
self.view.set_model(self.store)
self.login = loginRecords()
dict = self.login.get_all(0)
for k in sorted(dict.keys()):
print("%-25s %-25s %-25s" % (k, dict[k][0], translate(dict[k][1])))
| jpacg/su-binary | jni/selinux/gui/mappingsPage.py | Python | gpl-2.0 | 1,873 |
#
# Copyright (C) 2007, One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import logging
import dbus
from dbus.gobject_service import ExportedGObject
import base64
SERVICE = "org.laptop.WebActivity"
IFACE = SERVICE
PATH = "/org/laptop/WebActivity"
_logger = logging.getLogger('messenger')
class Messenger(ExportedGObject):
def __init__(self, tube, is_initiator, model):
ExportedGObject.__init__(self, tube, PATH)
self.tube = tube
self.is_initiator = is_initiator
self.members = []
self.entered = False
self.model = model
self.bus_name = None
self.tube.watch_participants(self.participant_change_cb)
def participant_change_cb(self, added, removed):
_logger.debug('Participants change add=%s rem=%s'
%(added, removed))
for handle, bus_name in added:
_logger.debug('Add member handle=%s bus_name=%s'
%(str(handle), str(bus_name)))
self.members.append(bus_name)
for handle in removed:
_logger.debug('Remove member %r', handle)
try:
self.members.remove(self.tube.participants[handle])
except ValueError:
_logger.debug('Remove member %r - already absent', handle)
if not self.entered:
self.tube.add_signal_receiver(self._add_link_receiver, '_add_link',
IFACE, path=PATH,
sender_keyword='sender',
byte_arrays=True)
self.bus_name = self.tube.get_unique_name()
if self.is_initiator:
_logger.debug('Initialising a new shared browser, I am %s .'
%self.tube.get_unique_name())
else:
# sync with other members
_logger.debug('Joined I am %s .'%self.bus_name)
for member in self.members:
if member != self.bus_name:
_logger.debug('Get info from %s' %member)
self.tube.get_object(member, PATH).sync_with_members(
self.model.get_links_ids(), dbus_interface=IFACE,
reply_handler=self.reply_sync, error_handler=lambda
e:self.error_sync(e, 'transfering file'))
self.entered = True
def reply_sync(self, a_ids, sender):
a_ids.pop()
for link in self.model.data['shared_links']:
if link['hash'] not in a_ids:
self.tube.get_object(sender, PATH).send_link(
link['hash'], link['url'], link['title'], link['color'],
link['owner'], link['thumb'], link['timestamp'])
def error_sync(self, e, when):
_logger.error('Error %s: %s'%(when, e))
@dbus.service.method(dbus_interface=IFACE, in_signature='as',
out_signature='ass', sender_keyword='sender')
def sync_with_members(self, b_ids, sender=None):
'''Sync with members '''
b_ids.pop()
# links the caller wants from me
for link in self.model.data['shared_links']:
if link['hash'] not in b_ids:
self.tube.get_object(sender, PATH).send_link(
link['hash'], link['url'], link['title'], link['color'],
link['owner'], link['thumb'], link['timestamp'])
a_ids = self.model.get_links_ids()
a_ids.append('')
# links I want from the caller
return (a_ids, self.bus_name)
@dbus.service.method(dbus_interface=IFACE, in_signature='ssssssd',
out_signature='')
def send_link(self, identifier, url, title, color, owner, buf, timestamp):
'''Send link'''
a_ids = self.model.get_links_ids()
if identifier not in a_ids:
thumb = base64.b64decode(buf)
self.model.add_link(url, title, thumb, owner, color, timestamp)
@dbus.service.signal(IFACE, signature='sssssd')
def _add_link(self, url, title, color, owner, thumb, timestamp):
'''Signal to send the link information (add)'''
_logger.debug('Add Link: %s '%url)
def _add_link_receiver(self, url, title, color, owner, buf, timestamp,
sender=None):
'''Member sent a link'''
handle = self.tube.bus_name_to_handle[sender]
if self.tube.self_handle != handle:
thumb = base64.b64decode(buf)
self.model.add_link(url, title, thumb, owner, color, timestamp)
_logger.debug('Added link: %s to linkbar.'%(url))
| lucian1900/Webified | messenger.py | Python | gpl-2.0 | 5,665 |
#! /usr/bin/env python
import sys
import os
import gtk
from arithmetic import Parser
class Editor(object):
'A minimal editor'
def __init__(self):
# path to UI file
scriptPath = os.path.split( sys.argv[0] )[0]
uiFilePath = os.path.join( scriptPath,'editor.ui' )
self.builder = gtk.Builder()
self.builder.add_from_file( uiFilePath )
self.builder.connect_signals(self)
self.textview = self.builder.get_object( 'textview1' )
self.buffer = self.textview.get_buffer()
self.textview.connect( "key_press_event", calculate, self.buffer )
if len( sys.argv ) > 1:
text = open( sys.argv[1] ).read()
self.buffer.set_text( text )
def run(self):
try:
gtk.main()
except KeyboardInterrupt:
pass
def quit(self):
gtk.main_quit()
def on_window1_delete_event(self, *args):
self.quit()
class ParserGTK(Parser):
''
def parse( self, textBuffer ):
''
for i in range( self.countLines( textBuffer ) ):
self.parseLine( i, textBuffer, variables=self.variables, functions=self.functions )
def countLines( self, textBuffer ):
''
return textBuffer.get_line_count()
def readLine( self, i, textBuffer ):
''
iter_start = textBuffer.get_iter_at_line( i )
if iter_start.ends_line():
return ''
else:
iter_end = textBuffer.get_iter_at_line( i )
iter_end.forward_to_line_end()
return textBuffer.get_text( iter_start, iter_end )
def writeResult( self, i, textBuffer, start, end, text ):
'Write text in line i of lines from start to end offset.'
# Delete
if end > start:
# handle start at end of line or beyond
iter_line = textBuffer.get_iter_at_line( i )
nchars = iter_line.get_chars_in_line()
if start > nchars-1:
start = nchars-1
iter_start = textBuffer.get_iter_at_line_offset( i, start )
iter_end = textBuffer.get_iter_at_line_offset( i, end )
textBuffer.delete( iter_start, iter_end )
# Insert
iter_start = textBuffer.get_iter_at_line_offset( i, start )
textBuffer.insert( iter_start, text )
def calculate( widget, event, textbuffer ):
'Perform arithmetic operations'
if event.keyval == gtk.keysyms.F5:
parser = ParserGTK()
parser.parse( textbuffer )
if __name__ == '__main__':
editor = Editor()
editor.run()
| ppaez/arithmetic | editor-gtk.py | Python | gpl-2.0 | 2,609 |
# -*- coding: utf-8 -*-
from __future__ import print_function
import re
import sys
from datetime import datetime, timedelta
import numpy as np
import pandas as pd
import pandas.compat as compat
import pandas.core.common as com
import pandas.util.testing as tm
from pandas import (Series, Index, DatetimeIndex, TimedeltaIndex, PeriodIndex,
Timedelta)
from pandas.compat import u, StringIO
from pandas.core.base import (FrozenList, FrozenNDArray, PandasDelegate,
NoNewAttributesMixin)
from pandas.tseries.base import DatetimeIndexOpsMixin
from pandas.util.testing import (assertRaisesRegexp, assertIsInstance)
class CheckStringMixin(object):
def test_string_methods_dont_fail(self):
repr(self.container)
str(self.container)
bytes(self.container)
if not compat.PY3:
unicode(self.container) # noqa
def test_tricky_container(self):
if not hasattr(self, 'unicode_container'):
raise nose.SkipTest('Need unicode_container to test with this')
repr(self.unicode_container)
str(self.unicode_container)
bytes(self.unicode_container)
if not compat.PY3:
unicode(self.unicode_container) # noqa
class CheckImmutable(object):
mutable_regex = re.compile('does not support mutable operations')
def check_mutable_error(self, *args, **kwargs):
# pass whatever functions you normally would to assertRaises (after the
# Exception kind)
assertRaisesRegexp(TypeError, self.mutable_regex, *args, **kwargs)
def test_no_mutable_funcs(self):
def setitem():
self.container[0] = 5
self.check_mutable_error(setitem)
def setslice():
self.container[1:2] = 3
self.check_mutable_error(setslice)
def delitem():
del self.container[0]
self.check_mutable_error(delitem)
def delslice():
del self.container[0:3]
self.check_mutable_error(delslice)
mutable_methods = getattr(self, "mutable_methods", [])
for meth in mutable_methods:
self.check_mutable_error(getattr(self.container, meth))
def test_slicing_maintains_type(self):
result = self.container[1:2]
expected = self.lst[1:2]
self.check_result(result, expected)
def check_result(self, result, expected, klass=None):
klass = klass or self.klass
assertIsInstance(result, klass)
self.assertEqual(result, expected)
class TestFrozenList(CheckImmutable, CheckStringMixin, tm.TestCase):
mutable_methods = ('extend', 'pop', 'remove', 'insert')
unicode_container = FrozenList([u("\u05d0"), u("\u05d1"), "c"])
def setUp(self):
self.lst = [1, 2, 3, 4, 5]
self.container = FrozenList(self.lst)
self.klass = FrozenList
def test_add(self):
result = self.container + (1, 2, 3)
expected = FrozenList(self.lst + [1, 2, 3])
self.check_result(result, expected)
result = (1, 2, 3) + self.container
expected = FrozenList([1, 2, 3] + self.lst)
self.check_result(result, expected)
def test_inplace(self):
q = r = self.container
q += [5]
self.check_result(q, self.lst + [5])
# other shouldn't be mutated
self.check_result(r, self.lst)
class TestFrozenNDArray(CheckImmutable, CheckStringMixin, tm.TestCase):
mutable_methods = ('put', 'itemset', 'fill')
unicode_container = FrozenNDArray([u("\u05d0"), u("\u05d1"), "c"])
def setUp(self):
self.lst = [3, 5, 7, -2]
self.container = FrozenNDArray(self.lst)
self.klass = FrozenNDArray
def test_shallow_copying(self):
original = self.container.copy()
assertIsInstance(self.container.view(), FrozenNDArray)
self.assertFalse(isinstance(
self.container.view(np.ndarray), FrozenNDArray))
self.assertIsNot(self.container.view(), self.container)
self.assert_numpy_array_equal(self.container, original)
# shallow copy should be the same too
assertIsInstance(self.container._shallow_copy(), FrozenNDArray)
# setting should not be allowed
def testit(container):
container[0] = 16
self.check_mutable_error(testit, self.container)
def test_values(self):
original = self.container.view(np.ndarray).copy()
n = original[0] + 15
vals = self.container.values()
self.assert_numpy_array_equal(original, vals)
self.assertIsNot(original, vals)
vals[0] = n
self.assert_numpy_array_equal(self.container, original)
self.assertEqual(vals[0], n)
class TestPandasDelegate(tm.TestCase):
def setUp(self):
pass
def test_invalida_delgation(self):
# these show that in order for the delegation to work
# the _delegate_* methods need to be overriden to not raise a TypeError
class Delegator(object):
_properties = ['foo']
_methods = ['bar']
def _set_foo(self, value):
self.foo = value
def _get_foo(self):
return self.foo
foo = property(_get_foo, _set_foo, doc="foo property")
def bar(self, *args, **kwargs):
""" a test bar method """
pass
class Delegate(PandasDelegate):
def __init__(self, obj):
self.obj = obj
Delegate._add_delegate_accessors(delegate=Delegator,
accessors=Delegator._properties,
typ='property')
Delegate._add_delegate_accessors(delegate=Delegator,
accessors=Delegator._methods,
typ='method')
delegate = Delegate(Delegator())
def f():
delegate.foo
self.assertRaises(TypeError, f)
def f():
delegate.foo = 5
self.assertRaises(TypeError, f)
def f():
delegate.foo()
self.assertRaises(TypeError, f)
class Ops(tm.TestCase):
def _allow_na_ops(self, obj):
"""Whether to skip test cases including NaN"""
if (isinstance(obj, Index) and
(obj.is_boolean() or not obj._can_hold_na)):
# don't test boolean / int64 index
return False
return True
def setUp(self):
self.bool_index = tm.makeBoolIndex(10, name='a')
self.int_index = tm.makeIntIndex(10, name='a')
self.float_index = tm.makeFloatIndex(10, name='a')
self.dt_index = tm.makeDateIndex(10, name='a')
self.dt_tz_index = tm.makeDateIndex(10, name='a').tz_localize(
tz='US/Eastern')
self.period_index = tm.makePeriodIndex(10, name='a')
self.string_index = tm.makeStringIndex(10, name='a')
self.unicode_index = tm.makeUnicodeIndex(10, name='a')
arr = np.random.randn(10)
self.int_series = Series(arr, index=self.int_index, name='a')
self.float_series = Series(arr, index=self.float_index, name='a')
self.dt_series = Series(arr, index=self.dt_index, name='a')
self.dt_tz_series = self.dt_tz_index.to_series(keep_tz=True)
self.period_series = Series(arr, index=self.period_index, name='a')
self.string_series = Series(arr, index=self.string_index, name='a')
types = ['bool', 'int', 'float', 'dt', 'dt_tz', 'period', 'string',
'unicode']
fmts = ["{0}_{1}".format(t, f)
for t in types for f in ['index', 'series']]
self.objs = [getattr(self, f)
for f in fmts if getattr(self, f, None) is not None]
def check_ops_properties(self, props, filter=None, ignore_failures=False):
for op in props:
for o in self.is_valid_objs:
# if a filter, skip if it doesn't match
if filter is not None:
filt = o.index if isinstance(o, Series) else o
if not filter(filt):
continue
try:
if isinstance(o, Series):
expected = Series(
getattr(o.index, op), index=o.index, name='a')
else:
expected = getattr(o, op)
except (AttributeError):
if ignore_failures:
continue
result = getattr(o, op)
# these couuld be series, arrays or scalars
if isinstance(result, Series) and isinstance(expected, Series):
tm.assert_series_equal(result, expected)
elif isinstance(result, Index) and isinstance(expected, Index):
tm.assert_index_equal(result, expected)
elif isinstance(result, np.ndarray) and isinstance(expected,
np.ndarray):
self.assert_numpy_array_equal(result, expected)
else:
self.assertEqual(result, expected)
# freq raises AttributeError on an Int64Index because its not
# defined we mostly care about Series hwere anyhow
if not ignore_failures:
for o in self.not_valid_objs:
# an object that is datetimelike will raise a TypeError,
# otherwise an AttributeError
if issubclass(type(o), DatetimeIndexOpsMixin):
self.assertRaises(TypeError, lambda: getattr(o, op))
else:
self.assertRaises(AttributeError,
lambda: getattr(o, op))
def test_binary_ops_docs(self):
from pandas import DataFrame, Panel
op_map = {'add': '+',
'sub': '-',
'mul': '*',
'mod': '%',
'pow': '**',
'truediv': '/',
'floordiv': '//'}
for op_name in ['add', 'sub', 'mul', 'mod', 'pow', 'truediv',
'floordiv']:
for klass in [Series, DataFrame, Panel]:
operand1 = klass.__name__.lower()
operand2 = 'other'
op = op_map[op_name]
expected_str = ' '.join([operand1, op, operand2])
self.assertTrue(expected_str in getattr(klass,
op_name).__doc__)
# reverse version of the binary ops
expected_str = ' '.join([operand2, op, operand1])
self.assertTrue(expected_str in getattr(klass, 'r' +
op_name).__doc__)
class TestIndexOps(Ops):
def setUp(self):
super(TestIndexOps, self).setUp()
self.is_valid_objs = [o for o in self.objs if o._allow_index_ops]
self.not_valid_objs = [o for o in self.objs if not o._allow_index_ops]
def test_none_comparison(self):
# bug brought up by #1079
# changed from TypeError in 0.17.0
for o in self.is_valid_objs:
if isinstance(o, Series):
o[0] = np.nan
# noinspection PyComparisonWithNone
result = o == None # noqa
self.assertFalse(result.iat[0])
self.assertFalse(result.iat[1])
# noinspection PyComparisonWithNone
result = o != None # noqa
self.assertTrue(result.iat[0])
self.assertTrue(result.iat[1])
result = None == o # noqa
self.assertFalse(result.iat[0])
self.assertFalse(result.iat[1])
# this fails for numpy < 1.9
# and oddly for *some* platforms
# result = None != o # noqa
# self.assertTrue(result.iat[0])
# self.assertTrue(result.iat[1])
result = None > o
self.assertFalse(result.iat[0])
self.assertFalse(result.iat[1])
result = o < None
self.assertFalse(result.iat[0])
self.assertFalse(result.iat[1])
def test_ndarray_compat_properties(self):
for o in self.objs:
# check that we work
for p in ['shape', 'dtype', 'flags', 'T', 'strides', 'itemsize',
'nbytes']:
self.assertIsNotNone(getattr(o, p, None))
self.assertTrue(hasattr(o, 'base'))
# if we have a datetimelike dtype then needs a view to work
# but the user is responsible for that
try:
self.assertIsNotNone(o.data)
except ValueError:
pass
self.assertRaises(ValueError, o.item) # len > 1
self.assertEqual(o.ndim, 1)
self.assertEqual(o.size, len(o))
self.assertEqual(Index([1]).item(), 1)
self.assertEqual(Series([1]).item(), 1)
def test_ops(self):
for op in ['max', 'min']:
for o in self.objs:
result = getattr(o, op)()
if not isinstance(o, PeriodIndex):
expected = getattr(o.values, op)()
else:
expected = pd.Period(ordinal=getattr(o.values, op)(),
freq=o.freq)
try:
self.assertEqual(result, expected)
except TypeError:
# comparing tz-aware series with np.array results in
# TypeError
expected = expected.astype('M8[ns]').astype('int64')
self.assertEqual(result.value, expected)
def test_nanops(self):
# GH 7261
for op in ['max', 'min']:
for klass in [Index, Series]:
obj = klass([np.nan, 2.0])
self.assertEqual(getattr(obj, op)(), 2.0)
obj = klass([np.nan])
self.assertTrue(pd.isnull(getattr(obj, op)()))
obj = klass([])
self.assertTrue(pd.isnull(getattr(obj, op)()))
obj = klass([pd.NaT, datetime(2011, 11, 1)])
# check DatetimeIndex monotonic path
self.assertEqual(getattr(obj, op)(), datetime(2011, 11, 1))
obj = klass([pd.NaT, datetime(2011, 11, 1), pd.NaT])
# check DatetimeIndex non-monotonic path
self.assertEqual(getattr(obj, op)(), datetime(2011, 11, 1))
# argmin/max
obj = Index(np.arange(5, dtype='int64'))
self.assertEqual(obj.argmin(), 0)
self.assertEqual(obj.argmax(), 4)
obj = Index([np.nan, 1, np.nan, 2])
self.assertEqual(obj.argmin(), 1)
self.assertEqual(obj.argmax(), 3)
obj = Index([np.nan])
self.assertEqual(obj.argmin(), -1)
self.assertEqual(obj.argmax(), -1)
obj = Index([pd.NaT, datetime(2011, 11, 1), datetime(2011, 11, 2),
pd.NaT])
self.assertEqual(obj.argmin(), 1)
self.assertEqual(obj.argmax(), 2)
obj = Index([pd.NaT])
self.assertEqual(obj.argmin(), -1)
self.assertEqual(obj.argmax(), -1)
def test_value_counts_unique_nunique(self):
for o in self.objs:
klass = type(o)
values = o.values
# create repeated values, 'n'th element is repeated by n+1 times
if isinstance(o, PeriodIndex):
# freq must be specified because repeat makes freq ambiguous
# resets name from Index
expected_index = pd.Index(o[::-1])
expected_index.name = None
# attach name to klass
o = o.repeat(range(1, len(o) + 1))
o.name = 'a'
elif isinstance(o, DatetimeIndex):
# resets name from Index
expected_index = pd.Index(o[::-1])
expected_index.name = None
# attach name to klass
o = o.repeat(range(1, len(o) + 1))
o.name = 'a'
# don't test boolean
elif isinstance(o, Index) and o.is_boolean():
continue
elif isinstance(o, Index):
expected_index = pd.Index(values[::-1])
expected_index.name = None
o = o.repeat(range(1, len(o) + 1))
o.name = 'a'
else:
expected_index = pd.Index(values[::-1])
idx = o.index.repeat(range(1, len(o) + 1))
o = klass(
np.repeat(values, range(1,
len(o) + 1)), index=idx, name='a')
expected_s = Series(
range(10, 0, -
1), index=expected_index, dtype='int64', name='a')
result = o.value_counts()
tm.assert_series_equal(result, expected_s)
self.assertTrue(result.index.name is None)
self.assertEqual(result.name, 'a')
result = o.unique()
if isinstance(o, (DatetimeIndex, PeriodIndex)):
self.assertTrue(isinstance(result, o.__class__))
self.assertEqual(result.name, o.name)
self.assertEqual(result.freq, o.freq)
self.assert_numpy_array_equal(result, values)
self.assertEqual(o.nunique(), len(np.unique(o.values)))
for null_obj in [np.nan, None]:
for o in self.objs:
klass = type(o)
values = o.values
if not self._allow_na_ops(o):
continue
# special assign to the numpy array
if com.is_datetimetz(o):
if isinstance(o, DatetimeIndex):
v = o.asi8
v[0:2] = pd.tslib.iNaT
values = o._shallow_copy(v)
else:
o = o.copy()
o[0:2] = pd.tslib.iNaT
values = o.values
elif o.values.dtype == 'datetime64[ns]' or isinstance(
o, PeriodIndex):
values[0:2] = pd.tslib.iNaT
else:
values[0:2] = null_obj
# create repeated values, 'n'th element is repeated by n+1
# times
if isinstance(o, PeriodIndex):
# freq must be specified because repeat makes freq
# ambiguous
# resets name from Index
expected_index = pd.Index(o, name=None)
# attach name to klass
o = klass(
np.repeat(values, range(
1, len(o) + 1)), freq=o.freq, name='a')
elif isinstance(o, Index):
expected_index = pd.Index(values, name=None)
o = klass(
np.repeat(values, range(1, len(o) + 1)), name='a')
else:
expected_index = pd.Index(values, name=None)
idx = np.repeat(o.index.values, range(1, len(o) + 1))
o = klass(
np.repeat(values, range(
1, len(o) + 1)), index=idx, name='a')
expected_s_na = Series(list(range(10, 2, -1)) + [3],
index=expected_index[9:0:-1],
dtype='int64', name='a')
expected_s = Series(list(range(10, 2, -1)),
index=expected_index[9:1:-1],
dtype='int64', name='a')
result_s_na = o.value_counts(dropna=False)
tm.assert_series_equal(result_s_na, expected_s_na)
self.assertTrue(result_s_na.index.name is None)
self.assertEqual(result_s_na.name, 'a')
result_s = o.value_counts()
tm.assert_series_equal(o.value_counts(), expected_s)
self.assertTrue(result_s.index.name is None)
self.assertEqual(result_s.name, 'a')
# numpy_array_equal cannot compare arrays includes nan
result = o.unique()
self.assert_numpy_array_equal(result[1:], values[2:])
if isinstance(o, (DatetimeIndex, PeriodIndex)):
self.assertTrue(result.asi8[0] == pd.tslib.iNaT)
else:
self.assertTrue(pd.isnull(result[0]))
self.assertEqual(o.nunique(), 8)
self.assertEqual(o.nunique(dropna=False), 9)
def test_value_counts_inferred(self):
klasses = [Index, Series]
for klass in klasses:
s_values = ['a', 'b', 'b', 'b', 'b', 'c', 'd', 'd', 'a', 'a']
s = klass(s_values)
expected = Series([4, 3, 2, 1], index=['b', 'a', 'd', 'c'])
tm.assert_series_equal(s.value_counts(), expected)
self.assert_numpy_array_equal(s.unique(), np.unique(s_values))
self.assertEqual(s.nunique(), 4)
# don't sort, have to sort after the fact as not sorting is
# platform-dep
hist = s.value_counts(sort=False).sort_values()
expected = Series([3, 1, 4, 2], index=list('acbd')).sort_values()
tm.assert_series_equal(hist, expected)
# sort ascending
hist = s.value_counts(ascending=True)
expected = Series([1, 2, 3, 4], index=list('cdab'))
tm.assert_series_equal(hist, expected)
# relative histogram.
hist = s.value_counts(normalize=True)
expected = Series([.4, .3, .2, .1], index=['b', 'a', 'd', 'c'])
tm.assert_series_equal(hist, expected)
# bins
self.assertRaises(TypeError,
lambda bins: s.value_counts(bins=bins), 1)
s1 = Series([1, 1, 2, 3])
res1 = s1.value_counts(bins=1)
exp1 = Series({0.998: 4})
tm.assert_series_equal(res1, exp1)
res1n = s1.value_counts(bins=1, normalize=True)
exp1n = Series({0.998: 1.0})
tm.assert_series_equal(res1n, exp1n)
self.assert_numpy_array_equal(s1.unique(), np.array([1, 2, 3]))
self.assertEqual(s1.nunique(), 3)
res4 = s1.value_counts(bins=4)
exp4 = Series({0.998: 2,
1.5: 1,
2.0: 0,
2.5: 1}, index=[0.998, 2.5, 1.5, 2.0])
tm.assert_series_equal(res4, exp4)
res4n = s1.value_counts(bins=4, normalize=True)
exp4n = Series(
{0.998: 0.5,
1.5: 0.25,
2.0: 0.0,
2.5: 0.25}, index=[0.998, 2.5, 1.5, 2.0])
tm.assert_series_equal(res4n, exp4n)
# handle NA's properly
s_values = ['a', 'b', 'b', 'b', np.nan, np.nan, 'd', 'd', 'a', 'a',
'b']
s = klass(s_values)
expected = Series([4, 3, 2], index=['b', 'a', 'd'])
tm.assert_series_equal(s.value_counts(), expected)
self.assert_numpy_array_equal(s.unique(), np.array(
['a', 'b', np.nan, 'd'], dtype='O'))
self.assertEqual(s.nunique(), 3)
s = klass({})
expected = Series([], dtype=np.int64)
tm.assert_series_equal(s.value_counts(), expected,
check_index_type=False)
self.assert_numpy_array_equal(s.unique(), np.array([]))
self.assertEqual(s.nunique(), 0)
# GH 3002, datetime64[ns]
# don't test names though
txt = "\n".join(['xxyyzz20100101PIE', 'xxyyzz20100101GUM',
'xxyyzz20100101EGG', 'xxyyww20090101EGG',
'foofoo20080909PIE', 'foofoo20080909GUM'])
f = StringIO(txt)
df = pd.read_fwf(f, widths=[6, 8, 3],
names=["person_id", "dt", "food"],
parse_dates=["dt"])
s = klass(df['dt'].copy())
s.name = None
idx = pd.to_datetime(
['2010-01-01 00:00:00Z', '2008-09-09 00:00:00Z',
'2009-01-01 00:00:00X'])
expected_s = Series([3, 2, 1], index=idx)
tm.assert_series_equal(s.value_counts(), expected_s)
expected = np.array(['2010-01-01 00:00:00Z',
'2009-01-01 00:00:00Z',
'2008-09-09 00:00:00Z'],
dtype='datetime64[ns]')
if isinstance(s, DatetimeIndex):
expected = DatetimeIndex(expected)
self.assertTrue(s.unique().equals(expected))
else:
self.assert_numpy_array_equal(s.unique(), expected)
self.assertEqual(s.nunique(), 3)
# with NaT
s = df['dt'].copy()
s = klass([v for v in s.values] + [pd.NaT])
result = s.value_counts()
self.assertEqual(result.index.dtype, 'datetime64[ns]')
tm.assert_series_equal(result, expected_s)
result = s.value_counts(dropna=False)
expected_s[pd.NaT] = 1
tm.assert_series_equal(result, expected_s)
unique = s.unique()
self.assertEqual(unique.dtype, 'datetime64[ns]')
# numpy_array_equal cannot compare pd.NaT
self.assert_numpy_array_equal(unique[:3], expected)
self.assertTrue(unique[3] is pd.NaT or unique[3].astype('int64') ==
pd.tslib.iNaT)
self.assertEqual(s.nunique(), 3)
self.assertEqual(s.nunique(dropna=False), 4)
# timedelta64[ns]
td = df.dt - df.dt + timedelta(1)
td = klass(td, name='dt')
result = td.value_counts()
expected_s = Series([6], index=[Timedelta('1day')], name='dt')
tm.assert_series_equal(result, expected_s)
expected = TimedeltaIndex(['1 days'])
if isinstance(td, TimedeltaIndex):
self.assertTrue(td.unique().equals(expected))
else:
self.assert_numpy_array_equal(td.unique(), expected.values)
td2 = timedelta(1) + (df.dt - df.dt)
td2 = klass(td2, name='dt')
result2 = td2.value_counts()
tm.assert_series_equal(result2, expected_s)
def test_factorize(self):
for o in self.objs:
if isinstance(o, Index) and o.is_boolean():
exp_arr = np.array([0, 1] + [0] * 8)
exp_uniques = o
exp_uniques = Index([False, True])
else:
exp_arr = np.array(range(len(o)))
exp_uniques = o
labels, uniques = o.factorize()
self.assert_numpy_array_equal(labels, exp_arr)
if isinstance(o, Series):
expected = Index(o.values)
self.assert_numpy_array_equal(uniques, expected)
else:
self.assertTrue(uniques.equals(exp_uniques))
for o in self.objs:
# don't test boolean
if isinstance(o, Index) and o.is_boolean():
continue
# sort by value, and create duplicates
if isinstance(o, Series):
o = o.sort_values()
n = o.iloc[5:].append(o)
else:
indexer = o.argsort()
o = o.take(indexer)
n = o[5:].append(o)
exp_arr = np.array([5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
labels, uniques = n.factorize(sort=True)
self.assert_numpy_array_equal(labels, exp_arr)
if isinstance(o, Series):
expected = Index(o.values)
self.assert_numpy_array_equal(uniques, expected)
else:
self.assertTrue(uniques.equals(o))
exp_arr = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4])
labels, uniques = n.factorize(sort=False)
self.assert_numpy_array_equal(labels, exp_arr)
if isinstance(o, Series):
expected = Index(np.concatenate([o.values[5:10], o.values[:5]
]))
self.assert_numpy_array_equal(uniques, expected)
else:
expected = o[5:].append(o[:5])
self.assertTrue(uniques.equals(expected))
def test_duplicated_drop_duplicates(self):
# GH 4060
for original in self.objs:
if isinstance(original, Index):
# special case
if original.is_boolean():
result = original.drop_duplicates()
expected = Index([False, True], name='a')
tm.assert_index_equal(result, expected)
continue
# original doesn't have duplicates
expected = np.array([False] * len(original), dtype=bool)
duplicated = original.duplicated()
tm.assert_numpy_array_equal(duplicated, expected)
self.assertTrue(duplicated.dtype == bool)
result = original.drop_duplicates()
tm.assert_index_equal(result, original)
self.assertFalse(result is original)
# has_duplicates
self.assertFalse(original.has_duplicates)
# create repeated values, 3rd and 5th values are duplicated
idx = original[list(range(len(original))) + [5, 3]]
expected = np.array([False] * len(original) + [True, True],
dtype=bool)
duplicated = idx.duplicated()
tm.assert_numpy_array_equal(duplicated, expected)
self.assertTrue(duplicated.dtype == bool)
tm.assert_index_equal(idx.drop_duplicates(), original)
base = [False] * len(idx)
base[3] = True
base[5] = True
expected = np.array(base)
duplicated = idx.duplicated(keep='last')
tm.assert_numpy_array_equal(duplicated, expected)
self.assertTrue(duplicated.dtype == bool)
result = idx.drop_duplicates(keep='last')
tm.assert_index_equal(result, idx[~expected])
# deprecate take_last
with tm.assert_produces_warning(FutureWarning):
duplicated = idx.duplicated(take_last=True)
tm.assert_numpy_array_equal(duplicated, expected)
self.assertTrue(duplicated.dtype == bool)
with tm.assert_produces_warning(FutureWarning):
result = idx.drop_duplicates(take_last=True)
tm.assert_index_equal(result, idx[~expected])
base = [False] * len(original) + [True, True]
base[3] = True
base[5] = True
expected = np.array(base)
duplicated = idx.duplicated(keep=False)
tm.assert_numpy_array_equal(duplicated, expected)
self.assertTrue(duplicated.dtype == bool)
result = idx.drop_duplicates(keep=False)
tm.assert_index_equal(result, idx[~expected])
with tm.assertRaisesRegexp(
TypeError, "drop_duplicates\(\) got an unexpected "
"keyword argument"):
idx.drop_duplicates(inplace=True)
else:
expected = Series([False] * len(original),
index=original.index, name='a')
tm.assert_series_equal(original.duplicated(), expected)
result = original.drop_duplicates()
tm.assert_series_equal(result, original)
self.assertFalse(result is original)
idx = original.index[list(range(len(original))) + [5, 3]]
values = original._values[list(range(len(original))) + [5, 3]]
s = Series(values, index=idx, name='a')
expected = Series([False] * len(original) + [True, True],
index=idx, name='a')
tm.assert_series_equal(s.duplicated(), expected)
tm.assert_series_equal(s.drop_duplicates(), original)
base = [False] * len(idx)
base[3] = True
base[5] = True
expected = Series(base, index=idx, name='a')
tm.assert_series_equal(s.duplicated(keep='last'), expected)
tm.assert_series_equal(s.drop_duplicates(keep='last'),
s[~np.array(base)])
# deprecate take_last
with tm.assert_produces_warning(FutureWarning):
tm.assert_series_equal(
s.duplicated(take_last=True), expected)
with tm.assert_produces_warning(FutureWarning):
tm.assert_series_equal(s.drop_duplicates(take_last=True),
s[~np.array(base)])
base = [False] * len(original) + [True, True]
base[3] = True
base[5] = True
expected = Series(base, index=idx, name='a')
tm.assert_series_equal(s.duplicated(keep=False), expected)
tm.assert_series_equal(s.drop_duplicates(keep=False),
s[~np.array(base)])
s.drop_duplicates(inplace=True)
tm.assert_series_equal(s, original)
def test_fillna(self):
# # GH 11343
# though Index.fillna and Series.fillna has separate impl,
# test here to confirm these works as the same
def get_fill_value(obj):
if isinstance(obj, pd.tseries.base.DatetimeIndexOpsMixin):
return obj.asobject.values[0]
else:
return obj.values[0]
for o in self.objs:
klass = type(o)
values = o.values
# values will not be changed
result = o.fillna(get_fill_value(o))
if isinstance(o, Index):
self.assert_index_equal(o, result)
else:
self.assert_series_equal(o, result)
# check shallow_copied
self.assertFalse(o is result)
for null_obj in [np.nan, None]:
for o in self.objs:
klass = type(o)
values = o.values.copy()
if not self._allow_na_ops(o):
continue
# value for filling
fill_value = get_fill_value(o)
# special assign to the numpy array
if o.values.dtype == 'datetime64[ns]' or isinstance(
o, PeriodIndex):
values[0:2] = pd.tslib.iNaT
else:
values[0:2] = null_obj
if isinstance(o, PeriodIndex):
# freq must be specified because repeat makes freq
# ambiguous
expected = [fill_value.ordinal] * 2 + list(values[2:])
expected = klass(ordinal=expected, freq=o.freq)
o = klass(ordinal=values, freq=o.freq)
else:
expected = [fill_value] * 2 + list(values[2:])
expected = klass(expected)
o = klass(values)
result = o.fillna(fill_value)
if isinstance(o, Index):
self.assert_index_equal(result, expected)
else:
self.assert_series_equal(result, expected)
# check shallow_copied
self.assertFalse(o is result)
def test_memory_usage(self):
for o in self.objs:
res = o.memory_usage()
res_deep = o.memory_usage(deep=True)
if (com.is_object_dtype(o) or (isinstance(o, Series) and
com.is_object_dtype(o.index))):
# if there are objects, only deep will pick them up
self.assertTrue(res_deep > res)
else:
self.assertEqual(res, res_deep)
if isinstance(o, Series):
self.assertEqual(
(o.memory_usage(index=False) +
o.index.memory_usage()),
o.memory_usage(index=True)
)
# sys.getsizeof will call the .memory_usage with
# deep=True, and add on some GC overhead
diff = res_deep - sys.getsizeof(o)
self.assertTrue(abs(diff) < 100)
class TestFloat64HashTable(tm.TestCase):
def test_lookup_nan(self):
from pandas.hashtable import Float64HashTable
xs = np.array([2.718, 3.14, np.nan, -7, 5, 2, 3])
m = Float64HashTable()
m.map_locations(xs)
self.assert_numpy_array_equal(m.lookup(xs), np.arange(len(xs)))
class TestNoNewAttributesMixin(tm.TestCase):
def test_mixin(self):
class T(NoNewAttributesMixin):
pass
t = T()
self.assertFalse(hasattr(t, "__frozen"))
t.a = "test"
self.assertEqual(t.a, "test")
t._freeze()
# self.assertTrue("__frozen" not in dir(t))
self.assertIs(getattr(t, "__frozen"), True)
def f():
t.b = "test"
self.assertRaises(AttributeError, f)
self.assertFalse(hasattr(t, "b"))
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
# '--with-coverage', '--cover-package=pandas.core'],
exit=False)
| pjryan126/solid-start-careers | store/api/zillow/venv/lib/python2.7/site-packages/pandas/tests/test_base.py | Python | gpl-2.0 | 38,533 |
##
# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild
#
# Copyright:: Copyright 2012-2017 Uni.Lu/LCSB, NTUA
# Authors:: Cedric Laczny <cedric.laczny@uni.lu>, Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste
# License:: MIT/GPL
# $Id$
#
# This work implements a part of the HPCBIOS project and is a component of the policy:
# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-94.html
##
"""
EasyBuild support for building and installing MetaVelvet, implemented as an easyblock
@author: Cedric Laczny (Uni.Lu)
@author: Fotis Georgatos (Uni.Lu)
@author: Kenneth Hoste (Ghent University)
"""
import os
import shutil
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.build_log import EasyBuildError
class EB_MetaVelvet(ConfigureMake):
"""
Support for building MetaVelvet
"""
def configure_step(self):
"""
No configure
"""
pass
def install_step(self):
"""
Install by copying files to install dir
"""
srcdir = self.cfg['start_dir']
destdir = os.path.join(self.installdir, 'bin')
srcfile = None
# Get executable files: for i in $(find . -maxdepth 1 -type f -perm +111 -print | sed -e 's/\.\///g' | awk '{print "\""$0"\""}' | grep -vE "\.sh|\.html"); do echo -ne "$i, "; done && echo
try:
os.makedirs(destdir)
for filename in ["meta-velvetg"]:
srcfile = os.path.join(srcdir, filename)
shutil.copy2(srcfile, destdir)
except OSError, err:
raise EasyBuildError("Copying %s to installation dir %s failed: %s", srcfile, destdir, err)
def sanity_check_step(self):
"""Custom sanity check for MetaVelvet."""
custom_paths = {
'files': ['bin/meta-velvetg'],
'dirs': []
}
super(EB_MetaVelvet, self).sanity_check_step(custom_paths=custom_paths)
| ULHPC/easybuild-easyblocks | easybuild/easyblocks/m/metavelvet.py | Python | gpl-2.0 | 2,000 |
import os
import pandas as pd
from input import Input
class Main(Input):
name = "CSV File Input"
def build_file_list(self):
files = {}
for s in self.symbols:
normalized_file = "%s.%s" % (s, self.extension)
if os.path.isfile(os.path.join(self.symbol_dir, normalized_file)):
files[s] = os.path.join(self.symbol_dir, normalized_file)
return files
def main(self):
self.extension = self.base_config['file_extension']
self.symbol_dir = self.base_config['symbol_dir']
files = self.build_file_list()
for symbol in files.keys():
df = pd.read_csv(files[symbol])
df = df.reset_index()
print df.tail
self.data[symbol] = df
return self.data
| hirodotp/mdata | inputs/csvfile.py | Python | gpl-2.0 | 678 |
from cellprofiler.gui.help import USING_METADATA_HELP_REF, USING_METADATA_GROUPING_HELP_REF, LOADING_IMAGE_SEQ_HELP_REF
TM_OVERLAP = 'Overlap'
TM_DISTANCE = 'Distance'
TM_MEASUREMENTS = 'Measurements'
TM_LAP = "LAP"
TM_ALL = [TM_OVERLAP, TM_DISTANCE, TM_MEASUREMENTS,TM_LAP]
LT_NONE = 0
LT_PHASE_1 = 1
LT_SPLIT = 2
LT_MITOSIS = 3
LT_GAP = 4
KM_VEL = 1
KM_NO_VEL = 0
KM_NONE = -1
'''Random motion model, for instance Brownian motion'''
M_RANDOM = "Random"
'''Velocity motion model, object position depends on prior velocity'''
M_VELOCITY = "Velocity"
'''Random and velocity models'''
M_BOTH = "Both"
RADIUS_STD_SETTING_TEXT = 'Number of standard deviations for search radius'
RADIUS_LIMIT_SETTING_TEXT = 'Search radius limit, in pixel units (Min,Max)'
ONLY_IF_2ND_PHASE_LAP_TEXT = '''<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i>'''%globals()
import cellprofiler.icons
from cellprofiler.gui.help import PROTIP_RECOMEND_ICON, PROTIP_AVOID_ICON, TECH_NOTE_ICON
__doc__ = """
<b>Track Objects</b> allows tracking objects throughout sequential
frames of a series of images, so that from frame to frame
each object maintains a unique identity in the output measurements
<hr>
This module must be placed downstream of a module that identifies objects
(e.g., <b>IdentifyPrimaryObjects</b>). <b>TrackObjects</b> will associate each
object with the same object in the frames before and after. This allows the study
of objects' lineages and the timing and characteristics of dynamic events in
movies.
<p>Images in CellProfiler are processed sequentially by frame (whether loaded as a
series of images or a movie file). To process a collection of images/movies,
you will need to do the following:
<ul>
<li>Define each individual movie using metadata
either contained within the image file itself or as part of the images nomenclature
or folder structure. %(USING_METADATA_HELP_REF)s.</li>
<li>Group the movies to make sure
that each image sequence is handled individually. %(USING_METADATA_GROUPING_HELP_REF)s.
</li>
</ul>
For complete details, see <i>%(LOADING_IMAGE_SEQ_HELP_REF)s</i>.</p>
<p>For an example pipeline using TrackObjects, see the CellProfiler
<a href="http://www.cellprofiler.org/examples.shtml#Tracking">Examples</a> webpage.</p>
<h4>Available measurements</h4>
<b>Object measurements</b>
<ul>
<li><i>Label:</i> Each tracked object is assigned a unique identifier (label).
Child objects resulting from a split or merge are assigned the label of the ancestor.</li>
<li><i>ParentImageNumber, ParentObjectNumber:</i> The <i>ImageNumber</i> and
<i>ObjectNumber</i> of the parent object in the prior frame. For a split, each
child object will have the label of the object it split from. For a merge,
the child will have the label of the closest parent.</li>
<li><i>TrajectoryX, TrajectoryY:</i> The direction of motion (in x and y coordinates) of the
object from the previous frame to the current frame.</li>
<li><i>DistanceTraveled:</i> The distance traveled by the object from the
previous frame to the current frame (calculated as the magnitude of
the trajectory vectors).</li>
<li><i>Displacement:</i> The shortest distance traveled by the object from its
initial starting position to the position in the current frame. That is, it is
the straight-line path between the two points.</li>
<li><i>IntegratedDistance:</i> The total distance traveled by the object during
the lifetime of the object.</li>
<li><i>Linearity:</i> A measure of how linear the object trajectity is during the
object lifetime. Calculated as (displacement from initial to final
location)/(integrated object distance). Value is in range of [0,1].</li>
<li><i>Lifetime:</i> The number of frames an objects has existed. The lifetime starts
at 1 at the frame when an object appears, and is incremented with each frame that the
object persists. At the final frame of the image set/movie, the
lifetimes of all remaining objects are output.</li>
<li><i>FinalAge:</i> Similar to <i>LifeTime</i> but is only output at the final
frame of the object's life (or the movie ends, whichever comes first). At this point,
the final age of the object is output; no values are stored for earlier frames.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> This value
is useful if you want to plot a histogram of the object lifetimes; all but the final age
can be ignored or filtered out.</dd>
</dl></li>
</ul>
The following object measurements are specific to the %(TM_LAP)s tracking method:
<ul>
<li><i>LinkType:</i> The linking method used to link the object to its parent.
Possible values are
<ul>
<li><b>%(LT_NONE)d</b>: The object was not linked to a parent.</li>
<li><b>%(LT_PHASE_1)d</b>: The object was linked to a parent in the previous frame.</li>
<li><b>%(LT_SPLIT)d</b>: The object is linked as the start of a split path.</li>
<li><b>%(LT_MITOSIS)s</b>: The object was linked to its parent as a daughter of
a mitotic pair.</li>
<li><b>%(LT_GAP)d</b>: The object was linked to a parent in a frame prior to the
previous frame (a gap).</li>
</ul>
Under some circumstances, multiple linking methods may apply to a given object, e.g, an
object may be both the beginning of a split path and not have a parent. However, only
one linking method is assigned.</li>
<li><i>MovementModel:</i>The movement model used to track the object.
<ul>
<li><b>%(KM_NO_VEL)d</b>: The <i>%(M_RANDOM)s</i> model was used.</li>
<li><b>%(KM_VEL)d</b>: The <i>%(M_VELOCITY)s</i> model was used.</li>
<li><b>-1</b>: Neither model was used. This can occur under two circumstances:
<ul>
<li>At the beginning of a trajectory, when there is no data to determine the model as
yet.</li>
<li>At the beginning of a closed gap, since a model was not actually applied to make
the link in the first phase.</li>
</ul></li>
</ul>
</li>
<li><i>LinkingDistance:</i>The difference between the propagated position of an
object and the object to which it is matched.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> A slowly decaying histogram of
these distances indicates that the search radius is large enough. A cut-off histogram
is a sign that the search radius is too small.</dd>
</dl></li>
<li><i>StandardDeviation:</i>The Kalman filter maintains a running estimate
of the variance of the error in estimated position for each model.
This measurement records the linking distance divided by the standard deviation
of the error when linking the object with its parent.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> This value is multiplied by
the <i>"%(RADIUS_STD_SETTING_TEXT)s"</i> setting to constrain the search distance.
A histogram of this value can help determine if the <i>"%(RADIUS_LIMIT_SETTING_TEXT)s"</i>
setting is appropriate.</dd>
</dl>
</li>
<li><i>GapLength:</i> The number of frames between an object and its parent.
For instance, an object in frame 3 with a parent in frame 1 has a gap length of
2.</li>
<li><i>GapScore:</i> If an object is linked to its parent by bridging a gap,
this value is the score for the gap.</li>
<li><i>SplitScore:</i> If an object linked to its parent via a split, this
value is the score for the split.</li>
<li><i>MergeScore:</i> If an object linked to a child via a merge, this value is
the score for the merge.</li>
<li><i>MitosisScore:</i> If an object linked to two children via a mitosis,
this value is the score for the mitosis.</li>
</ul>
<b>Image measurements</b>
<ul>
<li><i>LostObjectCount:</i> Number of objects that appear in the previous frame
but have no identifiable child in the current frame.</li>
<li><i>NewObjectCount:</i> Number of objects that appear in the current frame but
have no identifiable parent in the previous frame. </li>
<li><i>SplitObjectCount:</i> Number of objects in the current frame that
resulted from a split from a parent object in the previous frame.</li>
<li><i>MergedObjectCount:</i> Number of objects in the current frame that
resulted from the merging of child objects in the previous frame.</li>
</ul>
See also: Any of the <b>Measure</b> modules, <b>IdentifyPrimaryObjects</b>, <b>Groups</b>.
"""%globals()
# CellProfiler is distributed under the GNU General Public License.
# See the accompanying file LICENSE for details.
#
# Copyright (c) 2003-2009 Massachusetts Institute of Technology
# Copyright (c) 2009-2015 Broad Institute
#
# Please see the AUTHORS file for credits.
#
# Website: http://www.cellprofiler.org
import logging
logger = logging.getLogger(__name__)
import numpy as np
import numpy.ma
from scipy.ndimage import distance_transform_edt
import scipy.ndimage
import scipy.sparse
import cellprofiler.cpmodule as cpm
import cellprofiler.cpimage as cpi
import cellprofiler.pipeline as cpp
import cellprofiler.settings as cps
from cellprofiler.settings import YES, NO
import cellprofiler.measurements as cpmeas
import cellprofiler.preferences as cpprefs
from cellprofiler.cpmath.lapjv import lapjv
import cellprofiler.cpmath.filter as cpfilter
from cellprofiler.cpmath.cpmorphology import fixup_scipy_ndimage_result as fix
from cellprofiler.cpmath.cpmorphology import centers_of_labels
from cellprofiler.cpmath.cpmorphology import associate_by_distance
from cellprofiler.cpmath.cpmorphology import all_connected_components
from cellprofiler.cpmath.index import Indexes
from identify import M_LOCATION_CENTER_X, M_LOCATION_CENTER_Y
from cellprofiler.gui.help import HELP_ON_MEASURING_DISTANCES
DT_COLOR_AND_NUMBER = 'Color and Number'
DT_COLOR_ONLY = 'Color Only'
DT_ALL = [DT_COLOR_AND_NUMBER, DT_COLOR_ONLY]
R_PARENT = "Parent"
F_PREFIX = "TrackObjects"
F_LABEL = "Label"
F_PARENT_OBJECT_NUMBER = "ParentObjectNumber"
F_PARENT_IMAGE_NUMBER = "ParentImageNumber"
F_TRAJECTORY_X = "TrajectoryX"
F_TRAJECTORY_Y = "TrajectoryY"
F_DISTANCE_TRAVELED = "DistanceTraveled"
F_DISPLACEMENT = "Displacement"
F_INTEGRATED_DISTANCE = "IntegratedDistance"
F_LINEARITY = "Linearity"
F_LIFETIME = "Lifetime"
F_FINAL_AGE = "FinalAge"
F_MOVEMENT_MODEL = "MovementModel"
F_LINK_TYPE = "LinkType"
F_LINKING_DISTANCE = "LinkingDistance"
F_STANDARD_DEVIATION = "StandardDeviation"
F_GAP_LENGTH = "GapLength"
F_GAP_SCORE = "GapScore"
F_MERGE_SCORE = "MergeScore"
F_SPLIT_SCORE = "SplitScore"
F_MITOSIS_SCORE = "MitosisScore"
F_KALMAN = "Kalman"
F_STATE = "State"
F_COV = "COV"
F_NOISE = "Noise"
F_VELOCITY_MODEL = "Vel"
F_STATIC_MODEL = "NoVel"
F_X = "X"
F_Y = "Y"
F_VX = "VX"
F_VY = "VY"
F_EXPT_ORIG_NUMTRACKS = "%s_OriginalNumberOfTracks"%F_PREFIX
F_EXPT_FILT_NUMTRACKS = "%s_FilteredNumberOfTracks"%F_PREFIX
def kalman_feature(model, matrix_or_vector, i, j=None):
'''Return the feature name for a Kalman feature
model - model used for Kalman feature: velocity or static
matrix_or_vector - the part of the Kalman state to save, vec, COV or noise
i - the name for the first (or only for vec and noise) index into the vector
j - the name of the second index into the matrix
'''
pieces = [F_KALMAN, model, matrix_or_vector, i]
if j is not None:
pieces.append(j)
return "_".join(pieces)
'''# of objects in the current frame without parents in the previous frame'''
F_NEW_OBJECT_COUNT = "NewObjectCount"
'''# of objects in the previous frame without parents in the new frame'''
F_LOST_OBJECT_COUNT = "LostObjectCount"
'''# of parents that split into more than one child'''
F_SPLIT_COUNT = "SplitObjectCount"
'''# of children that are merged from more than one parent'''
F_MERGE_COUNT = "MergedObjectCount"
'''Object area measurement for LAP method
The final part of the LAP method needs the object area measurement
which is stored using this name.'''
F_AREA = "Area"
F_ALL_COLTYPE_ALL = [(F_LABEL, cpmeas.COLTYPE_INTEGER),
(F_PARENT_OBJECT_NUMBER, cpmeas.COLTYPE_INTEGER),
(F_PARENT_IMAGE_NUMBER, cpmeas.COLTYPE_INTEGER),
(F_TRAJECTORY_X, cpmeas.COLTYPE_INTEGER),
(F_TRAJECTORY_Y, cpmeas.COLTYPE_INTEGER),
(F_DISTANCE_TRAVELED, cpmeas.COLTYPE_FLOAT),
(F_DISPLACEMENT, cpmeas.COLTYPE_FLOAT),
(F_INTEGRATED_DISTANCE, cpmeas.COLTYPE_FLOAT),
(F_LINEARITY, cpmeas.COLTYPE_FLOAT),
(F_LIFETIME, cpmeas.COLTYPE_INTEGER),
(F_FINAL_AGE, cpmeas.COLTYPE_INTEGER)]
F_IMAGE_COLTYPE_ALL = [(F_NEW_OBJECT_COUNT, cpmeas.COLTYPE_INTEGER),
(F_LOST_OBJECT_COUNT, cpmeas.COLTYPE_INTEGER),
(F_SPLIT_COUNT, cpmeas.COLTYPE_INTEGER),
(F_MERGE_COUNT, cpmeas.COLTYPE_INTEGER)]
F_ALL = [feature for feature, coltype in F_ALL_COLTYPE_ALL]
F_IMAGE_ALL = [feature for feature, coltype in F_IMAGE_COLTYPE_ALL]
class TrackObjects(cpm.CPModule):
module_name = 'TrackObjects'
category = "Object Processing"
variable_revision_number = 6
def create_settings(self):
self.tracking_method = cps.Choice(
'Choose a tracking method',
TM_ALL, doc="""
When trying to track an object in an image,
<b>TrackObjects</b> will search within a maximum
specified distance (see the <i>distance within which to search</i> setting)
of the object's location in the previous image, looking for a "match".
Objects that match are assigned the same number, or label, throughout the
entire movie.
There are several options for the method used to find a match. Choose
among these options based on which is most consistent from frame
to frame of your movie.
<ul>
<li><i>%(TM_OVERLAP)s:</i> Compares the amount of spatial overlap between identified objects in
the previous frame with those in the current frame. The object with the
greatest amount of spatial overlap will be assigned the same number (label).
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s">
Recommended when there is a high degree of overlap of an object from one frame to the next,
which is the case for movies with high frame rates relative to object motion.</dd>
</dl></li>
<li><i>%(TM_DISTANCE)s:</i> Compares the distance between each identified
object in the previous frame with that of the current frame. The
closest objects to each other will be assigned the same number (label).
Distances are measured from the perimeter of each object.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s">
Recommended for cases where the objects are not very crowded but where
<i>%(TM_OVERLAP)s</i> does not work sufficiently well, which is the case
for movies with low frame rates relative to object motion.</dd>
</dl></li>
<li><i>%(TM_MEASUREMENTS)s:</i> Compares each object in the
current frame with objects in the previous frame based on a particular
feature you have measured for the objects (for example, a particular intensity
or shape measurement that can distinguish nearby objects). The object
with the closest-matching measurement will be selected as a match and will be
assigned the same number (label). This selection requires that you run the
specified <b>Measure</b> module previous to this module in the pipeline so
that the measurement values can be used to track the objects.</li>
<li><i>%(TM_LAP)s:</i> Uses the linear assignment problem (LAP) framework. The
linear assignment problem (LAP) algorithm (<i>Jaqaman et al., 2008</i>)
addresses the challenges of high object density, motion heterogeneity,
temporary disappearances, and object merging and splitting.
The algorithm first links objects between consecutive frames and then links
the resulting partial trajectories into complete trajectories. Both steps are formulated
as global combinatorial optimization problems whose solution identifies the overall
most likely set of object trajectories throughout a movie.
<p>Tracks are constructed from an image sequence by detecting objects in each
frame and linking objects between consecutive frames as a first step. This step alone
may result in incompletely tracked objects due to the appearance and disappearance
of objects, either in reality or apparently because of noise and imaging limitations.
To correct this, you may apply an optional second step which closes temporal gaps
between tracked objects and captures merging and splitting events. This step takes
place at the end of the analysis run.</p>
<p><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Some recommendations on optimizing
the LAP settings<br>
<ul>
<li><i>Work with a minimal subset of your data:</i> Attempting to optimize these settings
by examining a dataset containing many objects may be complicated and frustrating.
Therefore, it is a good idea to work with a smaller portion of the data containing the
behavior of interest.
<ul>
<li>For example, if splits characterize your data, trying narrowing down to following just
one cell that undergoes a split and examine a few frames before and after the event.</li>
<li>You can insert the <b>Crop</b> module to zoom in a region of interest, optimize the
settings and then either remove or disable the module when done.</li>
<li>You can also use the <b>Input</b> modules to limit yourself to a few frames under
consideration. For example, use the filtering settings in the <b>Images</b> module to
use only certain files from the movie in the pipeline.</li>
</ul></li>
<li><i>Begin by optimzing the settings for the first phase of the LAP:</i> The 2nd phase of
the LAP method depends on the results of the first phase. Therefore, it is a good idea to
optimize the first phase settings as the initial step.
<ul>
<li>You can disable 2nd phase calculation by selecting <i>%(NO)s</i> for "Run the second
phase of the LAP algorithm?"</li>
<li>By maximizing the the number of correct frame-to-frame links in the first phase, the
2nd phase will have less candidates to consider for linking and have a better chance of
closing gaps correctly. </li>
<li>If tracks are not being linked in the first phase, you may need to adjust the number
of standard deviations for the search radius and/or the radius limits (most likely
the maximum limit). See the help for these settings for details.</li>
</ul></li>
<li><i>Use any visualization tools at your disposal:</i>Visualizing the data often allows for
easier decision making as opposed to sorting through tabular data alone.
<ul>
<li>The <a href="http://cran.r-project.org/">R</a> open-source software package has
analysis and visualization tools that can query a database. See <a href=
"http://www.broadinstitute.org/~leek/rtracking.html">here</a> for a use case by our
lead software engineer.</li>
<li><a href="http://cellprofiler.org/tracer/">CellProfiler Tracer</a> is a version of
CellProfiler Analyst that contains tools for visualizing time-lapse data that has been exported
using the <b>ExportToDatabase</b> module.</li>
</ul></li>
</ul>
</p>
<p><b>References</b>
<ul>
<li>Jaqaman K, Loerke D, Mettlen M, Kuwata H, Grinstein S, Schmid SL, Danuser G. (2008)
"Robust single-particle tracking in live-cell time-lapse sequences."
<i>Nature Methods</i> 5(8),695-702.
<a href="http://dx.doi.org/10.1038/nmeth.1237">(link)</a></li>
<li>Jaqaman K, Danuser G. (2009) "Computational image analysis of cellular dynamics:
a case study based on particle tracking." Cold Spring Harb Protoc. 2009(12):pdb.top65.
<a href="http://dx.doi.org/10.1101/pdb.top65">(link)</a></li>
</ul></p>
</li>
</ul>"""%globals())
self.object_name = cps.ObjectNameSubscriber(
'Select the objects to track',cps.NONE, doc="""
Select the objects to be tracked by this module.""")
self.measurement = cps.Measurement(
'Select object measurement to use for tracking',
lambda : self.object_name.value, doc="""
<i>(Used only if Measurements is the tracking method)</i><br>
Select which type of measurement (category) and which specific feature from the
<b>Measure</b> module will be used for tracking. Select the feature name from
the popup box or see each <b>Measure</b> module's help for the list of
the features measured by that module. If necessary, you will also be asked
to specify additional details such as the
image from which the measurements originated or the measurement scale.""")
self.pixel_radius = cps.Integer(
'Maximum pixel distance to consider matches',50,minval=1,doc="""
Objects in the subsequent frame will be considered potential matches if
they are within this distance. To determine a suitable pixel distance, you can look
at the axis increments on each image (shown in pixel units) or
use the distance measurement tool. %(HELP_ON_MEASURING_DISTANCES)s"""%globals())
self.model = cps.Choice(
"Select the movement model",[M_RANDOM, M_VELOCITY, M_BOTH], value=M_BOTH,doc = """
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i><br>
This setting controls how to predict an object's position in
the next frame, assuming that each object moves randomly with
a frame-to-frame variance in position that follows a Gaussian
distribution.<br>
<ul>
<li><i>%(M_RANDOM)s:</i> A model in which objects move due to
Brownian Motion or a similar process where the variance in position
differs between objects.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s">
Use this model if the objects move with some
random jitter around a stationary location.</dd>
</dl></li>
<li><i>%(M_VELOCITY)s:</i> A model in which the object moves with
a velocity. Both velocity and position (after correcting for
velocity) vary following a Gaussian distribution.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Use this model if
the objects move along a spatial trajectory in some direction over time.</dd>
</dl></li>
<li><i>%(M_BOTH)s:</i> <b>TrackObjects</b> will predict each
object's position using both models and use the model with the
lowest penalty to join an object in one frame with one in another.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Use this
option if both models above are applicable over time.</dd>
</dl></li>
</ul>""" % globals())
self.radius_std = cps.Float(
RADIUS_STD_SETTING_TEXT, 3, minval=1,doc = """
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i>
<br>
<b>TrackObjects</b> will estimate the standard deviation of the error
between the observed and predicted positions of an object for
each movement model. It will constrain the search for matching
objects from one frame to the next to the standard deviation
of the error times the number of standard
deviations that you enter here.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>If the standard deviation is quite small, but the object makes a
large spatial jump, this value may need to be set higher in order
to increase the search area and thereby make the frame-to-frame
linkage.</li>
</ul></dd>
</dl>"""%globals())
self.radius_limit = cps.FloatRange(
RADIUS_LIMIT_SETTING_TEXT, (2, 10), minval = 0,doc = """
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i><br>
<b>TrackObjects</b> derives a search radius based on the error
estimation. Potentially, the module can make an erroneous assignment
with a large error, leading to a large estimated error for
the object in the next frame. Conversely, the module can arrive
at a small estimated error by chance, leading to a maximum radius
that does not track the object in a subsequent frame. The radius
limit constrains the maximum radius to reasonable values.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>Special care must be taken to adjust the upper limit appropriate
to the data.</li>
<li>The lower limit should be set to a radius (in pixels) that is a
reasonable displacement for any object from one frame to the next. Hence,
if you notice that a frame-to-frame linkage is not being made for a
steadily-moving object, it may be that this value needs to be decreased
such that the displacement falls above the lower limit.</li>
<li>The upper limit should be set to the maximum reasonable
displacement (in pixels) under any circumstances. Hence, if you notice that
a frame-to-frame linkage is not being made in the case of a unusually
large displacement, this value may need to be increased.</li>
</ul></dd>
</dl>"""%globals())
self.wants_second_phase = cps.Binary(
"Run the second phase of the LAP algorithm?", True, doc="""
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i><br>
Select <i>%(YES)s</i> to run the second phase of the LAP algorithm
after processing all images. Select <i>%(NO)s</i> to omit the
second phase or to perform the second phase when running the module
as a data tool.
<p>Since object tracks may start and end not only because of the true appearance
and disappearance of objects, but also because of apparent disappearances due
to noise and limitations in imaging, you may want to run the second phase
which attempts to close temporal gaps between tracked objects and tries to
capture merging and splitting events.</p>
<p>For additional details on optimizing the LAP settings, see the help for each
the settings.</p>"""%globals())
self.gap_cost = cps.Integer(
'Gap closing cost', 40, minval=1, doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting assigns a cost to keeping a gap caused
when an object is missing from one of the frames of a track (the
alternative to keeping the gap is to bridge it by connecting
the tracks on either side of the missing frames).
The cost of bridging a gap is the distance, in pixels, of the
displacement of the object between frames.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>Set the gap closing cost higher if tracks from objects in previous
frames are being erroneously joined, across a gap, to tracks from
objects in subsequent frames. </li>
<li>Set the gap closing cost lower if tracks
are not properly joined due to gaps caused by mis-segmentation.</li>
</ul></dd>
</dl></p>'''%globals())
self.split_cost = cps.Integer(
'Split alternative cost', 40, minval=1, doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting is the cost of keeping two tracks distinct
when the alternative is to make them into one track that
splits. A split occurs when an object in one frame is assigned
to the same track as two objects in a subsequent frame.
The split cost takes two components into account:
<ul>
<li>The area of the split object relative to the area of
the resulting objects.</li>
<li>The displacement of the resulting
objects relative to the position of the original object.</li>
</ul>
The split cost is roughly measured in pixels. The split alternative cost is
(conceptually) subtracted from the cost of making the split.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>The split cost should be set lower if objects are being split
that should not be split. </li>
<li>The split cost should be set higher if objects
that should be split are not.</li>
<li>If you are confident that there should be no splits present in the data,
the cost can be set to 1 (the minimum value possible)</li>
</ul></dd>
</dl>'''%globals())
self.merge_cost = cps.Integer(
'Merge alternative cost', 40, minval=1,doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting is the cost of keeping two tracks
distinct when the alternative is to merge them into one.
A merge occurs when two objects in one frame are assigned to
the same track as a single object in a subsequent frame.
The merge score takes two components into account:
<ul>
<li>The area of the two objects
to be merged relative to the area of the resulting objects.</li>
<li>The displacement of the original objects relative to the final
object. </li>
</ul>
The merge cost is measured in pixels. The merge
alternative cost is (conceptually) subtracted from the
cost of making the merge.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>Set the merge alternative cost lower if objects are being
merged when they should otherwise be kept separate. </li>
<li>Set the merge alternative cost
higher if objects that are not merged should be merged.</li>
<li>If you are confident that there should be no merges present in the data,
the cost can be set to 1 (the minimum value possible)</li>
</ul></dd>
</dl>'''%globals())
self.mitosis_cost = cps.Integer(
'Mitosis alternative cost', 80, minval=1, doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting is the cost of not linking a parent and two daughters
via the mitosis model. the %(TM_LAP)s tracking method weighs this
cost against the score of a potential mitosis. The model expects
the daughters to be equidistant from the parent after mitosis,
so the parent location is expected to be midway between the daughters.
In addition, the model expects the daughters' areas to be equal
to the parent's area. The mitosis score is the distance error
of the parent times the area inequality ratio of the parent and
daughters (the larger of Area(daughters) / Area(parent) and
Area(parent) / Area(daughters)).<br>
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>An accepted mitosis closes two gaps, so all things being equal,
the mitosis alternative cost should be approximately double the
gap closing cost.</li>
<li>Increase the mitosis alternative cost to favor more mitoses
and decrease it to prevent more mitoses candidates from being
accepted.</li>
</ul></dd>
</dl>'''%globals())
self.mitosis_max_distance = cps.Integer(
'Maximum mitosis distance, in pixel units', 40, minval=1, doc= '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting is the maximum allowed distance in pixels of either
of the daughter candidate centroids after mitosis from the parent candidate.
'''%globals())
self.max_gap_score = cps.Integer(
'Maximum gap displacement, in pixel units', 5, minval=1, doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting acts as a filter for unreasonably large
displacements during the second phase.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>The maximum gap displacement should be set to roughly
the maximum displacement of an object's center from frame to frame. An object that makes large
frame-to-frame jumps should have a higher value for this setting than one that only moves slightly.</li>
<li>Be aware that the LAP algorithm will run more slowly with a higher maximum gap displacement
value, since the higher this value, the more objects that must be compared at each step.</li>
<li>Objects that would have been tracked between successive frames for a lower maximum displacement
may not be tracked if the value is set higher.</li>
<li>This setting may be the culprit if an object is not tracked fame-to-frame despite optimizing
the LAP first-pass settings.</li>
</ul></dd>
</dl>'''%globals())
self.max_merge_score = cps.Integer(
'Maximum merge score', 50, minval=1, doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting acts as a filter for unreasonably large
merge scores. The merge score has two components:
<ul>
<li>The area of the resulting merged object relative to the area of the
two objects to be merged.</li>
<li>The distances between the objects to be merged and the resulting object. </li>
</ul>
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>The LAP algorithm will run more slowly with a higher maximum merge score value. </li>
<li>Objects that would have been merged at a lower maximum merge score will not be considered for merging.</li>
</ul></dd>
</dl>'''%globals())
self.max_split_score = cps.Integer(
'Maximum split score', 50, minval=1, doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
This setting acts as a filter for unreasonably large split scores. The split score has two components:
<ul>
<li>The area of the initial object relative to the area of the
two objects resulting from the split.</li>
<li>The distances between the original and resulting objects. </li>
</ul>
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>The LAP algorithm will run more slowly with a maximum split score value. </li>
<li>Objects that would have been split at a lower maximum split score will not be considered for splitting.</li>
</ul></dd>
</dl>'''%globals())
self.max_frame_distance = cps.Integer(
'Maximum temporal gap, in frames', 5, minval=1, doc = '''
%(ONLY_IF_2ND_PHASE_LAP_TEXT)s<br>
<b>Care must be taken to adjust this setting appropriate to the data.</b><br>
This setting controls the maximum number of frames that can
be skipped when merging a temporal gap caused by an unsegmented object.
These gaps occur when an image is mis-segmented and identification
fails to find an object in one or more frames.
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>Set the maximum gap higher in order to have more chance of correctly recapturing an object after
erroneously losing the original for a few frames.</li>
<li>Set the maximum gap lower to reduce the chance of erroneously connecting to the wrong object after
correctly losing the original object (e.g., if the cell dies or moves off-screen).</li>
</ul></dd>
</dl>'''%globals())
self.wants_lifetime_filtering = cps.Binary(
'Filter objects by lifetime?', False, doc = '''
Select <i>%(YES)s</i> if you want objects to be filtered by their
lifetime, i.e., total duration in frames. This is useful for
marking objects which transiently appear and disappear, such
as the results of a mis-segmentation. <br>
<dl>
<dd><img src="memory:%(PROTIP_RECOMEND_ICON)s"> Recommendations:
<ul>
<li>This operation does not actually delete the filtered object,
but merely removes its label from the tracked object list;
the filtered object's per-object measurements are retained.</li>
<li>An object can be filtered only if it is tracked as an unique object.
Splits continue the lifetime count from their parents, so the minimum
lifetime value does not apply to them.</li>
</ul></dd>
</dl>'''%globals())
self.wants_minimum_lifetime = cps.Binary(
'Filter using a minimum lifetime?', True, doc = '''
<i>(Used only if objects are filtered by lifetime)</i><br>
Select <i>%(YES)s</i> to filter the object on the basis of a minimum number of frames.'''%globals())
self.min_lifetime = cps.Integer(
'Minimum lifetime', 1, minval=1,doc="""
Enter the minimum number of frames an object is permitted to persist. Objects
which last this number of frames or lower are filtered out.""")
self.wants_maximum_lifetime = cps.Binary(
'Filter using a maximum lifetime?', False, doc = '''
<i>(Used only if objects are filtered by lifetime)</i><br>
Select <i>%(YES)s</i> to filter the object on the basis of a maximum number of frames.'''%globals())
self.max_lifetime = cps.Integer(
'Maximum lifetime', 100, doc="""
Enter the maximum number of frames an object is permitted to persist. Objects
which last this number of frames or more are filtered out.""")
self.display_type = cps.Choice(
'Select display option', DT_ALL, doc="""
The output image can be saved as:
<ul>
<li><i>%(DT_COLOR_ONLY)s:</i> A color-labeled image, with each tracked
object assigned a unique color</li>
<li><i>%(DT_COLOR_AND_NUMBER)s:</i> Same as above but with the tracked object
number superimposed.</li>
</ul>"""%globals())
self.wants_image = cps.Binary(
"Save color-coded image?", False, doc="""
Select <i>%(YES)s</i> to retain the image showing the tracked objects
for later use in the pipeline. For example, a common use is for quality control purposes
saving the image with the <b>SaveImages</b> module.
<p>Please note that if you are using the second phase of the %(TM_LAP)s method,
the final labels are not assigned until <i>after</i> the pipeline has
completed the analysis run. That means that saving the color-coded image
will only show the penultimate result and not the final product.</p>."""%globals())
self.image_name = cps.ImageNameProvider(
"Name the output image", "TrackedCells", doc = '''
<i>(Used only if saving the color-coded image)</i><br>
Enter a name to give the color-coded image of tracked labels.''')
def settings(self):
return [self.tracking_method, self.object_name, self.measurement,
self.pixel_radius, self.display_type, self.wants_image,
self.image_name, self.model,
self.radius_std, self.radius_limit,
self.wants_second_phase,
self.gap_cost, self.split_cost, self.merge_cost,
self.max_gap_score, self.max_split_score,
self.max_merge_score, self.max_frame_distance,
self.wants_lifetime_filtering, self.wants_minimum_lifetime,
self.min_lifetime, self.wants_maximum_lifetime,
self.max_lifetime, self.mitosis_cost, self.mitosis_max_distance]
def validate_module(self, pipeline):
'''Make sure that the user has selected some limits when filtering'''
if (self.tracking_method == TM_LAP and
self.wants_lifetime_filtering.value and
(self.wants_minimum_lifetime.value == False and self.wants_minimum_lifetime.value == False) ):
raise cps.ValidationError(
'Please enter a minimum and/or maximum lifetime limit',
self.wants_lifetime_filtering)
def visible_settings(self):
result = [self.tracking_method, self.object_name]
if self.tracking_method == TM_MEASUREMENTS:
result += [ self.measurement]
if self.tracking_method == TM_LAP:
result += [self.model, self.radius_std, self.radius_limit]
result += [self.wants_second_phase]
if self.wants_second_phase:
result += [
self.gap_cost, self.split_cost, self.merge_cost,
self.mitosis_cost,
self.max_gap_score, self.max_split_score,
self.max_merge_score, self.max_frame_distance,
self.mitosis_max_distance]
else:
result += [self.pixel_radius]
result += [ self.wants_lifetime_filtering]
if self.wants_lifetime_filtering:
result += [ self.wants_minimum_lifetime ]
if self.wants_minimum_lifetime:
result += [ self.min_lifetime ]
result += [ self.wants_maximum_lifetime ]
if self.wants_maximum_lifetime:
result += [ self.max_lifetime ]
result +=[ self.display_type, self.wants_image]
if self.wants_image.value:
result += [self.image_name]
return result
@property
def static_model(self):
return self.model in (M_RANDOM, M_BOTH)
@property
def velocity_model(self):
return self.model in (M_VELOCITY, M_BOTH)
def get_ws_dictionary(self, workspace):
return self.get_dictionary(workspace.image_set_list)
def __get(self, field, workspace, default):
if self.get_ws_dictionary(workspace).has_key(field):
return self.get_ws_dictionary(workspace)[field]
return default
def __set(self, field, workspace, value):
self.get_ws_dictionary(workspace)[field] = value
def get_group_image_numbers(self, workspace):
m = workspace.measurements
assert isinstance(m, cpmeas.Measurements)
d = self.get_ws_dictionary(workspace)
group_number = m.get_group_number()
if not d.has_key("group_number") or d["group_number"] != group_number:
d["group_number"] = group_number
group_indexes = np.array([
(m.get_measurement(cpmeas.IMAGE, cpmeas.GROUP_INDEX, i), i)
for i in m.get_image_numbers()
if m.get_measurement(cpmeas.IMAGE, cpmeas.GROUP_NUMBER, i) ==
group_number], int)
order = np.lexsort([group_indexes[:, 0]])
d["group_image_numbers"] = group_indexes[order, 1]
return d["group_image_numbers"]
def get_saved_measurements(self, workspace):
return self.__get("measurements", workspace, np.array([], float))
def set_saved_measurements(self, workspace, value):
self.__set("measurements", workspace, value)
def get_saved_coordinates(self, workspace):
return self.__get("coordinates", workspace, np.zeros((2,0), int))
def set_saved_coordinates(self, workspace, value):
self.__set("coordinates", workspace, value)
def get_orig_coordinates(self, workspace):
'''The coordinates of the first occurrence of an object's ancestor'''
return self.__get("orig coordinates", workspace, np.zeros((2,0), int))
def set_orig_coordinates(self, workspace, value):
self.__set("orig coordinates", workspace, value)
def get_saved_labels(self, workspace):
return self.__get("labels", workspace, None)
def set_saved_labels(self, workspace, value):
self.__set("labels", workspace, value)
def get_saved_object_numbers(self, workspace):
return self.__get("object_numbers", workspace, np.array([], int))
def set_saved_object_numbers(self, workspace, value):
return self.__set("object_numbers", workspace, value)
def get_saved_ages(self, workspace):
return self.__get("ages", workspace, np.array([], int))
def set_saved_ages(self, workspace, values):
self.__set("ages", workspace, values)
def get_saved_distances(self, workspace):
return self.__get("distances", workspace, np.zeros((0,)))
def set_saved_distances(self, workspace, values):
self.__set("distances", workspace, values)
def get_max_object_number(self, workspace):
return self.__get("max_object_number", workspace, 0)
def set_max_object_number(self, workspace, value):
self.__set("max_object_number", workspace, value)
def get_kalman_states(self, workspace):
return self.__get("kalman_states", workspace, None)
def set_kalman_states(self, workspace, value):
self.__set("kalman_states", workspace, value)
def prepare_group(self, workspace, grouping, image_numbers):
'''Erase any tracking information at the start of a run'''
d = self.get_dictionary(workspace.image_set_list)
d.clear()
return True
def measurement_name(self, feature):
'''Return a measurement name for the given feature'''
if self.tracking_method == TM_LAP:
return "%s_%s" % (F_PREFIX, feature)
return "%s_%s_%s" % (F_PREFIX, feature, str(self.pixel_radius.value))
def image_measurement_name(self, feature):
'''Return a measurement name for an image measurement'''
if self.tracking_method == TM_LAP:
return "%s_%s_%s" % (F_PREFIX, feature, self.object_name.value)
return "%s_%s_%s_%s" % (F_PREFIX, feature, self.object_name.value,
str(self.pixel_radius.value))
def add_measurement(self, workspace, feature, values):
'''Add a measurement to the workspace's measurements
workspace - current image set's workspace
feature - name of feature being measured
values - one value per object
'''
workspace.measurements.add_measurement(
self.object_name.value,
self.measurement_name(feature),
values)
def add_image_measurement(self, workspace, feature, value):
measurement_name = self.image_measurement_name(feature)
workspace.measurements.add_image_measurement(measurement_name, value)
def run(self, workspace):
objects = workspace.object_set.get_objects(self.object_name.value)
if self.tracking_method == TM_DISTANCE:
self.run_distance(workspace, objects)
elif self.tracking_method == TM_OVERLAP:
self.run_overlap(workspace, objects)
elif self.tracking_method == TM_MEASUREMENTS:
self.run_measurements(workspace, objects)
elif self.tracking_method == TM_LAP:
self.run_lapdistance(workspace, objects)
else:
raise NotImplementedError("Unimplemented tracking method: %s" %
self.tracking_method.value)
if self.wants_image.value:
import matplotlib.figure
import matplotlib.axes
import matplotlib.backends.backend_agg
import matplotlib.transforms
from cellprofiler.gui.cpfigure_tools import figure_to_image, only_display_image
figure = matplotlib.figure.Figure()
canvas = matplotlib.backends.backend_agg.FigureCanvasAgg(figure)
ax = figure.add_subplot(1,1,1)
self.draw(objects.segmented, ax,
self.get_saved_object_numbers(workspace))
#
# This is the recipe for just showing the axis
#
only_display_image(figure, objects.segmented.shape)
image_pixels = figure_to_image(figure, dpi=figure.dpi)
image = cpi.Image(image_pixels)
workspace.image_set.add(self.image_name.value, image)
if self.show_window:
workspace.display_data.labels = objects.segmented
workspace.display_data.object_numbers = \
self.get_saved_object_numbers(workspace)
def display(self, workspace, figure):
if hasattr(workspace.display_data, "labels"):
figure.set_subplots((1, 1))
subfigure = figure.figure
subfigure.clf()
ax = subfigure.add_subplot(1,1,1)
self.draw(workspace.display_data.labels, ax,
workspace.display_data.object_numbers)
else:
# We get here after running as a data tool
figure.figure.text(.5, .5, "Analysis complete",
ha="center", va="center")
def draw(self, labels, ax, object_numbers):
import matplotlib
indexer = np.zeros(len(object_numbers)+1,int)
indexer[1:] = object_numbers
#
# We want to keep the colors stable, but we also want the
# largest possible separation between adjacent colors. So, here
# we reverse the significance of the bits in the indices so
# that adjacent number (e.g. 0 and 1) differ by 128, roughly
#
pow_of_2 = 2**np.mgrid[0:8,0:len(indexer)][0]
bits = (indexer & pow_of_2).astype(bool)
indexer = np.sum(bits.transpose() * (2 ** np.arange(7,-1,-1)), 1)
recolored_labels = indexer[labels]
cm = matplotlib.cm.get_cmap(cpprefs.get_default_colormap())
cm.set_bad((0,0,0))
norm = matplotlib.colors.BoundaryNorm(range(256), 256)
img = ax.imshow(numpy.ma.array(recolored_labels, mask=(labels==0)),
cmap=cm, norm=norm)
if self.display_type == DT_COLOR_AND_NUMBER:
i,j = centers_of_labels(labels)
for n, x, y in zip(object_numbers, j, i):
if np.isnan(x) or np.isnan(y):
# This happens if there are missing labels
continue
ax.annotate(str(n), xy=(x,y),color='white',
arrowprops=dict(visible=False))
def run_distance(self, workspace, objects):
'''Track objects based on distance'''
old_i, old_j = self.get_saved_coordinates(workspace)
if len(old_i):
distances, (i,j) = distance_transform_edt(objects.segmented == 0,
return_indices=True)
#
# Look up the coordinates of the nearest new object (given by
# the transform i,j), then look up the label at that coordinate
# (objects.segmented[#,#])
#
new_object_numbers = objects.segmented[i[old_i, old_j],
j[old_i, old_j]]
#
# Mask out any objects at too great of a distance
#
new_object_numbers[distances[old_i, old_j] >
self.pixel_radius.value] = 0
#
# Do the same with the new centers and old objects
#
i,j = (centers_of_labels(objects.segmented)+.5).astype(int)
old_labels = self.get_saved_labels(workspace)
distances, (old_i,old_j) = distance_transform_edt(
old_labels == 0,
return_indices=True)
old_object_numbers = old_labels[old_i[i, j],
old_j[i, j]]
old_object_numbers[distances[i, j] > self.pixel_radius.value] = 0
self.map_objects(workspace,
new_object_numbers,
old_object_numbers,
i,j)
else:
i,j = (centers_of_labels(objects.segmented)+.5).astype(int)
count = len(i)
self.map_objects(workspace, np.zeros((0,),int),
np.zeros(count,int), i,j)
self.set_saved_labels(workspace, objects.segmented)
def run_lapdistance(self, workspace, objects):
'''Track objects based on distance'''
m = workspace.measurements
old_i, old_j = self.get_saved_coordinates(workspace)
n_old = len(old_i)
#
# Automatically set the cost of birth and death above
# that of the largest allowable cost.
#
costBorn = costDie = self.radius_limit.max * 1.10
kalman_states = self.get_kalman_states(workspace)
if kalman_states == None:
if self.static_model:
kalman_states = [ cpfilter.static_kalman_model()]
else:
kalman_states = []
if self.velocity_model:
kalman_states.append(cpfilter.velocity_kalman_model())
areas = fix(scipy.ndimage.sum(
np.ones(objects.segmented.shape), objects.segmented,
np.arange(1, np.max(objects.segmented) + 1,dtype=np.int32)))
areas = areas.astype(int)
model_types = np.array(
[m for m, s in ((KM_NO_VEL, self.static_model),
(KM_VEL, self.velocity_model)) if s], int)
if n_old > 0:
new_i, new_j = centers_of_labels(objects.segmented)
n_new = len(new_i)
i,j = np.mgrid[0:n_old, 0:n_new]
##############################
#
# Kalman filter prediction
#
#
# We take the lowest cost among all possible models
#
minDist = np.ones((n_old, n_new)) * self.radius_limit.max
d = np.ones((n_old, n_new)) * np.inf
sd = np.zeros((n_old, n_new))
# The index of the Kalman filter used: -1 means not used
kalman_used = -np.ones((n_old, n_new), int)
for nkalman, kalman_state in enumerate(kalman_states):
assert isinstance(kalman_state, cpfilter.KalmanState)
obs = kalman_state.predicted_obs_vec
dk = np.sqrt((obs[i,0] - new_i[j])**2 +
(obs[i,1] - new_j[j])**2)
noise_sd = np.sqrt(np.sum(kalman_state.noise_var[:,0:2], 1))
radius = np.maximum(np.minimum(noise_sd * self.radius_std.value,
self.radius_limit.max),
self.radius_limit.min)
is_best = ((dk < d) & (dk < radius[:, np.newaxis]))
d[is_best] = dk[is_best]
minDist[is_best] = radius[i][is_best]
kalman_used[is_best] = nkalman
minDist = np.maximum(np.minimum(minDist, self.radius_limit.max),
self.radius_limit.min)
#
#############################
#
# Linear assignment setup
#
n = len(old_i)+len(new_i)
kk = np.zeros((n+10)*(n+10), np.int32)
first = np.zeros(n+10, np.int32)
cc = np.zeros((n+10)*(n+10), np.float)
t = np.argwhere((d < minDist))
x = np.sqrt((old_i[t[0:t.size, 0]]-new_i[t[0:t.size, 1]])**2 + (old_j[t[0:t.size, 0]]-new_j[t[0:t.size, 1]])**2)
t = t+1
t = np.column_stack((t, x))
a = np.arange(len(old_i))+2
x = np.searchsorted(t[0:(t.size/2),0], a)
a = np.arange(len(old_i))+1
b = np.arange(len(old_i))+len(new_i)+1
c = np.zeros(len(old_i))+costDie
b = np.column_stack((a, b, c))
t = np.insert(t, x, b, 0)
i,j = np.mgrid[0:len(new_i),0:len(old_i)+1]
i = i+len(old_i)+1
j = j+len(new_i)
j[0:len(new_i)+1,0] = i[0:len(new_i)+1,0]-len(old_i)
x = np.zeros((len(new_i),len(old_i)+1))
x[0:len(new_i)+1,0] = costBorn
i = i.flatten()
j = j.flatten()
x = x.flatten()
x = np.column_stack((i, j, x))
t = np.vstack((t, x))
# Tack 0 <-> 0 at the start because object #s start at 1
i = np.hstack([0,t[:,0].astype(int)])
j = np.hstack([0,t[:,1].astype(int)])
c = np.hstack([0,t[:,2]])
x, y = lapjv(i, j, c)
a = np.argwhere(x > len(new_i))
b = np.argwhere(y >len(old_i))
x[a[0:len(a)]] = 0
y[b[0:len(b)]] = 0
a = np.arange(len(old_i))+1
b = np.arange(len(new_i))+1
new_object_numbers = x[a[0:len(a)]].astype(int)
old_object_numbers = y[b[0:len(b)]].astype(int)
###############################
#
# Kalman filter update
#
model_idx = np.zeros(len(old_object_numbers), int)
linking_distance = np.ones(len(old_object_numbers)) * np.NaN
standard_deviation = np.ones(len(old_object_numbers)) * np.NaN
model_type = np.ones(len(old_object_numbers), int) * KM_NONE
link_type = np.ones(len(old_object_numbers), int) * LT_NONE
mask = old_object_numbers > 0
old_idx = old_object_numbers - 1
model_idx[mask] =\
kalman_used[old_idx[mask], mask]
linking_distance[mask] = d[old_idx[mask], mask]
standard_deviation[mask] = \
linking_distance[mask] / noise_sd[old_idx[mask]]
model_type[mask] = model_types[model_idx[mask]]
link_type[mask] = LT_PHASE_1
#
# The measurement covariance is the square of the
# standard deviation of the measurement error. Assume
# that the measurement error comes from not knowing where
# the center is within the cell, then the error is
# proportional to the radius and the square to the area.
#
measurement_variance = areas.astype(float) / np.pi
#
# Broadcast the measurement error into a diagonal matrix
#
r = (measurement_variance[:, np.newaxis, np.newaxis] *
np.eye(2)[np.newaxis,:,:])
new_kalman_states = []
for kalman_state in kalman_states:
#
# The process noise covariance is a diagonal of the
# state noise variance.
#
state_len = kalman_state.state_len
q = np.zeros((len(old_idx), state_len, state_len))
if np.any(mask):
#
# Broadcast into the diagonal
#
new_idx = np.arange(len(old_idx))[mask]
matching_idx = old_idx[new_idx]
i,j = np.mgrid[0:len(matching_idx),0:state_len]
q[new_idx[i], j, j] = \
kalman_state.noise_var[matching_idx[i],j]
new_kalman_state = cpfilter.kalman_filter(
kalman_state,
old_idx,
np.column_stack((new_i, new_j)),
q,r)
new_kalman_states.append(new_kalman_state)
self.set_kalman_states(workspace, new_kalman_states)
i,j = (centers_of_labels(objects.segmented)+.5).astype(int)
self.map_objects(workspace,
new_object_numbers,
old_object_numbers,
i,j)
else:
i,j = centers_of_labels(objects.segmented)
count = len(i)
link_type = np.ones(count, int) * LT_NONE
model_type = np.ones(count, int) * KM_NONE
linking_distance = np.ones(count) * np.NaN
standard_deviation = np.ones(count) * np.NaN
#
# Initialize the kalman_state with the new objects
#
new_kalman_states = []
r = np.zeros((count, 2, 2))
for kalman_state in kalman_states:
q = np.zeros((count, kalman_state.state_len, kalman_state.state_len))
new_kalman_state = cpfilter.kalman_filter(
kalman_state, -np.ones(count),
np.column_stack((i,j)), q, r)
new_kalman_states.append(new_kalman_state)
self.set_kalman_states(workspace, new_kalman_states)
i = (i+.5).astype(int)
j = (j+.5).astype(int)
self.map_objects(workspace, np.zeros((0,),int),
np.zeros(count,int), i,j)
m = workspace.measurements
assert isinstance(m, cpmeas.Measurements)
m.add_measurement(self.object_name.value,
self.measurement_name(F_AREA),
areas)
m[self.object_name.value,
self.measurement_name(F_LINKING_DISTANCE)] = linking_distance
m[self.object_name.value,
self.measurement_name(F_STANDARD_DEVIATION)] = standard_deviation
m[self.object_name.value,
self.measurement_name(F_MOVEMENT_MODEL)] = model_type
m[self.object_name.value,
self.measurement_name(F_LINK_TYPE)] = link_type
self.save_kalman_measurements(workspace)
self.set_saved_labels(workspace, objects.segmented)
def get_kalman_models(self):
'''Return tuples of model and names of the vector elements'''
if self.static_model:
models = [ (F_STATIC_MODEL, (F_Y, F_X))]
else:
models = []
if self.velocity_model:
models.append((F_VELOCITY_MODEL, (F_Y, F_X, F_VY, F_VX)))
return models
def save_kalman_measurements(self, workspace):
'''Save the first-pass state_vec, state_cov and state_noise'''
m = workspace.measurements
object_name = self.object_name.value
for (model, elements), kalman_state in zip(
self.get_kalman_models(), self.get_kalman_states(workspace)):
assert isinstance(kalman_state, cpfilter.KalmanState)
nobjs = len(kalman_state.state_vec)
if nobjs > 0:
#
# Get the last state_noise entry for each object
#
# scipy.ndimage.maximum probably should return NaN if
# no index exists, but, in 0.8.0, returns 0. So stack
# a bunch of -1 values so every object will have a "-1"
# index.
last_idx = scipy.ndimage.maximum(
np.hstack((
-np.ones(nobjs),
np.arange(len(kalman_state.state_noise_idx)))),
np.hstack((
np.arange(nobjs), kalman_state.state_noise_idx)),
np.arange(nobjs))
last_idx = last_idx.astype(int)
for i, element in enumerate(elements):
#
# state_vec
#
mname = self.measurement_name(
kalman_feature(model, F_STATE, element))
values = np.zeros(0) if nobjs == 0 else kalman_state.state_vec[:,i]
m.add_measurement(object_name, mname, values)
#
# state_noise
#
mname = self.measurement_name(
kalman_feature(model, F_NOISE, element))
values = np.zeros(nobjs)
if nobjs > 0:
values[last_idx == -1] = np.NaN
values[last_idx > -1] = kalman_state.state_noise[last_idx[last_idx > -1], i]
m.add_measurement(object_name, mname, values)
#
# state_cov
#
for j, el2 in enumerate(elements):
mname = self.measurement_name(
kalman_feature(model, F_COV, element, el2))
values = kalman_state.state_cov[:, i, j]
m.add_measurement(object_name, mname, values)
def run_overlap(self, workspace, objects):
'''Track objects by maximum # of overlapping pixels'''
current_labels = objects.segmented
old_labels = self.get_saved_labels(workspace)
i,j = (centers_of_labels(objects.segmented)+.5).astype(int)
if old_labels is None:
count = len(i)
self.map_objects(workspace, np.zeros((0,),int),
np.zeros(count,int), i,j)
else:
mask = ((current_labels > 0) & (old_labels > 0))
cur_count = np.max(current_labels)
old_count = np.max(old_labels)
count = np.sum(mask)
if count == 0:
# There's no overlap.
self.map_objects(workspace,
np.zeros(old_count, int),
np.zeros(cur_count,int),
i,j)
else:
cur = current_labels[mask]
old = old_labels[mask]
histogram = scipy.sparse.coo_matrix(
(np.ones(count),(cur, old)),
shape=(cur_count+1,old_count+1)).toarray()
old_of_new = np.argmax(histogram, 1)[1:]
new_of_old = np.argmax(histogram, 0)[1:]
#
# The cast here seems to be needed to make scipy.ndimage.sum
# work. See http://projects.scipy.org/numpy/ticket/1012
#
old_of_new = np.array(old_of_new, np.int16)
old_of_new = np.array(old_of_new, np.int32)
new_of_old = np.array(new_of_old, np.int16)
new_of_old = np.array(new_of_old, np.int32)
self.map_objects(workspace,
new_of_old,
old_of_new,
i,j)
self.set_saved_labels(workspace, current_labels)
def run_measurements(self, workspace, objects):
current_labels = objects.segmented
new_measurements = workspace.measurements.get_current_measurement(
self.object_name.value,
self.measurement.value)
old_measurements = self.get_saved_measurements(workspace)
old_labels = self.get_saved_labels(workspace)
i,j = (centers_of_labels(objects.segmented)+.5).astype(int)
if old_labels is None:
count = len(i)
self.map_objects(workspace, np.zeros((0,),int),
np.zeros(count,int), i,j)
else:
associations = associate_by_distance(old_labels, current_labels,
self.pixel_radius.value)
best_child = np.zeros(len(old_measurements), int)
best_parent = np.zeros(len(new_measurements), int)
best_child_measurement = (np.ones(len(old_measurements), int) *
np.finfo(float).max)
best_parent_measurement = (np.ones(len(new_measurements), int) *
np.finfo(float).max)
for old, new in associations:
diff = abs(old_measurements[old-1] - new_measurements[new-1])
if diff < best_child_measurement[old-1]:
best_child[old-1] = new
best_child_measurement[old-1] = diff
if diff < best_parent_measurement[new-1]:
best_parent[new-1] = old
best_parent_measurement[new-1] = diff
self.map_objects(workspace, best_child, best_parent, i,j)
self.set_saved_labels(workspace,current_labels)
self.set_saved_measurements(workspace, new_measurements)
def run_as_data_tool(self, workspace):
m = workspace.measurements
assert isinstance(m, cpmeas.Measurements)
group_numbers = {}
for i in m.get_image_numbers():
group_number = m.get_measurement(cpmeas.IMAGE,
cpmeas.GROUP_NUMBER, i)
group_index = m.get_measurement(cpmeas.IMAGE,
cpmeas.GROUP_INDEX, i)
if ((not group_numbers.has_key(group_number)) or
(group_numbers[group_number][1] > group_index)):
group_numbers[group_number] = (i, group_index)
for group_number in sorted(group_numbers.keys()):
m.image_set_number = group_numbers[group_number][0]
self.post_group(workspace, {})
def flood(self, i, at, a, b, c, d, z):
z[i] = at
if(a[i] != -1 and z[a[i]] == 0):
z = self.flood(a[i], at, a, b, c, d, z)
if(b[i] != -1 and z[b[i]] == 0):
z = self.flood(b[i], at, a, b, c, d, z)
if(c[i] != -1 and z[c[i]] == 0):
z = self.flood(c[i], at, a, b, c, d, z)
if(c[i] != -1 and z[c[i]] == 0):
z = self.flood(c[i], at, a, b, c, d, z)
return z
def is_aggregation_module(self):
'''We connect objects across imagesets within a group = aggregation'''
return True
def post_group(self, workspace, grouping):
# If any tracking method other than LAP, recalculate measurements
# (Really, only the final age needs to be re-done)
if self.tracking_method != TM_LAP:
m = workspace.measurements
assert(isinstance(m, cpmeas.Measurements))
image_numbers = self.get_group_image_numbers(workspace)
self.recalculate_group(workspace, image_numbers)
return
if (self.tracking_method != TM_LAP or
not self.wants_second_phase):
return
gap_cost = float(self.gap_cost.value)
split_alternative_cost = float(self.split_cost.value) / 2
merge_alternative_cost = float(self.merge_cost.value)
mitosis_alternative_cost = float(self.mitosis_cost.value)
max_gap_score = self.max_gap_score.value
max_merge_score = self.max_merge_score.value
max_split_score = self.max_split_score.value / 2 # to match legacy
max_frame_difference = self.max_frame_distance.value
m = workspace.measurements
assert(isinstance(m, cpmeas.Measurements))
image_numbers = self.get_group_image_numbers(workspace)
object_name = self.object_name.value
label, object_numbers, a, b, Area, \
parent_object_numbers, parent_image_numbers = [
[m.get_measurement(object_name, feature, i).astype(mtype)
for i in image_numbers]
for feature, mtype in (
(self.measurement_name(F_LABEL), int),
(cpmeas.OBJECT_NUMBER, int),
(M_LOCATION_CENTER_X, float),
(M_LOCATION_CENTER_Y, float),
(self.measurement_name(F_AREA), float),
(self.measurement_name(F_PARENT_OBJECT_NUMBER), int),
(self.measurement_name(F_PARENT_IMAGE_NUMBER), int)
)]
group_indices, new_object_count, lost_object_count, merge_count, \
split_count = [
np.array([m.get_measurement(cpmeas.IMAGE, feature, i)
for i in image_numbers], int)
for feature in (cpmeas.GROUP_INDEX,
self.image_measurement_name(F_NEW_OBJECT_COUNT),
self.image_measurement_name(F_LOST_OBJECT_COUNT),
self.image_measurement_name(F_MERGE_COUNT),
self.image_measurement_name(F_SPLIT_COUNT))]
#
# Map image number to group index and vice versa
#
image_number_group_index = np.zeros(np.max(image_numbers) + 1, int)
image_number_group_index[image_numbers] = np.array(group_indices, int)
group_index_image_number = np.zeros(np.max(group_indices) + 1, int)
group_index_image_number[group_indices] = image_numbers
if all([len(lll) == 0 for lll in label]):
return # Nothing to do
#sets up the arrays F, L, P, and Q
#F is an array of all the cells that are the starts of segments
# F[:, :2] are the coordinates
# F[:, 2] is the image index
# F[:, 3] is the object index
# F[:, 4] is the object number
# F[:, 5] is the label
# F[:, 6] is the area
# F[:, 7] is the index into P
#L is the ends
#P includes all cells
X = 0
Y = 1
IIDX = 2
OIIDX = 3
ONIDX = 4
LIDX = 5
AIDX = 6
PIDX = 7
P = np.vstack([
np.column_stack((x, y, np.ones(len(x)) * i, np.arange(len(x)),
o, l, area, np.zeros(len(x))))
for i, (x, y, o, l, area)
in enumerate(zip(a, b, object_numbers, label, Area))])
count_per_label = np.bincount(P[:, LIDX].astype(int))
idx = np.hstack([0, np.cumsum(count_per_label)])
unique_label = np.unique(P[:, LIDX].astype(int))
order = np.lexsort((P[:, OIIDX], P[:, IIDX], P[:, LIDX]))
P = P[order, :]
P[:, PIDX] = np.arange(len(P))
F = P[idx[unique_label], :]
L = P[idx[unique_label + 1] - 1, :]
# Creates P1 and P2, which is P without the starts and ends
# of segments respectively, representing possible
# points of merges and splits respectively
P1 = np.delete(P, idx[:-1], 0)
P2 = np.delete(P, idx[1:] - 1, 0)
##################################################
#
# Addresses of supplementary nodes:
#
# The LAP array is composed of the following ranges
#
# Count | node type
# ------------------
# T | segment starts and ends
# T | gaps
# OB | split starts
# OB | merge ends
# M | mitoses
#
# T = # tracks
# OB = # of objects that can serve as merge or split points
# M = # of mitoses
#
# The graph:
#
# Gap Alternatives (in other words, do nothing)
# ----------------------------------------------
# End[i] <----> Gap alternative[i]
# Gap alternative[i] <----> Start[i]
# Split[i] <----> Split[i]
# Merge[j] <----> Merge[j]
# Mitosis[i] <----> Mitosis[i]
#
#
# Bridge gaps:
# -----------------------------------------------
#
# End[i] <---> Start[j]
# Gap alternative[i] <----> Gap alternative[j]
#
# Splits
# -----------------------------------------------
#
# Split[i] <----> Start[j]
# Gap alternative[j] <----> Split[i]
#
# Merges
# -----------------------------------------------
# End[i] <----> Merge[j]
# Merge[j] <----> Gap alternative[i]
#
# Mitoses
# -----------------------------------------------
# The mitosis model is somewhat imperfect. The mitosis
# caps the parent and makes it unavailable as a candidate
# for a gap closing. In the best case, there is only one
# mitosis candidate for the left and right child and
# the left and right child are connected to gap alternatives,
# but there may be competing splits, gap closings or
# other mitoses.
#
# We take a greedy approach, ordering the mitoses by their
# scores and fulfilling them. After processing the mitoses,
# we run LAP again, keeping only the parent nodes of untaken
# mitoses and child nodes connected to gap alternatives
#
# End[i] <----> Mitosis[j]
#
##################################################
end_nodes = []
start_nodes = []
scores = []
#
# The offsets and lengths of the start/end node ranges
#
start_end_off = 0
start_end_len = len(L)
gap_off = start_end_end = start_end_len
gap_end = gap_off + start_end_len
#-------------------------------------------
#
# Null model (do nothing)
#
#-------------------------------------------
for first, second in ((end_nodes, start_nodes),
(start_nodes, end_nodes)):
first.append(np.arange(start_end_len))
second.append(np.arange(start_end_len) + gap_off)
scores.append(np.ones(start_end_len) * gap_cost/2)
#------------------------------------------
#
# Gap-closing model
#
#------------------------------------------
#
# Create the edges between ends and starts.
# The edge weight is the gap pair cost.
#
a, gap_scores = self.get_gap_pair_scores(F, L, max_frame_difference)
# filter by max gap score
mask = gap_scores <= max_gap_score
if np.sum(mask) > 0:
a, gap_scores = a[mask], gap_scores[mask]
end_nodes.append(a[:, 0])
start_nodes.append(a[:, 1])
scores.append(gap_scores)
#
# Hook the gap alternative ends of the starts to
# the gap alternative starts of the ends
#
end_nodes.append(a[:, 1] + gap_off)
start_nodes.append(a[:, 0] + gap_off)
scores.append(np.zeros(len(gap_scores)))
#---------------------------------------------------
#
# Merge model
#
#---------------------------------------------------
#
# The first column of z is the index of the track that ends. The second
# is the index into P2 of the object to be merged into
#
merge_off = gap_end
if len(P1) > 0:
# Do the initial winnowing in chunks of 10m pairs
lchunk_size = 10000000 / len(P1)
chunks = []
for lstart in range(0, len(L), lchunk_size):
lend = min(len(L), lstart+lchunk_size)
merge_p1idx, merge_lidx = \
[_.flatten() for _ in np.mgrid[0:len(P1), lstart:lend]]
z = (P1[merge_p1idx, IIDX] - L[merge_lidx, IIDX]).astype(np.int32)
mask = (z <= max_frame_difference) & (z > 0)
if np.sum(mask) > 0:
chunks.append([_[mask] for _ in merge_p1idx, merge_lidx, z])
if len(chunks) > 0:
merge_p1idx, merge_lidx, z = [
np.hstack([_[i] for _ in chunks]) for i in range(3)]
else:
merge_p1idx = merge_lidx = z = np.zeros(0, np.int32)
else:
merge_p1idx = merge_lidx = z = np.zeros(0, np.int32)
if len(z) > 0:
# Calculate penalty = distance * area penalty
AreaLast = L[merge_lidx, AIDX]
AreaBeforeMerge = P[P1[merge_p1idx, PIDX].astype(int) - 1, AIDX]
AreaAtMerge = P1[merge_p1idx, AIDX]
rho = self.calculate_area_penalty(
AreaLast + AreaBeforeMerge, AreaAtMerge)
d = np.sqrt(np.sum((L[merge_lidx, :2]-P2[merge_p1idx, :2])**2, 1))
merge_scores = d * rho
mask = merge_scores <= max_merge_score
merge_p1idx, merge_lidx, merge_scores = [
_[mask] for _ in merge_p1idx, merge_lidx, merge_scores]
merge_len = np.sum(mask)
if merge_len > 0:
#
# The end nodes are the ends being merged to the intermediates
# The start nodes are the intermediates and have node #s
# that start at merge_off
#
end_nodes.append(merge_lidx)
start_nodes.append(merge_off + np.arange(merge_len))
scores.append(merge_scores)
#
# Hook the gap alternative starts for the ends to
# the merge nodes
#
end_nodes.append(merge_off + np.arange(merge_len))
start_nodes.append(merge_lidx + gap_off)
scores.append(np.ones(merge_len) * gap_cost / 2)
#
# The alternative hypothesis is represented by merges hooked
# to merges
#
end_nodes.append(merge_off + np.arange(merge_len))
start_nodes.append(merge_off + np.arange(merge_len))
scores.append(np.ones(merge_len) * merge_alternative_cost)
else:
merge_len = 0
merge_end = merge_off+merge_len
#------------------------------------------------------
#
# Split model
#
#------------------------------------------------------
split_off = merge_end
if len(P2) > 0:
lchunk_size = 10000000 / len(P2)
chunks = []
for fstart in range(0, len(L), lchunk_size):
fend = min(len(L), fstart+lchunk_size)
split_p2idx, split_fidx = \
[_.flatten() for _ in np.mgrid[0:len(P2), fstart:fend]]
z = (F[split_fidx, IIDX] - P2[split_p2idx, IIDX]).astype(np.int32)
mask = (z <= max_frame_difference) & (z > 0)
if np.sum(mask) > 0:
chunks.append(
[_[mask] for _ in split_p2idx, split_fidx, z])
if len(chunks) > 0:
split_p2idx, split_fidx, z = [
np.hstack([_[i] for _ in chunks]) for i in range(3)]
else:
split_p2idx = split_fidx = z = np.zeros(0, np.int32)
else:
split_p2idx = split_fidx = z = np.zeros(0, int)
if len(z) > 0:
AreaFirst = F[split_fidx, AIDX]
AreaAfterSplit = P[ P2[split_p2idx, PIDX].astype(int) + 1, AIDX]
AreaAtSplit = P2[split_p2idx, AIDX]
d = np.sqrt(np.sum((F[split_fidx, :2] - P2[split_p2idx, :2])**2, 1))
rho = self.calculate_area_penalty(
AreaFirst + AreaAfterSplit, AreaAtSplit)
split_scores = d * rho
mask = (split_scores <= max_split_score)
split_p2idx, split_fidx, split_scores = \
[_[mask] for _ in split_p2idx, split_fidx, split_scores]
split_len = np.sum(mask)
if split_len > 0:
#
# The end nodes are the intermediates (starting at split_off)
# The start nodes are the F
#
end_nodes.append(np.arange(split_len) + split_off)
start_nodes.append(split_fidx)
scores.append(split_scores)
#
# Hook the alternate ends to the split starts
#
end_nodes.append(split_fidx + gap_off)
start_nodes.append(np.arange(split_len) + split_off)
scores.append(np.ones(split_len) * gap_cost/2)
#
# The alternate hypothesis is split nodes hooked to themselves
#
end_nodes.append(np.arange(split_len) + split_off)
start_nodes.append(np.arange(split_len) + split_off)
scores.append(np.ones(split_len) * split_alternative_cost)
else:
split_len = 0
split_end = split_off + split_len
#----------------------------------------------------------
#
# Mitosis model
#
#----------------------------------------------------------
mitoses, mitosis_scores = self.get_mitotic_triple_scores(F, L)
n_mitoses = len(mitosis_scores)
if n_mitoses > 0:
order = np.argsort(mitosis_scores)
mitoses, mitosis_scores = mitoses[order], mitosis_scores[order]
MDLIDX = 0 # index of left daughter
MDRIDX = 1 # index of right daughter
MPIDX = 2 # index of parent
mitoses_parent_lidx = mitoses[:, MPIDX]
mitoses_left_child_findx = mitoses[:, MDLIDX]
mitoses_right_child_findx = mitoses[:, MDRIDX]
#
# Create the ranges for mitoses
#
mitosis_off = split_end
mitosis_len = n_mitoses
mitosis_end = mitosis_off + mitosis_len
if n_mitoses > 0:
#
# Taking the mitosis score will cost us the parent gap at least.
#
end_nodes.append(mitoses_parent_lidx)
start_nodes.append(np.arange(n_mitoses) + mitosis_off)
scores.append(mitosis_scores)
#
# Balance the mitosis against the gap alternative.
#
end_nodes.append(np.arange(n_mitoses) + mitosis_off)
start_nodes.append(mitoses_parent_lidx + gap_off)
scores.append(np.ones(n_mitoses) * gap_cost / 2)
#
# The alternative hypothesis links mitosis to mitosis
# We charge the alternative hypothesis the mitosis_alternative
# cost.
#
end_nodes.append(np.arange(n_mitoses) + mitosis_off)
start_nodes.append(np.arange(n_mitoses) + mitosis_off)
scores.append(np.ones(n_mitoses) * mitosis_alternative_cost)
i = np.hstack(end_nodes)
j = np.hstack(start_nodes)
c = scores = np.hstack(scores)
#-------------------------------------------------------
#
# LAP Processing # 1
#
x, y = lapjv(i, j, c)
score_matrix = scipy.sparse.coo.coo_matrix((c, (i, j))).tocsr()
#---------------------------
#
# Useful debugging diagnostics
#
def desc(node):
'''Describe a node for graphviz'''
fl = F
if node < start_end_end:
fmt = "N%d:%d"
idx = node
elif node < gap_end:
fmt = "G%d:%d"
idx = node - gap_off
elif node < merge_end:
fmt = "M%d:%d"
idx = merge_p1idx[node - merge_off]
fl = P1
elif node < split_end:
fmt = "S%d:%d"
idx = split_p2idx[node - split_off]
fl = P2
else:
mitosis = mitoses[node - mitosis_off]
(lin, lon), (rin, ron), (pin, pon) = [
(image_numbers[fl[idx, IIDX]], fl[idx, ONIDX])
for idx, fl in zip(mitosis, (F, F, L))]
return "n%d[label=\"MIT%d:%d->%d:%d+%d:%d\"]" % (
node, pin, pon, lin, lon, rin, ron)
return "n%d[label=\"%s\"]" % (
node, fmt % (image_numbers[int(fl[idx, IIDX])],
int(fl[idx, ONIDX])))
def write_graph(path, x, y):
'''Write a graphviz DOT file'''
with open(path, "w") as fd:
fd.write("digraph trackobjects {\n")
graph_idx = np.where(
(x != np.arange(len(x))) & (y != np.arange(len(y))))[0]
for idx in graph_idx:
fd.write(desc(idx)+";\n")
for idx in graph_idx:
fd.write("n%d -> n%d [label=%0.2f];\n" %
(idx, x[idx], score_matrix[idx, x[idx]]))
fd.write("}\n")
#
#--------------------------------------------------------
#
# Mitosis fixup.
#
good_mitoses = np.zeros(len(mitoses), bool)
for midx, (lidx, ridx, pidx) in enumerate(mitoses):
#
# If the parent was not accepted or either of the children
# have been assigned to a mitosis, skip
#
if x[pidx] == midx + mitosis_off and not \
any([y[idx] >= mitosis_off and y[idx] < mitosis_end
for idx in lidx, ridx]):
alt_score = sum([score_matrix[y[idx], idx] for idx in lidx, ridx])
#
# Taking the alt score would cost us a mitosis alternative
# cost, but would remove half of a gap alternative.
#
alt_score += mitosis_alternative_cost - gap_cost / 2
#
# Alternatively, taking the mitosis score would cost us
# the gap alternatives of the left and right.
#
if alt_score > mitosis_scores[midx] + gap_cost:
for idx in lidx, ridx:
old_y = y[idx]
if old_y < start_end_end:
x[old_y] = old_y + gap_off
else:
x[old_y] = old_y
y[lidx] = midx + mitosis_off
y[ridx] = midx + mitosis_off
good_mitoses[midx] = True
continue
x[pidx] = pidx + gap_off
y[pidx+gap_off] = pidx
x[midx+mitosis_off] = midx+mitosis_off
y[midx+mitosis_off] = midx+mitosis_off
if np.sum(good_mitoses) == 0:
good_mitoses = np.zeros((0, 3), int)
good_mitosis_scores = np.zeros(0)
else:
good_mitoses, good_mitosis_scores = \
mitoses[good_mitoses], mitosis_scores[good_mitoses]
#
#-------------------------------------
#
# Rerun to see if reverted mitoses could close gaps.
#
if np.any(x[mitoses[:, MPIDX]] != np.arange(len(mitoses)) + mitosis_off):
rerun_end = np.ones(mitosis_end, bool)
rerun_start = np.ones(mitosis_end, bool)
rerun_end[:start_end_end] = x[:start_end_end] < mitosis_off
rerun_end[mitosis_off:] = False
rerun_start[:start_end_end] = y[:start_end_end] < mitosis_off
rerun_start[mitosis_off:] = False
mask = rerun_end[i] & rerun_start[j]
i, j, c = i[mask], j[mask], c[mask]
i = np.hstack((i,
good_mitoses[:, MPIDX],
good_mitoses[:, MDLIDX] + gap_off,
good_mitoses[:, MDRIDX] + gap_off))
j = np.hstack((j,
good_mitoses[:, MPIDX] + gap_off,
good_mitoses[:, MDLIDX],
good_mitoses[:, MDRIDX]))
c = np.hstack((c, np.zeros(len(good_mitoses) *3)))
x, y = lapjv(i, j, c)
#
# Fixups to measurements
#
# fixup[N] gets the fixup dictionary for image set, N
#
# fixup[N][FEATURE] gets a tuple of a list of object numbers and
# values.
#
fixups = {}
def add_fixup(feature, image_number, object_number, value):
if image_number not in fixups:
fixups[image_number] = { feature: ([object_number], [value])}
else:
fid = fixups[image_number]
if feature not in fid:
fid[feature] = ([object_number], [value])
else:
object_numbers, values = fid[feature]
object_numbers.append(object_number)
values.append(value)
#attaches different segments together if they are matches through the IAP
a = -np.ones(len(F)+1, dtype="int32")
b = -np.ones(len(F)+1, dtype="int32")
c = -np.ones(len(F)+1, dtype="int32")
d = -np.ones(len(F)+1, dtype="int32")
z = np.zeros(len(F)+1, dtype="int32")
# relationships is a list of parent-child relationships. Each element
# is a two-tuple of parent and child and each parent/child is a
# two-tuple of image index and object number:
#
# [((<parent-image-index>, <parent-object-number>),
# (<child-image-index>, <child-object-number>))...]
#
relationships = []
#
# Starts can be linked to the following:
# ends (start_end_off <= j < start_end_off+start_end_len)
# gap alternatives (gap_off <= j < merge_off+merge_len)
# splits (split_off <= j < split_off+split_len)
# mitosis left (mitosis_left_child_off <= j < ....)
# mitosis right (mitosis_right_child_off <= j < ....)
#
# Discard starts linked to self = "do nothing"
#
start_idxs = np.where(
y[:start_end_end] != np.arange(gap_off, gap_end))[0]
for i in start_idxs:
my_image_index = int(F[i, IIDX])
my_image_number = image_numbers[my_image_index]
my_object_index = int(F[i, OIIDX])
my_object_number = int(F[i, ONIDX])
yi = y[i]
if yi < gap_end:
#-------------------------------
#
# GAP
#
# y[i] gives index of last hooked to first
#
b[i+1] = yi+1
c[yi+1] = i+1
#
# Hook our parent image/object number to found parent
#
parent_image_index = int(L[yi, IIDX])
parent_object_number = int(L[yi, ONIDX])
parent_image_number = image_numbers[parent_image_index]
parent_image_numbers[my_image_index][my_object_index] = \
parent_image_number
parent_object_numbers[my_image_index][my_object_index] = \
parent_object_number
relationships.append(
((parent_image_index, parent_object_number),
(my_image_index, my_object_number)))
add_fixup(F_LINK_TYPE, my_image_number, my_object_number,
LT_GAP)
add_fixup(F_GAP_LENGTH, my_image_number, my_object_number,
my_image_index - parent_image_index)
add_fixup(F_GAP_SCORE, my_image_number, my_object_number,
scores[yi])
#
# One less new object
#
new_object_count[my_image_index] -= 1
#
# One less lost object (the lost object is recorded in
# the image set after the parent)
#
lost_object_count[parent_image_index + 1] -= 1
logger.debug("Gap closing: %d:%d to %d:%d, score=%f" %
(parent_image_number, parent_object_number,
image_numbers[my_image_index],
object_numbers[my_image_index][my_object_index],
score_matrix[yi, i]))
elif yi >= split_off and yi < split_end:
#------------------------------------
#
# SPLIT
#
p2_idx = split_p2idx[yi - split_off]
parent_image_index = int(P2[p2_idx, IIDX])
parent_image_number = image_numbers[parent_image_index]
parent_object_number = int(P2[p2_idx, ONIDX])
b[i+1] = P2[p2_idx, LIDX]
c[b[i+1]] = i+1
parent_image_numbers[my_image_index][my_object_index] = \
parent_image_number
parent_object_numbers[my_image_index][my_object_index] = \
parent_object_number
relationships.append(
((parent_image_index, parent_object_number),
(my_image_index, my_object_number)))
add_fixup(F_LINK_TYPE, my_image_number, my_object_number,
LT_SPLIT)
add_fixup(F_SPLIT_SCORE, my_image_number, my_object_number,
split_scores[yi - split_off])
#
# one less new object
#
new_object_count[my_image_index] -= 1
#
# one more split object
#
split_count[my_image_index] += 1
logger.debug("split: %d:%d to %d:%d, score=%f" %
(parent_image_number, parent_object_number,
image_numbers[my_image_index],
object_numbers[my_image_index][my_object_index],
split_scores[y[i] - split_off]))
#---------------------
#
# Process ends (parents)
#
end_idxs = np.where(
x[:start_end_end] != np.arange(gap_off, gap_end))[0]
for i in end_idxs:
if(x[i] < start_end_end):
a[i+1] = x[i]+1
d[a[i+1]] = i+1
elif(x[i] >= merge_off and x[i] < merge_end):
#-------------------
#
# MERGE
#
# Handle merged objects. A merge hooks the end (L) of
# a segment (the parent) to a gap alternative in P1 (the child)
#
p1_idx = merge_p1idx[x[i]-merge_off]
a[i+1] = P1[p1_idx, LIDX]
d[a[i+1]] = i+1
parent_image_index = int(L[i, IIDX])
parent_object_number = int(L[i, ONIDX])
parent_image_number = image_numbers[parent_image_index]
child_image_index = int(P1[p1_idx, IIDX])
child_object_number = int(P1[p1_idx, ONIDX])
relationships.append(
((parent_image_index, parent_object_number),
(child_image_index, child_object_number)))
add_fixup(F_MERGE_SCORE, parent_image_number,
parent_object_number,
merge_scores[x[i] - merge_off])
lost_object_count[parent_image_index+1] -= 1
merge_count[child_image_index] += 1
logger.debug("Merge: %d:%d to %d:%d, score=%f" %
(image_numbers[parent_image_index]
, parent_object_number,
image_numbers[child_image_index],
child_object_number,
merge_scores[x[i] - merge_off]))
for (mlidx, mridx, mpidx), score in\
zip(good_mitoses, good_mitosis_scores):
#
# The parent is attached, one less lost object
#
lost_object_count[int(L[mpidx, IIDX])+1] -= 1
a[mpidx+1] = F[mlidx, LIDX]
d[a[mpidx+1]] = mpidx+1
parent_image_index = int(L[mpidx, IIDX])
parent_image_number = image_numbers[parent_image_index]
parent_object_number = int(L[mpidx, ONIDX])
split_count[int(F[lidx, IIDX])] += 1
for idx in mlidx, mridx:
#--------------------------------------
#
# MITOSIS child
#
my_image_index = int(F[idx, IIDX])
my_image_number = image_numbers[my_image_index]
my_object_index = int(F[idx, OIIDX])
my_object_number = int(F[idx, ONIDX])
b[idx+1] = int(L[mpidx, LIDX])
c[b[idx+1]] = idx+1
parent_image_numbers[my_image_index][my_object_index] = \
parent_image_number
parent_object_numbers[my_image_index][my_object_index] = \
parent_object_number
relationships.append(
((parent_image_index, parent_object_number),
(my_image_index, my_object_number)))
add_fixup(F_LINK_TYPE, my_image_number, my_object_number,
LT_MITOSIS)
add_fixup(F_MITOSIS_SCORE, my_image_number, my_object_number,
score)
new_object_count[my_image_index] -= 1
logger.debug("Mitosis: %d:%d to %d:%d and %d, score=%f" %
(parent_image_number, parent_object_number,
image_numbers[F[mlidx, IIDX]],
F[mlidx, ONIDX],
F[mridx, ONIDX],
score))
#
# At this point a gives the label # of the track that connects
# to the end of the indexed track. b gives the label # of the
# track that connects to the start of the indexed track.
# We convert these into edges.
#
# aa and bb are the vertices of an edge list and aa[i],bb[i]
# make up an edge
#
connect_mask = (a != -1)
aa = a[connect_mask]
bb = np.argwhere(connect_mask).flatten()
connect_mask = (b != -1)
aa = np.hstack((aa, b[connect_mask]))
bb = np.hstack((bb, np.argwhere(connect_mask).flatten()))
#
# Connect self to self for indices that do not connect
#
disconnect_mask = (a == -1) & (b == -1)
aa = np.hstack((aa, np.argwhere(disconnect_mask).flatten()))
bb = np.hstack((bb, np.argwhere(disconnect_mask).flatten()))
z = all_connected_components(aa, bb)
newlabel = [z[label[i]] for i in range(len(label))]
#
# Replace the labels for the image sets in the group
# inside the list retrieved from the measurements
#
m_link_type = self.measurement_name(F_LINK_TYPE)
for i, image_number in enumerate(image_numbers):
n_objects = len(newlabel[i])
m.add_measurement(cpmeas.IMAGE,
self.image_measurement_name(F_LOST_OBJECT_COUNT),
lost_object_count[i], True, image_number)
m.add_measurement(cpmeas.IMAGE,
self.image_measurement_name(F_NEW_OBJECT_COUNT),
new_object_count[i], True, image_number)
m.add_measurement(cpmeas.IMAGE,
self.image_measurement_name(F_MERGE_COUNT),
merge_count[i], True, image_number)
m.add_measurement(cpmeas.IMAGE,
self.image_measurement_name(F_SPLIT_COUNT),
split_count[i], True, image_number)
if n_objects == 0:
continue
m.add_measurement(object_name,
self.measurement_name(F_LABEL),
newlabel[i], can_overwrite = True,
image_set_number = image_number)
m.add_measurement(object_name,
self.measurement_name(F_PARENT_IMAGE_NUMBER),
parent_image_numbers[i],
can_overwrite = True,
image_set_number = image_number)
m.add_measurement(object_name,
self.measurement_name(F_PARENT_OBJECT_NUMBER),
parent_object_numbers[i],
can_overwrite = True,
image_set_number = image_number)
is_fixups = fixups.get(image_number, None)
if (is_fixups is not None) and (F_LINK_TYPE in is_fixups):
link_types = m[object_name, m_link_type, image_number]
object_numbers, values = [
np.array(_) for _ in is_fixups[F_LINK_TYPE]]
link_types[object_numbers-1] = values
m[object_name, m_link_type, image_number] = link_types
for feature, data_type in (
(F_GAP_LENGTH, np.int32),
(F_GAP_SCORE, np.float32),
(F_MERGE_SCORE, np.float32),
(F_SPLIT_SCORE, np.float32),
(F_MITOSIS_SCORE, np.float32)):
if data_type == np.int32:
values = np.zeros(n_objects, data_type)
else:
values = np.ones(n_objects, data_type) * np.NaN
if (is_fixups is not None) and (feature in is_fixups):
object_numbers, fixup_values = [
np.array(_) for _ in is_fixups[feature]]
values[object_numbers-1] = fixup_values
m[object_name, self.measurement_name(feature), image_number] =\
values
#
# Write the relationships.
#
if len(relationships) > 0:
relationships = np.array(relationships)
parent_image_numbers = image_numbers[relationships[:, 0, 0]]
child_image_numbers = image_numbers[relationships[:, 1, 0]]
parent_object_numbers = relationships[:, 0, 1]
child_object_numbers = relationships[:, 1, 1]
m.add_relate_measurement(
self.module_num, R_PARENT, object_name, object_name,
parent_image_numbers, parent_object_numbers,
child_image_numbers, child_object_numbers)
self.recalculate_group(workspace, image_numbers)
def calculate_area_penalty(self, a1, a2):
'''Calculate a penalty for areas that don't match
Ideally, area should be conserved while tracking. We divide the larger
of the two by the smaller of the two to get the area penalty
which is then multiplied by the distance.
Note that this differs from Jaqaman eqn 5 which has an asymmetric
penalty (sqrt((a1 + a2) / b) for a1+a2 > b and b / (a1 + a2) for
a1+a2 < b. I can't think of a good reason why they should be
asymmetric.
'''
result = a1 / a2
result[result < 1] = 1/result[result < 1]
result[np.isnan(result)] = np.inf
return result
def get_gap_pair_scores(self, F, L, max_gap):
'''Compute scores for matching last frame with first to close gaps
F - an N x 3 (or more) array giving X, Y and frame # of the first object
in each track
L - an N x 3 (or more) array giving X, Y and frame # of the last object
in each track
max_gap - the maximum allowed # of frames between the last and first
Returns: an M x 2 array of M pairs where the first element of the array
is the index of the track whose last frame is to be joined to
the track whose index is the second element of the array.
an M-element vector of scores.
'''
#
# There have to be at least two things to match
#
nothing = (np.zeros((0, 2), int), np.zeros(0))
if F.shape[0] <= 1:
return nothing
X = 0
Y = 1
IIDX = 2
AIDX = 6
#
# Create an indexing ordered by the last frame index and by the first
#
i = np.arange(len(F))
j = np.arange(len(F))
f_iidx = F[:, IIDX].astype(int)
l_iidx = L[:, IIDX].astype(int)
i_lorder = np.lexsort((i, l_iidx))
j_forder = np.lexsort((j, f_iidx))
i = i[i_lorder]
j = j[j_forder]
i_counts = np.bincount(l_iidx)
j_counts = np.bincount(f_iidx)
i_indexes = Indexes([i_counts])
j_indexes = Indexes([j_counts])
#
# The lowest possible F for each L is 1+L
#
j_self = np.minimum(np.arange(len(i_counts)),
len(j_counts) - 1)
j_first_idx = j_indexes.fwd_idx[j_self] + j_counts[j_self]
#
# The highest possible F for each L is L + max_gap. j_end is the
# first illegal value... just past that.
#
j_last = np.minimum(np.arange(len(i_counts)) + max_gap,
len(j_counts)-1)
j_end_idx = j_indexes.fwd_idx[j_last] + j_counts[j_last]
#
# Structure the i and j block ranges
#
ij_counts = j_end_idx - j_first_idx
ij_indexes = Indexes([i_counts, ij_counts])
if ij_indexes.length == 0:
return nothing
#
# The index into L of the first element of the pair
#
ai = i[i_indexes.fwd_idx[ij_indexes.rev_idx] + ij_indexes.idx[0]]
#
# The index into F of the second element of the pair
#
aj = j[j_first_idx[ij_indexes.rev_idx] + ij_indexes.idx[1]]
#
# The distances
#
d = np.sqrt((L[ai, X] - F[aj, X]) ** 2 +
(L[ai, Y] - F[aj, Y]) ** 2)
#
# Rho... the area penalty
#
rho = self.calculate_area_penalty(L[ai, AIDX], F[aj, AIDX])
return np.column_stack((ai, aj)), d * rho
def get_mitotic_triple_scores(self, F, L):
'''Compute scores for matching a parent to two daughters
F - an N x 3 (or more) array giving X, Y and frame # of the first object
in each track
L - an N x 3 (or more) array giving X, Y and frame # of the last object
in each track
Returns: an M x 3 array of M triples where the first column is the
index in the L array of the parent cell and the remaining
columns are the indices of the daughters in the F array
an M-element vector of distances of the parent from the expected
'''
X = 0
Y = 1
IIDX = 2
AIDX = 6
if len(F) <= 1:
return np.zeros((0, 3), np.int32), np.zeros(0, np.int32)
max_distance = self.mitosis_max_distance.value
# Find all daughter pairs within same frame
i, j = np.where(F[:, np.newaxis, IIDX] == F[np.newaxis, :, IIDX])
i, j = i[i < j], j[i < j] # get rid of duplicates and self-compares
#
# Calculate the maximum allowed distance before one or the other
# daughter is farther away than the maximum allowed from the center
#
# That's the max_distance * 2 minus the distance
#
dmax = max_distance * 2 - np.sqrt(np.sum((F[i, :2] - F[j, :2]) ** 2, 1))
mask = dmax >= 0
i, j = i[mask], j[mask]
if len(i) == 0:
return np.zeros((0, 3), np.int32), np.zeros(0, np.int32)
center_x = (F[i, X] + F[j, X]) / 2
center_y = (F[i, Y] + F[j, Y]) / 2
frame = F[i, IIDX]
# Find all parent-daughter pairs where the parent
# is in the frame previous to the daughters
ij, k = [_.flatten() for _ in np.mgrid[0:len(i), 0:len(L)]]
mask = F[i[ij], IIDX] == L[k, IIDX]+1
ij, k = ij[mask], k[mask]
if len(ij) == 0:
return np.zeros((0, 3), np.int32), np.zeros(0, np.int32)
d = np.sqrt((center_x[ij] - L[k, X]) ** 2 +
(center_y[ij] - L[k, Y]) ** 2)
mask = d <= dmax[ij]
ij, k, d = ij[mask], k[mask], d[mask]
if len(ij) == 0:
return np.zeros((0, 3), np.int32), np.zeros(0, np.int32)
rho = self.calculate_area_penalty(
F[i[ij], AIDX] + F[j[ij], AIDX], L[k, AIDX])
return np.column_stack((i[ij], j[ij], k)), d * rho
def recalculate_group(self, workspace, image_numbers):
'''Recalculate all measurements once post_group has run
workspace - the workspace being operated on
image_numbers - the image numbers of the group's image sets' measurements
'''
m = workspace.measurements
object_name = self.object_name.value
assert isinstance(m, cpmeas.Measurements)
image_index = np.zeros(np.max(image_numbers)+1, int)
image_index[image_numbers] = np.arange(len(image_numbers))
image_index[0] = -1
index_to_imgnum = np.array(image_numbers)
parent_image_numbers, parent_object_numbers = [
[ m.get_measurement(
object_name, self.measurement_name(feature), image_number)
for image_number in image_numbers]
for feature in (F_PARENT_IMAGE_NUMBER, F_PARENT_OBJECT_NUMBER)]
#
# Do all_connected_components on the graph of parents to find groups
# that share the same ancestor
#
count = np.array([len(x) for x in parent_image_numbers])
idx = Indexes(count)
if idx.length == 0:
# Nothing to do
return
parent_image_numbers = np.hstack(parent_image_numbers).astype(int)
parent_object_numbers = np.hstack(parent_object_numbers).astype(int)
parent_image_indexes = image_index[parent_image_numbers]
parent_object_indexes = parent_object_numbers - 1
i = np.arange(idx.length)
i = i[parent_image_numbers != 0]
j = idx.fwd_idx[parent_image_indexes[i]] + parent_object_indexes[i]
# Link self to self too
i = np.hstack((i, np.arange(idx.length)))
j = np.hstack((j, np.arange(idx.length)))
labels = all_connected_components(i, j)
nlabels = np.max(labels) + 1
#
# Set the ancestral index for each label
#
ancestral_index = np.zeros(nlabels, int)
ancestral_index[labels[parent_image_numbers == 0]] =\
np.argwhere(parent_image_numbers == 0).flatten().astype(int)
ancestral_image_index = idx.rev_idx[ancestral_index]
ancestral_object_index = \
ancestral_index - idx.fwd_idx[ancestral_image_index]
#
# Blow these up to one per object for convenience
#
ancestral_index = ancestral_index[labels]
ancestral_image_index = ancestral_image_index[labels]
ancestral_object_index = ancestral_object_index[labels]
def start(image_index):
'''Return the start index in the array for the given image index'''
return idx.fwd_idx[image_index]
def end(image_index):
'''Return the end index in the array for the given image index'''
return start(image_index) + idx.counts[0][image_index]
def slyce(image_index):
return slice(start(image_index), end(image_index))
class wrapped(object):
'''make an indexable version of a measurement, with parent and ancestor fetching'''
def __init__(self, feature_name):
self.feature_name = feature_name
self.backing_store = np.hstack([
m.get_measurement(object_name, feature_name, i)
for i in image_numbers])
def __getitem__(self, index):
return self.backing_store[slyce(index)]
def __setitem__(self, index, val):
self.backing_store[slyce(index)] = val
m.add_measurement(object_name, self.feature_name, val,
image_set_number = image_numbers[index],
can_overwrite=True)
def get_parent(self, index, no_parent=None):
result = np.zeros(idx.counts[0][index],
self.backing_store.dtype)
my_slice = slyce(index)
mask = parent_image_numbers[my_slice] != 0
if not np.all(mask):
if np.isscalar(no_parent) or (no_parent is None):
result[~mask] = no_parent
else:
result[~mask] = no_parent[~mask]
if np.any(mask):
result[mask] = self.backing_store[
idx.fwd_idx[parent_image_indexes[my_slice][mask]] +
parent_object_indexes[my_slice][mask]]
return result
def get_ancestor(self, index):
return self.backing_store[ancestral_index[slyce(index)]]
#
# Recalculate the trajectories
#
x = wrapped(M_LOCATION_CENTER_X)
y = wrapped(M_LOCATION_CENTER_Y)
trajectory_x = wrapped(self.measurement_name(F_TRAJECTORY_X))
trajectory_y = wrapped(self.measurement_name(F_TRAJECTORY_Y))
integrated = wrapped(self.measurement_name(F_INTEGRATED_DISTANCE))
dists = wrapped(self.measurement_name(F_DISTANCE_TRAVELED))
displ = wrapped(self.measurement_name(F_DISPLACEMENT))
linearity = wrapped(self.measurement_name(F_LINEARITY))
lifetimes = wrapped(self.measurement_name(F_LIFETIME))
label = wrapped(self.measurement_name(F_LABEL))
final_age = wrapped(self.measurement_name(F_FINAL_AGE))
age = {} # Dictionary of per-label ages
if self.wants_lifetime_filtering.value:
minimum_lifetime = self.min_lifetime.value if self.wants_minimum_lifetime.value else -np.Inf
maximum_lifetime = self.max_lifetime.value if self.wants_maximum_lifetime.value else np.Inf
for image_number in image_numbers:
index = image_index[image_number]
this_x = x[index]
if len(this_x) == 0:
continue
this_y = y[index]
last_x = x.get_parent(index, no_parent=this_x)
last_y = y.get_parent(index, no_parent=this_y)
x_diff = this_x - last_x
y_diff = this_y - last_y
#
# TrajectoryX,Y = X,Y distances traveled from step to step
#
trajectory_x[index] = x_diff
trajectory_y[index] = y_diff
#
# DistanceTraveled = Distance traveled from step to step
#
dists[index] = np.sqrt(x_diff * x_diff + y_diff * y_diff)
#
# Integrated distance = accumulated distance for lineage
#
integrated[index] = integrated.get_parent(index, no_parent=0) + dists[index]
#
# Displacement = crow-fly distance from initial ancestor
#
x_tot_diff = this_x - x.get_ancestor(index)
y_tot_diff = this_y - y.get_ancestor(index)
tot_distance = np.sqrt(x_tot_diff * x_tot_diff +
y_tot_diff * y_tot_diff)
displ[index] = tot_distance
#
# Linearity = ratio of displacement and integrated
# distance. NaN for new cells is ok.
#
linearity[index] = tot_distance / integrated[index]
#
# Add 1 to lifetimes / one for new
#
lifetimes[index] = lifetimes.get_parent(index, no_parent=0) + 1
#
# Age = overall lifetime of each label
#
for this_label, this_lifetime in zip(label[index],lifetimes[index]):
age[this_label] = this_lifetime
all_labels = age.keys()
all_ages = age.values()
if self.wants_lifetime_filtering.value:
labels_to_filter = [k for k, v in age.iteritems() if v <= minimum_lifetime or v >= maximum_lifetime]
for image_number in image_numbers:
index = image_index[image_number]
# Fill in final object ages
this_label = label[index]
this_lifetime = lifetimes[index]
this_age = final_age[index]
ind = np.array(all_labels).searchsorted(this_label)
i = np.array(all_ages)[ind] == this_lifetime
this_age[i] = this_lifetime[i]
final_age[index] = this_age
# Filter object ages below the minimum
if self.wants_lifetime_filtering.value:
if len(labels_to_filter) > 0:
this_label = label[index].astype(float)
this_label[np.in1d(this_label,np.array(labels_to_filter))] = np.NaN
label[index] = this_label
m.add_experiment_measurement(F_EXPT_ORIG_NUMTRACKS, nlabels)
if self.wants_lifetime_filtering.value:
m.add_experiment_measurement(F_EXPT_FILT_NUMTRACKS, nlabels-len(labels_to_filter))
def map_objects(self, workspace, new_of_old, old_of_new, i, j):
'''Record the mapping of old to new objects and vice-versa
workspace - workspace for current image set
new_to_old - an array of the new labels for every old label
old_to_new - an array of the old labels for every new label
i, j - the coordinates for each new object.
'''
m = workspace.measurements
assert isinstance(m, cpmeas.Measurements)
image_number = m.get_current_image_measurement(cpp.IMAGE_NUMBER)
new_of_old = new_of_old.astype(int)
old_of_new = old_of_new.astype(int)
old_object_numbers = self.get_saved_object_numbers(workspace).astype(int)
max_object_number = self.get_max_object_number(workspace)
old_count = len(new_of_old)
new_count = len(old_of_new)
#
# Record the new objects' parents
#
parents = old_of_new.copy()
parents[parents != 0] =\
old_object_numbers[(old_of_new[parents!=0]-1)].astype(parents.dtype)
self.add_measurement(workspace, F_PARENT_OBJECT_NUMBER, old_of_new)
parent_image_numbers = np.zeros(len(old_of_new))
parent_image_numbers[parents != 0] = image_number - 1
self.add_measurement(workspace, F_PARENT_IMAGE_NUMBER,
parent_image_numbers)
#
# Assign object IDs to the new objects
#
mapping = np.zeros(new_count, int)
if old_count > 0 and new_count > 0:
mapping[old_of_new != 0] = \
old_object_numbers[old_of_new[old_of_new != 0] - 1]
miss_count = np.sum(old_of_new == 0)
lost_object_count = np.sum(new_of_old == 0)
else:
miss_count = new_count
lost_object_count = old_count
nunmapped = np.sum(mapping==0)
new_max_object_number = max_object_number + nunmapped
mapping[mapping == 0] = np.arange(max_object_number+1,
new_max_object_number + 1)
self.set_max_object_number(workspace, new_max_object_number)
self.add_measurement(workspace, F_LABEL, mapping)
self.set_saved_object_numbers(workspace, mapping)
#
# Compute distances and trajectories
#
diff_i = np.zeros(new_count)
diff_j = np.zeros(new_count)
distance = np.zeros(new_count)
integrated_distance = np.zeros(new_count)
displacement = np.zeros(new_count)
linearity = np.ones(new_count)
orig_i = i.copy()
orig_j = j.copy()
old_i, old_j = self.get_saved_coordinates(workspace)
old_distance = self.get_saved_distances(workspace)
old_orig_i, old_orig_j = self.get_orig_coordinates(workspace)
has_old = (old_of_new != 0)
if np.any(has_old):
old_indexes = old_of_new[has_old]-1
orig_i[has_old] = old_orig_i[old_indexes]
orig_j[has_old] = old_orig_j[old_indexes]
diff_i[has_old] = i[has_old] - old_i[old_indexes]
diff_j[has_old] = j[has_old] - old_j[old_indexes]
distance[has_old] = np.sqrt(diff_i[has_old]**2 + diff_j[has_old]**2)
integrated_distance[has_old] = (old_distance[old_indexes] + distance[has_old])
displacement[has_old] = np.sqrt((i[has_old]-orig_i[has_old])**2 + (j[has_old]-orig_j[has_old])**2)
linearity[has_old] = displacement[has_old] / integrated_distance[has_old]
self.add_measurement(workspace, F_TRAJECTORY_X, diff_j)
self.add_measurement(workspace, F_TRAJECTORY_Y, diff_i)
self.add_measurement(workspace, F_DISTANCE_TRAVELED, distance)
self.add_measurement(workspace, F_DISPLACEMENT, displacement)
self.add_measurement(workspace, F_INTEGRATED_DISTANCE, integrated_distance)
self.add_measurement(workspace, F_LINEARITY, linearity)
self.set_saved_distances(workspace, integrated_distance)
self.set_orig_coordinates(workspace, (orig_i, orig_j))
self.set_saved_coordinates(workspace, (i,j))
#
# Update the ages
#
age = np.ones(new_count, int)
if np.any(has_old):
old_age = self.get_saved_ages(workspace)
age[has_old] = old_age[old_of_new[has_old]-1]+1
self.add_measurement(workspace, F_LIFETIME, age)
final_age = np.NaN*np.ones(new_count, float) # Initialize to NaN; will re-calc later
self.add_measurement(workspace, F_FINAL_AGE, final_age)
self.set_saved_ages(workspace, age)
self.set_saved_object_numbers(workspace, mapping)
#
# Add image measurements
#
self.add_image_measurement(workspace, F_NEW_OBJECT_COUNT,
np.sum(parents==0))
self.add_image_measurement(workspace, F_LOST_OBJECT_COUNT,
lost_object_count)
#
# Find parents with more than one child. These are the progenetors
# for daughter cells.
#
if np.any(parents != 0):
h = np.bincount(parents[parents != 0])
split_count = np.sum(h > 1)
else:
split_count = 0
self.add_image_measurement(workspace, F_SPLIT_COUNT, split_count)
#
# Find children with more than one parent. These are the merges
#
if np.any(new_of_old != 0):
h = np.bincount(new_of_old[new_of_old != 0])
merge_count = np.sum(h > 1)
else:
merge_count = 0
self.add_image_measurement(workspace, F_MERGE_COUNT, merge_count)
#########################################
#
# Compile the relationships between children and parents
#
#########################################
last_object_numbers = np.arange(1, len(new_of_old) + 1)
new_object_numbers = np.arange(1, len(old_of_new)+1)
r_parent_object_numbers = np.hstack((
old_of_new[old_of_new != 0],
last_object_numbers[new_of_old != 0]))
r_child_object_numbers = np.hstack((
new_object_numbers[parents != 0], new_of_old[new_of_old != 0]))
if len(r_child_object_numbers) > 0:
#
# Find unique pairs
#
order = np.lexsort((r_child_object_numbers, r_parent_object_numbers))
r_child_object_numbers = r_child_object_numbers[order]
r_parent_object_numbers = r_parent_object_numbers[order]
to_keep = np.hstack((
[True],
(r_parent_object_numbers[1:] != r_parent_object_numbers[:-1]) |
(r_child_object_numbers[1:] != r_child_object_numbers[:-1])))
r_child_object_numbers = r_child_object_numbers[to_keep]
r_parent_object_numbers = r_parent_object_numbers[to_keep]
r_image_numbers = np.ones(
r_parent_object_numbers.shape[0],
r_parent_object_numbers.dtype) * image_number
if len(r_child_object_numbers) > 0:
m.add_relate_measurement(
self.module_num, R_PARENT,
self.object_name.value, self.object_name.value,
r_image_numbers - 1, r_parent_object_numbers,
r_image_numbers, r_child_object_numbers)
def get_kalman_feature_names(self):
if self.tracking_method != TM_LAP:
return []
return sum(
[sum(
[[ kalman_feature(model, F_STATE, element),
kalman_feature(model, F_NOISE, element)] +
[ kalman_feature(model, F_COV, element, e2)
for e2 in elements]
for element in elements],[])
for model, elements in self.get_kalman_models()], [])
def get_measurement_columns(self, pipeline):
result = [(self.object_name.value,
self.measurement_name(feature),
coltype)
for feature, coltype in F_ALL_COLTYPE_ALL]
result += [(cpmeas.IMAGE, self.image_measurement_name(feature), coltype)
for feature, coltype in F_IMAGE_COLTYPE_ALL]
if self.tracking_method == TM_LAP:
result += [( self.object_name.value,
self.measurement_name(name),
coltype) for name, coltype in (
(F_AREA, cpmeas.COLTYPE_INTEGER),
(F_LINK_TYPE, cpmeas.COLTYPE_INTEGER),
(F_LINKING_DISTANCE, cpmeas.COLTYPE_FLOAT),
(F_STANDARD_DEVIATION, cpmeas.COLTYPE_FLOAT),
(F_MOVEMENT_MODEL, cpmeas.COLTYPE_INTEGER))]
result += [( self.object_name.value,
self.measurement_name(name),
cpmeas.COLTYPE_FLOAT) for name in
list(self.get_kalman_feature_names())]
if self.wants_second_phase:
result += [
(self.object_name.value, self.measurement_name(name), coltype)
for name, coltype in (
(F_GAP_LENGTH, cpmeas.COLTYPE_INTEGER),
(F_GAP_SCORE, cpmeas.COLTYPE_FLOAT),
(F_MERGE_SCORE, cpmeas.COLTYPE_FLOAT),
(F_SPLIT_SCORE, cpmeas.COLTYPE_FLOAT),
(F_MITOSIS_SCORE, cpmeas.COLTYPE_FLOAT))]
# Add the post-group attribute to all measurements
attributes = { cpmeas.MCA_AVAILABLE_POST_GROUP: True }
result = [ ( c[0], c[1], c[2], attributes) for c in result]
return result
def get_object_relationships(self, pipeline):
'''Return the object relationships produced by this module'''
object_name = self.object_name.value
if self.wants_second_phase and self.tracking_method == TM_LAP:
when = cpmeas.MCA_AVAILABLE_POST_GROUP
else:
when = cpmeas.MCA_AVAILABLE_EACH_CYCLE
return [(R_PARENT, object_name, object_name, when)]
def get_categories(self, pipeline, object_name):
if object_name in (self.object_name.value, cpmeas.IMAGE):
return [F_PREFIX]
elif object_name == cpmeas.EXPERIMENT:
return [F_PREFIX]
else:
return []
def get_measurements(self, pipeline, object_name, category):
if object_name == self.object_name.value and category == F_PREFIX:
result = list(F_ALL)
if self.tracking_method == TM_LAP:
result += [F_AREA, F_LINKING_DISTANCE, F_STANDARD_DEVIATION,
F_LINK_TYPE, F_MOVEMENT_MODEL]
if self.wants_second_phase:
result += [F_GAP_LENGTH, F_GAP_SCORE, F_MERGE_SCORE,
F_SPLIT_SCORE, F_MITOSIS_SCORE]
result += self.get_kalman_feature_names()
return result
if object_name == cpmeas.IMAGE:
result = F_IMAGE_ALL
return result
if object_name == cpmeas.EXPERIMENT and category == F_PREFIX:
return [F_EXPT_ORIG_NUMTRACKS, F_EXPT_FILT_NUMTRACKS]
return []
def get_measurement_objects(self, pipeline, object_name, category,
measurement):
if (object_name == cpmeas.IMAGE and category == F_PREFIX and
measurement in F_IMAGE_ALL):
return [ self.object_name.value]
return []
def get_measurement_scales(self, pipeline, object_name, category, feature,image_name):
if self.tracking_method == TM_LAP:
return []
if feature in self.get_measurements(pipeline, object_name, category):
return [str(self.pixel_radius.value)]
return []
def upgrade_settings(self, setting_values, variable_revision_number,
module_name, from_matlab):
if from_matlab and variable_revision_number == 3:
wants_image = setting_values[10] != cps.DO_NOT_USE
measurement = '_'.join(setting_values[2:6])
setting_values = [ setting_values[0], # tracking method
setting_values[1], # object name
measurement,
setting_values[6], # pixel_radius
setting_values[7], # display_type
wants_image,
setting_values[10]]
variable_revision_number = 1
from_matlab = False
if (not from_matlab) and variable_revision_number == 1:
setting_values = setting_values + ["100","100"]
variable_revision_number = 2
if (not from_matlab) and variable_revision_number == 2:
# Added phase 2 parameters
setting_values = setting_values + [
"40","40","40","50","50","50","5"]
variable_revision_number = 3
if (not from_matlab) and variable_revision_number == 3:
# Added Kalman choices:
# Model
# radius std
# radius limit
setting_values = (setting_values[:7] +
[ M_BOTH, "3", "2,10"] +
setting_values[9:])
variable_revision_number = 4
if (not from_matlab) and variable_revision_number == 4:
# Added lifetime filtering: Wants filtering + min/max allowed lifetime
setting_values = setting_values + [cps.NO, cps.YES, "1", cps.NO, "100"]
variable_revision_number = 5
if (not from_matlab) and variable_revision_number == 5:
# Added mitosis alternative score + mitosis_max_distance
setting_values = setting_values + ["80", "40"]
variable_revision_number = 6
return setting_values, variable_revision_number, from_matlab
| sstoma/CellProfiler | cellprofiler/modules/trackobjects.py | Python | gpl-2.0 | 137,657 |
#!/usr/bin/env python
"""
Do whatever is needed to get a security upload released
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2010 Joerg Jaspert <joerg@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
################################################################################
import os
import sys
import time
import apt_pkg
import commands
import errno
import fcntl
from daklib import queue
from daklib import daklog
from daklib import utils
from daklib.dbconn import *
from daklib.regexes import re_taint_free
from daklib.config import Config
Options = None
Logger = None
Queue = None
changes = []
def usage():
print """Usage: dak security-install [OPTIONS] changesfiles
Do whatever there is to do for a security release
-h, --help show this help and exit
-n, --no-action don't commit changes
-s, --sudo dont bother, used internally
"""
sys.exit()
def spawn(command):
if not re_taint_free.match(command):
utils.fubar("Invalid character in \"%s\"." % (command))
if Options["No-Action"]:
print "[%s]" % (command)
else:
(result, output) = commands.getstatusoutput(command)
if (result != 0):
utils.fubar("Invocation of '%s' failed:\n%s\n" % (command, output), result)
##################### ! ! ! N O T E ! ! ! #####################
#
# These functions will be reinvoked by semi-priveleged users, be careful not
# to invoke external programs that will escalate privileges, etc.
#
##################### ! ! ! N O T E ! ! ! #####################
def sudo(arg, fn, exit):
if Options["Sudo"]:
os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
"/usr/local/bin/dak", "new-security-install", "-"+arg)
else:
fn()
if exit:
quit()
def do_Approve(): sudo("A", _do_Approve, True)
def _do_Approve():
print "Locking unchecked"
with os.fdopen(os.open('/srv/security-master.debian.org/lock/unchecked.lock', os.O_CREAT | os.O_RDWR ), 'r') as lock_fd:
while True:
try:
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
break
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
print "Another process keeping the unchecked lock, waiting."
time.sleep(10)
else:
raise
# 1. Install accepted packages
print "Installing accepted packages into security archive"
for queue in ("embargoed",):
spawn("dak process-policy {0}".format(queue))
# 2. Run all the steps that are needed to publish the changed archive
print "Doing loadsa stuff in the archive, will take time, please be patient"
os.environ['configdir'] = '/srv/security-master.debian.org/dak/config/debian-security'
spawn("/srv/security-master.debian.org/dak/config/debian-security/cronscript unchecked-dinstall")
print "Triggering metadata export for packages.d.o and other consumers"
spawn("/srv/security-master.debian.org/dak/config/debian-security/export.sh")
########################################################################
########################################################################
def main():
global Options, Logger, Queue, changes
cnf = Config()
Arguments = [('h', "Help", "Security::Options::Help"),
('n', "No-Action", "Security::Options::No-Action"),
('c', 'Changesfile', "Security::Options::Changesfile"),
('s', "Sudo", "Security::Options::Sudo"),
('A', "Approve", "Security::Options::Approve")
]
for i in ["Help", "No-Action", "Changesfile", "Sudo", "Approve"]:
key = "Security::Options::%s" % i
if key not in cnf:
cnf[key] = ""
changes_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
Options = cnf.subtree("Security::Options")
if Options['Help']:
usage()
changesfiles={}
for a in changes_files:
if not a.endswith(".changes"):
utils.fubar("not a .changes file: %s" % (a))
changesfiles[a]=1
changes = changesfiles.keys()
username = utils.getusername()
if username != "dak":
print "Non-dak user: %s" % username
Options["Sudo"] = "y"
if Options["No-Action"]:
Options["Sudo"] = ""
if not Options["Sudo"] and not Options["No-Action"]:
Logger = daklog.Logger("security-install")
session = DBConn().session()
# If we call ourselve to approve, we do just that and exit
if Options["Approve"]:
do_Approve()
sys.exit()
if len(changes) == 0:
utils.fubar("Need changes files as arguments")
# Yes, we could do this inside do_Approve too. But this way we see who exactly
# called it (ownership of the file)
acceptfiles={}
for change in changes:
dbchange=get_dbchange(os.path.basename(change), session)
# strip epoch from version
version=dbchange.version
version=version[(version.find(':')+1):]
# strip possible version from source (binNMUs)
source = dbchange.source.split(None, 1)[0]
acceptfilename="%s/COMMENTS/ACCEPT.%s_%s" % (os.path.dirname(os.path.abspath(changes[0])), source, version)
acceptfiles[acceptfilename]=1
print "Would create %s now and then go on to accept this package, if you allow me to." % (acceptfiles.keys())
if Options["No-Action"]:
sys.exit(0)
else:
raw_input("Press Enter to continue")
for acceptfilename in acceptfiles.keys():
accept_file = file(acceptfilename, "w")
accept_file.write("OK\n")
accept_file.close()
do_Approve()
if __name__ == '__main__':
main()
| purism/pdak | dak/new_security_install.py | Python | gpl-2.0 | 6,680 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# from http://homepage.hispeed.ch/py430/python/daemon.py
###########################################################################
# configure these paths:
LOGFILE = '/var/log/powerscout.log'
PIDFILE = '/var/run/powerscout.pid'
# and let USERPROG be the main function of your project
import powerscoutLogger
USERPROG = powerscoutLogger.main
###########################################################################
#based on Jürgen Hermanns http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012
import sys, os
class Log:
"""file like for writes with auto flush after each write
to ensure that everything is logged, even during an
unexpected exit."""
def __init__(self, f):
self.f = f
def write(self, s):
self.f.write(s)
self.f.flush()
def main():
#change to data directory if needed
os.chdir("/")
#redirect outputs to a logfile
sys.stdout = sys.stderr = Log(open(LOGFILE, 'a+'))
#ensure the that the daemon runs a normal user
#os.setegid(20) #set group first "dialout" so we can access port
#os.seteuid(1000) #set user "pydaemon"
# need root for logging in /var/log
os.setegid(0)
os.seteuid(0)
#start the user program here:
USERPROG()
if __name__ == "__main__":
# do the UNIX double-fork magic, see Stevens' "Advanced
# Programming in the UNIX Environment" for details (ISBN 0201563177)
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# decouple from parent environment
os.chdir("/") #don't prevent unmounting....
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent, print eventual PID before
#print "Daemon PID %d" % pid
open(PIDFILE,'w').write("%d"%pid)
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# start the daemon main loop
main() | amm042/pywattnode | pypowerscout.py | Python | gpl-2.0 | 2,227 |
#import webdemo
| 3dfxsoftware/cbss-addons | webdemo/__init__.py | Python | gpl-2.0 | 16 |
# -*- coding: utf-8 -*-
###############################################################################################
#
# MediaPortal for Dreambox OS
#
# Coded by MediaPortal Team (c) 2013-2017
#
# This plugin is open source but it is NOT free software.
#
# This plugin may only be distributed to and executed on hardware which
# is licensed by Dream Property GmbH. This includes commercial distribution.
# In other words:
# It's NOT allowed to distribute any parts of this plugin or its source code in ANY way
# to hardware which is NOT licensed by Dream Property GmbH.
# It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way
# on hardware which is NOT licensed by Dream Property GmbH.
#
# This applies to the source code as a whole as well as to parts of it, unless
# explicitely stated otherwise.
#
# If you want to use or modify the code or parts of it,
# you have to keep OUR license and inform us about the modifications, but it may NOT be
# commercially distributed other than under the conditions noted above.
#
# As an exception regarding execution on hardware, you are permitted to execute this plugin on VU+ hardware
# which is licensed by satco europe GmbH, if the VTi image is used on that hardware.
#
# As an exception regarding modifcations, you are NOT permitted to remove
# any copy protections implemented in this plugin or change them for means of disabling
# or working around the copy protections, unless the change has been explicitly permitted
# by the original authors. Also decompiling and modification of the closed source
# parts is NOT permitted.
#
# Advertising with this plugin is NOT allowed.
# For other uses, permission from the authors is necessary.
#
###############################################################################################
from Plugins.Extensions.MediaPortal.plugin import _
from Plugins.Extensions.MediaPortal.resources.imports import *
from Plugins.Extensions.MediaPortal.resources.choiceboxext import ChoiceBoxExt
myagent = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0'
BASE_NAME = "WickedPictures.com"
class wickedGenreScreen(MPScreen):
def __init__(self, session):
MPScreen.__init__(self, session, skin='MP_Plugin')
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"0" : self.closeAll,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"cancel" : self.keyCancel
}, -1)
self['title'] = Label(BASE_NAME)
self['ContentTitle'] = Label("Genre:")
self.genreliste = []
self.suchString = ''
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.genreliste.insert(0, ("Exclusive Girls", 'http://www.wicked.com/tour/pornstars/exclusive/', None))
self.genreliste.insert(0, ("Most Active Girls", 'http://www.wicked.com/tour/pornstars/mostactive/', None))
self.genreliste.insert(0, ("Most Liked Girls", 'http://www.wicked.com/tour/pornstars/mostliked/', None))
self.genreliste.insert(0, ("Most Recent Girls", 'http://www.wicked.com/tour/pornstars/mostrecent/', None))
self.genreliste.insert(0, ("Most Viewed Movies", 'http://www.wicked.com/tour/movies/mostviewed/', None))
self.genreliste.insert(0, ("Top Rated Movies", 'http://www.wicked.com/tour/movies/toprated/', None))
self.genreliste.insert(0, ("Latest Movies", 'http://www.wicked.com/tour/movies/latest/', None))
self.genreliste.insert(0, ("Most Viewed Scenes", 'http://www.wicked.com/tour/videos/mostviewed/', None))
self.genreliste.insert(0, ("Top Rated Scenes", 'http://www.wicked.com/tour/videos/toprated/', None))
self.genreliste.insert(0, ("Latest Scenes", 'http://www.wicked.com/tour/videos/latest/', None))
self.genreliste.insert(0, ("--- Search ---", "callSuchen", None))
self.ml.setList(map(self._defaultlistcenter, self.genreliste))
self.showInfos()
def keyOK(self):
if not config.mediaportal.premiumize_use.value:
message = self.session.open(MessageBoxExt, _("%s only works with enabled MP premiumize.me option (MP Setup)!" % BASE_NAME), MessageBoxExt.TYPE_INFO, timeout=10)
return
Name = self['liste'].getCurrent()[0][0]
Link = self['liste'].getCurrent()[0][1]
if Name == "--- Search ---":
self.suchen()
elif re.match(".*?Girls", Name):
self.session.open(wickedGirlsScreen, Link, Name)
else:
self.session.open(wickedFilmScreen, Link, Name)
def SuchenCallback(self, callback = None, entry = None):
if callback is not None and len(callback):
self.suchString = callback
Name = "--- Search ---"
Link = self.suchString.replace(' ', '-')
self.session.open(wickedFilmScreen, Link, Name)
class wickedGirlsScreen(MPScreen, ThumbsHelper):
def __init__(self, session, Link, Name):
self.Link = Link
self.Name = Name
MPScreen.__init__(self, session, skin='MP_Plugin')
ThumbsHelper.__init__(self)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"0" : self.closeAll,
"cancel" : self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"nextBouquet" : self.keyPageUp,
"prevBouquet" : self.keyPageDown,
"green" : self.keyPageNumber
}, -1)
self['title'] = Label(BASE_NAME)
self['ContentTitle'] = Label("Genre: %s" % self.Name)
self['F2'] = Label(_("Page"))
self['Page'] = Label(_("Page:"))
self.keyLocked = True
self.page = 1
self.lastpage = 1
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
self.keyLocked = True
self['name'].setText(_('Please wait...'))
self.filmliste = []
url = "%s%s/" % (self.Link, str(self.page))
getPage(url, agent=myagent).addCallback(self.loadData).addErrback(self.dataError)
def loadData(self, data):
self.getLastPage(data, 'class="paginationui-container(.*?)</ul>', '.*(?:\/|>)(\d+)')
parse = re.search('class="showcase-models(.*?)</section>', data, re.S)
Movies = re.findall('<a\shref="(.*?)"\sclass="showcase-models.*?img\ssrc="(.*?)"\stitle="(.*?)".*?scenes">(\d+)\sScenes', parse.group(1), re.S)
if Movies:
for (Url, Image, Title, Scenes) in Movies:
Url = "http://www.wicked.com" + Url
Title = Title + " - %s Scenes" % Scenes
self.filmliste.append((decodeHtml(Title), Url, Image))
if len(self.filmliste) == 0:
self.filmliste.append((_('No pornstars found!'), None, None))
self.ml.setList(map(self._defaultlistleft, self.filmliste))
self.ml.moveToIndex(0)
self.keyLocked = False
self.th_ThumbsQuery(self.filmliste, 0, 1, 2, None, None, self.page, int(self.lastpage), mode=1)
self.showInfos()
def showInfos(self):
title = self['liste'].getCurrent()[0][0]
pic = self['liste'].getCurrent()[0][2]
self['name'].setText(title)
CoverHelper(self['coverArt']).getCover(pic)
def keyOK(self):
if self.keyLocked:
return
Link = self['liste'].getCurrent()[0][1]
if Link:
rangelist = [['Scenes', 'videos/'], ['Movies', 'movies/']]
self.session.openWithCallback(self.keyOK2, ChoiceBoxExt, title=_('Select Action'), list = rangelist)
def keyOK2(self, result):
if result:
Name = self['liste'].getCurrent()[0][0]
Link = self['liste'].getCurrent()[0][1]
Link = Link + result[1]
self.session.open(wickedFilmScreen, Link, Name)
class wickedFilmScreen(MPScreen, ThumbsHelper):
def __init__(self, session, Link, Name):
self.Link = Link
self.Name = Name
MPScreen.__init__(self, session, skin='MP_Plugin')
ThumbsHelper.__init__(self)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"0" : self.closeAll,
"cancel" : self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"nextBouquet" : self.keyPageUp,
"prevBouquet" : self.keyPageDown,
"green" : self.keyPageNumber
}, -1)
self['title'] = Label(BASE_NAME)
self['ContentTitle'] = Label("Genre: %s" % self.Name)
self['F2'] = Label(_("Page"))
self['Page'] = Label(_("Page:"))
self.keyLocked = True
self.page = 1
self.lastpage = 9
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
self.keyLocked = True
self['name'].setText(_('Please wait...'))
self.filmliste = []
if re.match(".*?Search", self.Name):
url = "http://www.wicked.com/tour/search/videos/%s/%s/" % (self.Link, str(self.page))
else:
url = "%s%s/" % (self.Link, str(self.page))
getPage(url, agent=myagent).addCallback(self.loadData).addErrback(self.dataError)
def loadData(self, data):
if re.match(".*?Search", self.Name):
self.getLastPage(data, 'class="paginationui-container(.*?)</ul>', '.*(?:\/|>)(\d+)')
elif re.match(".*?/tour/pornstar", self.Link):
self.getLastPage(data, 'class="paginationui-container(.*?)</ul>', '.*(?:\/|>)(\d+)')
else:
self['page'].setText(str(self.page) + ' / ' + str(self.lastpage))
parse = re.search('lass="showcase-movies">(.*?)</section>', data, re.S)
if parse:
Movies = re.findall('<a\shref="(.*?)"\sclass="showcase-movies.*?img\ssrc="(.*?)"\salt=".*?"\stitle="(.*?)"', parse.group(1), re.S)
else:
parse = re.search('class="showcase-scenes">(.*?)</section>', data, re.S)
if parse:
Movies = re.findall('<a\shref="(.*?)"\sclass="showcase-scenes.*?img\ssrc="(.*?)"\stitle=".*?"\salt="(.*?)"', parse.group(1), re.S)
if Movies:
for (Url, Image, Title) in Movies:
Image = Image.replace('_2.jpg','_1.jpg')
Url = "http://www.wicked.com" + Url
self.filmliste.append((decodeHtml(Title), Url, Image))
if len(self.filmliste) == 0:
self.filmliste.append((_('No videos found!'), '', None, ''))
self.ml.setList(map(self._defaultlistleft, self.filmliste))
self.ml.moveToIndex(0)
self.keyLocked = False
self.th_ThumbsQuery(self.filmliste, 0, 1, 2, None, None, self.page, int(self.lastpage), mode=1)
self.showInfos()
def showInfos(self):
title = self['liste'].getCurrent()[0][0]
pic = self['liste'].getCurrent()[0][2]
self['name'].setText(title)
CoverHelper(self['coverArt']).getCover(pic)
def keyOK(self):
if self.keyLocked:
return
Link = self['liste'].getCurrent()[0][1]
get_stream_link(self.session).check_link(Link, self.play)
def play(self, url):
title = self['liste'].getCurrent()[0][0]
self.session.open(SimplePlayer, [(title, url.replace('%2F','%252F').replace('%3D','%253D').replace('%2B','%252B'))], showPlaylist=False, ltype='wicked') | schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Plugins/Extensions/MediaPortal/additions/porn/wicked.py | Python | gpl-2.0 | 10,807 |
#!/usr/bmport settings
# -*- coding: utf-8 -*-
from django import http
from django.shortcuts import render
from main import models
from main import myforms
from main import cfg
from main import tools
from main import data_render
from main import database_manager
from main import scripts
from main import errors
from main import user_settings
import logging
logger = logging.getLogger(__name__)
import csv
import datetime
def __getIds(raw_items, item_id):
l = []
for k in raw_items:
ids = k.split(',')
l.append(ids[item_id])
return l
import xlwt
def __export_xls(data_table, filename="tabella"):
# Create the HttpResponse object with the appropriate CSV header.
response = http.HttpResponse(mimetype='application/ms-excel; charset=utf-8')
response['Content-Disposition'] = 'attachment; filename="%s_%s.xls"' % (filename, datetime.datetime.today().strftime("%d-%m-%Y"))
book = xlwt.Workbook(encoding='utf-8')
sheet = book.add_sheet('Elenco')
#Add header
export_list = user_settings.settings_columView('export_table')
for colum,j in enumerate(export_list):
sheet.write(0, colum, "%s" % j.replace('_', ' ').capitalize())
#Write table
for row,i in enumerate(data_table):
for colum,j in enumerate(export_list):
#we should skip the header row.
sheet.write(row + 1, colum, data_render.formatFields(i,j, default_text="-"))
book.save(response)
return response
def __export_csv(data_table, filename="tabella"):
# Create the HttpResponse object with the appropriate CSV header.
response = http.HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s_%s.csv"' % (filename, datetime.datetime.today().strftime("%d-%m-%Y"))
export_list = user_settings.settings_columView('export_table')
response.write("\xEF\xBB\xBF")
writer = tools.UnicodeWriter(response, delimiter=';')
writer.writerow(["%s" % j.replace('_', ' ').capitalize() for j in export_list])
for item_dict in data_table:
l = []
for i in export_list:
l.append(data_render.formatFields(item_dict, i, default_text="-"))
writer.writerow(l)
return response
def export_table(request):
search_string = request.GET.get('search_keys','')
data_table = database_manager.search_fullText(search_string)
return __export_xls(data_table, "Anagrafe")
def home(request, d={}):
form = myforms.RangeDataSelect()
data = ''
notification = ''
# Use default at first time when the home page is never loaded
form_dict = {
'search_keys' : "",
'filter_type' : None,
'ref_month' : None,
'ref_year' : None,
'order_by_field' : "",
'ordering' : "",
}
if request.method == 'POST':
selected_rows = request.POST.getlist('row_select', [])
action = request.POST.get('button_action', '')
if action == 'Lettera':
ids = __getIds(selected_rows, data_render.CLIENTE_ID)
data_to_render = database_manager.search_ids('main_cliente.id', ids)
return generate_report(data_to_render)
elif action == 'Scarica Tabella':
ids = __getIds(selected_rows, data_render.CLIENTE_ID)
data_to_render = database_manager.search_ids('main_cliente.id', ids)
return __export_xls(data_to_render, "Elenco")
else:
for i in selected_rows:
ids = i.split(',')
verifica_id = ids[data_render.VERIFICA_ID]
if verifica_id != 'None':
_id = int(verifica_id)
if action == 'Apri':
models.Verifica.objects.filter(id=_id).update(stato_verifica='A')
if action == 'Chiudi':
models.Verifica.objects.filter(id=_id).update(stato_verifica='C')
if action == 'Sospendi':
models.Verifica.objects.filter(id=_id).update(stato_verifica='S')
if request.method == 'GET' and request.GET != {}:
form = myforms.RangeDataSelect(request.GET)
if form.is_valid():
form_dict['search_keys'] = form.cleaned_data['search_keys']
form_dict['filter_type'] = form.cleaned_data['filter_type']
form_dict['ref_month'] = form.cleaned_data['ref_month']
form_dict['ref_year'] = form.cleaned_data['ref_year']
form_dict['order_by_field'] = form.cleaned_data['order_by_field']
form_dict['ordering'] = form.cleaned_data['ordering']
data_to_render = database_manager.search_inMonth(**form_dict)
dr = data_render.DataRender(data_to_render)
dr.selectColums(user_settings.settings_columView('home_view'))
tb_top = [
"<button class=\"btn btn-info dropdown-toggle\" data-toggle=\"dropdown\">Seleziona \
<span class=\"caret\"></span></button> \
<ul class=\"dropdown-menu\"> \
<li><a id=\"action\" href=\"#\">Aperti</a></li> \
<li><a id=\"action\" href=\"#\">Sospesi</a></li> \
<li><a id=\"action\" href=\"#\">Chiusi</a></li> \
<li class=\"divider\"></li> \
<li><a id=\"action\" href=\"#\">Tutti</a></li> \
<li><a id=\"action\" href=\"#\">Nessuno</a></li> \
</ul>",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Apri\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Chiudi\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Sospendi\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Lettera\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Scarica Tabella\">",
]
tb_left = [
"<input type=\"checkbox\" name=\"row_select\" id=\"{stato_verifica}\" value=\"{cliente_id},{impianto_id},{verifica_id},{intervento_id}\">"
]
dr.toolbar(top=tb_top, left=tb_left)
dr.msgItemsEmpty("<br><h3>La ricerca non ha prodotto risultati.</h3>")
dr.msgStatistics(("<br><h2>Nel mese di %s " % myforms.monthStr(form_dict['ref_month'])) + "COUNT interventi in scadenza.</h2><br>")
dr.showStatistics()
dr.orderUrl('home', form_dict)
data += dr.toTable()
form_dict['status'] = True
data_to_render = database_manager.search_inMonth(**form_dict)
dr = data_render.DataRender(data_to_render)
dr.selectColums(user_settings.settings_columView('home_view'))
dr.toolbar(top=tb_top, left=tb_left)
dr.msgItemsEmpty("")
dr.msgStatistics(("<br><h2>N.COUNT interventi chiusi nel mese di %s" % myforms.monthStr(form_dict['ref_month'])) + ".</h2><br>")
dr.showStatistics()
data += dr.toTable()
if d:
notification = data_render.notification(d['message_hdr'], d['message'], d['message_type'])
return render(request, 'home.html',{'query_path':request.get_full_path(),
'notification': notification,
'data': data,
'data_form': form,
'scripts': scripts.HOME_ADD_JS,
})
def populatedb(request):
#data = tools.insert_csv_files(cli_on=False)
data = tools.load_csv('/home/asterix/gestionale_www/main/elenco2011.csv')
return _display_ok(request, "DB aggiornato con sucesso\n" + data)
def test(request):
print request.POST.getlist('or', [])
show = cfg.HOME_STD_VIEW
hide = ["Vuota"]
#print show, hide
return render(request, 'test.html', {'items_show': show, 'items_hide':hide })
from functools import partial
import tempfile
import re
import os,sys
import gestionale
def tag_replace(m, item_dict):
k = m.group()
field_name = k[1:-1].lower()
field = data_render.formatFields(item_dict, field_name, default_text="-")
return ''.join([c if ord(c) < 128 else u'\\u' + unicode(ord(c)) + u'?' for c in unicode(field)])
def generate_report(items, file_name=None):
block = []
block_copy = False
add_page = False
date_str = datetime.date.today()
date_str = date_str.strftime(cfg.DATA_FIELD_STR_FORMAT)
tmp_file = tempfile.NamedTemporaryFile()
with open(gestionale.local_settings.LOCAL_TEMPLATE_PATH + 'lettera.rtf', 'r') as in_tpl:
for line in in_tpl:
#inizio la copia del blocco.
if '>>START<<' in line:
print "Start"
block_copy = True
continue
#inizio la copia del blocco.
if '>>END<<' in line:
block_copy = False
add_page = True
print "End"
if block_copy and not add_page:
block.append(line)
elif add_page:
for item in items:
item['data'] = date_str
for s in block:
s = re.sub('(<\w+>)', partial(tag_replace, item_dict=item), s)
tmp_file.write(s)
add_page = False
block_copy = False
else:
tmp_file.write(line)
tmp_file.seek(0)
response = http.HttpResponse(tmp_file, mimetype='application/rtf')
response['Content-Disposition'] = 'attachment; filename="lettere.rtf"'
return response
def err(request):
return errors.server_error(request)
def check_test(request):
return render(request, 'anagrafe.html', {'data': "" })
def check_layout(request):
return render(request, 'fluid.html', {})
| asterix24/GestionaleCaldaie | main/views.py | Python | gpl-2.0 | 9,759 |
#*************************************************************************************************
#
# File Name: gui.py
# Project: Erebus Labs Sensor
# Revision Date: 04/13/2014
# Description: This file contains the main Erebus Sensor gui class
#
#*************************************************************************************************
# System Imports
import tkinter as tk
import tkinter.messagebox as mb
import time
import copy
from datetime import datetime
import subprocess
# Project Imports
import erebus_sensor.interface as interface
class ErebusGUI(tk.Frame):
"""
Provides the user interface for the Erebus Labs sensor.
Members:
<Too Many To List> all GUI attributes
sensorHandle serial.Serial object that communicates with the sensor
displayedSettings the collection of sensor settings displayed in the user
interface
Methods:
configureWindow initializes the tkinter window
"""
def __init__(self, master=None):
"""
Initializes the ErebusGUI object and displays the user interface.
Arguments:
master tkinter.Tk root object
"""
# Configure GUI-wide variables
self.sensorHandle = None
self.displayedSettings = interface.Settings()
# Initialize Window
tk.Frame.__init__(self, master)
self.configureWindow()
master.wm_title('Erebus Labs Sensor')
def configureWindow(self):
"""
Configures and applies attributes of the GUI window.
Arguments:
<None>
"""
# *** Configure Top-Level Window ***
self.grid(sticky=tk.N+tk.S+tk.E+tk.W)
self.pack()
top = self.winfo_toplevel()
top.rowconfigure(0, weight=1)
top.columnconfigure(0, weight=1)
self.rowconfigure(0, weight=1)
self.columnconfigure(0, weight=1)
self['padx'] = 10
self['pady'] = 10
# *** Create Top-level Menu ***
self.menuBar = tk.Menu(top, tearoff=0)
top['menu'] = self.menuBar
# Create File Menu Entry
self.subMenuFile = tk.Menu(self.menuBar, tearoff=0)
self.menuBar.add_cascade(label='File', menu=self.subMenuFile)
self.subMenuFile.add_command(label="Exit", command=self.quit)
# Create Sensor Menu Entry
self.subMenuSensor = tk.Menu(self.menuBar, tearoff=0)
self.menuBar.add_cascade(label="Sensor", menu=self.subMenuSensor)
self.subMenuSensor.add_command(label="Get Data",
command=self.getData)
self.subMenuSensor.add_command(label="Get Current Configuration",
command=self.getSettings)
self.subMenuSensor.add_command(label="Apply Current Configuration",
command=self.applySettings)
self.subMenuSensor.add_command(label="Reset Sensor",
command=self.resetSensor)
# Create Help Menu Entry
self.subMenuHelp = tk.Menu(self.menuBar, tearoff=0)
self.menuBar.add_cascade(label="Help", menu=self.subMenuHelp)
self.subMenuHelp.add_command(label="User Manual",
command=self.openManual)
self.subMenuHelp.add_command(label="Technical Manual",
command=self.openTRM)
self.subMenuHelp.add_command(label="About",
command=self.showAbout)
# *** Create Sensor Button and Status Box ***
# Create the Frame
self.fStatus = tk.Frame(self)
self.fStatus['borderwidth'] = 3
self.fStatus['padx'] = 10
self.fStatus['pady'] = 10
self.fStatus['relief'] = tk.GROOVE
self.fStatus.grid(row=0, sticky=tk.E+tk.W)
# Create Sensor Status Box
self.lStatus = tk.Label(self.fStatus)
self.lStatus['font'] = ('Helvetica', 16, 'bold')
self.lStatus['width'] = 24
self.lStatus['height'] = 3
self.lStatus['foreground'] = 'black'
self.lStatus.grid(row=0, column=0, padx=10, pady=10, sticky=tk.E+tk.W)
# Create Connect Button
self.bConnectSensor = tk.Button(self.fStatus)
self.bConnectSensor['font'] = ('Helvetica', 12, 'bold')
self.bConnectSensor['command'] = self.toggleSensor
self.bConnectSensor['relief'] = tk.RAISED
self.bConnectSensor.grid(row=0, column=1, padx=10, pady=10)
# Initialize Connected and Button Text
self._disconnectedMessage()
# *** Create Settings Area ***
self.fSettings = tk.Frame(self)
self.fSettings['borderwidth'] = 3
self.fSettings['padx'] = 10
self.fSettings['pady'] = 10
self.fSettings['relief'] = tk.GROOVE
self.fSettings.grid(row=1, sticky=tk.E+tk.W)
# Create Header
self.lStatusHeader = tk.Label(self.fSettings, text="Data Collection Settings")
self.lStatusHeader['font'] = ('Helvetica', 14, 'bold')
self.lStatusHeader.grid(row=0, column=0, columnspan=3, sticky=tk.W)
# Create Section Labels
self.lSettingSensor = tk.Label(self.fSettings, text="Sensor:")
self.lSettingSensor['font'] = ('Helvetica', 12)
self.lSettingSensor.grid(row=1, column=0, sticky=tk.E, padx=10)
self.lSettingInterval = tk.Label(self.fSettings, text="Interval:")
self.lSettingInterval['font'] = ('Helvetica', 12)
self.lSettingInterval.grid(row=2, column=0, sticky=tk.E, padx=10)
self.lSettingUnit = tk.Label(self.fSettings, text="Unit:")
self.lSettingUnit['font'] = ('Helvetica', 12)
self.lSettingUnit.grid(row=3, column=0, sticky=tk.E, padx=10)
# Create Sensor Selection Dropdown
self.sensorOptions = tk.StringVar()
self.sensorOptions.set(self.displayedSettings.SENSOR)
self.dSettingSensor = tk.OptionMenu(self.fSettings,
self.sensorOptions,
*interface.sensorOptions)
self.dSettingSensor.grid(row=1, column=1, sticky=tk.E+tk.W, padx=20)
# Create Interval Edit Box
self.eSettingInterval = tk.Entry(self.fSettings)
self.eSettingInterval["width"] =25
self.iSettingInterval = tk.StringVar()
self.eSettingInterval["textvariable"] = self.iSettingInterval
self.eSettingInterval["justify"] = tk.RIGHT
self.eSettingInterval.insert(0, str(self.displayedSettings.SAMPLE_INTERVAL))
self.eSettingInterval.grid(row=2, column=1, sticky=tk.E+tk.W, padx=20)
self.lSettingInterval = tk.Label(self.fSettings)
self.lSettingInterval["text"] = ("Interval must be between 0 and {}"
.format(interface.maxInterval))
self.lSettingInterval["font"] = ('Helvetica', 10)
self.lSettingInterval.grid(row=2, column=2, padx=20)
# Create Unit Selection Dropdown
self.unitOptions = tk.StringVar()
self.unitOptions.set(self.displayedSettings.SAMPLE_UNIT)
self.dSettingUnit = tk.OptionMenu(self.fSettings,
self.unitOptions,
*interface.unitOptions)
self.dSettingUnit.grid(row=3, column=1, sticky=tk.E+tk.W, padx=20)
return
def toggleSensor(self):
"""
Toggles the sensor status box based on sensor connectivity state.
Status window possible states:
Red Not Connected
Yellow Connecting
Green Connected
Arguments:
<None>
"""
if self.sensorHandle != None:
self.disconnectSensor()
self._disconnectedMessage()
else:
self.lStatus['background'] = 'yellow'
self.lStatus['text'] = "Connecting..."
proceed = mb.askokcancel("Connecting...",
"This may take up to 30 seconds and cannot be "
"interrupted. Click OK to continue.")
if proceed and not self.connectSensor():
self._connectedMessage()
if self.sensorHandle.update_RTC():
mb.showwarning("RTC Not Updated",
"Warning: The sensor is connected, but the "
"device's time could not be updated. Consider "
"disconnecting and reconnecting the sensor.")
elif proceed:
self._disconnectedMessage(warning="Could not establish communication "
"with Erebus Sensor.")
self.sensorHandle = None
else:
self._disconnectedMessage()
self.sensorHandle = None
return
def _connectedMessage(self):
"""
Sets the status and connection buttons to the connected state.
Arguments:
<None>
"""
self.lStatus['background'] = 'green'
self.lStatus['text'] = "Sensor Connected"
self.bConnectSensor["text"] = "Disconnect Sensor"
return
def _disconnectedMessage(self, warning=None):
"""
Sets the status and connection buttons to the disconnected state and displays a
warning dialog box if necessary.
Arguments:
warning string to display as a warning message to the user
"""
self.lStatus['background'] = 'red'
self.lStatus['text'] = "Sensor Not Connected"
self.bConnectSensor["text"] = "Connect to Erebus Sensor"
if warning:
mb.showwarning("Failure", warning)
return
def _showNotConnected(self):
"""
Displays an error message to the user indicating that their command failed
because the sensor is not currently connected.
Arguments:
<None>
"""
mb.showerror("", "Erebus sensor is not currently connected. Please connect "
"the sensor and try again.")
return
def connectSensor(self):
"""
Creates the ErebusSensor object, attempts to connect to the sensor, and reports the
result.
Arguments:
<None>
"""
connectLimit = 12
status = 1
for x in range(connectLimit):
self.sensorHandle = interface.ErebusSensor()
if self.sensorHandle.isConnected():
status = 0
break
time.sleep(3)
return status
def disconnectSensor(self):
"""
Severs the USBUART connection with the sensor.
Note: This does not send any commands to the device. The sensor does not know the
connection has been severed. It only considers itself disconnected when the
USB cable is unplugged.
Arguments:
<None>
"""
if self.sensorHandle != None:
self.sensorHandle.close()
self.sensorHandle = None
return
def getSettings(self):
"""
Retrives the current sample settings from the device and displays them for the user.
Arguments:
<None>
"""
if self.sensorHandle == None:
self._showNotConnected()
return
sensorSettings = self.sensorHandle.getSettings()
if sensorSettings == -1:
mb.showerror("", "Settings could not be retrieved. Please try again.")
return
self.sensorOptions.set(sensorSettings.SENSOR)
self.eSettingInterval.delete(0, tk.END)
self.eSettingInterval.insert(0, str(sensorSettings.SAMPLE_INTERVAL))
self.unitOptions.set(sensorSettings.SAMPLE_UNIT)
return
def applySettings(self):
"""
Retrieves the current settings from the GUI window that the user has selected and
sends them to the sensor.
Arguments:
<None>
"""
if self.sensorHandle == None:
self._showNotConnected()
return
if self.sensorHandle.applySettings(self.sensorOptions.get(),
self.unitOptions.get(),
int(self.eSettingInterval.get().strip(' ,.'))):
mb.showerror("", "Settings update failed. Please try again.")
else:
mb.showinfo("", "Settings update successful.")
return
def getData(self):
"""
Initiates a dump of stored samples from the sensor and outputs the result in a test
file called "datadump.txt".
Arguments:
<None>
"""
if self.sensorHandle == None:
self._showNotConnected()
return
dataBlocks = self.sensorHandle.getData()
if isinstance(dataBlocks, list) and dataBlocks:
with open('datadump.txt', 'a') as fo:
fo.write("".join(["\n\n", "*"*30,
"\nErebus Sensor Data Dump",
"\n{} Blocks of data samples".format(len(dataBlocks)),
"\nDump Time: {}".format(str(datetime.now())),
"\n", "*"*30]))
for block in dataBlocks:
fo.write(str(block))
mb.showinfo("", "Data export successful.")
elif dataBlocks == []:
mb.showinfo("", "There were no data samples to retrieve from the device.")
else:
mb.showerror("", "There was an error retrieving data from the device. "
"Please try again.")
return
def resetSensor(self):
"""
Initiates a reset of the sample block pointers on the device. This function MUST
be called after the device is reprogrammed, before any samples are collected.
Arguments:
<None>
"""
if self.sensorHandle == None:
self._showNotConnected()
return
if self.sensorHandle.hard_reset():
mb.showerror("", "There was an erorr resetting the device. Please try "
"again.")
else:
mb.showinfo("", "Reset successful.")
return
def showAbout(self):
"""
Displays an information box with the GUI version
Arguments:
<None>
"""
mb.showinfo("About", "Erebus Labs Stem Sensor\n"
"User Application Version 1.0\n"
"(C) 2014 Erebus Labs")
return
def openTRM(self):
"""
Opens the Erebus Labs Sensor Technical Reference Manual
This routine is not cross-platform. It must be modified to enable Windows
support.
Arguments:
<None>
"""
subprocess.call(['xdg-open', './documents/Technical_Reference_Manual.pdf'],
stderr=None,
stdout=None)
return
def openManual(self):
"""
Opens the Erebus Labs Sensor User Manual
This routine is not cross-platform. It must be modified to enable Windows
support.
Arguments:
<None>
"""
subprocess.call(['xdg-open', './documents/User_Manual.pdf'],
stderr=None,
stdout=None)
return
| erebus-labs/PSU-VOC-SENSOR-2K14 | Code/python_gui/erebus_sensor/gui.py | Python | gpl-2.0 | 15,795 |
# kate: replace-tabs on; indent-width 4; remove-trailing-spaces all; show-tabs on; newline-at-eof on;
# -*- coding:utf-8 -*-
'''
Copyright (C) 2014 Peter Urbanec
All Right Reserved
License: Proprietary / Commercial - contact enigma.licensing (at) urbanec.net
'''
import requests
import json
from fcntl import ioctl
from struct import pack
from socket import socket, create_connection, AF_INET, SOCK_DGRAM, SHUT_RDWR, error as sockerror
from . import config, saveConfigFile
from boxbranding import getMachineBrand, getMachineName
_version_string = "20141027"
_protocol = "http://"
_server = "api.icetv.com.au"
_device_type_id = 22
_debug_level = 0 # 1 = request/reply, 2 = 1+headers, 3 = 2+partial body, 4 = 2+full body
def isServerReachable():
try:
sock = create_connection((_server, 80), 3)
sock.shutdown(SHUT_RDWR)
sock.close()
return True
except sockerror as ex:
print "[IceTV] Can not connect to IceTV server:", str(ex)
return False
def getMacAddress(ifname):
result = "00:00:00:00:00:00"
sock = socket(AF_INET, SOCK_DGRAM)
# noinspection PyBroadException
try:
iface = pack('256s', ifname[:15])
info = ioctl(sock.fileno(), 0x8927, iface)
result = ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1].upper()
except:
pass
sock.close()
return result
def haveCredentials():
return bool(config.plugins.icetv.member.token.value)
def getCredentials():
return {
"email_address": config.plugins.icetv.member.email_address.value,
"token": config.plugins.icetv.member.token.value,
}
def clearCredentials():
config.plugins.icetv.member.token.value = ""
config.plugins.icetv.member.token.save()
saveConfigFile()
def showIdToEventId(show_id):
# Fit within 16 bits, but avoid 0 and 0xFFF8 - 0xFFFF
return (int(show_id) % 0xFFF7) + 1
class Request(object):
def __init__(self, resource):
super(Request, self).__init__()
self.params = {
"api_key": "9019fa88-bd0c-4b1b-94ac-6761aa6a664f",
"application_version": _version_string,
}
self.headers = {
"Content-Type": "application/json",
"Accept": "application/json",
"User-Agent": "SystemPlugins.IceTV/%s (%s; %s)" % (_version_string, getMachineBrand(), getMachineName()),
}
self.url = _protocol + _server + resource
self.data = {}
self.response = None
def _shorten(self, text):
if len(text) < 4000:
return text
return text[:2000] + "\n...\n" + text[-2000:]
def send(self, method):
data = json.dumps(self.data)
r = requests.request(method, self.url, params=self.params, headers=self.headers, data=data, verify=False)
err = not r.ok
if err or _debug_level > 0:
print "[IceTV]", r.request.method, r.request.url
if err or _debug_level > 1:
print "[IceTV] headers", r.request.headers
if err or _debug_level == 3:
print "[IceTV]", self._shorten(r.request.body)
elif err or _debug_level > 3:
print "[IceTV]", r.request.body
if err or _debug_level > 0:
print "[IceTV]", r.status_code, r.reason
if err or _debug_level > 1:
print "[IceTV] headers", r.headers
if err or _debug_level == 3:
print "[IceTV]", self._shorten(r.text)
elif err or _debug_level > 3:
print "[IceTV]", r.text
self.response = r
if r.status_code == 401:
clearCredentials()
r.raise_for_status()
return r
class AuthRequest(Request):
def __init__(self, resource):
super(AuthRequest, self).__init__(resource)
self.params.update(getCredentials())
class Regions(Request):
def __init__(self):
super(Regions, self).__init__("/regions")
def get(self):
return self.send("get")
class Region(Request):
def __init__(self, region):
super(Region, self).__init__("/regions/" + str(int(region)))
def get(self):
return self.send("get")
class Channels(Request):
def __init__(self, region=None):
if region is None:
super(Channels, self).__init__("/regions/channels")
else:
super(Channels, self).__init__("/regions/" + str(int(region)) + "/channels")
def get(self):
return self.send("get")
class Login(Request):
def __init__(self, email, password, region=None):
super(Login, self).__init__("/login")
self.data["device"] = {
"uid": getMacAddress('eth0'),
"label": config.plugins.icetv.device.label.value,
"type_id": config.plugins.icetv.device.type_id.value,
}
self.data["member"] = {
"email_address": email,
"password": password,
}
if region:
self.data["member"]["region_id"] = region
def post(self):
return self.send("post")
def put(self):
return self.send("put")
def send(self, method):
r = super(Login, self).send(method)
result = r.json()
config.plugins.icetv.member.email_address.value = result["member"]["email_address"]
config.plugins.icetv.member.token.value = result["member"]["token"]
config.plugins.icetv.member.id.value = result["member"]["id"]
config.plugins.icetv.member.region_id.value = result["member"]["region_id"]
config.plugins.icetv.device.id.value = result["device"]["id"]
config.plugins.icetv.device.label.value = result["device"]["label"]
config.plugins.icetv.device.type_id.value = result["device"]["type_id"]
config.plugins.icetv.save()
saveConfigFile()
return r
class Logout(AuthRequest):
def __init__(self):
super(Logout, self).__init__("/logout")
def delete(self):
return self.send("delete")
def send(self, method):
r = super(Logout, self).send(method)
clearCredentials()
return r
class Devices(AuthRequest):
def __init__(self):
super(Devices, self).__init__("/devices")
def get(self):
return self.send("get")
def post(self):
return self.send("post")
class Device(AuthRequest):
def __init__(self, deviceid):
super(Device, self).__init__("/devices/" + str(int(deviceid)))
def get(self):
return self.send("get")
def put(self):
return self.send("put")
def delete(self):
return self.send("delete")
class DeviceTypes(AuthRequest):
def __init__(self):
super(DeviceTypes, self).__init__("/devices/types")
def get(self):
return self.send("get")
class DeviceType(AuthRequest):
def __init__(self, deviceid):
super(DeviceType, self).__init__("/devices/types/" + str(int(deviceid)))
def get(self):
return self.send("get")
class DeviceManufacturers(AuthRequest):
def __init__(self):
super(DeviceManufacturers, self).__init__("/devices/manufacturers")
def get(self):
return self.send("get")
class DeviceManufacturer(AuthRequest):
def __init__(self, deviceid):
super(DeviceManufacturer, self).__init__("/devices/manufacturers/" + str(int(deviceid)))
def get(self):
return self.send("get")
class Shows(AuthRequest):
def __init__(self):
super(Shows, self).__init__("/shows")
def get(self):
return self.send("get")
class Timers(AuthRequest):
def __init__(self):
super(Timers, self).__init__("/shows/timers")
def get(self):
return self.send("get")
def post(self):
return self.send("post")
def put(self):
return self.send("put")
class Timer(AuthRequest):
def __init__(self, timerid):
super(Timer, self).__init__("/shows/timers/" + str(timerid))
def get(self):
return self.send("get")
def put(self):
return self.send("put")
def delete(self):
return self.send("delete")
| popazerty/beyonwiz-4.1 | lib/python/Plugins/SystemPlugins/IceTV/API.py | Python | gpl-2.0 | 8,110 |
import math
from .link import Link
from nimbus.reports import report as rp
from nimbus.reports import input as inp
from nimbus.network.links.sections import circle as cir
from nimbus.network.links.sections import rectangle as rct
class Weir(Link):
def __init__(self, name=None, section=None, orif_coef=None, weir_coef=None, invert=None, node1=None, node2=None):
super(Weir, self).__init__(name, node1, node2, section)
self.orif_coef = orif_coef
self.weir_coef = weir_coef
self.invert = invert
self.report = inp.InputReport(self)
def get_flow(self, stage1, stage2):
"""Return the flow of the weir given the stages on both sides of the link."""
crown = self.invert + self.section.rise / 12.0
center = self.invert + self.section.rise / 12.0 / 2.0
if stage1 > stage2: # stage 1 higher
if stage1 > crown: # orifice flow
if stage2 < self.invert: # free flow
eff_head = stage1 - center
else: # submerged flow
eff_head = stage1 - stage2
area = self.section.get_flow_area(self.section.rise)
flow = self.orif_coef * area * math.sqrt(2.0 * 32.2 * eff_head)
elif stage1 > self.invert: # weir flow
eff_head = stage1 - self.invert
flow = self.weir_coef * self.section.span / 12.0 * pow(eff_head, 1.5)
if stage2 > self.invert: # submerged flow
flow *= 1.0 - pow(pow(stage2 / stage1, 1.5), 0.385)
else:
flow = 0.0
else: # stage 2 higher
if stage2 > crown: # orifice flow
if stage1 < self.invert: # free flow
eff_head = stage2 - center
else: # submerged flow
eff_head = stage2 - stage1
area = self.section.get_flow_area(self.section.rise)
flow = -self.orif_coef * area * math.sqrt(2.0 * 32.2 * eff_head)
elif stage2 > self.invert: # weir flow
eff_head = stage2 - self.invert
flow = -self.weir_coef * self.section.span / 12.0 * pow(eff_head, 1.5)
if stage1 > self.invert: # submerged flow
flow *= 1.0 - pow(pow(stage1 / stage2, 1.5), 0.385)
else:
flow = 0.0
return flow
def get_input_strings(self):
if self.section:
shape_type = rp.property_to_string(self.section.__class__, '__name__')
shape_span = rp.float_to_string(self.section.span, 3)
shape_rise = rp.float_to_string(self.section.rise, 3)
else:
shape_type = 'Undefined'
shape_span = 'Undefined'
shape_rise = 'Undefined'
inputs = ['Name: ' + rp.property_to_string(self, 'name'),
'Shape Type: ' + shape_type,
'Span (in): ' + shape_span,
'Rise (in): ' + shape_rise,
'Orifice Coef.: ' + rp.float_to_string(self.orif_coef, 3),
'Weir. Coef: ' + rp.float_to_string(self.weir_coef, 3),
'Invert: ' + rp.float_to_string(self.invert, 3)]
return inputs
def set_shape_as_rectangle(self, span, rise, horizontal=False):
self.section = rct.Rectangle(span, rise, horizontal)
return
def set_shape_as_circle(self, diameter, horizontal=False):
self.section = cir.Circle(diameter, horizontal)
return
| benjiyamin/nimbus | nimbus/network/links/weir.py | Python | gpl-2.0 | 4,270 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Common/Shared code related to the Settings dialog
# Copyright (C) 2010-2018 Filipe Coelho <falktx@falktx.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# For a full copy of the GNU General Public License see the COPYING file
# ------------------------------------------------------------------------------------------------------------
# Imports (Global)
if True:
from PyQt5.QtCore import pyqtSlot, QSettings
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
else:
from PyQt4.QtCore import pyqtSlot, QSettings
from PyQt4.QtGui import QDialog, QDialogButtonBox
# ------------------------------------------------------------------------------------------------------------
# Imports (Custom Stuff)
import ui_settings_app
from shared import *
from patchcanvas_theme import *
# ------------------------------------------------------------------------------------------------------------
# Global variables
# Tab indexes
TAB_INDEX_MAIN = 0
TAB_INDEX_CANVAS = 1
TAB_INDEX_LADISH = 2
TAB_INDEX_NONE = 3
# PatchCanvas defines
CANVAS_ANTIALIASING_SMALL = 1
CANVAS_EYECANDY_SMALL = 1
# LADISH defines
LADISH_CONF_KEY_DAEMON_NOTIFY = "/org/ladish/daemon/notify"
LADISH_CONF_KEY_DAEMON_SHELL = "/org/ladish/daemon/shell"
LADISH_CONF_KEY_DAEMON_TERMINAL = "/org/ladish/daemon/terminal"
LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART = "/org/ladish/daemon/studio_autostart"
LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY = "/org/ladish/daemon/js_save_delay"
# LADISH defaults
LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT = True
LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT = "sh"
LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT = "x-terminal-emulator"
LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT = True
LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY_DEFAULT = 0
# Internal defaults
global SETTINGS_DEFAULT_PROJECT_FOLDER
SETTINGS_DEFAULT_PROJECT_FOLDER = HOME
# ------------------------------------------------------------------------------------------------------------
# Change internal defaults
def setDefaultProjectFolder(folder):
global SETTINGS_DEFAULT_PROJECT_FOLDER
SETTINGS_DEFAULT_PROJECT_FOLDER = folder
# ------------------------------------------------------------------------------------------------------------
# Settings Dialog
class SettingsW(QDialog):
def __init__(self, parent, appName, hasOpenGL=False):
QDialog.__init__(self, parent)
self.ui = ui_settings_app.Ui_SettingsW()
self.ui.setupUi(self)
# -------------------------------------------------------------
# Set default settings
self.fRefreshInterval = 120
self.fAutoHideGroups = True
self.fUseSystemTray = True
self.fCloseToTray = False
# -------------------------------------------------------------
# Set app-specific settings
if appName == "catarina":
self.fAutoHideGroups = False
self.ui.lw_page.hideRow(TAB_INDEX_MAIN)
self.ui.lw_page.hideRow(TAB_INDEX_LADISH)
self.ui.lw_page.setCurrentCell(TAB_INDEX_CANVAS, 0)
elif appName == "catia":
self.fUseSystemTray = False
self.ui.group_main_paths.setEnabled(False)
self.ui.group_main_paths.setVisible(False)
self.ui.group_tray.setEnabled(False)
self.ui.group_tray.setVisible(False)
self.ui.lw_page.hideRow(TAB_INDEX_LADISH)
self.ui.lw_page.setCurrentCell(TAB_INDEX_MAIN, 0)
elif appName == "claudia":
self.ui.cb_jack_port_alias.setEnabled(False)
self.ui.cb_jack_port_alias.setVisible(False)
self.ui.label_jack_port_alias.setEnabled(False)
self.ui.label_jack_port_alias.setVisible(False)
self.ui.lw_page.setCurrentCell(TAB_INDEX_MAIN, 0)
else:
self.ui.lw_page.hideRow(TAB_INDEX_MAIN)
self.ui.lw_page.hideRow(TAB_INDEX_CANVAS)
self.ui.lw_page.hideRow(TAB_INDEX_LADISH)
self.ui.stackedWidget.setCurrentIndex(TAB_INDEX_NONE)
return
# -------------------------------------------------------------
# Load settings
self.loadSettings()
# -------------------------------------------------------------
# Set-up GUI
if not hasOpenGL:
self.ui.cb_canvas_use_opengl.setChecked(False)
self.ui.cb_canvas_use_opengl.setEnabled(False)
self.ui.lw_page.item(0, 0).setIcon(getIcon(appName, 48))
self.ui.label_icon_main.setPixmap(getIcon(appName, 48).pixmap(48, 48))
# -------------------------------------------------------------
# Set-up connections
self.accepted.connect(self.slot_saveSettings)
self.ui.buttonBox.button(QDialogButtonBox.Reset).clicked.connect(self.slot_resetSettings)
self.ui.b_main_def_folder_open.clicked.connect(self.slot_getAndSetProjectPath)
def loadSettings(self):
settings = QSettings()
if not self.ui.lw_page.isRowHidden(TAB_INDEX_MAIN):
self.ui.le_main_def_folder.setText(settings.value("Main/DefaultProjectFolder", SETTINGS_DEFAULT_PROJECT_FOLDER, type=str))
self.ui.cb_tray_enable.setChecked(settings.value("Main/UseSystemTray", self.fUseSystemTray, type=bool))
self.ui.cb_tray_close_to.setChecked(settings.value("Main/CloseToTray", self.fCloseToTray, type=bool))
self.ui.sb_gui_refresh.setValue(settings.value("Main/RefreshInterval", self.fRefreshInterval, type=int))
self.ui.cb_jack_port_alias.setCurrentIndex(settings.value("Main/JackPortAlias", 2, type=int))
# ---------------------------------------
if not self.ui.lw_page.isRowHidden(TAB_INDEX_CANVAS):
self.ui.cb_canvas_hide_groups.setChecked(settings.value("Canvas/AutoHideGroups", self.fAutoHideGroups, type=bool))
self.ui.cb_canvas_bezier_lines.setChecked(settings.value("Canvas/UseBezierLines", True, type=bool))
self.ui.cb_canvas_eyecandy.setCheckState(settings.value("Canvas/EyeCandy", CANVAS_EYECANDY_SMALL, type=int))
self.ui.cb_canvas_use_opengl.setChecked(settings.value("Canvas/UseOpenGL", False, type=bool))
self.ui.cb_canvas_render_aa.setCheckState(settings.value("Canvas/Antialiasing", CANVAS_ANTIALIASING_SMALL, type=int))
self.ui.cb_canvas_render_hq_aa.setChecked(settings.value("Canvas/HighQualityAntialiasing", False, type=bool))
themeName = settings.value("Canvas/Theme", getDefaultThemeName(), type=str)
for i in range(Theme.THEME_MAX):
thisThemeName = getThemeName(i)
self.ui.cb_canvas_theme.addItem(thisThemeName)
if thisThemeName == themeName:
self.ui.cb_canvas_theme.setCurrentIndex(i)
# ---------------------------------------
if not self.ui.lw_page.isRowHidden(TAB_INDEX_LADISH):
self.ui.cb_ladish_notify.setChecked(settings.value(LADISH_CONF_KEY_DAEMON_NOTIFY, LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT, type=bool))
self.ui.le_ladish_shell.setText(settings.value(LADISH_CONF_KEY_DAEMON_SHELL, LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT, type=str))
self.ui.le_ladish_terminal.setText(settings.value(LADISH_CONF_KEY_DAEMON_TERMINAL, LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT, type=str))
self.ui.cb_ladish_studio_autostart.setChecked(settings.value(LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART, LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT, type=bool))
self.ui.sb_ladish_jsdelay.setValue(settings.value(LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY, LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY_DEFAULT, type=int))
@pyqtSlot()
def slot_saveSettings(self):
settings = QSettings()
if not self.ui.lw_page.isRowHidden(TAB_INDEX_MAIN):
settings.setValue("Main/RefreshInterval", self.ui.sb_gui_refresh.value())
if self.ui.group_tray.isEnabled():
settings.setValue("Main/UseSystemTray", self.ui.cb_tray_enable.isChecked())
settings.setValue("Main/CloseToTray", self.ui.cb_tray_close_to.isChecked())
if self.ui.group_main_paths.isEnabled():
settings.setValue("Main/DefaultProjectFolder", self.ui.le_main_def_folder.text())
if self.ui.cb_jack_port_alias.isEnabled():
settings.setValue("Main/JackPortAlias", self.ui.cb_jack_port_alias.currentIndex())
# ---------------------------------------
if not self.ui.lw_page.isRowHidden(TAB_INDEX_CANVAS):
settings.setValue("Canvas/Theme", self.ui.cb_canvas_theme.currentText())
settings.setValue("Canvas/AutoHideGroups", self.ui.cb_canvas_hide_groups.isChecked())
settings.setValue("Canvas/UseBezierLines", self.ui.cb_canvas_bezier_lines.isChecked())
settings.setValue("Canvas/UseOpenGL", self.ui.cb_canvas_use_opengl.isChecked())
settings.setValue("Canvas/HighQualityAntialiasing", self.ui.cb_canvas_render_hq_aa.isChecked())
# 0, 1, 2 match their enum variants
settings.setValue("Canvas/EyeCandy", self.ui.cb_canvas_eyecandy.checkState())
settings.setValue("Canvas/Antialiasing", self.ui.cb_canvas_render_aa.checkState())
# ---------------------------------------
if not self.ui.lw_page.isRowHidden(TAB_INDEX_LADISH):
settings.setValue(LADISH_CONF_KEY_DAEMON_NOTIFY, self.ui.cb_ladish_notify.isChecked())
settings.setValue(LADISH_CONF_KEY_DAEMON_SHELL, self.ui.le_ladish_shell.text())
settings.setValue(LADISH_CONF_KEY_DAEMON_TERMINAL, self.ui.le_ladish_terminal.text())
settings.setValue(LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART, self.ui.cb_ladish_studio_autostart.isChecked())
settings.setValue(LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY, self.ui.sb_ladish_jsdelay.value())
@pyqtSlot()
def slot_resetSettings(self):
if self.ui.lw_page.currentRow() == TAB_INDEX_MAIN:
self.ui.le_main_def_folder.setText(SETTINGS_DEFAULT_PROJECT_FOLDER)
self.ui.cb_tray_enable.setChecked(self.fUseSystemTray)
self.ui.cb_tray_close_to.setChecked(self.fCloseToTray)
self.ui.sb_gui_refresh.setValue(self.fRefreshInterval)
self.ui.cb_jack_port_alias.setCurrentIndex(2)
elif self.ui.lw_page.currentRow() == TAB_INDEX_CANVAS:
self.ui.cb_canvas_theme.setCurrentIndex(0)
self.ui.cb_canvas_hide_groups.setChecked(self.fAutoHideGroups)
self.ui.cb_canvas_bezier_lines.setChecked(True)
self.ui.cb_canvas_eyecandy.setCheckState(Qt.PartiallyChecked)
self.ui.cb_canvas_use_opengl.setChecked(False)
self.ui.cb_canvas_render_aa.setCheckState(Qt.PartiallyChecked)
self.ui.cb_canvas_render_hq_aa.setChecked(False)
elif self.ui.lw_page.currentRow() == TAB_INDEX_LADISH:
self.ui.cb_ladish_notify.setChecked(LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT)
self.ui.cb_ladish_studio_autostart.setChecked(LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT)
self.ui.le_ladish_shell.setText(LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT)
self.ui.le_ladish_terminal.setText(LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT)
@pyqtSlot()
def slot_getAndSetProjectPath(self):
getAndSetPath(self, self.ui.le_main_def_folder.text(), self.ui.le_main_def_folder)
def done(self, r):
QDialog.done(self, r)
self.close()
| falkTX/Cadence | src/shared_settings.py | Python | gpl-2.0 | 12,056 |
# -*- coding: utf-8 -*-
"""Functions for video streaming."""
import cStringIO
import fcntl
import os
import signal
import struct
import subprocess
import sys
import time
import types
import json
from PIL import Image
import v4l2
VIDEO_DEVICE = None
VIDEO_STREAM_PROCESS = None
VIDEO_INITIALIZED = False
VIDEO_SIZE = "-1,-1"
VIDEO_RESTART = False
VIDEO_ORIGINAL_SIZE = 0,0
def open_video_device(path="/dev/video0"):
global VIDEO_DEVICE
if os.path.exists(path):
# binary, unbuffered write
device = open(path, "wb", 0)
VIDEO_DEVICE = device
else:
msg = "Cannot open video device %s, path do not exist. " % path
msg += "Make sure that the v4l2loopback kernel module is loaded (modprobe v4l2loopback). "
msg += "Falling back to MJPEG."
raise RuntimeError(msg)
return VIDEO_DEVICE
def initialize_video_device(pixel_format, width, height, channels):
f = v4l2.v4l2_format()
f.type = v4l2.V4L2_BUF_TYPE_VIDEO_OUTPUT
f.fmt.pix.pixelformat = pixel_format
f.fmt.pix.width = width
f.fmt.pix.height = height
f.fmt.pix.field = v4l2.V4L2_FIELD_NONE
f.fmt.pix.bytesperline = width * channels
f.fmt.pix.sizeimage = width * height * channels
f.fmt.pix.colorspace = v4l2.V4L2_COLORSPACE_SRGB
res = fcntl.ioctl(VIDEO_DEVICE, v4l2.VIDIOC_S_FMT, f)
if res != 0:
raise RuntimeError("Could not initialize video device: %d" % res)
return True
def set_video_size(width=-1, height=-1):
global VIDEO_SIZE
global VIDEO_RESTART
VIDEO_SIZE = "%s,%s" % (width, height)
VIDEO_RESTART = True
def video_size():
current_size = VIDEO_SIZE.split(",")
scale = float(current_size[0]) / VIDEO_ORIGINAL_SIZE[0]
return current_size + list((scale,))
def new_frame_received(img, width, height, *args, **kwargs):
"""
Executed when a new image is received, (new frame received callback).
"""
pixel_format = v4l2.V4L2_PIX_FMT_RGB24
channels = 3
global VIDEO_INITIALIZED
global VIDEO_STREAM_PROCESS
global VIDEO_RESTART
# Assume that we are getting a qimage if we are not getting a str,
# to be able to handle data sent by hardware objects used in MxCuBE 2.x
if not isinstance(img, str):
# 4 Channels with alpha
channels = 4
pixel_format = v4l2.V4L2_PIX_FMT_RGB32
rawdata = img.bits().asstring(img.numBytes())
img = rawdata
else:
# Is the image on JPEG format get the RGB data otherwise assume its
# already RGB and do nothing with the data
if img.startswith('\xff\xd8\xff\xe0\x00\x10JFIF'):
# jpeg image
strbuf = cStringIO.StringIO(img)
img = Image.open(strbuf)
img = img.tobytes()
if VIDEO_DEVICE:
if not VIDEO_INITIALIZED:
VIDEO_INITIALIZED = \
initialize_video_device(pixel_format, width, height, channels)
VIDEO_DEVICE.write(img)
if VIDEO_RESTART and VIDEO_STREAM_PROCESS:
os.system('pkill -TERM -P {pid}'.format(pid=VIDEO_STREAM_PROCESS.pid))
VIDEO_RESTART = False
VIDEO_STREAM_PROCESS = None
# start the streaming process if not started or restart if terminated
if not VIDEO_STREAM_PROCESS or VIDEO_STREAM_PROCESS.poll() is not None:
sfpath = os.path.join(os.path.dirname(__file__), "streaming_processes.py")
python_executable = os.sep.join(os.path.dirname(os.__file__).split(os.sep)[:-2]+["bin", "python"])
VIDEO_STREAM_PROCESS = subprocess.Popen([python_executable, sfpath, VIDEO_DEVICE.name, VIDEO_SIZE], close_fds=True)
def get_available_sizes(camera):
try:
w, h = camera.getWidth(), camera.getHeight()
# Some video decoders have difficulties to decode videos with odd image dimensions
# (JSMPEG beeing one of them) so we make sure that the size is even
w = w if w % 2 == 0 else w + 1
h = h if h % 2 == 0 else h + 1
# Calculate half the size and quarter of the size if MPEG streaming is used
# otherwise just return the orignal size.
if VIDEO_STREAM_PROCESS:
video_sizes = [(w, h), (w/2, h/2), (w/4, h/4)]
else:
video_sizes = [(w, h)]
except (ValueError, AttributeError):
video_sizes = []
return video_sizes
def set_initial_stream_size(camera, video_device_path):
global VIDEO_SIZE
global VIDEO_ORIGINAL_SIZE
w, h = camera.getWidth(), camera.getHeight()
w = w if w % 2 == 0 else w + 1
h = h if h % 2 == 0 else h + 1
VIDEO_ORIGINAL_SIZE = w, h
VIDEO_SIZE = "%s,%s" % VIDEO_ORIGINAL_SIZE
def tango_lima_video_plugin(camera, video_device):
"""
Configures video frame handling for TangoLimaVideo devices.
:param HardwareObject camera: Object providing frames to encode and stream
:param str video_device: Video loopback path
"""
if camera.__class__.__name__ == 'TangoLimaVideo':
# patch hardware object to set acquisition to the right mode
# and to get the right frames out of the video device
if camera.isReady():
camera.setLive(False)
camera.device.video_mode = "RGB24"
time.sleep(0.1)
camera.setLive(True)
def parse_image_data(self, img_data):
hfmt = ">IHHqiiHHHH"
hsize = struct.calcsize(hfmt)
_, _, img_mode, frame_number, width, height, _, _, _, _ = \
struct.unpack(hfmt, img_data[1][:hsize])
raw_data = img_data[1][hsize:]
return width, height, raw_data
def do_polling(self, sleep_time):
hfmt = ">IHHqiiHHHH"
hsize = struct.calcsize(hfmt)
while True:
width, height, raw_data = \
self.parse_image_data(self.device.video_last_image)
self.emit("imageReceived", raw_data, width, height, False)
time.sleep(sleep_time)
def take_snapshot(self, path, bw=False):
width, height, raw_data = \
self.parse_image_data(self.device.video_last_image)
img = Image.frombytes("RGB", (width, height), raw_data)
if bw:
img.convert("1")
img.save(path)
camera._do_polling = types.MethodType(do_polling, camera)
camera.takeSnapshot = types.MethodType(take_snapshot, camera)
camera.parse_image_data = types.MethodType(parse_image_data, camera)
def init(camera, video_device_path):
"""
Initialize video loopback device.
:param HardwareObject camera: Object providing frames to encode and stream
:param str video_device_path: Video loopback path
"""
set_initial_stream_size(camera, video_device_path)
tango_lima_video_plugin(camera, video_device_path)
video_device = open_video_device(video_device_path)
camera.connect("imageReceived", new_frame_received)
return video_device
| meguiraun/mxcube3 | mxcube3/video/streaming.py | Python | gpl-2.0 | 7,123 |
class Solution(object):
def isPerfectSquare(self, num):
"""
:type num: int
:rtype: bool
"""
# return num ** 0.5 == int(num ** 0.5)
if num < 2:
return True
x = num // 2
while x * x > num:
x = (x + num // x) // 2
return x * x == num
| xinqiu/My-LeetCode-Notes | Leetcode/367.py | Python | gpl-2.0 | 333 |
# Xlib.ext.__init__ -- X extension modules
#
# Copyright (C) 2000 Peter Liljenberg <petli@ctrl-c.liu.se>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# __extensions__ is a list of tuples: (extname, extmod)
# extname is the name of the extension according to the X
# protocol. extmod is the name of the module in this package.
__extensions__ = [
('XTEST', 'xtest'),
('SHAPE', 'shape'),
('XINERAMA', 'xinerama'),
('RECORD', 'record'),
('Composite', 'composite'),
('RANDR', 'randr'),
]
__all__ = [x[1] for x in __extensions__]
| lubosch/productivity-classifier-linux | Xlib/ext/__init__.py | Python | gpl-2.0 | 1,249 |
# -*- coding: utf-8 -*-
#
# Cheroke-admin
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2009-2010 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
URL_APPLY = '/plugin/wildcard/apply'
NOTE_WILDCARD = N_("Accepted host name. Wildcard characters (* and ?) are allowed. Eg: *example.com")
WARNING_EMPTY = N_("At least one wildcard string must be defined.")
class Content (CTK.Container):
def __init__ (self, refreshable, key, url_apply, **kwargs):
CTK.Container.__init__ (self, **kwargs)
entries = CTK.cfg.keys (key)
# Warning message
if not entries:
notice = CTK.Notice('warning')
notice += CTK.RawHTML (_(WARNING_EMPTY))
self += notice
# List
else:
table = CTK.Table()
submit = CTK.Submitter(url_apply)
submit += table
self += CTK.Indenter(submit)
table.set_header(1)
table += [CTK.RawHTML(_('Domain pattern'))]
for i in entries:
e1 = CTK.TextCfg ("%s!%s"%(key,i))
rm = None
if len(entries) >= 2:
rm = CTK.ImageStock('del')
rm.bind('click', CTK.JS.Ajax (url_apply,
data = {"%s!%s"%(key,i): ''},
complete = refreshable.JS_to_refresh()))
table += [e1, rm]
# Add New
table = CTK.PropsTable()
next = CTK.cfg.get_next_entry_prefix (key)
table.Add (_('New host name'), CTK.TextCfg(next, False, {'class':'noauto'}), _(NOTE_WILDCARD))
submit = CTK.Submitter(url_apply)
dialog = CTK.Dialog2Buttons ({'title': _('Add new entry')}, _('Add'), submit.JS_to_submit())
submit += table
submit.bind ('submit_success', refreshable.JS_to_refresh())
submit.bind ('submit_success', dialog.JS_to_close())
dialog += submit
self += dialog
add_new = CTK.Button(_('Add New'))
add_new.bind ('click', dialog.JS_to_show())
self += add_new
class Plugin_wildcard (CTK.Plugin):
def __init__ (self, key, vsrv_num):
CTK.Plugin.__init__ (self, key)
pre = '%s!domain' %(key)
url_apply = '%s/%s' %(URL_APPLY, vsrv_num)
self += CTK.RawHTML ("<h2>%s</h2>" % (_('Accepted Domains')))
# Content
refresh = CTK.Refreshable ({'id': 'plugin_wildcard'})
refresh.register (lambda: Content(refresh, pre, url_apply).Render())
self += refresh
# Validation, and Public URLs
CTK.publish ('^%s/[\d]+$'%(URL_APPLY), CTK.cfg_apply_post, method="POST")
| mdavid/cherokee-webserver-svnclone | admin/plugins/wildcard.py | Python | gpl-2.0 | 3,364 |
import inspect
import unittest
import mock
from pulp.server.db.connection import PulpCollection
from pulp.server.db.model.criteria import Criteria
from pulp.server.managers.content.query import ContentQueryManager
from test_cud import PulpContentTests, TYPE_1_DEF, TYPE_1_UNITS, TYPE_2_DEF, TYPE_2_UNITS
class PulpContentQueryTests(PulpContentTests):
def setUp(self):
super(PulpContentQueryTests, self).setUp()
self.type_1_ids = []
self.type_2_ids = []
for unit in TYPE_1_UNITS:
unit_id = self.cud_manager.add_content_unit(TYPE_1_DEF.id, None, unit)
self.type_1_ids.append(unit_id)
for unit in TYPE_2_UNITS:
unit_id = self.cud_manager.add_content_unit(TYPE_2_DEF.id, None, unit)
self.type_2_ids.append(unit_id)
def test_get_content_unit_collection(self):
manager = ContentQueryManager()
collection = manager.get_content_unit_collection('deb')
self.assertTrue(isinstance(collection, PulpCollection))
self.assertEqual(collection.name, 'units_deb')
@mock.patch.object(ContentQueryManager, 'get_content_unit_collection')
def test_find_by_criteria(self, mock_get_collection):
criteria = Criteria(limit=20)
units = self.query_manager.find_by_criteria('deb', criteria)
# make sure it tried to get the correct collection
mock_get_collection.assert_called_once_with('deb')
# make sure the query call itself was correct
mock_query = mock_get_collection.return_value.query
self.assertEqual(mock_query.call_count, 1)
self.assertEqual(mock_query.call_args[0][0], criteria)
self.assertEqual(mock_query.return_value, units)
def test_list(self):
units = self.query_manager.list_content_units(TYPE_1_DEF.id)
self.assertEqual(len(TYPE_1_UNITS), len(units))
units = self.query_manager.list_content_units(TYPE_2_DEF.id)
self.assertEqual(len(TYPE_2_UNITS), len(units))
def test_get_by_id(self):
unit = self.query_manager.get_content_unit_by_id(TYPE_1_DEF.id, self.type_1_ids[0])
self.assertEqual(unit['key-1'], TYPE_1_UNITS[0]['key-1'])
def test_get_by_ids(self):
units = self.query_manager.get_multiple_units_by_ids(TYPE_2_DEF.id, self.type_2_ids)
self.assertEqual(len(units), len(self.type_2_ids))
def test_key_dict(self):
unit_ids, unit_keys = self.query_manager.get_content_unit_keys(TYPE_2_DEF.id,
[self.type_2_ids[0]])
self.assertEqual(len(unit_keys), 1)
unit_id = unit_ids[0]
unit_dict = unit_keys[0]
unit_model = TYPE_2_UNITS[0]
self.assertEqual(unit_id, self.type_2_ids[0])
self.assertEqual(unit_dict['key-2a'], unit_model['key-2a'])
self.assertEqual(unit_dict['key-2b'], unit_model['key-2b'])
def test_get_by_key_dict(self):
key_dict = self.query_manager.get_content_unit_keys(
TYPE_2_DEF.id, [self.type_2_ids[0]])[1][0]
unit = self.query_manager.get_content_unit_by_keys_dict(TYPE_2_DEF.id, key_dict)
self.assertEqual(unit['_id'], self.type_2_ids[0])
def test_multi_key_dicts(self):
ids, key_dicts = self.query_manager.get_content_unit_keys(TYPE_2_DEF.id, self.type_2_ids)
units = list(self.query_manager.get_multiple_units_by_keys_dicts(TYPE_2_DEF.id, key_dicts))
self.assertEqual(len(units), len(self.type_2_ids))
def __test_keys_dicts_query(self):
# XXX this test proves my multi-dict query wrong, need to fix it
new_unit = {'key-2a': 'B', 'key-2b': 'B'}
self.cud_manager.add_content_unit(TYPE_2_DEF.id, None, new_unit)
keys_dicts = TYPE_2_UNITS[1:3]
units = self.query_manager.get_multiple_units_by_keys_dicts(TYPE_2_DEF.id, keys_dicts)
self.assertEqual(len(units), 2)
@mock.patch('pulp.plugins.types.database.type_units_unit_key', return_value=['a'])
@mock.patch('pulp.plugins.types.database.type_units_collection')
class TestGetContentUnitIDs(unittest.TestCase):
def setUp(self):
super(TestGetContentUnitIDs, self).setUp()
self.manager = ContentQueryManager()
def test_returns_generator(self, mock_type_collection, mock_type_unit_key):
mock_type_collection.return_value.find.return_value = []
ret = self.manager.get_content_unit_ids('fake_type', [])
self.assertTrue(inspect.isgenerator(ret))
def test_returns_ids(self, mock_type_collection, mock_type_unit_key):
mock_type_collection.return_value.find.return_value = [{'_id': 'abc'}, {'_id': 'def'}]
ret = self.manager.get_content_unit_ids('fake_type', [{'a': 'foo'}, {'a': 'bar'}])
self.assertEqual(list(ret), ['abc', 'def'])
def test_calls_find(self, mock_type_collection, mock_type_unit_key):
mock_find = mock_type_collection.return_value.find
mock_find.return_value = [{'_id': 'abc'}, {'_id': 'def'}]
ret = self.manager.get_content_unit_ids('fake_type', [{'a': 'foo'}, {'a': 'bar'}])
# evaluate the generator so the code actually runs
list(ret)
expected_spec = {'$or': ({'a': 'foo'}, {'a': 'bar'})}
mock_find.assert_called_once_with(expected_spec, fields=['_id'])
| mhrivnak/pulp | server/test/unit/server/managers/content/test_query.py | Python | gpl-2.0 | 5,319 |
import Orange
import logging
import random
from discretization import *
from FeatureSelector import *
from utils import *
from sklearn import svm
from sklearn import cross_validation
from sklearn.metrics import f1_score, precision_recall_fscore_support
from sklearn.feature_extraction import DictVectorizer
import numpy as np
# Vars
testsetPercentage = .2
validationsetPercentage = .3
progress = False
baseline = .9496
# Utilities
logging.basicConfig(filename='main.log',level=logging.DEBUG,format='%(levelname)s\t%(message)s')
def logmessage(message, color):
print color(message)
logging.info(message)
def copyDataset(dataset):
return Orange.data.Table(dataset)
# Compute S Threshold
# =============================================================================
boxmessage("Start", warning)
data = Orange.data.Table("dataset.tab")
data.randomGenerator = Orange.orange.RandomGenerator(random.randint(0, 10))
logmessage("Main Dataset Loaded", success)
# =============================================================================
# Extracts Test Set
boxmessage("Extracting Test Set and Working Set", info)
testSet = None
workingSet = None
if progress:
try:
with open("finaltestset.tab"):
logmessage("Final Test Set found", info)
with open("trainingset.tab"):
logmessage("Working Set found", info)
testSet = Orange.data.Table("finaltestset.tab")
workingSet = Orange.data.Table("trainingset.tab")
except IOError:
logmessage("IOError in loading final and working sets", error)
pass
else:
selection = Orange.orange.MakeRandomIndices2(data, testsetPercentage)
testSet = data.select(selection, 0)
testSet.save("finaltestset.tab")
workingSet = data.select(selection, 1)
workingSet.save("workingset.tab")
print success("Extraction performed")
print info("Test Instances: %s" % len(testSet))
print info("Training + Validation Instances: %s" % len(workingSet))
# =============================================================================
# Starts Iterations
K = 1
S = 0
C = 0
boxmessage("Starting main Loop", info)
#while(performanceIncrease):
# Split
if not progress:
info("Splitting Working Dataset for training and validation (70-30)")
selection = Orange.orange.MakeRandomIndices2(workingSet, validationsetPercentage)
validationSet = workingSet.select(selection, 0)
trainingSet = workingSet.select(selection, 1)
trainingSet.save("trainingset.tab")
validationSet.save("validationset.tab")
else:
validationSet = Orange.data.Table("validationset.tab")
trainingSet = Orange.data.Table("trainingset.tab")
# Discretization
ds = Discretizer(trainingSet, K, logging)
if progress:
try:
with open("discretizer.K.gains"):
print info("Loading Previous Iteration")
ds.load()
except IOError:
logmessage("IOError in loading found gains", error)
pass
else:
ds.findThresholds()
if progress:
try:
with open("discretized.tab"):
trainingSet = Orange.data.Table("discretized.tab")
print info("Discretized Dataset Loaded")
except IOError:
logmessage("IOError in loading discretized training dataset", error)
else:
trainingSet = ds.discretizeDataset(trainingSet)
trainingSet.save("discretized.tab")
# ============================================================================ #
# Feature Selection
fs = FeatureSelector()
if progress:
try:
with open("featureselected.tab"):
trainingSet = Orange.data.Table("featureselected.tab")
print info("Features Selected Dataset Loaded")
except IOError:
fs.computeThreshold(trainingSet)
fs.save()
trainingSet = fs.select(trainingSet)
trainingSet.save("featureselected.tab")
print info("New training dataset is %s" %len(trainingSet))
print info("New training dataset features are %s" % len(trainingSet[0]))
# Model Training
# Convert Train Dataset
# Apply transformation, from labels to you know what I mean
converted_train_data = ([[ d[f].value for f in trainingSet.domain if f != trainingSet.domain.class_var] for d in trainingSet])
converted_train_data = [dict(enumerate(d)) for d in converted_train_data]
vector = DictVectorizer(sparse=False)
converted_train_data = vector.fit_transform(converted_train_data)
converted_train_targets = ([ 0 if d[trainingSet.domain.class_var].value == 'ALL' else 1 for d in trainingSet ])
clf = svm.SVC(kernel='linear')
clf.fit(converted_train_data, converted_train_targets)
logmessage("Model learnt", success)
# Performances
# Apply Discretization and feature selection to validation set
validationSet = ds.discretizeDataset(validationSet)
validationSet = fs.select(validationSet)
logmessage("Validation set length is %s" % len(validationSet), info)
logmessage("Validation feature length is %s" % len(validationSet[0]), info)
# Convert Test Dataset
converted_test_data = ([[ d[f].value for f in validationSet.domain if f != validationSet.domain.class_var] for d in validationSet])
converted_test_data = [dict(enumerate(d)) for d in converted_test_data]
converted_test_data = vector.fit_transform(converted_test_data)
converted_test_targets = ([0 if d[validationSet.domain.class_var].value == 'ALL' else 1 for d in validationSet ])
logmessage("Starting Prediction Task", info)
prediction = clf.predict(converted_test_data)
p, r, f1, support = precision_recall_fscore_support(converted_test_targets, prediction)
f1_avg = np.average(f1)
logmessage("Average F1(Over 2 classes): %s" % f1_avg, info)
if f1_avg > baseline:
logmessage("Performance Increased", success)
logmessage("Using K: %s, S: %s, C: default" % (ds.K, fs.threshold), info)
else:
logmessage("Performance Decreased", error)
# =============================================================================
# Final Test
| Sh1n/AML-ALL-classifier | main.py | Python | gpl-2.0 | 5,647 |
#!/usr/bin/env python
# class to convert/process modis data
#
# (c) Copyright Ingmar Nitze 2013
# Authors: Ingmar Nitze, Luca Delucchi
# Email: initze at ucc dot ie
# Email: luca dot delucchi at iasma dot it
#
##################################################################
#
# This MODIS Python class is licensed under the terms of GNU GPL 2.
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
##################################################################
"""A class for the extraction and transformation of MODIS quality layers to
specific information
Classes:
* :class:`QualityModis`
"""
from __future__ import print_function
import os
import numpy as np
try:
import osgeo.gdal as gdal
import osgeo.gdal_array as gdal_array
except ImportError:
try:
import gdal
import gdal_array
except ImportError:
raise 'Python GDAL library not found, please install python-gdal'
VALIDTYPES = {'13': map(str, range(1, 10)), '11': map(str, range(1, 6))}
PRODUCTPROPS = {'MOD13Q1': ([2], ['QAGrp1']),
'MYD13Q1': ([2], ['QAGrp1']),
'MOD13A1': ([2], ['QAGrp1']),
'MYD13A1': ([2], ['QAGrp1']),
'MOD13A2': ([2], ['QAGrp1']),
'MYD13A2': ([2], ['QAGrp1']),
'MOD13A3': ([2], ['QAGrp1']),
'MYD13A3': ([2], ['QAGrp1']),
'MOD13C1': ([2], ['QAGrp1']),
'MYD13C1': ([2], ['QAGrp1']),
'MOD13C2': ([2], ['QAGrp1']),
'MYD13C2': ([2], ['QAGrp1']),
'MOD11A1': ([1, 5], ['QAGrp2', 'QAGrp2']),
'MYD11A1': ([1, 5], ['QAGrp2', 'QAGrp2']),
'MOD11A2': ([1, 5], ['QAGrp4', 'QAGrp4']),
'MYD11A2': ([1, 5], ['QAGrp4', 'QAGrp4']),
'MOD11B1': ([1, 5, -2], ['QAGrp2', 'QAGrp2', 'QAGrp3']),
'MYD11B1': ([1, 5, -2], ['QAGrp2', 'QAGrp2', 'QAGrp3']),
'MOD11C1': ([1, 5, -2], ['QAGrp2', 'QAGrp2', 'QAGrp3']),
'MYD11C1': ([1, 5, -2], ['QAGrp2', 'QAGrp2', 'QAGrp3']),
'MOD11C2': ([1, 6], ['QAGrp2', 'QAGrp2']),
'MYD11C2': ([1, 6], ['QAGrp2', 'QAGrp2']),
'MOD11C3': ([1, 6], ['QAGrp2', 'QAGrp2']),
'MYD11C3': ([1, 6], ['QAGrp2', 'QAGrp2'])}
QAindices = {'QAGrp1': (16, [[-2, None], [-6, -2], [-8, -6], [-9, -8],
[-10, -9], [-11, -10], [-14, -11], [-15, -14],
[-16, -15]]),
'QAGrp2': (7, [[-2, None], [-3, -2], [-4, -3], [-6, -4],
[-8, -6]]),
'QAGrp3': (7, [[-3, None], [-6, -3], [-7, -6]]),
'QAGrp4': (8, [[-2, None], [-4, -2], [-6, -4], [-8, -6]])}
class QualityModis():
"""A Class for the extraction and transformation of MODIS
quality layers to specific information
:param str infile: the full path to the hdf file
:param str outfile: the full path to the parameter file
"""
def __init__(self, infile, outfile, qType=None, qLayer=None, pType=None):
"""Function to initialize the object"""
self.infile = infile
self.outfile = outfile
self.qType = qType
self.qLayer = qLayer
self.qaGroup = None
self.pType = pType
def loadData(self):
"""loads the input file to the object"""
os.path.isfile(self.infile)
self.ds = gdal.Open(self.infile)
def setProductType(self):
"""read productType from Metadata of hdf file"""
if self.pType == None:
self.productType = self.ds.GetMetadata()['SHORTNAME']
else:
self.productType = self.pType
def setProductGroup(self):
"""read productGroup from Metadata of hdf file"""
self.productGroup = self.productType[3:5]
def setQAGroup(self):
"""set QA dataset group type"""
if self.productType in PRODUCTPROPS.keys():
self.qaGroup = PRODUCTPROPS[self.productType][1][int(self.qLayer)-1]
else:
print("Product version is currently not supported!")
def setQALayer(self):
"""function sets the input path of the designated QA layer"""
self.qaLayer = self.ds.GetSubDatasets()[PRODUCTPROPS[self.productType][0][int(self.qLayer)-1]][0]
def loadQAArray(self):
"""loads the QA layer to the object"""
self.qaArray = gdal_array.LoadFile(self.qaLayer)
def qualityConvert(self, modisQaValue):
"""converts encoded Bit-Field values to designated QA information"""
startindex = QAindices[self.qaGroup][1][int(self.qType)-1][0]
endindex = QAindices[self.qaGroup][1][int(self.qType)-1][1]
return int(np.binary_repr(modisQaValue, QAindices[self.qaGroup][0])[startindex: endindex], 2)
def exportData(self):
"""writes calculated QA values to physical .tif file"""
qaDS = gdal.Open(self.qaLayer)
dr = gdal.GetDriverByName('GTiff')
outds = dr.Create(self.outfile, self.ncols, self.nrows, 1, gdal.GDT_Byte)
outds.SetProjection(qaDS.GetProjection())
outds.SetGeoTransform(qaDS.GetGeoTransform())
outds.GetRasterBand(1).WriteArray(self.qaOut)
outds = None
qaDS = None
def run(self):
"""Function defines the entire process"""
self.loadData()
self.setProductType()
self.setProductGroup()
#self.setDSversion()
self.setQAGroup()
self.setQALayer()
self.loadQAArray()
self.nrows, self.ncols = self.qaArray.shape
print("Conversion started !")
self.qaOut = np.zeros_like(self.qaArray, dtype=np.int8)
if self.productGroup in ['11', '13'] and self.qType in VALIDTYPES[self.productGroup] and self.qaGroup != None:
for val in np.unique(self.qaArray):
ind = np.where(self.qaArray == val)
self.qaOut[ind] = self.qualityConvert(self.qaArray[ind][0])
self.exportData()
print("Export finished!")
else:
print("This MODIS type is currently not supported.")
| gilliM/MFQ | ModisFromQgis/mypymodis/qualitymodis.py | Python | gpl-2.0 | 6,568 |
"""
Django settings for dfiid project.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
from django.core.exceptions import ImproperlyConfigured
def get_env(setting):
""" Get the environment setting or return exception """
try:
return os.environ[setting]
except KeyError:
error_msg = 'Set the %s env variable' % setting
raise ImproperlyConfigured(error_msg)
SECRET_KEY = get_env('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'nocaptcha_recaptcha',
'core',
'user',
'content',
'notify',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'dfiid.urls'
WSGI_APPLICATION = 'dfiid.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': get_env('DB_NAME'),
'USER': get_env('DB_USER'),
'PASSWORD': get_env('DB_PASSWORD'),
'HOST': get_env('DB_HOST'),
'PORT': get_env('DB_PORT'),
}
}
LANGUAGE_CODE = get_env('LANGUAGE')
TIME_ZONE = 'Atlantic/Canary'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/s/'
STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
STATIC_ROOT = os.path.join(BASE_DIR, 's')
MEDIA_URL = '/m/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'm')
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
AUTH_USER_MODEL = 'user.User'
LOGIN_URL = '/login'
LOGIN_REDIRECT_URL = '/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
NORECAPTCHA_SITE_KEY = get_env('NORECAPTCHA_SITE_KEY')
NORECAPTCHA_SECRET_KEY = get_env('NORECAPTCHA_SECRET_KEY')
| ellipticaldoor/dfiid | project/dfiid/settings/base.py | Python | gpl-2.0 | 2,415 |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2021 Philipp Wolfer
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import os
import os.path
from PyQt5.QtCore import (
QCoreApplication,
QStandardPaths,
)
from picard import (
PICARD_APP_NAME,
PICARD_ORG_NAME,
)
# Ensure the application is properly configured for the paths to work
QCoreApplication.setApplicationName(PICARD_APP_NAME)
QCoreApplication.setOrganizationName(PICARD_ORG_NAME)
def config_folder():
return os.path.normpath(os.environ.get('PICARD_CONFIG_DIR', QStandardPaths.writableLocation(QStandardPaths.AppConfigLocation)))
def cache_folder():
return os.path.normpath(os.environ.get('PICARD_CACHE_DIR', QStandardPaths.writableLocation(QStandardPaths.CacheLocation)))
def plugin_folder():
# FIXME: This really should be in QStandardPaths.AppDataLocation instead,
# but this is a breaking change that requires data migration
return os.path.normpath(os.environ.get('PICARD_PLUGIN_DIR', os.path.join(config_folder(), 'plugins')))
| zas/picard | picard/const/appdirs.py | Python | gpl-2.0 | 1,741 |
# -*- coding: utf-8 -*-
'''
Template Add-on
Copyright (C) 2016 Demo
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib2, urllib, xbmcgui, xbmcplugin, xbmcaddon, xbmc, re, sys, os
try:
import json
except:
import simplejson as json
import yt
ADDON_NAME = 'Carrera'
addon_id = 'plugin.video.Carrera'
Base_Url = 'http://herovision.x10host.com/carrera/'
Main_Menu_File_Name = 'main.php'
search_filenames = ['sv1','teh','s5']
########################################################################################
### FAVOURITES SECTION IS NOT THIS AUTHORS CODE, I COULD NOT GET IT TO REMOVE FAVOURITES SO ALL CREDIT DUE TO THEM, SORRY IM NOT SURE WHERE IT CAME FROM BUT GOOD WORK :) ###
ADDON = xbmcaddon.Addon(id=addon_id)
ADDON_PATH = xbmc.translatePath('special://home/addons/'+addon_id)
ICON = ADDON_PATH + 'icon.png'
FANART = ADDON_PATH + 'fanart.jpg'
PATH = 'Carrera'
VERSION = '0.0.1'
Dialog = xbmcgui.Dialog()
addon_data = xbmc.translatePath('special://home/userdata/addon_data/'+addon_id+'/')
favorites = os.path.join(addon_data, 'favorites.txt')
watched = addon_data + 'watched.txt'
source_file = Base_Url + 'source_file.php'
debug = ADDON.getSetting('debug')
if os.path.exists(addon_data)==False:
os.makedirs(addon_data)
if not os.path.exists(watched):
open(watched,'w+')
if os.path.exists(favorites)==True:
FAV = open(favorites).read()
else: FAV = []
watched_read = open(watched).read()
def Main_Menu():
OPEN = Open_Url(Base_Url+Main_Menu_File_Name)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if name == '[COLORskyblue]F[COLORblue]avourites[/COLOR]':
Menu(name,url,6,icon,fanart,desc)
elif 'php' in url:
Menu(name,url,1,icon,fanart,desc)
elif name == '[COLORskyblue]S[COLORblue]earch[/COLOR]':
Menu('[COLORskyblue]S[COLORblue]earch[/COLOR]',url,3,icon,fanart,desc)
elif name == '[COLORskyblue]i[COLORblue]dex[/COLOR]':
Menu('[COLORskyblue]O[COLORblue]nline Lists[/COLOR]',url,10,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def Second_Menu(url):
OPEN = Open_Url(url)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def index_Menu():
#Menu('Favourites','',5,'','','','','')
Menu('List of Index\'s','',10,'','','')
# Menu('Search','',6,ICON,FANART,'','','')
# Menu('[COLORred]Press here to add a source url[/COLOR] ','',2,'','','','','')
def Index_List():
OPEN = Open_Url(source_file)
Regex = re.compile('url="(.+?)">name="(.+?)"').findall(OPEN)
for url,name in Regex:
Menu(name,url,8,'','','')
#####################################MAIN REGEX LOOP ###############################
def Main_Loop(url):
HTML = Open_Url(url)
match = re.compile('<a href="(.+?)">(.+?)</a>').findall(HTML)
for url2,name in match:
url3 = url + url2
if '..' in url3:
pass
elif 'rar' in url3:
pass
elif 'jpg' in url3:
pass
elif 'vtx' in url3:
pass
elif 'srt' in url3:
pass
elif 'C=' in url2:
pass
elif '/' in url2:
Menu((name).replace('/',''),url3,8,ICON,FANART,'','','')
else:
Clean_name(name,url3)
################################### TIDY UP NAME #############################
def Clean_name(name,url3):
name1 = (name).replace('S01E','S01 E').replace('(MovIran).mkv','').replace('The.Walking.Dead','').replace('.mkv','').replace('Tehmovies.com.mkv','').replace('Nightsdl','').replace('Ganool','')
name2=(name1).replace('.',' ').replace(' (ParsFilm).mkv','').replace('_TehMovies.Com.mkv','').replace(' (SaberFun.IR).mkv','').replace('[UpFilm].mkv','').replace('(Bia2Movies)','')
name3=(name2).replace('.mkv','').replace('.Film2Movie_INFO.mkv','').replace('.HEVC.Film2Movie_INFO.mkv','').replace('.ParsFilm.mkv ','').replace('(SaberFunIR)','')
name4=(name3).replace('.INTERNAL.','').replace('.Film2Movie_INFO.mkv','').replace('.web-dl.Tehmovies.net.mkv','').replace('S01E06','S01 E06').replace('S01E07','S01 E07')
name5=(name4).replace('S01E08','S01 E08').replace('S01E09','S01 E09').replace('S01E10','S01 E10').replace('.Tehmovies.net','').replace('.WEBRip.Tehmovies.com.mkv','')
name6=(name5).replace('.mp4','').replace('.mkv','').replace('.Tehmovies.ir','').replace('x265HEVC','').replace('Film2Movie_INFO','').replace('Tehmovies.com.mkv','')
name7=(name6).replace(' (ParsFilm)','').replace('Tehmovies.ir.mkv','').replace('.480p',' 480p').replace('.WEBrip','').replace('.web-dl','').replace('.WEB-DL','')
name8=(name7).replace('.','').replace('.Tehmovies.com','').replace('480p.Tehmovies.net</',' 480p').replace('720p.Tehmovies.net','720p').replace('.480p',' 480p')
name9=(name8).replace('.480p.WEB-DL',' 480p').replace('.mkv','').replace('.INTERNAL.','').replace('720p',' 720p').replace('.Tehmovi..>','').replace('.Tehmovies.net.mkv','')
name10=(name9).replace('..720p',' 720p').replace('.REPACK.Tehmovies..>','').replace('.Tehmovies.com.mkv','').replace('.Tehmovies..>','').replace('Tehmovies.ir..>','')
name11=(name10).replace('Tehmovies.ne..>','').replace('.HDTV.x264-mRs','').replace('...>','').replace('.Tehmovies...>','').replace('.Tehmovies.com.mp4','')
name12=(name11).replace('.Tehmovies.com.mp4','').replace('_MovieFarsi','').replace('_MovieFar','').replace('_com','').replace('>','').replace('avi','').replace('(1)','')
name13=(name12).replace('(2)','').replace('cd 2','').replace('cd 1','').replace('-dos-xvid','').replace('divx','').replace('Xvid','').replace('DVD','').replace('DVDrip','')
name14=(name13).replace('DvDrip-aXXo','').replace('[','').replace(']','').replace('(','').replace(')','').replace('XviD-TLF-','').replace('CD1','').replace('CD2','')
name15=(name14).replace('CD3','').replace('mp4','').replace('&','&').replace('HDRip','').replace('-','').replace(' ',' ').replace('xvid','').replace('1080p','')
name16=(name15).replace('1970','').replace('1971','').replace('1972','').replace('1973','').replace('1974','').replace('1975','').replace('1976','').replace('1977','')
name17=(name16).replace('1978','').replace('1979','').replace('1980','').replace('1981','').replace('1982','').replace('1983','').replace('1984','').replace('1985','')
name18=(name17).replace('1986','').replace('1987','').replace('1988','').replace('1989','').replace('1990','').replace('1991','').replace('1992','').replace('1993','')
name19=(name18).replace('1994','').replace('1995','').replace('1996','').replace('1997','').replace('1998','').replace('1999','').replace('2000','').replace('2001','')
name20=(name19).replace('2002','').replace('2003','').replace('2004','').replace('2005','').replace('2006','').replace('2007','').replace('2008','').replace('2009','')
name21=(name20).replace('2010','').replace('2011','').replace('2012','').replace('2013','').replace('2014','').replace('2015','').replace('2016','').replace('720p','')
name22=(name21).replace('360p','').replace(' ',' ').replace('BluRay','').replace('rip','').replace('WEBDL','').replace('s01','').replace('s02','').replace('S02','')
name23=(name22).replace('s03','').replace('s04','').replace('s05','').replace('s06','').replace('s07','').replace('s08','').replace('s09','').replace('S01','')
name24=(name23).replace('S03','').replace('S04',' ').replace('S05','').replace('S06','').replace('S07','').replace('S08','').replace('S09','').replace('E01','')
name25=(name24).replace('E02','').replace('E03','').replace('E04','').replace('E05','').replace('E06','').replace('E07','').replace('E08','').replace('E09','').replace('e01','')
name25=(name24).replace('e02','').replace('e03','').replace('e04','').replace('e05','').replace('e06','').replace('e07','').replace('e08','').replace('e09','').replace('e01','')
clean_name = name15
search_name = name25
#if ADDON.getSetting('Data')=='true':
# Imdb_Scrape(url3,clean_name,search_name)
#if ADDON.getSetting('Data')=='false':
Play(clean_name,url3,2,ICON,FANART,'','','')
def Search():
Search_Name = Dialog.input('Search', type=xbmcgui.INPUT_ALPHANUM)
Search_Title = Search_Name.lower()
if Search_Title == '':
pass
else:
for file_Name in search_filenames:
search_URL = Base_Url + file_Name + '.php'
OPEN = Open_Url(search_URL)
if OPEN != 'Opened':
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if Search_Title in name.lower():
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
####################################################################PROCESSES###################################################
def Open_Url(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = ''
link = ''
try:
response = urllib2.urlopen(req)
link=response.read()
response.close()
except: pass
if link != '':
return link
else:
link = 'Opened'
return link
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
def Menu(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def Play(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def GetPlayerCore():
try:
PlayerMethod=getSet("core-player")
if (PlayerMethod=='DVDPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_DVDPLAYER
elif (PlayerMethod=='MPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_MPLAYER
elif (PlayerMethod=='PAPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_PAPLAYER
else: PlayerMeth=xbmc.PLAYER_CORE_AUTO
except: PlayerMeth=xbmc.PLAYER_CORE_AUTO
return PlayerMeth
return True
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def resolve(url):
print_text_file = open(watched,"a")
print_text_file.write('item="'+url+'"\n')
print_text_file.close
play=xbmc.Player(GetPlayerCore())
import urlresolver
try: play.play(url)
except: pass
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def addon_log(string):
if debug == 'true':
xbmc.log("["+ADDON_NAME+"]: %s" %(addon_version, string))
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favorites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favorites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
def getFavorites():
if os.path.exists(favorites)==False:
favList = []
addon_log('Making Favorites File')
favList.append(('[COLORskyblue]C[COLORblue]arrera Favourites Section[/COLOR]','','','','','',''))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
items = json.loads(open(favorites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if ADDON.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
Menu(name,url,'',iconimage,fanart,'','fav')
else:
Menu(name,url,i[4],iconimage,fanart,'','fav')
def rmFavorite(name):
data = json.loads(open(favorites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
iconimage=None
mode=None
fanart=None
description=None
fav_mode=None
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
print str(PATH)+': '+str(VERSION)
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
print "IconImage: "+str(iconimage)
#####################################################END PROCESSES##############################################################
if mode == None: Main_Menu()
elif mode == 1 : Second_Menu(url)
elif mode == 2 :
if 'youtube' in url:
url = (url).replace('https://www.youtube.com/watch?v=','').replace('http://www.youtube.com/watch?v=','')
yt.PlayVideo(url)
else:
resolve(url)
elif mode == 3 : Search()
elif mode==4:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==5:
addon_log("rmFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
elif mode==6:
addon_log("getFavorites")
getFavorites()
elif mode == 7 : index_Menu()
elif mode == 8 : Main_Loop(url)
elif mode == 9 : Source_File()
elif mode ==10 : Index_List()
xbmcplugin.addSortMethod(int(sys.argv[1]), 1)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
| dannyperry571/theapprentice | plugin.video.Carrera/default.py | Python | gpl-2.0 | 19,611 |
from ptrace.signames import signalName
class ProcessEvent(Exception):
"""
A process event: program exit, program killed by a signal, program
received a signal, etc.
The attribute "process" contains the related process.
"""
def __init__(self, process, message):
Exception.__init__(self, message)
self.process = process
class ProcessExit(ProcessEvent):
"""
Process exit event:
- process kill by a signal (if signum attribute is not None)
- process exited with a code (if exitcode attribute is not None)
- process terminated abnormally (otherwise)
"""
def __init__(self, process, signum=None, exitcode=None):
pid = process.pid
if signum:
message = "Process %s killed by signal %s" % (
pid, signalName(signum))
elif exitcode is not None:
if not exitcode:
message = "Process %s exited normally" % pid
else:
message = "Process %s exited with code %s" % (pid, exitcode)
else:
message = "Process %s terminated abnormally" % pid
ProcessEvent.__init__(self, process, message)
self.signum = signum
self.exitcode = exitcode
class ProcessExecution(ProcessEvent):
"""
Process execution: event send just after the process calls the exec()
syscall if exec() tracing option is enabled.
"""
def __init__(self, process):
ProcessEvent.__init__(self, process, "Process %s execution" % process.pid)
class NewProcessEvent(ProcessEvent):
"""
New process: event send when a process calls the fork() syscall if fork()
tracing option is enabled. The attribute process contains the new child
process.
"""
def __init__(self, process):
ProcessEvent.__init__(self, process, "New process %s" % process.pid)
| pombredanne/python-ptrace | ptrace/debugger/process_event.py | Python | gpl-2.0 | 1,864 |
#
# corecd.py
#
# Copyright (C) 2014 Fabio Erculiani
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from pyanaconda.installclass import BaseInstallClass
from pyanaconda.i18n import N_
from pyanaconda.sabayon import Entropy
class InstallClass(BaseInstallClass):
id = "sabayon_corecd"
name = N_("Sabayon Core")
sortPriority = 9998
_l10n_domain = "anaconda"
efi_dir = "sabayon"
dmrc = None
def configure(self, anaconda):
BaseInstallClass.configure(self, anaconda)
BaseInstallClass.setDefaultPartitioning(self, anaconda.storage)
def getBackend(self):
from pyanaconda.sabayon.livecd import LiveCDCopyBackend
return LiveCDCopyBackend
def __init__(self):
BaseInstallClass.__init__(self)
| Sabayon/anaconda | pyanaconda/installclasses/corecd.py | Python | gpl-2.0 | 1,357 |
from sofia.step import Step
class MolecularWeights(Step):
"""
Reads in a set of molecular weights. The file of molecular weights can be obtained from
http://emboss.sourceforge.net/.
"""
IN = ['molecular_weight_file']
OUT = ['molecular_weight_set']
def run(self, molecular_weight_file):
infile = open(molecular_weight_file[0], encoding='utf-8')
data = infile.read()
infile.close()
interface = {}
for line in data.split('\n'):
if line.strip() == '' or line[0] == '#' or line.startswith('Mol'):
continue
parts = line.split()
interface[parts[0]] = {'avg': float(parts[1]), 'mono': float(parts[2])}
yield interface
| childsish/sofia | templates/genomics/steps/molecular_weights.py | Python | gpl-2.0 | 742 |
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
from openstack_dashboard.openstack.common.log import policy_is
class CheckHost(horizon.Panel):
name = _("Instance")
slug = 'checkhost'
img = '/static/dashboard/img/nav/checkhost1.png'
def nav(self, context):
username = context['request'].user.username
return policy_is(username, 'appradmin')
dashboard.Admin.register(CheckHost)
| ChinaMassClouds/copenstack-server | openstack/src/horizon-2014.2/openstack_dashboard/dashboards/admin/checkhost/panel.py | Python | gpl-2.0 | 495 |
# WARNING: This script copies all the files in this repo into the root
# WARNING: tree of the RaspberryPi and saves a backup file (.bak).
# WARNING: Need to be ran with "sudo".
import os
import shutil
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
for directory in ('home', 'etc'):
for root, dirnames, filenames in os.walk(directory):
if 'home/pi/apps/' in root:
# exclude all the apps
continue
for f in filenames:
relative_path = os.path.abspath(os.path.join(root, f))
root_path = os.path.join('/', root, f)
print('Copying: {} to {}'.format(relative_path, root_path))
answer = raw_input('Copy this file? [Y/n]')
if answer != 'Y':
print('Skipping...')
continue
try:
shutil.move(root_path, root_path + '.bak')
except IOError as e:
print('There was an error backuping this file')
print('Exception: {}'.format(e))
shutil.copy(relative_path, root_path)
# Some specific-configuration commands
os.system('chown root:root /etc/wpa_supplicant/wpa_roam.conf')
os.system('chmod 600 /etc/wpa_supplicant/wpa_roam.conf')
| humitos/pyfispot | raspberrypi/copy_config_files.py | Python | gpl-2.0 | 1,278 |
"""
NCL User Guide Python Example: PyNGL_unstructured_ICON_triangles.py
Grid type: unstructured
Model: ICON
Info: - colored triangles
- add labelbar (colorbar)
- wkColorMap
18.02.16 meier-fleischer(at)dkrz.de
"""
import numpy as np
import math, time, sys, os
import Nio, Ngl
t1 = time.time() #-- retrieve start time
#-- define variables
diri = './'
fname = 'ta_ps_850.nc' #-- data path and file name
gname = 'r2b4_amip.nc' #-- grid info file
VarName = 'ta' #-- variable name
#---Test if files exist
if(not os.path.exists(diri+fname) or not os.path.exists(diri+gname)):
print("You do not have the necessary files to run this example, '%s' and '%s'." % (diri+fname,diri+gname))
print("You can get the files from the NCL website at:")
print("http://www.ncl.ucar.edu/Document/Manuals/NCL_User_Guide/Data/")
sys.exit()
#-- open file and read variables
f = Nio.open_file(diri + fname,'r') #-- add data file
g = Nio.open_file(diri + gname,'r') #-- add grid file (not contained in data file!!!)
#-- read a timestep of 'ta'
variable = f.variables['ta'] #-- first time step, lev, ncells
data = variable[0,0,:] #-- ta [time,lev,ncells]; miss _FillValue
var = data - 273.15 #-- convert to degrees Celsius; miss _FillValue
#-- define _FillValue and missing_value if not existing
missing = -1e20
if not hasattr(var,'_FillValue'):
var._FillValue = missing #-- set _FillValue
if not hasattr(var,'missing_value'):
var.missing_value = missing #-- set missing_value
varM = np.ma.array(var, mask=np.equal(var,missing)) #-- mask array with missing values
nummissing = np.count_nonzero(varM.mask) #-- number of missing values
#-- set data intervals, levels, labels, color indices
varMin, varMax, varInt = -32, 28, 4 #-- set data minimum, maximum, interval
levels = range(varMin,varMax,varInt) #-- set levels array
nlevs = len(levels) #-- number of levels
labels = ['{:.2f}'.format(x) for x in levels] #-- convert list of floats to list of strings
#-- print info to stdout
print ''
print 'min/max: %.2f' %np.min(varM) + ' /' + ' %.2f' %np.max(varM)
print ''
print 'varMin: %3d' %varMin
print 'varMax: %3d' %varMax
print 'varInt: %3d' %varInt
print ''
print 'missing_value: ', missing
print 'missing values: ', nummissing
#-------------------------------------------------------------------
#-- define the x-, y-values and the polygon points
#-------------------------------------------------------------------
rad2deg = 45./np.arctan(1.) #-- radians to degrees
x, y = g.variables['clon'][:], g.variables['clat'][:]
vlon, vlat = g.variables['clon_vertices'][:], g.variables['clat_vertices'][:]
x, y = x*rad2deg, y*rad2deg #-- cell center, lon, lat
vlat, vlon = vlat*rad2deg, vlon * rad2deg #-- cell latitude/longitude vertices
ncells, nv = vlon.shape #-- ncells: number of cells; nv: number of edges
#-- print information to stdout
print ''
print 'cell points: ', nv
print 'cells: ', str(ncells)
print ''
#-- rearrange the longitude values to -180.-180.
def rearrange(vlon):
less_than = vlon < -180.
greater_than = vlon > 180.
vlon[less_than] = vlon[less_than] + 360.
vlon[greater_than] = vlon[greater_than] - 360.
return vlon
vlon = rearrange(vlon) #-- set longitude values to -180.-180. degrees
print 'min/max vlon: ', np.min(vlon), np.max(vlon)
print 'min/max vlat: ', np.min(vlat), np.max(vlat)
print ''
#-- open a workstation for second plot: triangles plot
wkres = Ngl.Resources()
wkres.wkWidth, wkres.wkHeight = 2500, 2500
wks_type = 'png'
wks = Ngl.open_wks(wks_type,'unstructured_ICON_triangles_ngl',wkres)
#-- define colormap
colormap = Ngl.read_colormap_file('WhiteBlueGreenYellowRed')[22::12,:] #-- RGB ! [256,4] -> [20,4]
#-- select every 12th color
colormap[19,:] = [1.,1.,1.,0.] #-- white for missing values
print ''
print 'levels: ',levels
print 'labels: ',labels
print ''
print 'nlevs: %3d' %nlevs
print ''
#-- set map resources
mpres = Ngl.Resources()
mpres.nglDraw = False #-- turn off plot draw and frame advance. We will
mpres.nglFrame = False #-- do it later after adding subtitles.
mpres.mpGridAndLimbOn = False
mpres.mpGeophysicalLineThicknessF = 2.
mpres.pmTitleDisplayMode = 'Always'
mpres.tiMainString = 'PyNGL: unstructured grid ICON'
#-- create only a map
map = Ngl.map(wks,mpres)
Ngl.draw(map)
#-- assign and initialize array which will hold the color indices of the cells
gscolors = -1*(np.ones((ncells,),dtype=np.int)) #-- assign array containing zeros; init to transparent: -1
#-- set color index of all cells in between levels
for m in xrange(0,nlevs):
vind = [] #-- empty list for color indices
for i in xrange(0,ncells-1):
if (varM[i] >= levels[m] and varM[i] < levels[m+1]):
gscolors[i] = m+1 # 1 to nlevs
vind.append(i)
print 'finished level %3d' % m , ' -- %5d ' % len(vind) , ' polygons considered - gscolors %3d' % (m+1)
del vind
gscolors[varM < varMin] = 0 #-- set color index for cells less than level[0]
gscolors[varM >= varMax] = nlevs+1 #-- set color index for cells greater than levels[nlevs-1]
gscolors[np.nonzero(varM.mask)] = -1 #-- set color index for missing locations
#-- set polygon resources
pgres = Ngl.Resources()
pgres.gsEdgesOn = True #-- draw the edges
pgres.gsFillIndex = 0 #-- solid fill
pgres.gsLineColor = 'black' #-- edge line color
pgres.gsLineThicknessF = 0.7 #-- line thickness
pgres.gsColors = colormap[gscolors,:] #-- use color array
pgres.gsSegments = range(0,len(vlon[:,0])*3,3) #-- define segments array for fast draw
lon1d, lat1d = np.ravel(vlon), np.ravel(vlat) #-- convert to 1D-arrays
#-- add polygons to map
polyg = Ngl.add_polygon(wks,map,lon1d,lat1d,pgres)
#-- add a labelbar
lbres = Ngl.Resources()
lbres.vpWidthF = 0.85
lbres.vpHeightF = 0.15
lbres.lbOrientation = 'Horizontal'
lbres.lbFillPattern = 'SolidFill'
lbres.lbMonoFillPattern = 21 #-- must be 21 for color solid fill
lbres.lbMonoFillColor = False #-- use multiple colors
lbres.lbFillColors = colormap
lbres.lbLabelFontHeightF= 0.014
lbres.lbLabelAlignment = 'InteriorEdges'
lbres.lbLabelStrings = labels
lb = Ngl.labelbar_ndc(wks,nlevs+1,labels,0.1,0.24,lbres)
#-- maximize and draw the plot and advance the frame
Ngl.draw(map)
Ngl.frame(wks)
#-- get wallclock time
t2 = time.time()
print ''
print 'Wallclock time: %0.3f seconds' % (t2-t1)
print ''
Ngl.end()
| likev/ncl | ncl_ncarg_src/ni/src/examples/nug/NUG_unstructured_ICON_triangles_PyNGL.py | Python | gpl-2.0 | 7,516 |
#!/usr/bin/python
#coding:utf-8
import requests
url="http://sendcloud.sohu.com/webapi/mail.send.json"
#files={ "file1": (u"1.pdf", open(u"1.pdf", "rb")),
# "file2": (u"2.pdf", open(u"2.pdf", "rb"))}
# 不同于登录SendCloud站点的帐号,您需要登录后台创建发信子帐号,使用子帐号和密码才可以进行邮件的发送。
params = {"api_user": "bargetor_test_C9Lnuz", \
"api_key" : "va1NbZRs1VIQPk1b",\
"to" : "madgin@qq.com", \
"from" : "hello@bargetor.com", \
"fromname" : "SendCloud测试邮件", \
"subject" : "来自SendCloud的第一封邮件!", \
"html": "你太棒了!你已成功的从SendCloud发送了一封测试邮件,接下来快登录前台去完善账户信息吧!" \
}
r = requests.post(url, files={}, data=params)
print r.text
| Bargetor/chestnut | bargetor/notifiction/test.py | Python | gpl-2.0 | 811 |
# This file is part of OpenDrift.
#
# OpenDrift is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2
#
# OpenDrift is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenDrift. If not, see <https://www.gnu.org/licenses/>.
#
# Copyright 2015, Knut-Frode Dagestad, MET Norway
import logging; logger = logging.getLogger(__name__)
from opendrift.models.basemodel import OpenDriftSimulation
from opendrift.elements.passivetracer import PassiveTracer
class WindBlow(OpenDriftSimulation):
"""Demonstration trajectory model based on OpenDrift framework.
Simply advects a particle (passive tracer with
no properties except for position) with the ambient wind.
"""
ElementType = PassiveTracer
required_variables = {
'x_wind': {'fallback': 0},
'y_wind': {'fallback': 0}
}
max_speed = 12 # m/s
def update(self):
# Simply move particles with ambient wind
self.update_positions(self.environment.x_wind,
self.environment.y_wind)
| OpenDrift/opendrift | opendrift/models/windblow.py | Python | gpl-2.0 | 1,401 |
'''
|marx| offers several different source shapes. Tests in this module exercise
those sources (except ``SAOSAC``, which is heavily used in
:ref:`sect-tests.PSF` already).
'''
import shutil
import subprocess
import os
from collections import OrderedDict
from marxtest import base
title = 'Sources in |marx|'
tests = ['GeometricSources', 'ImageSource',
#'RayfileSource',
'UserSource']
class GeometricSources(base.MarxTest):
'''This test exercises build-in |marx| sources with different geometric
shapes.
Most source types have parameters, and not all parameters are tested here.
See :ref:`sect-sourcemodels` for a detailed description of source
parameters.
'''
title = 'Build-in geometric sources'
figures = OrderedDict([('ds9', {'alternative': 'Six PSFs.',
'caption': '`ds9`_ image of the simulated PSFs in alphabetical order (beta distribution, disk, disk with hole, Gauss, line, and point).'})
])
@base.Marx
def step_10(self):
return [{'OutputDir': 'point'},
{'SourceType': 'GAUSS', 'S-GaussSigma': 20,
'OutputDir': 'gauss'},
{'SourceType': 'BETA', 'S-BetaCoreRadius': 10,
'S-BetaBeta': 0.6, 'OutputDir': 'beta'},
{'SourceType': 'DISK',
'S-DiskTheta0': 0, 'S-DiskTheta1': 20,
'OutputDir': 'disk'},
{'SourceType': 'DISK',
'S-DiskTheta0': 10, 'S-DiskTheta1': 20,
'OutputDir': 'diskhole'},
{'SourceType': 'LINE', 'S-LinePhi': 45, 'S-LineTheta': 30,
'OutputDir': 'line'},
]
# more to come for SAOSAC, RAYFILE, SIMPUT, USER
# but first make something work here
@base.Marx2fits
def step_20(self):
dirs = ['point', 'gauss', 'beta', 'disk', 'diskhole',
'line']
return ['--pixadj=EDSER'] * len(dirs), dirs, [d + '.fits' for d in dirs]
@base.Ciao
def step_30(self):
'''ds9 images of the PSF'''
return ['''ds9 -width 800 -height 500 -log -cmap heat *.fits -pan to 4018 4141 physical -match frame wcs -saveimage {0} -exit'''.format(self.figpath(list(self.figures.keys())[0]))]
class ImageSource(base.MarxTest):
'''An image can be used as |marx| input. In this case, the intensity of the
X-ray radiation on that sky is taken to be proportional to the value of the
image at that point.
'''
title = 'Image as source'
figures = OrderedDict([('ds9', {'alternative': 'The simulated events generally follow the input image, but with significant noise because of the short observation time.',
'caption': '`ds9`_ shows the input image (left) and the simulated event list (right).'})
])
@base.Python
def step_0(self):
'''Make input image
In this example we use python to make a simple image as input.
We setup a 3-d box and fill it with an emitting shell. We then
integrate along one dimension to obtain a collapsed image.
Physically, this represents the thin shell of a supernova
explosion.
'''
import numpy as np
from astropy.wcs import WCS
from astropy.io import fits
# Actually to make this run faster, we'll do only one quadrant here
cube = np.zeros((201, 201, 201))
mg = np.mgrid[0: 201., 0:201, 0:201 ]
d = np.sqrt(mg[0, :, :, :]**2 + mg[1, :, :, :]**2 + mg[2, :, :, :]**2)
cube[(d > 160.) & (d < 170)] = 1
im = cube.sum(axis=0)
# Now rotate and put the four quarters together
image = np.zeros((401, 401))
image[:201, :201] = np.fliplr(np.flipud(im))
image[:201, 200:] = np.flipud(im)
image[200:, :201] = np.fliplr(im)
image[200:, 200:] = im
# Create a new WCS object.
w = WCS(naxis=2)
w.wcs.crpix = [100., 100.]
# Pixel size of our image shall be 1 arcsec
w.wcs.cdelt = [1. / 3600., 1. / 3600.]
w.wcs.ctype = ["RA---TAN", "DEC--TAN"]
# Now, write out the WCS object as a FITS header
header = w.to_header()
# header is an astropy.io.fits.Header object. We can use it to create a new
# PrimaryHDU and write it to a file.
hdu = fits.PrimaryHDU(header=header, data=image)
# Save to FITS file
hdu.writeto(os.path.join(self.basepath, 'input_image.fits'), clobber=True)
@base.Marx
def step_1(self):
'''Run |marx|.
We run a monoenergetic simulation here for the Si XIII line at 6.65 Ang.
'''
return {'SourceType': "IMAGE", 'S-ImageFile': 'input_image.fits',
'MinEnergy': 1.9, 'MaxEnergy': 1.9, 'GratingType': 'NONE',
'OutputDir': 'image'}
@base.Marx2fits
def step_2(self):
return '--pixadj=EDSER', 'image', 'image.fits'
@base.Ciao
def step_30(self):
'''ds9 images of the PSF'''
return ['''ds9 -width 800 -height 500 -log -cmap heat input_image.fits image.fits -pan to 4018 4141 physical -zoom 0.5 -sleep 1 -saveimage {0} -exit'''.format(self.figpath(list(self.figures.keys())[0]))]
class RayfileSource(base.MarxTest):
'''|marx| is a Monte-Carlo code, thus the exact distribution of photons
on the sky will be different every time the code is run. Sometimes it
can be useful to generate a list of photons with position, time and
energy from the source on the sky and then "observe" the exact same list
with different instrument configurations so that any differences in the
result are only due to the different configuration and not to random
fluctuations in the source.
In this example, we look at a relatively large, diffuse emission region
with a very soft spectrum (for simplicity we are using a flat spectrum).
We compare simulations using ACIS-S and ACIS-I. ACIS-S has a better
response to soft photons, but some parts of the source may not be in the
field-of-view; ACIS-I is less efficient for soft photons, but has a
larger field-of-view.
'''
title = 'Using a RAYFILE source'
figures = OrderedDict([('ds9', {'alternative': 'As described above, ACIS-S shows more photons, but ACIS-I does include more the wings of the Gaussian source distribution',
'caption': '`ds9`_ shows the ACIS-I (left) and ACIS-S image (right). Both sources are generated from the same photon list. Sometimes the same pattern of photons can be seen in both images, but with a few events missing on ACIS-I due to the lower soft response.'})
])
@base.Marx
def step_1(self):
'''Write ray file
'''
return {'SourceType': 'GAUSS', 'S-GaussSigma': 300,
'DumpToRayFile': 'yes', 'MinEnergy': 0.3, 'MaxEnergy': 0.5}
@base.Marx
def step_2(self):
'''ACIS-S'''
return {'SourceType': 'RAYFILE', 'RayFile': 'marx.output',
'OutputDir': 'aciss', 'DetectorType': 'ACIS-S'}
@base.Marx
def step_3(self):
'''ACIS-I'''
return {'SourceType': 'RAYFILE', 'RayFile': 'marx.output',
'OutputDir': 'acisi', 'DetectorType': 'ACIS-I'}
@base.Marx2fits
def step_4(self):
'''Turn into fits files
We use the ``EXACT`` setting here to make the comparison simpler.
The default EDSER (energy-dependent sub-pixel event repositioning)
shifts photons of the same energy by a different amount for ACIS-S and
ACIS-I, which would make it harder to compare the resulting images.
'''
return ['--pixadj=EXACT', '--pixadj=EXACT'], ['acisi', 'aciss'], ['i.fits', 's.fits']
@base.Ciao
def step_30(self):
'''ds9 images of the PSF'''
return ['''ds9 -width 800 -height 500 -log -cmap heat i.fits s.fits -pan to 4018 4141 physical -match frame wcs -saveimage {0} -exit'''.format(self.figpath(list(self.figures.keys())[0]))]
class SimputSource(base.MarxTest):
pass
class UserSource(base.MarxTest):
'''Run an example for a USER source.
|marx| comes with several examples for user written source in C.
These can be compiled as shared objects and dynamically linked into |marx|
at run time.
To test this, we copy one of the source files from the installed |marx|
version and compile it with gcc. This particular case is not very useful,
because |marx| already has a point source with the same properties
build-in. The purpose of this test is only to have an automatic check that
the dynamic linking works.
'''
title = 'Compiling a USER source'
figures = OrderedDict([('ds9', {'alternative': 'A point source',
'caption': '`ds9`_ shows that the distribution of source is indeed a point source.'})
])
@base.Python
def step_1(self):
'''compile USER code
|marx| ships with a few examples of user sources. We pick one
of them, copy them to the right directory and compile it with gcc.
'''
marxpath = self.conf.get('marx', 'path')
src = os.path.join(marxpath,
'share', 'doc', 'marx', 'examples', 'user-source')
for f in ['point.c', 'user.h']:
shutil.copy(os.path.join(src, f),
os.path.join(self.basepath, f))
jdmath_h = os.path.join(marxpath, 'include')
jdmath_a = os.path.join(marxpath, 'lib', 'libjdmath.a')
subprocess.call(['gcc', '-I' + jdmath_h, jdmath_a,
'-shared', 'point.c', '-o', 'point.so'])
@base.Marx
def step_2(self):
'''run USER source'''
return {'SourceType': 'USER',
'UserSourceFile': os.path.join(self.basepath, 'point.so')}
@base.Marx2fits
def step_3(self):
'turn into fits file'
return '--pixadj=EDSER', 'point', 'point.fits'
@base.Ciao
def step_30(self):
'''ds9 images of the PSF'''
return ['''ds9 -width 800 -height 500 -log -cmap heat point.fits -pan to 4018 4141 physical -zoom 8 -saveimage {0} -exit'''.format(self.figpath(list(self.figures.keys())[0]))]
| Chandra-MARX/marx-test | tests/source.py | Python | gpl-2.0 | 10,336 |
# This file is part of Rubber and thus covered by the GPL
# (c) Emmanuel Beffara, 2002--2006
"""
LaTeX document building system for Rubber.
This module contains all the code in Rubber that actually does the job of
building a LaTeX document from start to finish.
"""
import os, os.path, sys, imp
import re
import string
from rubber import _
from rubber.util import *
from rubber.depend import Node
from rubber.version import moddir
import rubber.latex_modules
from rubber.tex import Parser, EOF, OPEN, SPACE, END_LINE
#---- Module handler ----{{{1
class Modules:
"""
This class gathers all operations related to the management of modules.
The modules are searched for first in the current directory, then as
scripts in the 'modules' directory in the program's data directort, then
as a Python module in the package `rubber.latex'.
"""
def __init__ (self, env):
self.env = env
self.objects = {}
self.commands = {}
def __getitem__ (self, name):
"""
Return the module object of the given name.
"""
return self.objects[name]
def has_key (self, name):
"""
Check if a given module is loaded.
"""
return self.objects.has_key(name)
def register (self, name, dict={}):
"""
Attempt to register a module with the specified name. If the module is
already loaded, do nothing. If it is found and not yet loaded, then
load it, initialise it (using the context passed as optional argument)
and run any delayed commands for it.
"""
if self.has_key(name):
msg.debug(_("module %s already registered") % name, pkg='latex')
return 2
# First look for a script
mod = None
for path in "", os.path.join(moddir, "modules"):
file = os.path.join(path, name + ".rub")
if os.path.exists(file):
mod = ScriptModule(self.env, file)
msg.log(_("script module %s registered") % name, pkg='latex')
break
# Then look for a Python module
if not mod:
try:
file, path, descr = imp.find_module(name,
rubber.latex_modules.__path__)
pymodule = imp.load_module(name, file, path, descr)
file.close()
mod = PyModule(self.env, pymodule, dict)
msg.log(_("built-in module %s registered") % name, pkg='latex')
except ImportError:
msg.debug(_("no support found for %s") % name, pkg='latex')
return 0
# Run any delayed commands.
if self.commands.has_key(name):
for (cmd, args, vars) in self.commands[name]:
msg.push_pos(vars)
try:
# put the variables as they were when the directive was
# found
saved_vars = self.env.vars
self.env.vars = vars
try:
# call the command
mod.command(cmd, args)
finally:
# restore the variables to their current state
self.env.vars = saved_vars
except AttributeError:
msg.warn(_("unknown directive '%s.%s'") % (name, cmd))
except TypeError:
msg.warn(_("wrong syntax for '%s.%s'") % (name, cmd))
msg.pop_pos()
del self.commands[name]
self.objects[name] = mod
return 1
def command (self, mod, cmd, args):
"""
Send a command to a particular module. If this module is not loaded,
store the command so that it will be sent when the module is register.
"""
if self.objects.has_key(mod):
self.objects[mod].command(cmd, args)
else:
if not self.commands.has_key(mod):
self.commands[mod] = []
self.commands[mod].append((cmd, args, self.env.vars))
#---- Log parser ----{{{1
re_loghead = re.compile("This is [0-9a-zA-Z-]*")
re_rerun = re.compile("LaTeX Warning:.*Rerun")
re_file = re.compile("(\\((?P<file>[^ \n\t(){}]*)|\\))")
re_badbox = re.compile(r"(Ov|Und)erfull \\[hv]box ")
re_line = re.compile(r"(l\.(?P<line>[0-9]+)( (?P<code>.*))?$|<\*>)")
re_cseq = re.compile(r".*(?P<seq>(\\|\.\.\.)[^ ]*) ?$")
re_macro = re.compile(r"^(?P<macro>\\.*) ->")
re_page = re.compile("\[(?P<num>[0-9]+)\]")
re_atline = re.compile(
"( detected| in paragraph)? at lines? (?P<line>[0-9]*)(--(?P<last>[0-9]*))?")
re_reference = re.compile("LaTeX Warning: Reference `(?P<ref>.*)' \
on page (?P<page>[0-9]*) undefined on input line (?P<line>[0-9]*)\\.$")
re_label = re.compile("LaTeX Warning: (?P<text>Label .*)$")
re_warning = re.compile(
"(LaTeX|Package)( (?P<pkg>.*))? Warning: (?P<text>.*)$")
re_online = re.compile("(; reported)? on input line (?P<line>[0-9]*)")
re_ignored = re.compile("; all text was ignored after line (?P<line>[0-9]*).$")
class LogCheck (object):
"""
This class performs all the extraction of information from the log file.
For efficiency, the instances contain the whole file as a list of strings
so that it can be read several times with no disk access.
"""
#-- Initialization {{{2
def __init__ (self):
self.lines = None
def read (self, name):
"""
Read the specified log file, checking that it was produced by the
right compiler. Returns true if the log file is invalid or does not
exist.
"""
self.lines = None
try:
file = open(name)
except IOError:
return 2
line = file.readline()
if not line:
file.close()
return 1
if not re_loghead.match(line):
file.close()
return 1
self.lines = file.readlines()
file.close()
return 0
#-- Process information {{{2
def errors (self):
"""
Returns true if there was an error during the compilation.
"""
skipping = 0
for line in self.lines:
if line.strip() == "":
skipping = 0
continue
if skipping:
continue
m = re_badbox.match(line)
if m:
skipping = 1
continue
if line[0] == "!":
# We check for the substring "pdfTeX warning" because pdfTeX
# sometimes issues warnings (like undefined references) in the
# form of errors...
if string.find(line, "pdfTeX warning") == -1:
return 1
return 0
def run_needed (self):
"""
Returns true if LaTeX indicated that another compilation is needed.
"""
for line in self.lines:
if re_rerun.match(line):
return 1
return 0
#-- Information extraction {{{2
def continued (self, line):
"""
Check if a line in the log is continued on the next line. This is
needed because TeX breaks messages at 79 characters per line. We make
this into a method because the test is slightly different in Metapost.
"""
return len(line) == 79
def parse (self, errors=0, boxes=0, refs=0, warnings=0):
"""
Parse the log file for relevant information. The named arguments are
booleans that indicate which information should be extracted:
- errors: all errors
- boxes: bad boxes
- refs: warnings about references
- warnings: all other warnings
The function returns a generator. Each generated item is a dictionary
that contains (some of) the following entries:
- kind: the kind of information ("error", "box", "ref", "warning")
- text: the text of the error or warning
- code: the piece of code that caused an error
- file, line, last, pkg: as used by Message.format_pos.
"""
if not self.lines:
return
last_file = None
pos = [last_file]
page = 1
parsing = 0 # 1 if we are parsing an error's text
skipping = 0 # 1 if we are skipping text until an empty line
something = 0 # 1 if some error was found
prefix = None # the prefix for warning messages from packages
accu = "" # accumulated text from the previous line
macro = None # the macro in which the error occurs
cseqs = {} # undefined control sequences so far
for line in self.lines:
line = line[:-1] # remove the line feed
# TeX breaks messages at 79 characters, just to make parsing
# trickier...
if not parsing and self.continued(line):
accu += line
continue
line = accu + line
accu = ""
# Text that should be skipped (from bad box messages)
if prefix is None and line == "":
skipping = 0
continue
if skipping:
continue
# Errors (including aborted compilation)
if parsing:
if error == "Undefined control sequence.":
# This is a special case in order to report which control
# sequence is undefined.
m = re_cseq.match(line)
if m:
seq = m.group("seq")
if cseqs.has_key(seq):
error = None
else:
cseqs[seq] = None
error = "Undefined control sequence %s." % m.group("seq")
m = re_macro.match(line)
if m:
macro = m.group("macro")
m = re_line.match(line)
if m:
parsing = 0
skipping = 1
pdfTeX = string.find(line, "pdfTeX warning") != -1
if error is not None and ((pdfTeX and warnings) or (errors and not pdfTeX)):
if pdfTeX:
d = {
"kind": "warning",
"pkg": "pdfTeX",
"text": error[error.find(":")+2:]
}
else:
d = {
"kind": "error",
"text": error
}
d.update( m.groupdict() )
m = re_ignored.search(error)
if m:
d["file"] = last_file
if d.has_key("code"):
del d["code"]
d.update( m.groupdict() )
elif pos[-1] is None:
d["file"] = last_file
else:
d["file"] = pos[-1]
if macro is not None:
d["macro"] = macro
macro = None
yield d
elif line[0] == "!":
error = line[2:]
elif line[0:3] == "***":
parsing = 0
skipping = 1
if errors:
yield {
"kind": "abort",
"text": error,
"why" : line[4:],
"file": last_file
}
elif line[0:15] == "Type X to quit ":
parsing = 0
skipping = 0
if errors:
yield {
"kind": "error",
"text": error,
"file": pos[-1]
}
continue
if len(line) > 0 and line[0] == "!":
error = line[2:]
parsing = 1
continue
if line == "Runaway argument?":
error = line
parsing = 1
continue
# Long warnings
if prefix is not None:
if line[:len(prefix)] == prefix:
text.append(string.strip(line[len(prefix):]))
else:
text = " ".join(text)
m = re_online.search(text)
if m:
info["line"] = m.group("line")
text = text[:m.start()] + text[m.end():]
if warnings:
info["text"] = text
d = { "kind": "warning" }
d.update( info )
yield d
prefix = None
continue
# Undefined references
m = re_reference.match(line)
if m:
if refs:
d = {
"kind": "warning",
"text": _("Reference `%s' undefined.") % m.group("ref"),
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
m = re_label.match(line)
if m:
if refs:
d = {
"kind": "warning",
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
# Other warnings
if line.find("Warning") != -1:
m = re_warning.match(line)
if m:
info = m.groupdict()
info["file"] = pos[-1]
info["page"] = page
if info["pkg"] is None:
del info["pkg"]
prefix = ""
else:
prefix = ("(%s)" % info["pkg"])
prefix = prefix.ljust(m.start("text"))
text = [info["text"]]
continue
# Bad box messages
m = re_badbox.match(line)
if m:
if boxes:
mpos = { "file": pos[-1], "page": page }
m = re_atline.search(line)
if m:
md = m.groupdict()
for key in "line", "last":
if md[key]: mpos[key] = md[key]
line = line[:m.start()]
d = {
"kind": "warning",
"text": line
}
d.update( mpos )
yield d
skipping = 1
continue
# If there is no message, track source names and page numbers.
last_file = self.update_file(line, pos, last_file)
page = self.update_page(line, page)
def get_errors (self):
return self.parse(errors=1)
def get_boxes (self):
return self.parse(boxes=1)
def get_references (self):
return self.parse(refs=1)
def get_warnings (self):
return self.parse(warnings=1)
def update_file (self, line, stack, last):
"""
Parse the given line of log file for file openings and closings and
update the list `stack'. Newly opened files are at the end, therefore
stack[1] is the main source while stack[-1] is the current one. The
first element, stack[0], contains the value None for errors that may
happen outside the source. Return the last file from which text was
read (the new stack top, or the one before the last closing
parenthesis).
"""
m = re_file.search(line)
while m:
if line[m.start()] == '(':
last = m.group("file")
stack.append(last)
else:
last = stack[-1]
del stack[-1]
line = line[m.end():]
m = re_file.search(line)
return last
def update_page (self, line, before):
"""
Parse the given line and return the number of the page that is being
built after that line, assuming the current page before the line was
`before'.
"""
ms = re_page.findall(line)
if ms == []:
return before
return int(ms[-1]) + 1
#---- Parsing and compiling ----{{{1
re_command = re.compile("%[% ]*rubber: *(?P<cmd>[^ ]*) *(?P<arg>.*).*")
class SourceParser (Parser):
"""
Extends the general-purpose TeX parser to handle Rubber directives in the
comment lines.
"""
def __init__ (self, file, dep):
Parser.__init__(self, file)
self.latex_dep = dep
def read_line (self):
while Parser.read_line(self):
match = re_command.match(self.line.strip())
if match is None:
return True
vars = dict(self.latex_dep.vars.items())
vars['line'] = self.pos_line
args = parse_line(match.group("arg"), vars)
self.latex_dep.command(match.group("cmd"), args, vars)
return False
def skip_until (self, expr):
regexp = re.compile(expr)
while Parser.read_line(self):
match = regexp.match(self.line)
if match is None:
continue
self.line = self.line[match.end():]
self.pos_char += match.end()
return
class EndDocument:
""" This is the exception raised when \\end{document} is found. """
pass
class EndInput:
""" This is the exception raised when \\endinput is found. """
pass
class LaTeXDep (Node):
"""
This class represents dependency nodes for LaTeX compilation. It handles
the cyclic LaTeX compilation until a stable output, including actual
compilation (with a parametrable executable) and possible processing of
compilation results (e.g. running BibTeX).
Before building (or cleaning) the document, the method `parse' must be
called to load and configure all required modules. Text lines are read
from the files and parsed to extract LaTeX macro calls. When such a macro
is found, a handler is searched for in the `hooks' dictionary. Handlers
are called with one argument: the dictionary for the regular expression
that matches the macro call.
"""
#-- Initialization {{{2
def __init__ (self, env):
"""
Initialize the environment. This prepares the processing steps for the
given file (all steps are initialized empty) and sets the regular
expressions and the hook dictionary.
"""
Node.__init__(self, env.depends)
self.env = env
self.log = LogCheck()
self.modules = Modules(self)
self.vars = Variables(env.vars, {
"program": "latex",
"engine": "TeX",
"paper": "",
"arguments": [],
"src-specials": "",
"source": None,
"target": None,
"path": None,
"base": None,
"ext": None,
"job": None,
"graphics_suffixes" : [] })
self.vars_stack = []
self.cmdline = ["\\nonstopmode", "\\input{%s}"]
if self.vars.get('shell_escape', 0):
self.cmdline.insert(0, '--shell-escape')
# the initial hooks:
self.comment_mark = "%"
self.hooks = {
"begin": ("a", self.h_begin),
"end": ("a", self.h_end),
"pdfoutput": ("", self.h_pdfoutput),
"input" : ("", self.h_input),
"include" : ("a", self.h_include),
"includeonly": ("a", self.h_includeonly),
"usepackage" : ("oa", self.h_usepackage),
"RequirePackage" : ("oa", self.h_usepackage),
"documentclass" : ("oa", self.h_documentclass),
"LoadClass" : ("oa", self.h_documentclass),
"LoadClassWithOptions" : ("a", self.h_documentclass),
"tableofcontents" : ("", self.h_tableofcontents),
"listoffigures" : ("", self.h_listoffigures),
"listoftables" : ("", self.h_listoftables),
"bibliography" : ("a", self.h_bibliography),
"bibliographystyle" : ("a", self.h_bibliographystyle),
"endinput" : ("", self.h_endinput)
}
self.begin_hooks = {
"verbatim": self.h_begin_verbatim,
"verbatim*": lambda loc: self.h_begin_verbatim(loc, env="verbatim\\*")
}
self.end_hooks = {
"document": self.h_end_document
}
self.hooks_changed = True
self.include_only = {}
# description of the building process:
self.aux_md5 = {}
self.aux_old = {}
self.watched_files = {}
self.onchange_md5 = {}
self.onchange_cmd = {}
self.removed_files = []
self.not_included = [] # dependencies that don't trigger latex
# state of the builder:
self.processed_sources = {}
self.must_compile = 0
self.something_done = 0
self.failed_module = None
def set_source (self, path, jobname=None):
"""
Specify the main source for the document. The exact path and file name
are determined, and the source building process is updated if needed,
according the the source file's extension. The optional argument
'jobname' can be used to specify the job name to something else that
the base of the file name.
"""
name = self.env.find_file(path, ".tex")
if not name:
msg.error(_("cannot find %s") % name)
return 1
self.reset_sources()
self.vars['source'] = name
(src_path, name) = os.path.split(name)
self.vars['path'] = src_path
(job, self.vars['ext']) = os.path.splitext(name)
if jobname is None:
self.set_job = 0
else:
self.set_job = 1
job = jobname
self.vars['job'] = job
if src_path == "":
src_path = "."
self.vars['base'] = job
else:
self.env.path.append(src_path)
self.vars['base'] = os.path.join(src_path, job)
source = self.source()
prefix = os.path.join(self.vars["cwd"], "")
if source[:len(prefix)] == prefix:
comp_name = source[len(prefix):]
else:
comp_name = source
if comp_name.find('"') >= 0:
msg.error(_("The filename contains \", latex cannot handle this."))
return 1
for c in " \n\t()":
if source.find(c) >= 0:
msg.warn(_("Source path uses special characters, error tracking might get confused."))
break
self.vars['target'] = self.target = os.path.join(prefix, job)
self.reset_products([self.target + ".dvi"])
return 0
def includeonly (self, files):
"""
Use partial compilation, by appending a call to \\inlcudeonly on the
command line on compilation.
"""
if self.vars["engine"] == "VTeX":
msg.error(_("I don't know how to do partial compilation on VTeX."))
return
if self.cmdline[-2][:13] == "\\includeonly{":
self.cmdline[-2] = "\\includeonly{" + ",".join(files) + "}"
else:
self.cmdline.insert(-1, "\\includeonly{" + ",".join(files) + "}")
for f in files:
self.include_only[f] = None
def source (self):
"""
Return the main source's complete filename.
"""
return self.vars['source']
def abspath (self, name, ref=None):
"""
Return the absolute path of a given filename. Relative paths are
considered relative to the file currently processed, the optional
argument "ref" can be used to override the reference file name.
"""
path = self.vars["cwd"]
if ref is None and self.vars.has_key("file"):
ref = self.vars["file"]
if ref is not None:
path = os.path.join(path, os.path.dirname(ref))
return os.path.abspath(os.path.join(path, os.path.expanduser(name)))
#-- LaTeX source parsing {{{2
def parse (self):
"""
Parse the source for packages and supported macros.
"""
try:
self.process(self.source())
except EndDocument:
pass
self.set_date()
msg.log(_("dependencies: %r") % self.sources, pkg='latex')
def parse_file (self, file):
"""
Process a LaTeX source. The file must be open, it is read to the end
calling the handlers for the macro calls. This recursively processes
the included sources.
"""
parser = SourceParser(file, self)
parser.set_hooks(self.hooks.keys())
self.hooks_changed = False
while True:
if self.hooks_changed:
parser.set_hooks(self.hooks.keys())
self.hooks_changed = False
token = parser.next_hook()
if token.cat == EOF:
break
format, function = self.hooks[token.val]
args = []
for arg in format:
if arg == 'a':
args.append(parser.get_argument_text())
elif arg == 'o':
args.append(parser.get_latex_optional_text())
self.parser = parser
self.vars['line'] = parser.pos_line
function(self.vars, *args)
def process (self, path):
"""
This method is called when an included file is processed. The argument
must be a valid file name.
"""
if self.processed_sources.has_key(path):
msg.debug(_("%s already parsed") % path, pkg='latex')
return
self.processed_sources[path] = None
if path not in self.sources:
self.add_source(path)
try:
saved_vars = self.vars
try:
msg.log(_("parsing %s") % path, pkg='latex')
self.vars = Variables(saved_vars,
{ "file": path, "line": None })
file = open(path)
try:
self.parse_file(file)
finally:
file.close()
finally:
self.vars = saved_vars
msg.debug(_("end of %s") % path, pkg='latex')
except EndInput:
pass
def input_file (self, name, loc={}):
"""
Treat the given name as a source file to be read. If this source can
be the result of some conversion, then the conversion is performed,
otherwise the source is parsed. The returned value is a couple
(name,dep) where `name' is the actual LaTeX source and `dep' is
its dependency node. The return value is (None,None) if the source
could neither be read nor built.
"""
if name.find("\\") >= 0 or name.find("#") >= 0:
return None, None
for path in self.env.path:
pname = os.path.join(path, name)
dep = self.env.convert(pname, suffixes=[".tex",""], context=self.vars)
if dep:
file = dep.products[0]
else:
file = self.env.find_file(name, ".tex")
if not file:
continue
dep = None
self.add_source(file)
if dep is None or dep.is_leaf():
self.process(file)
if dep is None:
return file, self.set[file]
else:
return file, dep
return None, None
#-- Directives {{{2
def command (self, cmd, args, pos=None):
"""
Execute the rubber command 'cmd' with arguments 'args'. This is called
when a command is found in the source file or in a configuration file.
A command name of the form 'foo.bar' is considered to be a command
'bar' for module 'foo'. The argument 'pos' describes the position
(file and line) where the command occurs.
"""
if pos is None:
pos = self.vars
# Calls to this method are actually translated into calls to "do_*"
# methods, except for calls to module directives.
lst = string.split(cmd, ".", 1)
#try:
if len(lst) > 1:
self.modules.command(lst[0], lst[1], args)
elif not hasattr(self, "do_" + cmd):
msg.warn(_("unknown directive '%s'") % cmd, **pos)
else:
msg.log(_("directive: %s") % ' '.join([cmd]+args), pkg='latex')
getattr(self, "do_" + cmd)(*args)
#except TypeError:
# msg.warn(_("wrong syntax for '%s'") % cmd, **pos)
def do_alias (self, name, val):
if self.hooks.has_key(val):
self.hooks[name] = self.hooks[val]
self.hooks_changed = True
def do_clean (self, *args):
for file in args:
self.removed_files.append(self.abspath(file))
def do_depend (self, *args):
for arg in args:
file = self.env.find_file(arg)
if file:
self.add_source(file)
else:
msg.warn(_("dependency '%s' not found") % arg, **self.vars)
def do_make (self, file, *args):
file = self.abspath(file)
vars = { "target": file }
while len(args) > 1:
if args[0] == "from":
vars["source"] = self.abspath(args[1])
elif args[0] == "with":
vars["name"] = args[1]
else:
break
args = args[2:]
if len(args) != 0:
msg.error(_("invalid syntax for 'make'"), **self.vars)
return
self.env.conv_set(file, vars)
def do_module (self, mod, opt=None):
dict = { 'arg': mod, 'opt': opt }
self.modules.register(mod, dict)
def do_onchange (self, file, cmd):
file = self.abspath(file)
self.onchange_cmd[file] = cmd
if os.path.exists(file):
self.onchange_md5[file] = md5_file(file)
else:
self.onchange_md5[file] = None
def do_paper (self, arg):
self.vars["paper"] = arg
def do_path (self, name):
self.env.path.append(self.abspath(name))
def do_read (self, name):
path = self.abspath(name)
self.push_vars(file=path, line=None)
try:
file = open(path)
lineno = 0
for line in file.readlines():
lineno += 1
line = line.strip()
if line == "" or line[0] == "%":
continue
self.vars["line"] = lineno
lst = parse_line(line, self.vars)
self.command(lst[0], lst[1:])
file.close()
except IOError:
msg.warn(_("cannot read option file %s") % name, **self.vars)
self.pop_vars()
def do_rules (self, file):
name = self.env.find_file(file)
if name is None:
msg.warn(_("cannot read rule file %s") % file, **self.vars)
else:
self.env.converter.read_ini(name)
def do_set (self, name, val):
try:
self.vars[name] = val
except KeyError:
msg.warn(_("unknown variable: %s") % name, **self.vars)
def do_setlist (self, name, *val):
try:
self.vars[name] = val
except KeyError:
msg.warn(_("unknown variable: %s") % name, **self.vars)
def do_watch (self, *args):
for arg in args:
self.watch_file(self.abspath(arg))
#-- Macro handling {{{2
def hook_macro (self, name, format, fun):
self.hooks[name] = (format, fun)
self.hooks_changed = True
def hook_begin (self, name, fun):
self.begin_hooks[name] = fun
def hook_end (self, name, fun):
self.end_hooks[name] = fun
# Now the macro handlers:
def h_begin (self, loc, env):
if self.begin_hooks.has_key(env):
self.begin_hooks[env](loc)
def h_end (self, loc, env):
if self.end_hooks.has_key(env):
self.end_hooks[env](loc)
def h_pdfoutput (self, loc):
"""
Called when \\pdfoutput is found. Tries to guess if it is a definition
that asks for the output to be in PDF or DVI.
"""
parser = self.parser
token = parser.get_token()
if token.raw == '=':
token2 = parser.get_token()
if token2.raw == '0':
mode = 0
elif token2.raw == '1':
mode = 1
else:
parser.put_token(token2)
return
elif token.raw == '0':
mode = 0
elif token.raw == '1':
mode = 1
else:
parser.put_token(token)
return
if mode == 0:
if 'pdftex' in self.modules:
self.modules['pdftex'].pymodule.mode_dvi()
else:
self.modules.register('pdftex', {'opt': 'dvi'})
else:
if 'pdftex' in self.modules:
self.modules['pdftex'].pymodule.mode_pdf()
else:
self.modules.register('pdftex')
def h_input (self, loc):
"""
Called when an \\input macro is found. This calls the `process' method
if the included file is found.
"""
token = self.parser.get_token()
if token.cat == OPEN:
file = self.parser.get_group_text()
else:
file = ""
while token.cat not in (EOF, SPACE, END_LINE):
file += token.raw
token = self.parser.get_token()
self.input_file(file, loc)
def h_include (self, loc, filename):
"""
Called when an \\include macro is found. This includes files into the
source in a way very similar to \\input, except that LaTeX also
creates .aux files for them, so we have to notice this.
"""
if self.include_only and not self.include_only.has_key(filename):
return
file, _ = self.input_file(filename, loc)
if file:
aux = filename + ".aux"
self.removed_files.append(aux)
self.aux_old[aux] = None
if os.path.exists(aux):
self.aux_md5[aux] = md5_file(aux)
else:
self.aux_md5[aux] = None
def h_includeonly (self, loc, files):
"""
Called when the macro \\includeonly is found, indicates the
comma-separated list of files that should be included, so that the
othe \\include are ignored.
"""
self.include_only = {}
for name in files.split(","):
name = name.strip()
if name != "":
self.include_only[name] = None
def h_documentclass (self, loc, opt, name):
"""
Called when the macro \\documentclass is found. It almost has the same
effect as `usepackage': if the source's directory contains the class
file, in which case this file is treated as an input, otherwise a
module is searched for to support the class.
"""
file = self.env.find_file(name + ".cls")
if file:
self.process(file)
else:
dict = Variables(self.vars, { 'opt': opt })
self.modules.register(name, dict)
def h_usepackage (self, loc, opt, names):
"""
Called when a \\usepackage macro is found. If there is a package in the
directory of the source file, then it is treated as an include file
unless there is a supporting module in the current directory,
otherwise it is treated as a package.
"""
for name in string.split(names, ","):
name = name.strip()
file = self.env.find_file(name + ".sty")
if file and not os.path.exists(name + ".py"):
self.process(file)
else:
dict = Variables(self.vars, { 'opt': opt })
self.modules.register(name, dict)
def h_tableofcontents (self, loc):
self.watch_file(self.target + ".toc")
def h_listoffigures (self, loc):
self.watch_file(self.target + ".lof")
def h_listoftables (self, loc):
self.watch_file(self.target + ".lot")
def h_bibliography (self, loc, names):
"""
Called when the macro \\bibliography is found. This method actually
registers the module bibtex (if not already done) and registers the
databases.
"""
self.modules.register("bibtex", dict)
# This registers the actual hooks, so that subsequent occurrences of
# \bibliography and \bibliographystyle will be caught by the module.
# However, the first time, we have to call the hooks from here. The
# line below assumes that the new hook has the same syntax.
self.hooks['bibliography'][1](loc, names)
def h_bibliographystyle (self, loc, name):
"""
Called when \\bibliographystyle is found. This registers the module
bibtex (if not already done) and calls the method set_style() of the
module.
"""
self.modules.register("bibtex", dict)
# The same remark as in 'h_bibliography' applies here.
self.hooks['bibliographystyle'][1](loc, name)
def h_begin_verbatim (self, dict, env="verbatim"):
"""
Called when \\begin{verbatim} is found. This disables all macro
handling and comment parsing until the end of the environment. The
optional argument 'end' specifies the end marker, by default it is
"\\end{verbatim}".
"""
self.parser.skip_until(r"[ \t]*\\end\{%s\}.*" % env)
def h_endinput (self, dict):
"""
Called when \\endinput is found. This stops the processing of the
current input file, thus ignoring any code that appears afterwards.
"""
raise EndInput
def h_end_document (self, dict):
"""
Called when \\end{document} is found. This stops the processing of any
input file, thus ignoring any code that appears afterwards.
"""
raise EndDocument
#-- Compilation steps {{{2
def compile (self):
"""
Run one LaTeX compilation on the source. Return true on success or
false if errors occured.
"""
msg.progress(_("compiling %s") % msg.simplify(self.source()))
file = self.source()
prefix = os.path.join(self.vars["cwd"], "")
if file[:len(prefix)] == prefix:
file = file[len(prefix):]
if file.find(" ") >= 0:
file = '"%s"' % file
cmd = [self.vars["program"]]
if self.set_job:
if self.vars["engine"] == "VTeX":
msg.error(_("I don't know how set the job name with %s.")
% self.vars["engine"])
else:
cmd.append("-jobname=" + self.vars["job"])
specials = self.vars["src-specials"]
if specials != "":
if self.vars["engine"] == "VTeX":
msg.warn(_("I don't know how to make source specials with %s.")
% self.vars["engine"])
self.vars["src-specials"] = ""
elif specials == "yes":
cmd.append("-src-specials")
else:
cmd.append("-src-specials=" + specials)
cmd += self.vars["arguments"]
cmd += [x.replace("%s",file) for x in self.cmdline]
# Remove the CWD from elements inthe path, to avoid potential problems
# with special characters if there are any (except that ':' in paths
# is not handled).
prefix = self.env.vars["cwd"]
prefix_ = os.path.join(prefix, "")
paths = []
for p in self.env.path:
if p == prefix:
paths.append(".")
elif p[:len(prefix_)] == prefix_:
paths.append("." + p[len(prefix):])
else:
paths.append(p)
inputs = string.join(paths, ":")
if inputs == "":
env = {}
else:
inputs = inputs + ":" + os.getenv("TEXINPUTS", "")
env = {"TEXINPUTS": inputs}
self.env.execute(cmd, env, kpse=1)
self.something_done = 1
if self.log.read(self.target + ".log"):
msg.error(_("Could not run %s.") % cmd[0])
return False
if self.log.errors():
return False
if not os.access(self.products[0], os.F_OK):
msg.error(_("Output file `%s' was not produced.") %
msg.simplify(self.products[0]))
return False
for aux, md5 in self.aux_md5.items():
self.aux_old[aux] = md5
self.aux_md5[aux] = md5_file(aux)
return True
def pre_compile (self, force):
"""
Prepare the source for compilation using package-specific functions.
This function must return False on failure. This function sets
`must_compile' to True if we already know that a compilation is
needed, because it may avoid some unnecessary preprocessing (e.g.
BibTeXing).
"""
aux = self.target + ".aux"
if os.path.exists(aux):
self.aux_md5[aux] = md5_file(aux)
else:
self.aux_md5[aux] = None
self.aux_old[aux] = None
self.log.read(self.target + ".log")
self.must_compile = force
self.must_compile = self.compile_needed()
msg.log(_("building additional files..."), pkg='latex')
for mod in self.modules.objects.values():
if not mod.pre_compile():
self.failed_module = mod
return False
return True
def post_compile (self):
"""
Run the package-specific operations that are to be performed after
each compilation of the main source. Returns true on success or false
on failure.
"""
msg.log(_("running post-compilation scripts..."), pkg='latex')
for file, md5 in self.onchange_md5.items():
if not os.path.exists(file):
continue
new = md5_file(file)
if md5 != new:
msg.progress(_("running %s") % self.onchange_cmd[file])
self.env.execute(["sh", "-c", self.onchange_cmd[file]])
self.onchange_md5[file] = new
for mod in self.modules.objects.values():
if not mod.post_compile():
self.failed_module = mod
return False
return True
def clean (self, all=0):
"""
Remove all files that are produced by compilation.
"""
self.remove_suffixes([".log", ".aux", ".toc", ".lof", ".lot"])
for file in self.products + self.removed_files:
if os.path.exists(file):
msg.log(_("removing %s") % file, pkg='latex')
os.unlink(file)
msg.log(_("cleaning additional files..."), pkg='latex')
for dep in self.source_nodes():
dep.clean()
for mod in self.modules.objects.values():
mod.clean()
#-- Building routine {{{2
def force_run (self):
return self.run(True)
def run (self, force=False):
"""
Run the building process until the last compilation, or stop on error.
This method supposes that the inputs were parsed to register packages
and that the LaTeX source is ready. If the second (optional) argument
is true, then at least one compilation is done. As specified by the
class depend.Node, the method returns True on success and False on
failure.
"""
if not self.pre_compile(force):
return False
# If an error occurs after this point, it will be while LaTeXing.
self.failed_dep = self
self.failed_module = None
if force or self.compile_needed():
self.must_compile = False
if not self.compile():
return False
if not self.post_compile():
return False
while self.recompile_needed():
self.must_compile = False
if not self.compile():
return False
if not self.post_compile():
return False
# Finally there was no error.
self.failed_dep = None
if self.something_done:
self.date = int(time.time())
return True
def compile_needed (self):
"""
Returns true if a first compilation is needed. This method supposes
that no compilation was done (by the script) yet.
"""
if self.must_compile:
return 1
msg.log(_("checking if compiling is necessary..."), pkg='latex')
if not os.path.exists(self.products[0]):
msg.debug(_("the output file doesn't exist"), pkg='latex')
return 1
if not os.path.exists(self.target + ".log"):
msg.debug(_("the log file does not exist"), pkg='latex')
return 1
if os.path.getmtime(self.products[0]) < os.path.getmtime(self.source()):
msg.debug(_("the source is younger than the output file"), pkg='latex')
return 1
if self.log.read(self.target + ".log"):
msg.debug(_("the log file is not produced by TeX"), pkg='latex')
return 1
return self.recompile_needed()
def recompile_needed (self):
"""
Returns true if another compilation is needed. This method is used
when a compilation has already been done.
"""
if self.must_compile:
self.update_watches()
return 1
if self.log.errors():
msg.debug(_("last compilation failed"), pkg='latex')
self.update_watches()
return 1
if self.deps_modified(os.path.getmtime(self.products[0])):
msg.debug(_("dependencies were modified"), pkg='latex')
self.update_watches()
return 1
suffix = self.update_watches()
if suffix:
msg.debug(_("the %s file has changed") % suffix, pkg='latex')
return 1
if self.log.run_needed():
msg.debug(_("LaTeX asks to run again"), pkg='latex')
aux_changed = 0
for aux, md5 in self.aux_md5.items():
if md5 is not None and md5 != self.aux_old[aux]:
aux_changed = 1
break
if not aux_changed:
msg.debug(_("but the aux files are unchanged"), pkg='latex')
return 0
return 1
msg.debug(_("no new compilation is needed"), pkg='latex')
return 0
def deps_modified (self, date):
"""
Returns true if any of the dependencies is younger than the specified
date.
"""
for name in self.sources:
if name in self.not_included:
continue
node = self.set[name]
if node.date > date:
return True
return False
#-- Utility methods {{{2
def get_errors (self):
if self.failed_module is None:
return self.log.get_errors()
else:
return self.failed_module.get_errors()
def watch_file (self, file):
"""
Register the given file (typically "jobname.toc" or such) to be
watched. When the file changes during a compilation, it means that
another compilation has to be done.
"""
if os.path.exists(file):
self.watched_files[file] = md5_file(file)
else:
self.watched_files[file] = None
def update_watches (self):
"""
Update the MD5 sums of all files watched, and return the name of one
of the files that changed, or None of they didn't change.
"""
changed = None
for file in self.watched_files.keys():
if os.path.exists(file):
new = md5_file(file)
if self.watched_files[file] != new:
changed = file
self.watched_files[file] = new
return changed
def remove_suffixes (self, list):
"""
Remove all files derived from the main source with one of the
specified suffixes.
"""
for suffix in list:
file = self.target + suffix
if os.path.exists(file):
msg.log(_("removing %s") % file, pkg='latex')
os.unlink(file)
#---- Base classes for modules ----{{{1
class Module (object):
"""
This is the base class for modules. Each module should define a class
named 'Module' that derives from this one. The default implementation
provides all required methods with no effects.
"""
def __init__ (self, env, dict):
"""
The constructor receives two arguments: 'env' is the compiling
environment, 'dict' is a dictionary that describes the command that
caused the module to load.
"""
def pre_compile (self):
"""
This method is called before the first LaTeX compilation. It is
supposed to build any file that LaTeX would require to compile the
document correctly. The method must return true on success.
"""
return True
def post_compile (self):
"""
This method is called after each LaTeX compilation. It is supposed to
process the compilation results and possibly request a new
compilation. The method must return true on success.
"""
return True
def clean (self):
"""
This method is called when cleaning the compiled files. It is supposed
to remove all the files that this modules generates.
"""
def command (self, cmd, args):
"""
This is called when a directive for the module is found in the source.
The method can raise 'AttributeError' when the directive does not
exist and 'TypeError' if the syntax is wrong. By default, when called
with argument "foo" it calls the method "do_foo" if it exists, and
fails otherwise.
"""
getattr(self, "do_" + cmd)(*args)
def get_errors (self):
"""
This is called if something has failed during an operation performed
by this module. The method returns a generator with items of the same
form as in LaTeXDep.get_errors.
"""
if None:
yield None
class ScriptModule (Module):
"""
This class represents modules that are defined as Rubber scripts.
"""
def __init__ (self, env, filename):
vars = Variables(env.vars, {
'file': filename,
'line': None })
lineno = 0
file = open(filename)
for line in file.readlines():
line = line.strip()
lineno = lineno + 1
if line == "" or line[0] == "%":
continue
vars['line'] = lineno
lst = parse_line(line, vars)
env.command(lst[0], lst[1:], vars)
file.close()
class PyModule (Module):
def __init__ (self, document, pymodule, context):
self.pymodule = pymodule
if hasattr(pymodule, 'setup'):
pymodule.setup(document, context)
def pre_compile (self):
if hasattr(self.pymodule, 'pre_compile'):
return self.pymodule.pre_compile()
return True
def post_compile (self):
if hasattr(self.pymodule, 'post_compile'):
return self.pymodule.post_compile()
return True
def clean (self):
if hasattr(self.pymodule, 'clean'):
self.pymodule.clean()
def command (self, cmd, args):
if hasattr(self.pymodule, 'command'):
self.pymodule.command(cmd, args)
else:
getattr(self.pymodule, "do_" + cmd)(*args)
def get_errors (self):
if hasattr(self.pymodule, 'get_errors'):
return self.pymodule.get_errors()
return []
| petrhosek/rubber | rubber/converters/latex.py | Python | gpl-2.0 | 42,452 |
"""
Social.py
Contains elements that enable connecting with external social sites.
Copyright (C) 2015 Timothy Edmund Crosley
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import hashlib
import urllib
from . import ClientSide, Factory, Layout
from .Base import Node, TextNode
from .Buttons import Link
from .Display import Image
Factory = Factory.Factory("Social")
class Social(Node):
__slots__ = ('account')
properties = Node.properties.copy()
properties['account'] = {'action':'classAttribute'}
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, None, None, parent, *kargs, **kwargs)
self.account = ""
class TwitterBadge(Social):
"""
Displays a clickable twitter badge.
"""
def toHTML(self, formatted=False, *args, **kwargs):
"""
Returns the twitter badge as defined by the api directly
"""
return ("""<a href="https://twitter.com/%(account)s" class="twitter-follow-button" """ + \
"""data-show-count="false">Follow @%(account)s</a><script>!function(d,s,id){""" + \
"""var js,fjs=d.getElementsByTagName(s)[0];if(!d.getElementById(id)){js=d.createElement""" + \
"""(s);js.id=id;js.src="//platform.twitter.com/widgets.js";fjs.parentNode.insertBefore(""" + \
"""js,fjs);}}(document,"script","twitter-wjs");</script>""") % {'account':self.account}
Factory.addProduct(TwitterBadge)
class TwitterAPI(Node):
__slots__ = ()
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, name, id, parent, *kargs, **kwargs)
self.addScript('window.twttr = (function (d,s,id) {'
'var t, js, fjs = d.getElementsByTagName(s)[0];'
'if (d.getElementById(id)) return; js=d.createElement(s); js.id=id;'
'js.src="https://platform.twitter.com/widgets.js"; fjs.parentNode.insertBefore(js, fjs);'
'return window.twttr || (t = { _e: [], ready: function(f){ t._e.push(f) } });'
'}(document, "script", "twitter-wjs"));')
Factory.addProduct(TwitterAPI)
class Tweet(Link):
__slots__ = ()
properties = Link.properties.copy()
properties['hideCount'] = {'action':'hideCount', 'type':'bool', 'info':"Don't show the number of re-tweets"}
properties['largeButton'] = {'action':'useLargeButton', 'type':'bool', 'info':'User larger tweet button size'}
properties['url'] = {'action':'attribute', 'name':'data-url', 'info':'Set the url the tweet will link to'}
properties['hashtag'] = {'action':'attribute', 'name':'data-hashtags', 'info':'Associated a hashtag to the tweet'}
properties['via'] = {'action':'attribute', 'name':'data-via', 'info':'Associated with another twitter account'}
properties['message'] = {'action':'attribute', 'name':'data-text', 'info':'The tweet message text'}
def _create(self, name=None, id=None, parent=None, *kargs, **kwargs):
Link._create(self, name, id, parent, *kargs, **kwargs)
self.setText("Tweet")
self.addClass("twitter-share-button")
self.setDestination("https://twitter.com/share")
def hideCount(self, hide=True):
if hide:
self.attributes['data-count'] = 'none'
else:
self.attributes.pop('data-count', None)
def useLargeButton(self, use=True):
if use:
self.attributes['data-size'] = 'large'
else:
self.attributes.pop('data-size', None)
def toHTML(self, formatted=False, *args, **kwargs):
"""
Adds the twitter script to the tweet button
"""
html = Link.toHTML(self, formatted, *args, **kwargs)
return html
Factory.addProduct(Tweet)
class GooglePlusAPI(Node):
__slots__ = ()
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, name, id, parent, *kargs, **kwargs)
self.addScript("window.___gcfg = {lang:'en-US','parsetags':'explicit'};"
"(function() {var po = document.createElement('script');"
"po.type = 'text/javascript'; po.async = true;"
"po.src = 'https://apis.google.com/js/plusone.js';"
"var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(po, s);"
"})();")
Factory.addProduct(GooglePlusAPI)
class GooglePlusShare(Layout.Box):
__slots__ = ()
properties = Layout.Box.properties.copy()
properties['size'] = {'action':'attribute', 'name':'data-height', 'type':'int',
'info':"The Size of the of the button, 2 is large"}
properties['url'] = {'action':'attribute', 'name':'data-href', 'info':"The url the google plus button points to"}
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Node._create(self, name, id, parent, *kargs, **kwargs)
self.addClass("g-plus")
self.attributes['data-action'] = "share"
self.attributes['data-annotation'] = "none"
Factory.addProduct(GooglePlusShare)
class GooglePlusBadge(Social):
"""
Displays a clickable google plus badge.
"""
__slots__ = ('link', )
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Social._create(self, None, None, parent, *kargs, **kwargs)
self.link = self.add(Link())
self.link.attributes['rel'] = "publisher"
self.link.addClass("WGooglePlusBadge")
self.link += Image(src="https://ssl.gstatic.com/images/icons/gplus-32.png", alt="Google+")
def _render(self):
self.link.setDestination("https://plus.google.com/%s?prsrc=3" % self.account)
Factory.addProduct(GooglePlusBadge)
class FacebookLike(Social):
"""
Adds a facebook like link to your site
"""
def toHTML(self, formatted=False, *args, **kwargs):
return ("""<div class="fb-like" data-href="https://www.facebook.com/%s" data-send="false""" + \
"""data-layout="button_count" data-width="300" data-show-faces="false"></div>""") % self.account
Factory.addProduct(FacebookLike)
class FacebookAPI(Layout.Box):
"""
Adds facebook api support to your site and optionally calls the init method on it - only add once.
"""
__slots__ = ('loginURL', 'logoutURL', 'appId', 'init')
properties = Node.properties.copy()
properties['appId'] = {'action':'classAttribute'}
properties['init'] = {'action':'classAttribute', 'type':'bool'}
properties['loginURL'] = {'action':'classAttribute'}
properties['logoutURL'] = {'action':'classAttribute'}
class ClientSide(Layout.Box.ClientSide):
def feed(self, name, caption, description, link, picture=None, redirect=None, callback=None):
"""
Posts defined data to the users news feed.
"""
arguments = {'method':'feed', 'name':name, 'caption':caption, 'link':link}
if picture:
arguments['picture'] = picture
if redirect:
arguments['redirect_url'] = redirect
if callback:
return ClientSide.call("FB.ui", arguments, callback)
if description:
arguments['description'] = description
return ClientSide.call("FB.ui", arguments)
def _create(self, id=None, name=None, parent=None, *kargs, **kwargs):
Layout.Box._create(self, "fb-root", name, parent, *kargs, **kwargs)
self.appId = ""
self.init = False
self.loginURL = None
self.logoutURL = None
def _render(self):
"""
Returns the api support code directly
"""
if self.init:
extra = ""
if self.loginURL:
extra += "FB.Event.subscribe('auth.login', function(response){window.location = '%s'});" % \
self.loginURL
if self.logoutURL:
extra += "FB.Event.subscribe('auth.logout', function(response){window.location = '%s'});" % \
self.logoutURL
self.addScript("""window.fbAsyncInit = function(){FB.init
({appId: '%s', status: true, cookie: true, xfbml: true});
%s
}""" % (self.appId, extra))
self.addScript("""(function(d, s, id){
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) {return;}
js = d.createElement(s); js.id = id;
js.async = true;
js.src = "//connect.facebook.net/en_US/all.js";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));""")
Factory.addProduct(FacebookAPI)
class FacebookLogin(Node):
"""
Adds a facebook login button to the page
"""
__slots__ = ('text', )
tagName = "fb:login-button"
properties = Node.properties.copy()
properties['show-faces'] = {'action':'attribute', 'type':'bool',
'info':'Specifies whether to show faces underneath the Login button.'}
properties['width'] = {'action':'attribute', 'type':'int',
'info':'The width of the plugin in pixels. Default width: 200px.'}
properties['size'] = {'action':'attribute',
'info':'Different sized buttons: small, medium, large, xlarge (default: medium).'}
properties['max-rows'] = {'action':'attribute', 'type':'int',
'info':'The maximum number of rows of profile pictures to display. Default value: 1.'}
properties['scope'] = {'action':'attribute', 'info':'a comma separated list of extended permissions to request.'}
properties['registration-url '] = {'action':'attribute',
'info':'URL to redirect to on initial registration.'}
properties['text'] = {'action':'classAttribute', 'info':'Set a custom label for the facebook connect button.'}
def _create(self, id=None, name=None, parent=None, *kargs, **kwargs):
Node._create(self, id, name, parent, *kargs, **kwargs)
self.text = None
def _render(self):
if self.text:
if not self.childElements:
self += TextNode()
self.childElements[0].setText(self.text)
elif self.childElements:
self.childElements[0].setText("")
class ClientSide(Node.ClientSide):
"""
Defines the client-side behavior of the facebook api.
"""
def logout(self):
return ClientSide.call("FB.logout")
Factory.addProduct(FacebookLogin)
class Gravatar(Image):
"""
A Gravatar user image based on an email id
"""
__slots__ = ('email', '_size', '_default', '_rating')
properties = Image.properties.copy()
properties['email'] = {'action':'classAttribute'}
properties['size'] = {'action':'setSize', 'type':'int'}
properties['rating'] = {'action':'setRating'}
properties['default'] = {'action':'setDefault'}
def _create(self, name=None, id=None, parent=None, html="", *kargs, **kwargs):
Image._create(self, None, None, parent, *kargs, **kwargs)
self.email = ""
self._size = 80
self._default = "mm"
self._rating = "g"
def _render(self):
self.attributes['src'] = "http://www.gravatar.com/avatar/%s?s=%s&r=%s&d=%s" % \
(hashlib.md5(self.email.encode('utf-8')).hexdigest(), self.size(),
self.rating(), self.default())
self.style['width'] = "%spx" % self.size()
self.style['height'] = "%spx" % self.size()
def profileURL(self):
"""
Returns the associated profile URL that can be used to modify the provided image
"""
return "http://www.gravatar.com/%s" % hashlib.md5(self.email.encode('utf-8')).hexdigest()
def setSize(self, size):
"""
Set the width of the google chart in pixels (maximum allowed by google is 1000 pixels)
"""
size = int(size)
if size > 2048 or size < 1:
raise ValueError("Gravatar only supports requesting image sizes 1 - 2048")
self._size = size
def size(self):
"""
Returns the size of this gravatar
"""
return self._size
def setRating(self, rating):
"""
Sets the maximum rating of the returned image (g, pg, r, or x)
"""
rating = rating.lower()
if rating not in ('g', 'pg', 'r', 'x'):
raise ValueError("Gravatar only supports the ratings g, pg, r, and x")
self._rating = rating
def rating(self):
"""
Returns the maximum rating allowed for this image
"""
return self._rating
def setDefault(self, default):
"""
Sets the default image in the case the provided email does not have a gravatar
can be a direct url or one of the included defaults:
404, mm, identicon, monsterid, wavatar, retro, and blank
"""
self._default = urllib.encode(default)
def default(self):
"""
Returns the image set to load if none is available for the specified email address
"""
return self._default
Factory.addProduct(Gravatar)
| timothycrosley/thedom | thedom/social.py | Python | gpl-2.0 | 14,474 |
#
# Kickstart module for the storage.
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from pyanaconda.core.signal import Signal
from pyanaconda.core.dbus import DBus
from pyanaconda.modules.common.base import KickstartService
from pyanaconda.modules.common.constants.services import STORAGE
from pyanaconda.modules.common.containers import TaskContainer
from pyanaconda.modules.common.errors.storage import InvalidStorageError
from pyanaconda.modules.common.structures.requirement import Requirement
from pyanaconda.modules.storage.bootloader import BootloaderModule
from pyanaconda.modules.storage.checker import StorageCheckerModule
from pyanaconda.modules.storage.dasd import DASDModule
from pyanaconda.modules.storage.devicetree import DeviceTreeModule
from pyanaconda.modules.storage.disk_initialization import DiskInitializationModule
from pyanaconda.modules.storage.disk_selection import DiskSelectionModule
from pyanaconda.modules.storage.fcoe import FCOEModule
from pyanaconda.modules.storage.installation import MountFilesystemsTask, ActivateFilesystemsTask, \
WriteConfigurationTask
from pyanaconda.modules.storage.iscsi import ISCSIModule
from pyanaconda.modules.storage.kickstart import StorageKickstartSpecification
from pyanaconda.modules.storage.nvdimm import NVDIMMModule
from pyanaconda.modules.storage.partitioning.constants import PartitioningMethod
from pyanaconda.modules.storage.partitioning.factory import PartitioningFactory
from pyanaconda.modules.storage.partitioning.validate import StorageValidateTask
from pyanaconda.modules.storage.reset import ScanDevicesTask
from pyanaconda.modules.storage.snapshot import SnapshotModule
from pyanaconda.modules.storage.storage_interface import StorageInterface
from pyanaconda.modules.storage.teardown import UnmountFilesystemsTask, TeardownDiskImagesTask
from pyanaconda.modules.storage.zfcp import ZFCPModule
from pyanaconda.storage.initialization import enable_installer_mode, create_storage
from pyanaconda.anaconda_loggers import get_module_logger
log = get_module_logger(__name__)
class StorageService(KickstartService):
"""The Storage service."""
def __init__(self):
super().__init__()
# Initialize Blivet.
enable_installer_mode()
# The storage model.
self._current_storage = None
self._storage_playground = None
self.storage_changed = Signal()
# The created partitioning modules.
self._created_partitioning = []
self.created_partitioning_changed = Signal()
# The applied partitioning module.
self._applied_partitioning = None
self.applied_partitioning_changed = Signal()
self.partitioning_reset = Signal()
# Initialize modules.
self._modules = []
self._storage_checker_module = StorageCheckerModule()
self._add_module(self._storage_checker_module)
self._device_tree_module = DeviceTreeModule()
self._add_module(self._device_tree_module)
self._disk_init_module = DiskInitializationModule()
self._add_module(self._disk_init_module)
self._disk_selection_module = DiskSelectionModule()
self._add_module(self._disk_selection_module)
self._snapshot_module = SnapshotModule()
self._add_module(self._snapshot_module)
self._bootloader_module = BootloaderModule()
self._add_module(self._bootloader_module)
self._fcoe_module = FCOEModule()
self._add_module(self._fcoe_module)
self._iscsi_module = ISCSIModule()
self._add_module(self._iscsi_module)
self._nvdimm_module = NVDIMMModule()
self._add_module(self._nvdimm_module)
self._dasd_module = DASDModule()
self._add_module(self._dasd_module)
self._zfcp_module = ZFCPModule()
self._add_module(self._zfcp_module)
# Connect modules to signals.
self.storage_changed.connect(
self._device_tree_module.on_storage_changed
)
self.storage_changed.connect(
self._disk_init_module.on_storage_changed
)
self.storage_changed.connect(
self._disk_selection_module.on_storage_changed
)
self.storage_changed.connect(
self._snapshot_module.on_storage_changed
)
self.storage_changed.connect(
self._bootloader_module.on_storage_changed
)
self.storage_changed.connect(
self._dasd_module.on_storage_changed
)
self._disk_init_module.format_unrecognized_enabled_changed.connect(
self._dasd_module.on_format_unrecognized_enabled_changed
)
self._disk_init_module.format_ldl_enabled_changed.connect(
self._dasd_module.on_format_ldl_enabled_changed
)
self._disk_selection_module.protected_devices_changed.connect(
self.on_protected_devices_changed
)
def _add_module(self, storage_module):
"""Add a base kickstart module."""
self._modules.append(storage_module)
def publish(self):
"""Publish the module."""
TaskContainer.set_namespace(STORAGE.namespace)
for kickstart_module in self._modules:
kickstart_module.publish()
DBus.publish_object(STORAGE.object_path, StorageInterface(self))
DBus.register_service(STORAGE.service_name)
@property
def kickstart_specification(self):
"""Return the kickstart specification."""
return StorageKickstartSpecification
def process_kickstart(self, data):
"""Process the kickstart data."""
# Process the kickstart data in modules.
for kickstart_module in self._modules:
kickstart_module.process_kickstart(data)
# Set the default filesystem type.
if data.autopart.autopart and data.autopart.fstype:
self.storage.set_default_fstype(data.autopart.fstype)
# Create a new partitioning module.
partitioning_method = PartitioningFactory.get_method_for_kickstart(data)
if partitioning_method:
partitioning_module = self.create_partitioning(partitioning_method)
partitioning_module.process_kickstart(data)
def setup_kickstart(self, data):
"""Set up the kickstart data."""
for kickstart_module in self._modules:
kickstart_module.setup_kickstart(data)
if self.applied_partitioning:
self.applied_partitioning.setup_kickstart(data)
@property
def storage(self):
"""The storage model.
:return: an instance of Blivet
"""
if self._storage_playground:
return self._storage_playground
if not self._current_storage:
self._set_storage(create_storage())
return self._current_storage
def _set_storage(self, storage):
"""Set the current storage model.
The current storage is the latest model of
the system’s storage configuration created
by scanning all devices.
:param storage: a storage
"""
self._current_storage = storage
if self._storage_playground:
return
self.storage_changed.emit(storage)
log.debug("The storage model has changed.")
def _set_storage_playground(self, storage):
"""Set the storage playground.
The storage playground is a model of a valid
partitioned storage configuration, that can be
used for an installation.
:param storage: a storage or None
"""
self._storage_playground = storage
if storage is None:
storage = self.storage
self.storage_changed.emit(storage)
log.debug("The storage model has changed.")
def on_protected_devices_changed(self, protected_devices):
"""Update the protected devices in the storage model."""
if not self._current_storage:
return
self.storage.protect_devices(protected_devices)
def scan_devices_with_task(self):
"""Scan all devices with a task.
We will reset a copy of the current storage model
and switch the models if the reset is successful.
:return: a task
"""
# Copy the storage.
storage = self.storage.copy()
# Set up the storage.
storage.ignored_disks = self._disk_selection_module.ignored_disks
storage.exclusive_disks = self._disk_selection_module.exclusive_disks
storage.protected_devices = self._disk_selection_module.protected_devices
storage.disk_images = self._disk_selection_module.disk_images
# Create the task.
task = ScanDevicesTask(storage)
task.succeeded_signal.connect(lambda: self._set_storage(storage))
return task
def create_partitioning(self, method: PartitioningMethod):
"""Create a new partitioning.
Allowed values:
AUTOMATIC
CUSTOM
MANUAL
INTERACTIVE
BLIVET
:param PartitioningMethod method: a partitioning method
:return: a partitioning module
"""
module = PartitioningFactory.create_partitioning(method)
# Update the module.
module.on_storage_changed(
self._current_storage
)
module.on_selected_disks_changed(
self._disk_selection_module.selected_disks
)
# Connect the callbacks to signals.
self.storage_changed.connect(
module.on_storage_changed
)
self.partitioning_reset.connect(
module.on_partitioning_reset
)
self._disk_selection_module.selected_disks_changed.connect(
module.on_selected_disks_changed
)
# Update the list of modules.
self._add_created_partitioning(module)
return module
@property
def created_partitioning(self):
"""List of all created partitioning modules."""
return self._created_partitioning
def _add_created_partitioning(self, module):
"""Add a created partitioning module."""
self._created_partitioning.append(module)
self.created_partitioning_changed.emit(module)
log.debug("Created the partitioning %s.", module)
def apply_partitioning(self, module):
"""Apply a partitioning.
:param module: a partitioning module
:raise: InvalidStorageError of the partitioning is not valid
"""
# Validate the partitioning.
storage = module.storage.copy()
task = StorageValidateTask(storage)
report = task.run()
if not report.is_valid():
raise InvalidStorageError(" ".join(report.error_messages))
# Apply the partitioning.
self._set_storage_playground(storage)
self._set_applied_partitioning(module)
@property
def applied_partitioning(self):
"""The applied partitioning."""
return self._applied_partitioning
def _set_applied_partitioning(self, module):
"""Set the applied partitioning.
:param module: a partitioning module or None
"""
self._applied_partitioning = module
self.applied_partitioning_changed.emit()
if module is None:
module = "NONE"
log.debug("The partitioning %s is applied.", module)
def reset_partitioning(self):
"""Reset the partitioning."""
self._set_storage_playground(None)
self._set_applied_partitioning(None)
self.partitioning_reset.emit()
def collect_requirements(self):
"""Return installation requirements for this module.
:return: a list of requirements
"""
requirements = []
# Add the storage requirements.
for name in self.storage.packages:
requirements.append(Requirement.for_package(
name, reason="Required to manage storage devices."
))
# Add other requirements, for example for bootloader.
for kickstart_module in self._modules:
requirements.extend(kickstart_module.collect_requirements())
return requirements
def install_with_tasks(self):
"""Returns installation tasks of this module.
:returns: list of installation tasks
"""
storage = self.storage
return [
ActivateFilesystemsTask(storage),
MountFilesystemsTask(storage)
]
def write_configuration_with_task(self):
"""Write the storage configuration with a task.
FIXME: This is a temporary workaround.
:return: an installation task
"""
return WriteConfigurationTask(self.storage)
def teardown_with_tasks(self):
"""Returns teardown tasks for this module.
:return: a list installation tasks
"""
storage = self.storage
return [
UnmountFilesystemsTask(storage),
TeardownDiskImagesTask(storage)
]
| atodorov/anaconda | pyanaconda/modules/storage/storage.py | Python | gpl-2.0 | 13,962 |
# -*- coding: utf-8 -*-
#
# This file is part of CERN Analysis Preservation Framework.
# Copyright (C) 2016 CERN.
#
# CERN Analysis Preservation Framework is free software; you can redistribute
# it and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CERN Analysis Preservation Framework is distributed in the hope that it will
# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERN Analysis Preservation Framework; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""CAP LHCb permissions"""
from invenio_access import DynamicPermission
from cap.modules.experiments.permissions.common import get_collaboration_group_needs, get_superuser_needs
lhcb_group_need = set(
[g for g in get_collaboration_group_needs('LHCb')])
lhcb_group_need |= set([g for g in
get_superuser_needs()])
lhcb_permission = DynamicPermission(*lhcb_group_need)
| xchen101/analysis-preservation.cern.ch | cap/modules/experiments/permissions/lhcb.py | Python | gpl-2.0 | 1,489 |
# -*- coding: utf-8 -*-
from django.conf.urls import url
from . import views
urlpatterns = [
url(r"^$", views.home, name="board"),
url(r"^/all$", views.all_feed, name="board_all"),
url(r"^/course/(?P<course_id>.*)?$", views.course_feed, name="board_course"),
url(r"^/submit/course/(?P<course_id>.*)?$", views.course_feed_post, name="board_course_post"),
url(r"^/meme/submit/course/(?P<course_id>.*)?$", views.course_feed_post_meme, name="board_course_post_meme"),
url(r"^/section/(?P<section_id>.*)?$", views.section_feed, name="board_section"),
url(r"^/submit/section/(?P<section_id>.*)?$", views.section_feed_post, name="board_section_post"),
url(r"^/meme/submit/section/(?P<section_id>.*)?$", views.section_feed_post_meme, name="board_section_post_meme"),
url(r"^/meme/get$", views.get_memes_json, name="board_get_memes_json"),
url(r"^/post/(?P<post_id>\d+)?$", views.view_post, name="board_post"),
url(r"^/post/(?P<post_id>\d+)/comment$", views.comment_view, name="board_comment"),
# url(r"^/add$", views.add_post_view, name="add_boardpost"),
url(r"^/post/(?P<id>\d+)/modify$", views.modify_post_view, name="board_modify_post"),
url(r"^/post/(?P<id>\d+)/delete$", views.delete_post_view, name="board_delete_post"),
url(r"^/comment/(?P<id>\d+)/delete$", views.delete_comment_view, name="board_delete_comment"),
url(r"^/post/(?P<id>\d+)/react$", views.react_post_view, name="board_react_post"),
]
| jacobajit/ion | intranet/apps/board/urls.py | Python | gpl-2.0 | 1,474 |
# coding=utf-8
#Converts a csv contining country codes and numerical values, to json
#Years should be given in the header, like this:
#
# land, 1980, 1981, 1982
# se, 12, 13, 11
# fi 7, 10, 14
import csv
import json
import argparse
import os.path
import sys
import math
#Check if file exists
def is_valid_file(parser, arg):
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
#Check if values are numbers, for our purposes
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def doRounding(s,d):
if d > 0:
return round(float(s),d)
else:
return int(str(round(float(s),d))[:-2])
#Check if values are years
def isYear(s):
try:
float(s)
if 4 is len(s):
return True
else:
return False
except ValueError:
return False
#Define command line arguments
parser = argparse.ArgumentParser(description='Converts a csv contining country codes and numerical values, to json.')
#Input file
parser.add_argument("-i", "--input", dest="infile", required=True,
help="input file", metavar="FILE",
type=lambda x: is_valid_file(parser,x))
#Output file
parser.add_argument("-o", "--output", dest="outfile",
help="output file", metavar="FILE")
#Column
parser.add_argument("-c", "--column", dest="column",
help="column containing nation codes. The first column is “0”", type=int, default=0)
#Rounding
parser.add_argument("-d", "--decimals", dest="decimals",
help="Number of decimals to keep. Default is -1, meaning “keep all”", type=int, default=-1)
args = parser.parse_args()
inputFile = args.infile #"/home/leo/Världen/demo/patents/raw-pre.csv"
if args.outfile is None:
outputFile = os.path.splitext(inputFile)[0] + ".json"
print "No output file given, using %s" % outputFile
else:
outputFile = args.outfile
if os.path.isfile(outputFile):
print "File %s already exists. Overwrite? [y/N]" % outputFile
choice = raw_input().lower()
if not choice in ('y', 'yes'):
sys.exit()
indataColumn = args.column
print indataColumn
outdata = {}
headers = []
#Open file
try:
with open(inputFile, 'rb') as csvfile:
datacsv = csv.reader(csvfile,delimiter=',',quotechar='"')
firstRow = True
for row in datacsv:
if firstRow:
firstRow = False
for col in row:
headers.append(col)
else:
currentNation = row[indataColumn]
outdata[currentNation] = []
i = 0
for col in row:
currentHeader = headers[i]
if isYear(currentHeader):
if is_number(col):
if (args.decimals > -1):
outdata[currentNation].append(doRounding(col,args.decimals))
else:
outdata[currentNation].append(col)
else:
outdata[currentNation].append(None)
i += 1
except IOError:
print ("Could not open input file")
print "Writing %s..." % outputFile
with open(outputFile, 'w') as outfile:
json.dump(outdata, outfile)
print "done"
| jplusplus/thenmap-v0 | generators/utils/convert-csv-to-json.py | Python | gpl-2.0 | 2,991 |
def perfect_number():
sum, count, number = 0, 0, 1
top = input("enter a top value: ")
while number < top:
for i in range(1, number/2+1):
if number/i*i == number:
sum += i
if sum == number:
count += 1
print count, ".perfect number: ", number
number += 1
sum = 0
if count == 0:
print "any perfect number in this range"
if __name__ == '__main__':
perfect_number() | dogancankilment/UnixTools | utils/math/perfect_number.py | Python | gpl-2.0 | 477 |
#!/usr/bin/env python
#coding=utf8
'''
We are playing the Guess Game. The game is as follows:
I pick a number from 1 to n. You have to guess which number I picked.
Every time you guess wrong, I'll tell you whether the number I picked is higher or lower.
However, when you guess a particular number x, and you guess wrong, you pay $x. You win the game when you guess the number I picked.
Example:
n = 10, I pick 8.
First round: You guess 5, I tell you that it's higher. You pay $5.
Second round: You guess 7, I tell you that it's higher. You pay $7.
Third round: You guess 9, I tell you that it's lower. You pay $9.
Game over. 8 is the number I picked.
You end up paying $5 + $7 + $9 = $21.
Given a particular n ≥ 1, find out how much money you need to have to guarantee a win.
Related Topics
Dynamic Programming, Minimax
Similar Questions
Flip Game II, Guess Number Higher or Lower, Can I Win, Find K Closest Elements
f(i,j) = min(x + max(f(i,x-1), f(x+1,j) ) )
@author: Chauncey
beat 24.02%
'''
import heapq
import datetime
import time
import sys
import collections
class Solution(object):
def getMoneyAmount(self, n):
"""
:type n: int
:rtype: int
"""
if not n or n==1:
return 0
f = [[0]*n for _ in xrange(n)]
for i in xrange(n):
f[i][i] = i + 1
for k in xrange(1, n-1):
for i in xrange(n-k):
mini = min(f[i][i] + f[i+1][i+k], f[i][i+k-1] + f[i+k][i+k])
for x in xrange(i+1, i+k):
mini = min(mini, f[x][x] + max(f[i][x-1], f[x+1][i+k]) )
f[i][i+k] = mini
print i,i+k,'=',mini
mini = min(f[1][n-1], f[0][n-2])
for x in xrange(1, n-1):
mini = min(mini, max(f[0][x-1], f[x+1][n-1]) )
f[0][n-1] = mini
return f[0][n-1]
if __name__ == '__main__':
solution = Solution()
start_time = datetime.datetime.now()
#print solution.getMoneyAmount(10) #
print solution.getMoneyAmount(1) #0
print solution.getMoneyAmount(2) #1
print solution.getMoneyAmount(3) #2
elapsed = datetime.datetime.now() - start_time
print 'elapsed:', elapsed.total_seconds() | mornsun/javascratch | src/topcoder.py/LC_375_Guess_Number_Higher_or_Lower_II.py | Python | gpl-2.0 | 2,229 |
# localrepo.py - read/write repository class for mercurial
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import hex, nullid, short
from i18n import _
import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
import changelog, dirstate, filelog, manifest, context, bookmarks, phases
import lock, transaction, store, encoding
import scmutil, util, extensions, hook, error, revset
import match as matchmod
import merge as mergemod
import tags as tagsmod
from lock import release
import weakref, errno, os, time, inspect
import branchmap
propertycache = util.propertycache
filecache = scmutil.filecache
class repofilecache(filecache):
"""All filecache usage on repo are done for logic that should be unfiltered
"""
def __get__(self, repo, type=None):
return super(repofilecache, self).__get__(repo.unfiltered(), type)
def __set__(self, repo, value):
return super(repofilecache, self).__set__(repo.unfiltered(), value)
def __delete__(self, repo):
return super(repofilecache, self).__delete__(repo.unfiltered())
class storecache(repofilecache):
"""filecache for files in the store"""
def join(self, obj, fname):
return obj.sjoin(fname)
class unfilteredpropertycache(propertycache):
"""propertycache that apply to unfiltered repo only"""
def __get__(self, repo, type=None):
return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
class filteredpropertycache(propertycache):
"""propertycache that must take filtering in account"""
def cachevalue(self, obj, value):
object.__setattr__(obj, self.name, value)
def hasunfilteredcache(repo, name):
"""check if a repo has an unfilteredpropertycache value for <name>"""
return name in vars(repo.unfiltered())
def unfilteredmethod(orig):
"""decorate method that always need to be run on unfiltered version"""
def wrapper(repo, *args, **kwargs):
return orig(repo.unfiltered(), *args, **kwargs)
return wrapper
MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
class localpeer(peer.peerrepository):
'''peer for a local repo; reflects only the most recent API'''
def __init__(self, repo, caps=MODERNCAPS):
peer.peerrepository.__init__(self)
self._repo = repo.filtered('served')
self.ui = repo.ui
self._caps = repo._restrictcapabilities(caps)
self.requirements = repo.requirements
self.supportedformats = repo.supportedformats
def close(self):
self._repo.close()
def _capabilities(self):
return self._caps
def local(self):
return self._repo
def canpush(self):
return True
def url(self):
return self._repo.url()
def lookup(self, key):
return self._repo.lookup(key)
def branchmap(self):
return self._repo.branchmap()
def heads(self):
return self._repo.heads()
def known(self, nodes):
return self._repo.known(nodes)
def getbundle(self, source, heads=None, common=None, bundlecaps=None):
return self._repo.getbundle(source, heads=heads, common=common,
bundlecaps=None)
# TODO We might want to move the next two calls into legacypeer and add
# unbundle instead.
def lock(self):
return self._repo.lock()
def addchangegroup(self, cg, source, url):
return self._repo.addchangegroup(cg, source, url)
def pushkey(self, namespace, key, old, new):
return self._repo.pushkey(namespace, key, old, new)
def listkeys(self, namespace):
return self._repo.listkeys(namespace)
def debugwireargs(self, one, two, three=None, four=None, five=None):
'''used to test argument passing over the wire'''
return "%s %s %s %s %s" % (one, two, three, four, five)
class locallegacypeer(localpeer):
'''peer extension which implements legacy methods too; used for tests with
restricted capabilities'''
def __init__(self, repo):
localpeer.__init__(self, repo, caps=LEGACYCAPS)
def branches(self, nodes):
return self._repo.branches(nodes)
def between(self, pairs):
return self._repo.between(pairs)
def changegroup(self, basenodes, source):
return self._repo.changegroup(basenodes, source)
def changegroupsubset(self, bases, heads, source):
return self._repo.changegroupsubset(bases, heads, source)
class localrepository(object):
supportedformats = set(('revlogv1', 'generaldelta'))
supported = supportedformats | set(('store', 'fncache', 'shared',
'dotencode'))
openerreqs = set(('revlogv1', 'generaldelta'))
requirements = ['revlogv1']
filtername = None
def _baserequirements(self, create):
return self.requirements[:]
def __init__(self, baseui, path=None, create=False):
self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
self.wopener = self.wvfs
self.root = self.wvfs.base
self.path = self.wvfs.join(".hg")
self.origroot = path
self.auditor = scmutil.pathauditor(self.root, self._checknested)
self.vfs = scmutil.vfs(self.path)
self.opener = self.vfs
self.baseui = baseui
self.ui = baseui.copy()
# A list of callback to shape the phase if no data were found.
# Callback are in the form: func(repo, roots) --> processed root.
# This list it to be filled by extension during repo setup
self._phasedefaults = []
try:
self.ui.readconfig(self.join("hgrc"), self.root)
extensions.loadall(self.ui)
except IOError:
pass
if not self.vfs.isdir():
if create:
if not self.wvfs.exists():
self.wvfs.makedirs()
self.vfs.makedir(notindexed=True)
requirements = self._baserequirements(create)
if self.ui.configbool('format', 'usestore', True):
self.vfs.mkdir("store")
requirements.append("store")
if self.ui.configbool('format', 'usefncache', True):
requirements.append("fncache")
if self.ui.configbool('format', 'dotencode', True):
requirements.append('dotencode')
# create an invalid changelog
self.vfs.append(
"00changelog.i",
'\0\0\0\2' # represents revlogv2
' dummy changelog to prevent using the old repo layout'
)
if self.ui.configbool('format', 'generaldelta', False):
requirements.append("generaldelta")
requirements = set(requirements)
else:
raise error.RepoError(_("repository %s not found") % path)
elif create:
raise error.RepoError(_("repository %s already exists") % path)
else:
try:
requirements = scmutil.readrequires(self.vfs, self.supported)
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
requirements = set()
self.sharedpath = self.path
try:
vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
realpath=True)
s = vfs.base
if not vfs.exists():
raise error.RepoError(
_('.hg/sharedpath points to nonexistent directory %s') % s)
self.sharedpath = s
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
self.spath = self.store.path
self.svfs = self.store.vfs
self.sopener = self.svfs
self.sjoin = self.store.join
self.vfs.createmode = self.store.createmode
self._applyrequirements(requirements)
if create:
self._writerequirements()
self._branchcaches = {}
self.filterpats = {}
self._datafilters = {}
self._transref = self._lockref = self._wlockref = None
# A cache for various files under .hg/ that tracks file changes,
# (used by the filecache decorator)
#
# Maps a property name to its util.filecacheentry
self._filecache = {}
# hold sets of revision to be filtered
# should be cleared when something might have changed the filter value:
# - new changesets,
# - phase change,
# - new obsolescence marker,
# - working directory parent change,
# - bookmark changes
self.filteredrevcache = {}
def close(self):
pass
def _restrictcapabilities(self, caps):
return caps
def _applyrequirements(self, requirements):
self.requirements = requirements
self.sopener.options = dict((r, 1) for r in requirements
if r in self.openerreqs)
def _writerequirements(self):
reqfile = self.opener("requires", "w")
for r in sorted(self.requirements):
reqfile.write("%s\n" % r)
reqfile.close()
def _checknested(self, path):
"""Determine if path is a legal nested repository."""
if not path.startswith(self.root):
return False
subpath = path[len(self.root) + 1:]
normsubpath = util.pconvert(subpath)
# XXX: Checking against the current working copy is wrong in
# the sense that it can reject things like
#
# $ hg cat -r 10 sub/x.txt
#
# if sub/ is no longer a subrepository in the working copy
# parent revision.
#
# However, it can of course also allow things that would have
# been rejected before, such as the above cat command if sub/
# is a subrepository now, but was a normal directory before.
# The old path auditor would have rejected by mistake since it
# panics when it sees sub/.hg/.
#
# All in all, checking against the working copy seems sensible
# since we want to prevent access to nested repositories on
# the filesystem *now*.
ctx = self[None]
parts = util.splitpath(subpath)
while parts:
prefix = '/'.join(parts)
if prefix in ctx.substate:
if prefix == normsubpath:
return True
else:
sub = ctx.sub(prefix)
return sub.checknested(subpath[len(prefix) + 1:])
else:
parts.pop()
return False
def peer(self):
return localpeer(self) # not cached to avoid reference cycle
def unfiltered(self):
"""Return unfiltered version of the repository
Intended to be overwritten by filtered repo."""
return self
def filtered(self, name):
"""Return a filtered version of a repository"""
# build a new class with the mixin and the current class
# (possibly subclass of the repo)
class proxycls(repoview.repoview, self.unfiltered().__class__):
pass
return proxycls(self, name)
@repofilecache('bookmarks')
def _bookmarks(self):
return bookmarks.bmstore(self)
@repofilecache('bookmarks.current')
def _bookmarkcurrent(self):
return bookmarks.readcurrent(self)
def bookmarkheads(self, bookmark):
name = bookmark.split('@', 1)[0]
heads = []
for mark, n in self._bookmarks.iteritems():
if mark.split('@', 1)[0] == name:
heads.append(n)
return heads
@storecache('phaseroots')
def _phasecache(self):
return phases.phasecache(self, self._phasedefaults)
@storecache('obsstore')
def obsstore(self):
store = obsolete.obsstore(self.sopener)
if store and not obsolete._enabled:
# message is rare enough to not be translated
msg = 'obsolete feature not enabled but %i markers found!\n'
self.ui.warn(msg % len(list(store)))
return store
@storecache('00changelog.i')
def changelog(self):
c = changelog.changelog(self.sopener)
if 'HG_PENDING' in os.environ:
p = os.environ['HG_PENDING']
if p.startswith(self.root):
c.readpending('00changelog.i.a')
return c
@storecache('00manifest.i')
def manifest(self):
return manifest.manifest(self.sopener)
@repofilecache('dirstate')
def dirstate(self):
warned = [0]
def validate(node):
try:
self.changelog.rev(node)
return node
except error.LookupError:
if not warned[0]:
warned[0] = True
self.ui.warn(_("warning: ignoring unknown"
" working parent %s!\n") % short(node))
return nullid
return dirstate.dirstate(self.opener, self.ui, self.root, validate)
def __getitem__(self, changeid):
if changeid is None:
return context.workingctx(self)
return context.changectx(self, changeid)
def __contains__(self, changeid):
try:
return bool(self.lookup(changeid))
except error.RepoLookupError:
return False
def __nonzero__(self):
return True
def __len__(self):
return len(self.changelog)
def __iter__(self):
return iter(self.changelog)
def revs(self, expr, *args):
'''Return a list of revisions matching the given revset'''
expr = revset.formatspec(expr, *args)
m = revset.match(None, expr)
return [r for r in m(self, list(self))]
def set(self, expr, *args):
'''
Yield a context for each matching revision, after doing arg
replacement via revset.formatspec
'''
for r in self.revs(expr, *args):
yield self[r]
def url(self):
return 'file:' + self.root
def hook(self, name, throw=False, **args):
return hook.hook(self.ui, self, name, throw, **args)
@unfilteredmethod
def _tag(self, names, node, message, local, user, date, extra={}):
if isinstance(names, str):
names = (names,)
branches = self.branchmap()
for name in names:
self.hook('pretag', throw=True, node=hex(node), tag=name,
local=local)
if name in branches:
self.ui.warn(_("warning: tag %s conflicts with existing"
" branch name\n") % name)
def writetags(fp, names, munge, prevtags):
fp.seek(0, 2)
if prevtags and prevtags[-1] != '\n':
fp.write('\n')
for name in names:
m = munge and munge(name) or name
if (self._tagscache.tagtypes and
name in self._tagscache.tagtypes):
old = self.tags().get(name, nullid)
fp.write('%s %s\n' % (hex(old), m))
fp.write('%s %s\n' % (hex(node), m))
fp.close()
prevtags = ''
if local:
try:
fp = self.opener('localtags', 'r+')
except IOError:
fp = self.opener('localtags', 'a')
else:
prevtags = fp.read()
# local tags are stored in the current charset
writetags(fp, names, None, prevtags)
for name in names:
self.hook('tag', node=hex(node), tag=name, local=local)
return
try:
fp = self.wfile('.hgtags', 'rb+')
except IOError, e:
if e.errno != errno.ENOENT:
raise
fp = self.wfile('.hgtags', 'ab')
else:
prevtags = fp.read()
# committed tags are stored in UTF-8
writetags(fp, names, encoding.fromlocal, prevtags)
fp.close()
self.invalidatecaches()
if '.hgtags' not in self.dirstate:
self[None].add(['.hgtags'])
m = matchmod.exact(self.root, '', ['.hgtags'])
tagnode = self.commit(message, user, date, extra=extra, match=m)
for name in names:
self.hook('tag', node=hex(node), tag=name, local=local)
return tagnode
def tag(self, names, node, message, local, user, date):
'''tag a revision with one or more symbolic names.
names is a list of strings or, when adding a single tag, names may be a
string.
if local is True, the tags are stored in a per-repository file.
otherwise, they are stored in the .hgtags file, and a new
changeset is committed with the change.
keyword arguments:
local: whether to store tags in non-version-controlled file
(default False)
message: commit message to use if committing
user: name of user to use if committing
date: date tuple to use if committing'''
if not local:
for x in self.status()[:5]:
if '.hgtags' in x:
raise util.Abort(_('working copy of .hgtags is changed '
'(please commit .hgtags manually)'))
self.tags() # instantiate the cache
self._tag(names, node, message, local, user, date)
@filteredpropertycache
def _tagscache(self):
'''Returns a tagscache object that contains various tags related
caches.'''
# This simplifies its cache management by having one decorated
# function (this one) and the rest simply fetch things from it.
class tagscache(object):
def __init__(self):
# These two define the set of tags for this repository. tags
# maps tag name to node; tagtypes maps tag name to 'global' or
# 'local'. (Global tags are defined by .hgtags across all
# heads, and local tags are defined in .hg/localtags.)
# They constitute the in-memory cache of tags.
self.tags = self.tagtypes = None
self.nodetagscache = self.tagslist = None
cache = tagscache()
cache.tags, cache.tagtypes = self._findtags()
return cache
def tags(self):
'''return a mapping of tag to node'''
t = {}
if self.changelog.filteredrevs:
tags, tt = self._findtags()
else:
tags = self._tagscache.tags
for k, v in tags.iteritems():
try:
# ignore tags to unknown nodes
self.changelog.rev(v)
t[k] = v
except (error.LookupError, ValueError):
pass
return t
def _findtags(self):
'''Do the hard work of finding tags. Return a pair of dicts
(tags, tagtypes) where tags maps tag name to node, and tagtypes
maps tag name to a string like \'global\' or \'local\'.
Subclasses or extensions are free to add their own tags, but
should be aware that the returned dicts will be retained for the
duration of the localrepo object.'''
# XXX what tagtype should subclasses/extensions use? Currently
# mq and bookmarks add tags, but do not set the tagtype at all.
# Should each extension invent its own tag type? Should there
# be one tagtype for all such "virtual" tags? Or is the status
# quo fine?
alltags = {} # map tag name to (node, hist)
tagtypes = {}
tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
# Build the return dicts. Have to re-encode tag names because
# the tags module always uses UTF-8 (in order not to lose info
# writing to the cache), but the rest of Mercurial wants them in
# local encoding.
tags = {}
for (name, (node, hist)) in alltags.iteritems():
if node != nullid:
tags[encoding.tolocal(name)] = node
tags['tip'] = self.changelog.tip()
tagtypes = dict([(encoding.tolocal(name), value)
for (name, value) in tagtypes.iteritems()])
return (tags, tagtypes)
def tagtype(self, tagname):
'''
return the type of the given tag. result can be:
'local' : a local tag
'global' : a global tag
None : tag does not exist
'''
return self._tagscache.tagtypes.get(tagname)
def tagslist(self):
'''return a list of tags ordered by revision'''
if not self._tagscache.tagslist:
l = []
for t, n in self.tags().iteritems():
r = self.changelog.rev(n)
l.append((r, t, n))
self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
return self._tagscache.tagslist
def nodetags(self, node):
'''return the tags associated with a node'''
if not self._tagscache.nodetagscache:
nodetagscache = {}
for t, n in self._tagscache.tags.iteritems():
nodetagscache.setdefault(n, []).append(t)
for tags in nodetagscache.itervalues():
tags.sort()
self._tagscache.nodetagscache = nodetagscache
return self._tagscache.nodetagscache.get(node, [])
def nodebookmarks(self, node):
marks = []
for bookmark, n in self._bookmarks.iteritems():
if n == node:
marks.append(bookmark)
return sorted(marks)
def branchmap(self):
'''returns a dictionary {branch: [branchheads]}'''
branchmap.updatecache(self)
return self._branchcaches[self.filtername]
def _branchtip(self, heads):
'''return the tipmost branch head in heads'''
tip = heads[-1]
for h in reversed(heads):
if not self[h].closesbranch():
tip = h
break
return tip
def branchtip(self, branch):
'''return the tip node for a given branch'''
if branch not in self.branchmap():
raise error.RepoLookupError(_("unknown branch '%s'") % branch)
return self._branchtip(self.branchmap()[branch])
def branchtags(self):
'''return a dict where branch names map to the tipmost head of
the branch, open heads come before closed'''
bt = {}
for bn, heads in self.branchmap().iteritems():
bt[bn] = self._branchtip(heads)
return bt
def lookup(self, key):
return self[key].node()
def lookupbranch(self, key, remote=None):
repo = remote or self
if key in repo.branchmap():
return key
repo = (remote and remote.local()) and remote or self
return repo[key].branch()
def known(self, nodes):
nm = self.changelog.nodemap
pc = self._phasecache
result = []
for n in nodes:
r = nm.get(n)
resp = not (r is None or pc.phase(self, r) >= phases.secret)
result.append(resp)
return result
def local(self):
return self
def cancopy(self):
return self.local() # so statichttprepo's override of local() works
def join(self, f):
return os.path.join(self.path, f)
def wjoin(self, f):
return os.path.join(self.root, f)
def file(self, f):
if f[0] == '/':
f = f[1:]
return filelog.filelog(self.sopener, f)
def changectx(self, changeid):
return self[changeid]
def parents(self, changeid=None):
'''get list of changectxs for parents of changeid'''
return self[changeid].parents()
def setparents(self, p1, p2=nullid):
copies = self.dirstate.setparents(p1, p2)
pctx = self[p1]
if copies:
# Adjust copy records, the dirstate cannot do it, it
# requires access to parents manifests. Preserve them
# only for entries added to first parent.
for f in copies:
if f not in pctx and copies[f] in pctx:
self.dirstate.copy(copies[f], f)
if p2 == nullid:
for f, s in sorted(self.dirstate.copies().items()):
if f not in pctx and s not in pctx:
self.dirstate.copy(None, f)
def filectx(self, path, changeid=None, fileid=None):
"""changeid can be a changeset revision, node, or tag.
fileid can be a file revision or node."""
return context.filectx(self, path, changeid, fileid)
def getcwd(self):
return self.dirstate.getcwd()
def pathto(self, f, cwd=None):
return self.dirstate.pathto(f, cwd)
def wfile(self, f, mode='r'):
return self.wopener(f, mode)
def _link(self, f):
return self.wvfs.islink(f)
def _loadfilter(self, filter):
if filter not in self.filterpats:
l = []
for pat, cmd in self.ui.configitems(filter):
if cmd == '!':
continue
mf = matchmod.match(self.root, '', [pat])
fn = None
params = cmd
for name, filterfn in self._datafilters.iteritems():
if cmd.startswith(name):
fn = filterfn
params = cmd[len(name):].lstrip()
break
if not fn:
fn = lambda s, c, **kwargs: util.filter(s, c)
# Wrap old filters not supporting keyword arguments
if not inspect.getargspec(fn)[2]:
oldfn = fn
fn = lambda s, c, **kwargs: oldfn(s, c)
l.append((mf, fn, params))
self.filterpats[filter] = l
return self.filterpats[filter]
def _filter(self, filterpats, filename, data):
for mf, fn, cmd in filterpats:
if mf(filename):
self.ui.debug("filtering %s through %s\n" % (filename, cmd))
data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
break
return data
@unfilteredpropertycache
def _encodefilterpats(self):
return self._loadfilter('encode')
@unfilteredpropertycache
def _decodefilterpats(self):
return self._loadfilter('decode')
def adddatafilter(self, name, filter):
self._datafilters[name] = filter
def wread(self, filename):
if self._link(filename):
data = self.wvfs.readlink(filename)
else:
data = self.wopener.read(filename)
return self._filter(self._encodefilterpats, filename, data)
def wwrite(self, filename, data, flags):
data = self._filter(self._decodefilterpats, filename, data)
if 'l' in flags:
self.wopener.symlink(data, filename)
else:
self.wopener.write(filename, data)
if 'x' in flags:
self.wvfs.setflags(filename, False, True)
def wwritedata(self, filename, data):
return self._filter(self._decodefilterpats, filename, data)
def transaction(self, desc):
tr = self._transref and self._transref() or None
if tr and tr.running():
return tr.nest()
# abort here if the journal already exists
if self.svfs.exists("journal"):
raise error.RepoError(
_("abandoned transaction found - run hg recover"))
self._writejournal(desc)
renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
tr = transaction.transaction(self.ui.warn, self.sopener,
self.sjoin("journal"),
aftertrans(renames),
self.store.createmode)
self._transref = weakref.ref(tr)
return tr
def _journalfiles(self):
return ((self.svfs, 'journal'),
(self.vfs, 'journal.dirstate'),
(self.vfs, 'journal.branch'),
(self.vfs, 'journal.desc'),
(self.vfs, 'journal.bookmarks'),
(self.svfs, 'journal.phaseroots'))
def undofiles(self):
return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
def _writejournal(self, desc):
self.opener.write("journal.dirstate",
self.opener.tryread("dirstate"))
self.opener.write("journal.branch",
encoding.fromlocal(self.dirstate.branch()))
self.opener.write("journal.desc",
"%d\n%s\n" % (len(self), desc))
self.opener.write("journal.bookmarks",
self.opener.tryread("bookmarks"))
self.sopener.write("journal.phaseroots",
self.sopener.tryread("phaseroots"))
def recover(self):
lock = self.lock()
try:
if self.svfs.exists("journal"):
self.ui.status(_("rolling back interrupted transaction\n"))
transaction.rollback(self.sopener, self.sjoin("journal"),
self.ui.warn)
self.invalidate()
return True
else:
self.ui.warn(_("no interrupted transaction available\n"))
return False
finally:
lock.release()
def rollback(self, dryrun=False, force=False):
wlock = lock = None
try:
wlock = self.wlock()
lock = self.lock()
if self.svfs.exists("undo"):
return self._rollback(dryrun, force)
else:
self.ui.warn(_("no rollback information available\n"))
return 1
finally:
release(lock, wlock)
@unfilteredmethod # Until we get smarter cache management
def _rollback(self, dryrun, force):
ui = self.ui
try:
args = self.opener.read('undo.desc').splitlines()
(oldlen, desc, detail) = (int(args[0]), args[1], None)
if len(args) >= 3:
detail = args[2]
oldtip = oldlen - 1
if detail and ui.verbose:
msg = (_('repository tip rolled back to revision %s'
' (undo %s: %s)\n')
% (oldtip, desc, detail))
else:
msg = (_('repository tip rolled back to revision %s'
' (undo %s)\n')
% (oldtip, desc))
except IOError:
msg = _('rolling back unknown transaction\n')
desc = None
if not force and self['.'] != self['tip'] and desc == 'commit':
raise util.Abort(
_('rollback of last commit while not checked out '
'may lose data'), hint=_('use -f to force'))
ui.status(msg)
if dryrun:
return 0
parents = self.dirstate.parents()
self.destroying()
transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
if self.vfs.exists('undo.bookmarks'):
self.vfs.rename('undo.bookmarks', 'bookmarks')
if self.svfs.exists('undo.phaseroots'):
self.svfs.rename('undo.phaseroots', 'phaseroots')
self.invalidate()
parentgone = (parents[0] not in self.changelog.nodemap or
parents[1] not in self.changelog.nodemap)
if parentgone:
self.vfs.rename('undo.dirstate', 'dirstate')
try:
branch = self.opener.read('undo.branch')
self.dirstate.setbranch(encoding.tolocal(branch))
except IOError:
ui.warn(_('named branch could not be reset: '
'current branch is still \'%s\'\n')
% self.dirstate.branch())
self.dirstate.invalidate()
parents = tuple([p.rev() for p in self.parents()])
if len(parents) > 1:
ui.status(_('working directory now based on '
'revisions %d and %d\n') % parents)
else:
ui.status(_('working directory now based on '
'revision %d\n') % parents)
# TODO: if we know which new heads may result from this rollback, pass
# them to destroy(), which will prevent the branchhead cache from being
# invalidated.
self.destroyed()
return 0
def invalidatecaches(self):
if '_tagscache' in vars(self):
# can't use delattr on proxy
del self.__dict__['_tagscache']
self.unfiltered()._branchcaches.clear()
self.invalidatevolatilesets()
def invalidatevolatilesets(self):
self.filteredrevcache.clear()
obsolete.clearobscaches(self)
def invalidatedirstate(self):
'''Invalidates the dirstate, causing the next call to dirstate
to check if it was modified since the last time it was read,
rereading it if it has.
This is different to dirstate.invalidate() that it doesn't always
rereads the dirstate. Use dirstate.invalidate() if you want to
explicitly read the dirstate again (i.e. restoring it to a previous
known good state).'''
if hasunfilteredcache(self, 'dirstate'):
for k in self.dirstate._filecache:
try:
delattr(self.dirstate, k)
except AttributeError:
pass
delattr(self.unfiltered(), 'dirstate')
def invalidate(self):
unfiltered = self.unfiltered() # all file caches are stored unfiltered
for k in self._filecache:
# dirstate is invalidated separately in invalidatedirstate()
if k == 'dirstate':
continue
try:
delattr(unfiltered, k)
except AttributeError:
pass
self.invalidatecaches()
def _lock(self, lockname, wait, releasefn, acquirefn, desc):
try:
l = lock.lock(lockname, 0, releasefn, desc=desc)
except error.LockHeld, inst:
if not wait:
raise
self.ui.warn(_("waiting for lock on %s held by %r\n") %
(desc, inst.locker))
# default to 600 seconds timeout
l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
releasefn, desc=desc)
if acquirefn:
acquirefn()
return l
def _afterlock(self, callback):
"""add a callback to the current repository lock.
The callback will be executed on lock release."""
l = self._lockref and self._lockref()
if l:
l.postrelease.append(callback)
else:
callback()
def lock(self, wait=True):
'''Lock the repository store (.hg/store) and return a weak reference
to the lock. Use this before modifying the store (e.g. committing or
stripping). If you are opening a transaction, get a lock as well.)'''
l = self._lockref and self._lockref()
if l is not None and l.held:
l.lock()
return l
def unlock():
self.store.write()
if hasunfilteredcache(self, '_phasecache'):
self._phasecache.write()
for k, ce in self._filecache.items():
if k == 'dirstate' or k not in self.__dict__:
continue
ce.refresh()
l = self._lock(self.sjoin("lock"), wait, unlock,
self.invalidate, _('repository %s') % self.origroot)
self._lockref = weakref.ref(l)
return l
def wlock(self, wait=True):
'''Lock the non-store parts of the repository (everything under
.hg except .hg/store) and return a weak reference to the lock.
Use this before modifying files in .hg.'''
l = self._wlockref and self._wlockref()
if l is not None and l.held:
l.lock()
return l
def unlock():
self.dirstate.write()
self._filecache['dirstate'].refresh()
l = self._lock(self.join("wlock"), wait, unlock,
self.invalidatedirstate, _('working directory of %s') %
self.origroot)
self._wlockref = weakref.ref(l)
return l
def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
"""
commit an individual file as part of a larger transaction
"""
fname = fctx.path()
text = fctx.data()
flog = self.file(fname)
fparent1 = manifest1.get(fname, nullid)
fparent2 = fparent2o = manifest2.get(fname, nullid)
meta = {}
copy = fctx.renamed()
if copy and copy[0] != fname:
# Mark the new revision of this file as a copy of another
# file. This copy data will effectively act as a parent
# of this new revision. If this is a merge, the first
# parent will be the nullid (meaning "look up the copy data")
# and the second one will be the other parent. For example:
#
# 0 --- 1 --- 3 rev1 changes file foo
# \ / rev2 renames foo to bar and changes it
# \- 2 -/ rev3 should have bar with all changes and
# should record that bar descends from
# bar in rev2 and foo in rev1
#
# this allows this merge to succeed:
#
# 0 --- 1 --- 3 rev4 reverts the content change from rev2
# \ / merging rev3 and rev4 should use bar@rev2
# \- 2 --- 4 as the merge base
#
cfname = copy[0]
crev = manifest1.get(cfname)
newfparent = fparent2
if manifest2: # branch merge
if fparent2 == nullid or crev is None: # copied on remote side
if cfname in manifest2:
crev = manifest2[cfname]
newfparent = fparent1
# find source in nearest ancestor if we've lost track
if not crev:
self.ui.debug(" %s: searching for copy revision for %s\n" %
(fname, cfname))
for ancestor in self[None].ancestors():
if cfname in ancestor:
crev = ancestor[cfname].filenode()
break
if crev:
self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
meta["copy"] = cfname
meta["copyrev"] = hex(crev)
fparent1, fparent2 = nullid, newfparent
else:
self.ui.warn(_("warning: can't find ancestor for '%s' "
"copied from '%s'!\n") % (fname, cfname))
elif fparent2 != nullid:
# is one parent an ancestor of the other?
fparentancestor = flog.ancestor(fparent1, fparent2)
if fparentancestor == fparent1:
fparent1, fparent2 = fparent2, nullid
elif fparentancestor == fparent2:
fparent2 = nullid
# is the file changed?
if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
changelist.append(fname)
return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
# are just the flags changed during merge?
if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
changelist.append(fname)
return fparent1
@unfilteredmethod
def commit(self, text="", user=None, date=None, match=None, force=False,
editor=False, extra={}):
"""Add a new revision to current repository.
Revision information is gathered from the working directory,
match can be used to filter the committed files. If editor is
supplied, it is called to get a commit message.
"""
def fail(f, msg):
raise util.Abort('%s: %s' % (f, msg))
if not match:
match = matchmod.always(self.root, '')
if not force:
vdirs = []
match.explicitdir = vdirs.append
match.bad = fail
wlock = self.wlock()
try:
wctx = self[None]
merge = len(wctx.parents()) > 1
if (not force and merge and match and
(match.files() or match.anypats())):
raise util.Abort(_('cannot partially commit a merge '
'(do not specify files or patterns)'))
changes = self.status(match=match, clean=force)
if force:
changes[0].extend(changes[6]) # mq may commit unchanged files
# check subrepos
subs = []
commitsubs = set()
newstate = wctx.substate.copy()
# only manage subrepos and .hgsubstate if .hgsub is present
if '.hgsub' in wctx:
# we'll decide whether to track this ourselves, thanks
if '.hgsubstate' in changes[0]:
changes[0].remove('.hgsubstate')
if '.hgsubstate' in changes[2]:
changes[2].remove('.hgsubstate')
# compare current state to last committed state
# build new substate based on last committed state
oldstate = wctx.p1().substate
for s in sorted(newstate.keys()):
if not match(s):
# ignore working copy, use old state if present
if s in oldstate:
newstate[s] = oldstate[s]
continue
if not force:
raise util.Abort(
_("commit with new subrepo %s excluded") % s)
if wctx.sub(s).dirty(True):
if not self.ui.configbool('ui', 'commitsubrepos'):
raise util.Abort(
_("uncommitted changes in subrepo %s") % s,
hint=_("use --subrepos for recursive commit"))
subs.append(s)
commitsubs.add(s)
else:
bs = wctx.sub(s).basestate()
newstate[s] = (newstate[s][0], bs, newstate[s][2])
if oldstate.get(s, (None, None, None))[1] != bs:
subs.append(s)
# check for removed subrepos
for p in wctx.parents():
r = [s for s in p.substate if s not in newstate]
subs += [s for s in r if match(s)]
if subs:
if (not match('.hgsub') and
'.hgsub' in (wctx.modified() + wctx.added())):
raise util.Abort(
_("can't commit subrepos without .hgsub"))
changes[0].insert(0, '.hgsubstate')
elif '.hgsub' in changes[2]:
# clean up .hgsubstate when .hgsub is removed
if ('.hgsubstate' in wctx and
'.hgsubstate' not in changes[0] + changes[1] + changes[2]):
changes[2].insert(0, '.hgsubstate')
# make sure all explicit patterns are matched
if not force and match.files():
matched = set(changes[0] + changes[1] + changes[2])
for f in match.files():
f = self.dirstate.normalize(f)
if f == '.' or f in matched or f in wctx.substate:
continue
if f in changes[3]: # missing
fail(f, _('file not found!'))
if f in vdirs: # visited directory
d = f + '/'
for mf in matched:
if mf.startswith(d):
break
else:
fail(f, _("no match under directory!"))
elif f not in self.dirstate:
fail(f, _("file not tracked!"))
cctx = context.workingctx(self, text, user, date, extra, changes)
if (not force and not extra.get("close") and not merge
and not cctx.files()
and wctx.branch() == wctx.p1().branch()):
return None
if merge and cctx.deleted():
raise util.Abort(_("cannot commit merge with missing files"))
ms = mergemod.mergestate(self)
for f in changes[0]:
if f in ms and ms[f] == 'u':
raise util.Abort(_("unresolved merge conflicts "
"(see hg help resolve)"))
if editor:
cctx._text = editor(self, cctx, subs)
edited = (text != cctx._text)
# commit subs and write new state
if subs:
for s in sorted(commitsubs):
sub = wctx.sub(s)
self.ui.status(_('committing subrepository %s\n') %
subrepo.subrelpath(sub))
sr = sub.commit(cctx._text, user, date)
newstate[s] = (newstate[s][0], sr)
subrepo.writestate(self, newstate)
# Save commit message in case this transaction gets rolled back
# (e.g. by a pretxncommit hook). Leave the content alone on
# the assumption that the user will use the same editor again.
msgfn = self.savecommitmessage(cctx._text)
p1, p2 = self.dirstate.parents()
hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
try:
self.hook("precommit", throw=True, parent1=hookp1,
parent2=hookp2)
ret = self.commitctx(cctx, True)
except: # re-raises
if edited:
self.ui.write(
_('note: commit message saved in %s\n') % msgfn)
raise
# update bookmarks, dirstate and mergestate
bookmarks.update(self, [p1, p2], ret)
cctx.markcommitted(ret)
ms.reset()
finally:
wlock.release()
def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
self.hook("commit", node=node, parent1=parent1, parent2=parent2)
self._afterlock(commithook)
return ret
@unfilteredmethod
def commitctx(self, ctx, error=False):
"""Add a new revision to current repository.
Revision information is passed via the context argument.
"""
tr = lock = None
removed = list(ctx.removed())
p1, p2 = ctx.p1(), ctx.p2()
user = ctx.user()
lock = self.lock()
try:
tr = self.transaction("commit")
trp = weakref.proxy(tr)
if ctx.files():
m1 = p1.manifest().copy()
m2 = p2.manifest()
# check in files
new = {}
changed = []
linkrev = len(self)
for f in sorted(ctx.modified() + ctx.added()):
self.ui.note(f + "\n")
try:
fctx = ctx[f]
new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
changed)
m1.set(f, fctx.flags())
except OSError, inst:
self.ui.warn(_("trouble committing %s!\n") % f)
raise
except IOError, inst:
errcode = getattr(inst, 'errno', errno.ENOENT)
if error or errcode and errcode != errno.ENOENT:
self.ui.warn(_("trouble committing %s!\n") % f)
raise
else:
removed.append(f)
# update manifest
m1.update(new)
removed = [f for f in sorted(removed) if f in m1 or f in m2]
drop = [f for f in removed if f in m1]
for f in drop:
del m1[f]
mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
p2.manifestnode(), (new, drop))
files = changed + removed
else:
mn = p1.manifestnode()
files = []
# update changelog
self.changelog.delayupdate()
n = self.changelog.add(mn, files, ctx.description(),
trp, p1.node(), p2.node(),
user, ctx.date(), ctx.extra().copy())
p = lambda: self.changelog.writepending() and self.root or ""
xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
parent2=xp2, pending=p)
self.changelog.finalize(trp)
# set the new commit is proper phase
targetphase = phases.newcommitphase(self.ui)
if targetphase:
# retract boundary do not alter parent changeset.
# if a parent have higher the resulting phase will
# be compliant anyway
#
# if minimal phase was 0 we don't need to retract anything
phases.retractboundary(self, targetphase, [n])
tr.close()
branchmap.updatecache(self.filtered('served'))
return n
finally:
if tr:
tr.release()
lock.release()
@unfilteredmethod
def destroying(self):
'''Inform the repository that nodes are about to be destroyed.
Intended for use by strip and rollback, so there's a common
place for anything that has to be done before destroying history.
This is mostly useful for saving state that is in memory and waiting
to be flushed when the current lock is released. Because a call to
destroyed is imminent, the repo will be invalidated causing those
changes to stay in memory (waiting for the next unlock), or vanish
completely.
'''
# When using the same lock to commit and strip, the phasecache is left
# dirty after committing. Then when we strip, the repo is invalidated,
# causing those changes to disappear.
if '_phasecache' in vars(self):
self._phasecache.write()
@unfilteredmethod
def destroyed(self):
'''Inform the repository that nodes have been destroyed.
Intended for use by strip and rollback, so there's a common
place for anything that has to be done after destroying history.
'''
# When one tries to:
# 1) destroy nodes thus calling this method (e.g. strip)
# 2) use phasecache somewhere (e.g. commit)
#
# then 2) will fail because the phasecache contains nodes that were
# removed. We can either remove phasecache from the filecache,
# causing it to reload next time it is accessed, or simply filter
# the removed nodes now and write the updated cache.
self._phasecache.filterunknown(self)
self._phasecache.write()
# update the 'served' branch cache to help read only server process
# Thanks to branchcache collaboration this is done from the nearest
# filtered subset and it is expected to be fast.
branchmap.updatecache(self.filtered('served'))
# Ensure the persistent tag cache is updated. Doing it now
# means that the tag cache only has to worry about destroyed
# heads immediately after a strip/rollback. That in turn
# guarantees that "cachetip == currenttip" (comparing both rev
# and node) always means no nodes have been added or destroyed.
# XXX this is suboptimal when qrefresh'ing: we strip the current
# head, refresh the tag cache, then immediately add a new head.
# But I think doing it this way is necessary for the "instant
# tag cache retrieval" case to work.
self.invalidate()
def walk(self, match, node=None):
'''
walk recursively through the directory tree or a given
changeset, finding all files matched by the match
function
'''
return self[node].walk(match)
def status(self, node1='.', node2=None, match=None,
ignored=False, clean=False, unknown=False,
listsubrepos=False):
"""return status of files between two nodes or node and working
directory.
If node1 is None, use the first dirstate parent instead.
If node2 is None, compare node1 with working directory.
"""
def mfmatches(ctx):
mf = ctx.manifest().copy()
if match.always():
return mf
for fn in mf.keys():
if not match(fn):
del mf[fn]
return mf
if isinstance(node1, context.changectx):
ctx1 = node1
else:
ctx1 = self[node1]
if isinstance(node2, context.changectx):
ctx2 = node2
else:
ctx2 = self[node2]
working = ctx2.rev() is None
parentworking = working and ctx1 == self['.']
match = match or matchmod.always(self.root, self.getcwd())
listignored, listclean, listunknown = ignored, clean, unknown
# load earliest manifest first for caching reasons
if not working and ctx2.rev() < ctx1.rev():
ctx2.manifest()
if not parentworking:
def bad(f, msg):
# 'f' may be a directory pattern from 'match.files()',
# so 'f not in ctx1' is not enough
if f not in ctx1 and f not in ctx1.dirs():
self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
match.bad = bad
if working: # we need to scan the working dir
subrepos = []
if '.hgsub' in self.dirstate:
subrepos = sorted(ctx2.substate)
s = self.dirstate.status(match, subrepos, listignored,
listclean, listunknown)
cmp, modified, added, removed, deleted, unknown, ignored, clean = s
# check for any possibly clean files
if parentworking and cmp:
fixup = []
# do a full compare of any files that might have changed
for f in sorted(cmp):
if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
or ctx1[f].cmp(ctx2[f])):
modified.append(f)
else:
fixup.append(f)
# update dirstate for files that are actually clean
if fixup:
if listclean:
clean += fixup
try:
# updating the dirstate is optional
# so we don't wait on the lock
wlock = self.wlock(False)
try:
for f in fixup:
self.dirstate.normal(f)
finally:
wlock.release()
except error.LockError:
pass
if not parentworking:
mf1 = mfmatches(ctx1)
if working:
# we are comparing working dir against non-parent
# generate a pseudo-manifest for the working dir
mf2 = mfmatches(self['.'])
for f in cmp + modified + added:
mf2[f] = None
mf2.set(f, ctx2.flags(f))
for f in removed:
if f in mf2:
del mf2[f]
else:
# we are comparing two revisions
deleted, unknown, ignored = [], [], []
mf2 = mfmatches(ctx2)
modified, added, clean = [], [], []
withflags = mf1.withflags() | mf2.withflags()
for fn, mf2node in mf2.iteritems():
if fn in mf1:
if (fn not in deleted and
((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
(mf1[fn] != mf2node and
(mf2node or ctx1[fn].cmp(ctx2[fn]))))):
modified.append(fn)
elif listclean:
clean.append(fn)
del mf1[fn]
elif fn not in deleted:
added.append(fn)
removed = mf1.keys()
if working and modified and not self.dirstate._checklink:
# Symlink placeholders may get non-symlink-like contents
# via user error or dereferencing by NFS or Samba servers,
# so we filter out any placeholders that don't look like a
# symlink
sane = []
for f in modified:
if ctx2.flags(f) == 'l':
d = ctx2[f].data()
if len(d) >= 1024 or '\n' in d or util.binary(d):
self.ui.debug('ignoring suspect symlink placeholder'
' "%s"\n' % f)
continue
sane.append(f)
modified = sane
r = modified, added, removed, deleted, unknown, ignored, clean
if listsubrepos:
for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
if working:
rev2 = None
else:
rev2 = ctx2.substate[subpath][1]
try:
submatch = matchmod.narrowmatcher(subpath, match)
s = sub.status(rev2, match=submatch, ignored=listignored,
clean=listclean, unknown=listunknown,
listsubrepos=True)
for rfiles, sfiles in zip(r, s):
rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
except error.LookupError:
self.ui.status(_("skipping missing subrepository: %s\n")
% subpath)
for l in r:
l.sort()
return r
def heads(self, start=None):
heads = self.changelog.heads(start)
# sort the output in rev descending order
return sorted(heads, key=self.changelog.rev, reverse=True)
def branchheads(self, branch=None, start=None, closed=False):
'''return a (possibly filtered) list of heads for the given branch
Heads are returned in topological order, from newest to oldest.
If branch is None, use the dirstate branch.
If start is not None, return only heads reachable from start.
If closed is True, return heads that are marked as closed as well.
'''
if branch is None:
branch = self[None].branch()
branches = self.branchmap()
if branch not in branches:
return []
# the cache returns heads ordered lowest to highest
bheads = list(reversed(branches[branch]))
if start is not None:
# filter out the heads that cannot be reached from startrev
fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
bheads = [h for h in bheads if h in fbheads]
if not closed:
bheads = [h for h in bheads if not self[h].closesbranch()]
return bheads
def branches(self, nodes):
if not nodes:
nodes = [self.changelog.tip()]
b = []
for n in nodes:
t = n
while True:
p = self.changelog.parents(n)
if p[1] != nullid or p[0] == nullid:
b.append((t, n, p[0], p[1]))
break
n = p[0]
return b
def between(self, pairs):
r = []
for top, bottom in pairs:
n, l, i = top, [], 0
f = 1
while n != bottom and n != nullid:
p = self.changelog.parents(n)[0]
if i == f:
l.append(n)
f = f * 2
n = p
i += 1
r.append(l)
return r
def pull(self, remote, heads=None, force=False):
# don't open transaction for nothing or you break future useful
# rollback call
tr = None
trname = 'pull\n' + util.hidepassword(remote.url())
lock = self.lock()
try:
tmp = discovery.findcommonincoming(self, remote, heads=heads,
force=force)
common, fetch, rheads = tmp
if not fetch:
self.ui.status(_("no changes found\n"))
added = []
result = 0
else:
tr = self.transaction(trname)
if heads is None and list(common) == [nullid]:
self.ui.status(_("requesting all changes\n"))
elif heads is None and remote.capable('changegroupsubset'):
# issue1320, avoid a race if remote changed after discovery
heads = rheads
if remote.capable('getbundle'):
# TODO: get bundlecaps from remote
cg = remote.getbundle('pull', common=common,
heads=heads or rheads)
elif heads is None:
cg = remote.changegroup(fetch, 'pull')
elif not remote.capable('changegroupsubset'):
raise util.Abort(_("partial pull cannot be done because "
"other repository doesn't support "
"changegroupsubset."))
else:
cg = remote.changegroupsubset(fetch, heads, 'pull')
# we use unfiltered changelog here because hidden revision must
# be taken in account for phase synchronization. They may
# becomes public and becomes visible again.
cl = self.unfiltered().changelog
clstart = len(cl)
result = self.addchangegroup(cg, 'pull', remote.url())
clend = len(cl)
added = [cl.node(r) for r in xrange(clstart, clend)]
# compute target subset
if heads is None:
# We pulled every thing possible
# sync on everything common
subset = common + added
else:
# We pulled a specific subset
# sync on this subset
subset = heads
# Get remote phases data from remote
remotephases = remote.listkeys('phases')
publishing = bool(remotephases.get('publishing', False))
if remotephases and not publishing:
# remote is new and unpublishing
pheads, _dr = phases.analyzeremotephases(self, subset,
remotephases)
phases.advanceboundary(self, phases.public, pheads)
phases.advanceboundary(self, phases.draft, subset)
else:
# Remote is old or publishing all common changesets
# should be seen as public
phases.advanceboundary(self, phases.public, subset)
def gettransaction():
if tr is None:
return self.transaction(trname)
return tr
obstr = obsolete.syncpull(self, remote, gettransaction)
if obstr is not None:
tr = obstr
if tr is not None:
tr.close()
finally:
if tr is not None:
tr.release()
lock.release()
return result
def checkpush(self, force, revs):
"""Extensions can override this function if additional checks have
to be performed before pushing, or call it if they override push
command.
"""
pass
def push(self, remote, force=False, revs=None, newbranch=False):
'''Push outgoing changesets (limited by revs) from the current
repository to remote. Return an integer:
- None means nothing to push
- 0 means HTTP error
- 1 means we pushed and remote head count is unchanged *or*
we have outgoing changesets but refused to push
- other values as described by addchangegroup()
'''
# there are two ways to push to remote repo:
#
# addchangegroup assumes local user can lock remote
# repo (local filesystem, old ssh servers).
#
# unbundle assumes local user cannot lock remote repo (new ssh
# servers, http servers).
if not remote.canpush():
raise util.Abort(_("destination does not support push"))
unfi = self.unfiltered()
def localphasemove(nodes, phase=phases.public):
"""move <nodes> to <phase> in the local source repo"""
if locallock is not None:
phases.advanceboundary(self, phase, nodes)
else:
# repo is not locked, do not change any phases!
# Informs the user that phases should have been moved when
# applicable.
actualmoves = [n for n in nodes if phase < self[n].phase()]
phasestr = phases.phasenames[phase]
if actualmoves:
self.ui.status(_('cannot lock source repo, skipping local'
' %s phase update\n') % phasestr)
# get local lock as we might write phase data
locallock = None
try:
locallock = self.lock()
except IOError, err:
if err.errno != errno.EACCES:
raise
# source repo cannot be locked.
# We do not abort the push, but just disable the local phase
# synchronisation.
msg = 'cannot lock source repository: %s\n' % err
self.ui.debug(msg)
try:
self.checkpush(force, revs)
lock = None
unbundle = remote.capable('unbundle')
if not unbundle:
lock = remote.lock()
try:
# discovery
fci = discovery.findcommonincoming
commoninc = fci(unfi, remote, force=force)
common, inc, remoteheads = commoninc
fco = discovery.findcommonoutgoing
outgoing = fco(unfi, remote, onlyheads=revs,
commoninc=commoninc, force=force)
if not outgoing.missing:
# nothing to push
scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
ret = None
else:
# something to push
if not force:
# if self.obsstore == False --> no obsolete
# then, save the iteration
if unfi.obsstore:
# this message are here for 80 char limit reason
mso = _("push includes obsolete changeset: %s!")
mst = "push includes %s changeset: %s!"
# plain versions for i18n tool to detect them
_("push includes unstable changeset: %s!")
_("push includes bumped changeset: %s!")
_("push includes divergent changeset: %s!")
# If we are to push if there is at least one
# obsolete or unstable changeset in missing, at
# least one of the missinghead will be obsolete or
# unstable. So checking heads only is ok
for node in outgoing.missingheads:
ctx = unfi[node]
if ctx.obsolete():
raise util.Abort(mso % ctx)
elif ctx.troubled():
raise util.Abort(_(mst)
% (ctx.troubles()[0],
ctx))
discovery.checkheads(unfi, remote, outgoing,
remoteheads, newbranch,
bool(inc))
# TODO: get bundlecaps from remote
bundlecaps = None
# create a changegroup from local
if revs is None and not outgoing.excluded:
# push everything,
# use the fast path, no race possible on push
bundler = changegroup.bundle10(self, bundlecaps)
cg = self._changegroupsubset(outgoing,
bundler,
'push',
fastpath=True)
else:
cg = self.getlocalbundle('push', outgoing, bundlecaps)
# apply changegroup to remote
if unbundle:
# local repo finds heads on server, finds out what
# revs it must push. once revs transferred, if server
# finds it has different heads (someone else won
# commit/push race), server aborts.
if force:
remoteheads = ['force']
# ssh: return remote's addchangegroup()
# http: return remote's addchangegroup() or 0 for error
ret = remote.unbundle(cg, remoteheads, 'push')
else:
# we return an integer indicating remote head count
# change
ret = remote.addchangegroup(cg, 'push', self.url())
if ret:
# push succeed, synchronize target of the push
cheads = outgoing.missingheads
elif revs is None:
# All out push fails. synchronize all common
cheads = outgoing.commonheads
else:
# I want cheads = heads(::missingheads and ::commonheads)
# (missingheads is revs with secret changeset filtered out)
#
# This can be expressed as:
# cheads = ( (missingheads and ::commonheads)
# + (commonheads and ::missingheads))"
# )
#
# while trying to push we already computed the following:
# common = (::commonheads)
# missing = ((commonheads::missingheads) - commonheads)
#
# We can pick:
# * missingheads part of common (::commonheads)
common = set(outgoing.common)
cheads = [node for node in revs if node in common]
# and
# * commonheads parents on missing
revset = unfi.set('%ln and parents(roots(%ln))',
outgoing.commonheads,
outgoing.missing)
cheads.extend(c.node() for c in revset)
# even when we don't push, exchanging phase data is useful
remotephases = remote.listkeys('phases')
if (self.ui.configbool('ui', '_usedassubrepo', False)
and remotephases # server supports phases
and ret is None # nothing was pushed
and remotephases.get('publishing', False)):
# When:
# - this is a subrepo push
# - and remote support phase
# - and no changeset was pushed
# - and remote is publishing
# We may be in issue 3871 case!
# We drop the possible phase synchronisation done by
# courtesy to publish changesets possibly locally draft
# on the remote.
remotephases = {'publishing': 'True'}
if not remotephases: # old server or public only repo
localphasemove(cheads)
# don't push any phase data as there is nothing to push
else:
ana = phases.analyzeremotephases(self, cheads, remotephases)
pheads, droots = ana
### Apply remote phase on local
if remotephases.get('publishing', False):
localphasemove(cheads)
else: # publish = False
localphasemove(pheads)
localphasemove(cheads, phases.draft)
### Apply local phase on remote
# Get the list of all revs draft on remote by public here.
# XXX Beware that revset break if droots is not strictly
# XXX root we may want to ensure it is but it is costly
outdated = unfi.set('heads((%ln::%ln) and public())',
droots, cheads)
for newremotehead in outdated:
r = remote.pushkey('phases',
newremotehead.hex(),
str(phases.draft),
str(phases.public))
if not r:
self.ui.warn(_('updating %s to public failed!\n')
% newremotehead)
self.ui.debug('try to push obsolete markers to remote\n')
obsolete.syncpush(self, remote)
finally:
if lock is not None:
lock.release()
finally:
if locallock is not None:
locallock.release()
self.ui.debug("checking for updated bookmarks\n")
rb = remote.listkeys('bookmarks')
for k in rb.keys():
if k in unfi._bookmarks:
nr, nl = rb[k], hex(self._bookmarks[k])
if nr in unfi:
cr = unfi[nr]
cl = unfi[nl]
if bookmarks.validdest(unfi, cr, cl):
r = remote.pushkey('bookmarks', k, nr, nl)
if r:
self.ui.status(_("updating bookmark %s\n") % k)
else:
self.ui.warn(_('updating bookmark %s'
' failed!\n') % k)
return ret
def changegroupinfo(self, nodes, source):
if self.ui.verbose or source == 'bundle':
self.ui.status(_("%d changesets found\n") % len(nodes))
if self.ui.debugflag:
self.ui.debug("list of changesets:\n")
for node in nodes:
self.ui.debug("%s\n" % hex(node))
def changegroupsubset(self, bases, heads, source):
"""Compute a changegroup consisting of all the nodes that are
descendants of any of the bases and ancestors of any of the heads.
Return a chunkbuffer object whose read() method will return
successive changegroup chunks.
It is fairly complex as determining which filenodes and which
manifest nodes need to be included for the changeset to be complete
is non-trivial.
Another wrinkle is doing the reverse, figuring out which changeset in
the changegroup a particular filenode or manifestnode belongs to.
"""
cl = self.changelog
if not bases:
bases = [nullid]
# TODO: remove call to nodesbetween.
csets, bases, heads = cl.nodesbetween(bases, heads)
bases = [p for n in bases for p in cl.parents(n) if p != nullid]
outgoing = discovery.outgoing(cl, bases, heads)
bundler = changegroup.bundle10(self)
return self._changegroupsubset(outgoing, bundler, source)
def getlocalbundle(self, source, outgoing, bundlecaps=None):
"""Like getbundle, but taking a discovery.outgoing as an argument.
This is only implemented for local repos and reuses potentially
precomputed sets in outgoing."""
if not outgoing.missing:
return None
bundler = changegroup.bundle10(self, bundlecaps)
return self._changegroupsubset(outgoing, bundler, source)
def getbundle(self, source, heads=None, common=None, bundlecaps=None):
"""Like changegroupsubset, but returns the set difference between the
ancestors of heads and the ancestors common.
If heads is None, use the local heads. If common is None, use [nullid].
The nodes in common might not all be known locally due to the way the
current discovery protocol works.
"""
cl = self.changelog
if common:
hasnode = cl.hasnode
common = [n for n in common if hasnode(n)]
else:
common = [nullid]
if not heads:
heads = cl.heads()
return self.getlocalbundle(source,
discovery.outgoing(cl, common, heads),
bundlecaps=bundlecaps)
@unfilteredmethod
def _changegroupsubset(self, outgoing, bundler, source,
fastpath=False):
commonrevs = outgoing.common
csets = outgoing.missing
heads = outgoing.missingheads
# We go through the fast path if we get told to, or if all (unfiltered
# heads have been requested (since we then know there all linkrevs will
# be pulled by the client).
heads.sort()
fastpathlinkrev = fastpath or (
self.filtername is None and heads == sorted(self.heads()))
self.hook('preoutgoing', throw=True, source=source)
self.changegroupinfo(csets, source)
gengroup = bundler.generate(commonrevs, csets, fastpathlinkrev, source)
return changegroup.unbundle10(util.chunkbuffer(gengroup), 'UN')
def changegroup(self, basenodes, source):
# to avoid a race we use changegroupsubset() (issue1320)
return self.changegroupsubset(basenodes, self.heads(), source)
@unfilteredmethod
def addchangegroup(self, source, srctype, url, emptyok=False):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
the URL of the repo where this changegroup is coming from.
Return an integer summarizing the change to this repo:
- nothing changed or no source: 0
- more heads than before: 1+added heads (2..n)
- fewer heads than before: -1-removed heads (-2..-n)
- number of heads stays the same: 1
"""
def csmap(x):
self.ui.debug("add changeset %s\n" % short(x))
return len(cl)
def revmap(x):
return cl.rev(x)
if not source:
return 0
self.hook('prechangegroup', throw=True, source=srctype, url=url)
changesets = files = revisions = 0
efiles = set()
# write changelog data to temp files so concurrent readers will not see
# inconsistent view
cl = self.changelog
cl.delayupdate()
oldheads = cl.heads()
tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
try:
trp = weakref.proxy(tr)
# pull off the changeset group
self.ui.status(_("adding changesets\n"))
clstart = len(cl)
class prog(object):
step = _('changesets')
count = 1
ui = self.ui
total = None
def __call__(self):
self.ui.progress(self.step, self.count, unit=_('chunks'),
total=self.total)
self.count += 1
pr = prog()
source.callback = pr
source.changelogheader()
srccontent = cl.addgroup(source, csmap, trp)
if not (srccontent or emptyok):
raise util.Abort(_("received changelog group is empty"))
clend = len(cl)
changesets = clend - clstart
for c in xrange(clstart, clend):
efiles.update(self[c].files())
efiles = len(efiles)
self.ui.progress(_('changesets'), None)
# pull off the manifest group
self.ui.status(_("adding manifests\n"))
pr.step = _('manifests')
pr.count = 1
pr.total = changesets # manifests <= changesets
# no need to check for empty manifest group here:
# if the result of the merge of 1 and 2 is the same in 3 and 4,
# no new manifest will be created and the manifest group will
# be empty during the pull
source.manifestheader()
self.manifest.addgroup(source, revmap, trp)
self.ui.progress(_('manifests'), None)
needfiles = {}
if self.ui.configbool('server', 'validate', default=False):
# validate incoming csets have their manifests
for cset in xrange(clstart, clend):
mfest = self.changelog.read(self.changelog.node(cset))[0]
mfest = self.manifest.readdelta(mfest)
# store file nodes we must see
for f, n in mfest.iteritems():
needfiles.setdefault(f, set()).add(n)
# process the files
self.ui.status(_("adding file changes\n"))
pr.step = _('files')
pr.count = 1
pr.total = efiles
source.callback = None
newrevs, newfiles = self.addchangegroupfiles(source, revmap, trp,
pr, needfiles)
revisions += newrevs
files += newfiles
dh = 0
if oldheads:
heads = cl.heads()
dh = len(heads) - len(oldheads)
for h in heads:
if h not in oldheads and self[h].closesbranch():
dh -= 1
htext = ""
if dh:
htext = _(" (%+d heads)") % dh
self.ui.status(_("added %d changesets"
" with %d changes to %d files%s\n")
% (changesets, revisions, files, htext))
self.invalidatevolatilesets()
if changesets > 0:
p = lambda: cl.writepending() and self.root or ""
self.hook('pretxnchangegroup', throw=True,
node=hex(cl.node(clstart)), source=srctype,
url=url, pending=p)
added = [cl.node(r) for r in xrange(clstart, clend)]
publishing = self.ui.configbool('phases', 'publish', True)
if srctype == 'push':
# Old server can not push the boundary themself.
# New server won't push the boundary if changeset already
# existed locally as secrete
#
# We should not use added here but the list of all change in
# the bundle
if publishing:
phases.advanceboundary(self, phases.public, srccontent)
else:
phases.advanceboundary(self, phases.draft, srccontent)
phases.retractboundary(self, phases.draft, added)
elif srctype != 'strip':
# publishing only alter behavior during push
#
# strip should not touch boundary at all
phases.retractboundary(self, phases.draft, added)
# make changelog see real files again
cl.finalize(trp)
tr.close()
if changesets > 0:
if srctype != 'strip':
# During strip, branchcache is invalid but coming call to
# `destroyed` will repair it.
# In other case we can safely update cache on disk.
branchmap.updatecache(self.filtered('served'))
def runhooks():
# forcefully update the on-disk branch cache
self.ui.debug("updating the branch cache\n")
self.hook("changegroup", node=hex(cl.node(clstart)),
source=srctype, url=url)
for n in added:
self.hook("incoming", node=hex(n), source=srctype,
url=url)
newheads = [h for h in self.heads() if h not in oldheads]
self.ui.log("incoming",
"%s incoming changes - new heads: %s\n",
len(added),
', '.join([hex(c[:6]) for c in newheads]))
self._afterlock(runhooks)
finally:
tr.release()
# never return 0 here:
if dh < 0:
return dh - 1
else:
return dh + 1
def addchangegroupfiles(self, source, revmap, trp, pr, needfiles):
revisions = 0
files = 0
while True:
chunkdata = source.filelogheader()
if not chunkdata:
break
f = chunkdata["filename"]
self.ui.debug("adding %s revisions\n" % f)
pr()
fl = self.file(f)
o = len(fl)
if not fl.addgroup(source, revmap, trp):
raise util.Abort(_("received file revlog group is empty"))
revisions += len(fl) - o
files += 1
if f in needfiles:
needs = needfiles[f]
for new in xrange(o, len(fl)):
n = fl.node(new)
if n in needs:
needs.remove(n)
else:
raise util.Abort(
_("received spurious file revlog entry"))
if not needs:
del needfiles[f]
self.ui.progress(_('files'), None)
for f, needs in needfiles.iteritems():
fl = self.file(f)
for n in needs:
try:
fl.rev(n)
except error.LookupError:
raise util.Abort(
_('missing file data for %s:%s - run hg verify') %
(f, hex(n)))
return revisions, files
def stream_in(self, remote, requirements):
lock = self.lock()
try:
# Save remote branchmap. We will use it later
# to speed up branchcache creation
rbranchmap = None
if remote.capable("branchmap"):
rbranchmap = remote.branchmap()
fp = remote.stream_out()
l = fp.readline()
try:
resp = int(l)
except ValueError:
raise error.ResponseError(
_('unexpected response from remote server:'), l)
if resp == 1:
raise util.Abort(_('operation forbidden by server'))
elif resp == 2:
raise util.Abort(_('locking the remote repository failed'))
elif resp != 0:
raise util.Abort(_('the server sent an unknown error code'))
self.ui.status(_('streaming all changes\n'))
l = fp.readline()
try:
total_files, total_bytes = map(int, l.split(' ', 1))
except (ValueError, TypeError):
raise error.ResponseError(
_('unexpected response from remote server:'), l)
self.ui.status(_('%d files to transfer, %s of data\n') %
(total_files, util.bytecount(total_bytes)))
handled_bytes = 0
self.ui.progress(_('clone'), 0, total=total_bytes)
start = time.time()
for i in xrange(total_files):
# XXX doesn't support '\n' or '\r' in filenames
l = fp.readline()
try:
name, size = l.split('\0', 1)
size = int(size)
except (ValueError, TypeError):
raise error.ResponseError(
_('unexpected response from remote server:'), l)
if self.ui.debugflag:
self.ui.debug('adding %s (%s)\n' %
(name, util.bytecount(size)))
# for backwards compat, name was partially encoded
ofp = self.sopener(store.decodedir(name), 'w')
for chunk in util.filechunkiter(fp, limit=size):
handled_bytes += len(chunk)
self.ui.progress(_('clone'), handled_bytes,
total=total_bytes)
ofp.write(chunk)
ofp.close()
elapsed = time.time() - start
if elapsed <= 0:
elapsed = 0.001
self.ui.progress(_('clone'), None)
self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
(util.bytecount(total_bytes), elapsed,
util.bytecount(total_bytes / elapsed)))
# new requirements = old non-format requirements +
# new format-related
# requirements from the streamed-in repository
requirements.update(set(self.requirements) - self.supportedformats)
self._applyrequirements(requirements)
self._writerequirements()
if rbranchmap:
rbheads = []
for bheads in rbranchmap.itervalues():
rbheads.extend(bheads)
if rbheads:
rtiprev = max((int(self.changelog.rev(node))
for node in rbheads))
cache = branchmap.branchcache(rbranchmap,
self[rtiprev].node(),
rtiprev)
# Try to stick it as low as possible
# filter above served are unlikely to be fetch from a clone
for candidate in ('base', 'immutable', 'served'):
rview = self.filtered(candidate)
if cache.validfor(rview):
self._branchcaches[candidate] = cache
cache.write(rview)
break
self.invalidate()
return len(self.heads()) + 1
finally:
lock.release()
def clone(self, remote, heads=[], stream=False):
'''clone remote repository.
keyword arguments:
heads: list of revs to clone (forces use of pull)
stream: use streaming clone if possible'''
# now, all clients that can request uncompressed clones can
# read repo formats supported by all servers that can serve
# them.
# if revlog format changes, client will have to check version
# and format flags on "stream" capability, and use
# uncompressed only if compatible.
if not stream:
# if the server explicitly prefers to stream (for fast LANs)
stream = remote.capable('stream-preferred')
if stream and not heads:
# 'stream' means remote revlog format is revlogv1 only
if remote.capable('stream'):
return self.stream_in(remote, set(('revlogv1',)))
# otherwise, 'streamreqs' contains the remote revlog format
streamreqs = remote.capable('streamreqs')
if streamreqs:
streamreqs = set(streamreqs.split(','))
# if we support it, stream in and adjust our requirements
if not streamreqs - self.supportedformats:
return self.stream_in(remote, streamreqs)
return self.pull(remote, heads)
def pushkey(self, namespace, key, old, new):
self.hook('prepushkey', throw=True, namespace=namespace, key=key,
old=old, new=new)
self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
ret = pushkey.push(self, namespace, key, old, new)
self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
ret=ret)
return ret
def listkeys(self, namespace):
self.hook('prelistkeys', throw=True, namespace=namespace)
self.ui.debug('listing keys for "%s"\n' % namespace)
values = pushkey.list(self, namespace)
self.hook('listkeys', namespace=namespace, values=values)
return values
def debugwireargs(self, one, two, three=None, four=None, five=None):
'''used to test argument passing over the wire'''
return "%s %s %s %s %s" % (one, two, three, four, five)
def savecommitmessage(self, text):
fp = self.opener('last-message.txt', 'wb')
try:
fp.write(text)
finally:
fp.close()
return self.pathto(fp.name[len(self.root) + 1:])
# used to avoid circular references so destructors work
def aftertrans(files):
renamefiles = [tuple(t) for t in files]
def a():
for vfs, src, dest in renamefiles:
try:
vfs.rename(src, dest)
except OSError: # journal file does not yet exist
pass
return a
def undoname(fn):
base, name = os.path.split(fn)
assert name.startswith('journal')
return os.path.join(base, name.replace('journal', 'undo', 1))
def instance(ui, path, create):
return localrepository(ui, util.urllocalpath(path), create)
def islocal(path):
return True
| vmg/hg-stable | mercurial/localrepo.py | Python | gpl-2.0 | 96,365 |
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 25 13:08:19 2015
@author: jgimenez
"""
from PyQt4 import QtGui, QtCore
from initialConditions_ui import Ui_initialConditionsUI
import os
from utils import *
from PyFoam.RunDictionary.BoundaryDict import BoundaryDict
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from utils import types
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
unknowns = ['U','p','p_rgh','alpha','k','epsilon','omega','nut','nuTilda']
class initialConditionsUI(QtGui.QScrollArea, Ui_initialConditionsUI):
def __init__(self, parent=None, f=QtCore.Qt.WindowFlags()):
QtGui.QScrollArea.__init__(self, parent)
self.setupUi(self)
class initialConditionsWidget(initialConditionsUI):
def __init__(self,folder):
self.currentFolder = folder
initialConditionsUI.__init__(self)
[self.timedir,self.fields,currtime] = currentFields(self.currentFolder)
self.pushButton.setEnabled(False)
self.addTabs()
def addTabs(self,ipatch=None):
for itab in range(self.tabWidget.count()):
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
self.clearLayout(layout,0)
self.tabWidget.clear()
for ifield in self.fields:
if ifield not in unknowns:
continue
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout(widget)
layout2 = QtGui.QHBoxLayout()
cb = QtGui.QComboBox()
cb.addItems(['uniform','nonuniform'])
layout2.addWidget(cb)
if types[ifield]=='scalar':
ledit = QtGui.QLineEdit()
ledit.setValidator(QtGui.QDoubleValidator())
QtCore.QObject.connect(ledit, QtCore.SIGNAL(_fromUtf8("textEdited(QString)")), self.checkData)
layout2.addWidget(ledit)
else:
for j in range(3):
ledit = QtGui.QLineEdit()
ledit.setValidator(QtGui.QDoubleValidator())
layout2.addWidget(ledit)
QtCore.QObject.connect(ledit, QtCore.SIGNAL(_fromUtf8("textEdited(QString)")), self.checkData)
layout.addLayout(layout2)
if ifield=='U':
qbutton = QtGui.QCheckBox()
qbutton.setText('Initialize from potential flow')
layout.addWidget(qbutton)
QtCore.QObject.connect(qbutton, QtCore.SIGNAL(_fromUtf8("stateChanged(int)")), self.onPotentialFlow)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
self.tabWidget.addTab(widget, ifield)
self.tabWidget.setTabText(self.tabWidget.count(),ifield)
def onPotentialFlow(self):
for itab in range(self.tabWidget.count()):
ifield = self.tabWidget.tabText(itab)
if ifield=='U':
print ifield
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
cb = self.tabWidget.widget(itab).findChildren(QtGui.QCheckBox)[0]
layout2 = layout.itemAt(0).layout()
for i in range(layout2.count()):
if isinstance(layout2.itemAt(i), QtGui.QWidgetItem):
layout2.itemAt(i).widget().setEnabled(not cb.isChecked())
def clearLayout(self, layout, dejar):
for i in reversed(range(layout.count())):
if i>= dejar:
item = layout.itemAt(i)
if isinstance(item, QtGui.QWidgetItem):
item.widget().close()
item.widget().deleteLater()
# or
# item.widget().setParent(None)
elif isinstance(item, QtGui.QSpacerItem):
None
# no need to do extra stuff
else:
self.clearLayout(item.layout(),0)
# remove the item from layout
layout.removeItem(item)
def setConditions(self):
runPotentialFlow = 0
for itab in range(self.tabWidget.count()):
ifield = self.tabWidget.tabText(itab)
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
filename = '%s/%s'%(self.timedir,ifield)
parsedData = ParsedParameterFile(filename,createZipped=False)
layout2 = layout.itemAt(0).layout()
if layout2.count()==2:
parsedData['internalField'] = '%s %s'%(layout2.itemAt(0).widget().currentText(),layout2.itemAt(1).widget().text())
else:
if ifield == 'U' and self.tabWidget.widget(itab).findChildren(QtGui.QCheckBox)[0].isChecked():
runPotentialFlow = 1
parsedData['internalField'] = '%s (%s %s %s)'%('uniform',0,0,0)
else:
parsedData['internalField'] = '%s (%s %s %s)'%(layout2.itemAt(0).widget().currentText(),layout2.itemAt(1).widget().text(),layout2.itemAt(2).widget().text(),layout2.itemAt(3).widget().text())
parsedData.writeFile()
self.pushButton.setEnabled(False)
if runPotentialFlow:
QtGui.QMessageBox.about(self, "ERROR", 'Debe simularse con potentialFoam, hacer!!')
return
def checkData(self):
ready = True
for itab in range(self.tabWidget.count()):
edits = self.tabWidget.widget(itab).findChildren(QtGui.QLineEdit)
for E in edits:
if E.isEnabled():
if not E.text():
ready = False
if ready:
self.pushButton.setEnabled(True)
else:
self.pushButton.setEnabled(False) | jmarcelogimenez/petroFoam | initialConditions.py | Python | gpl-2.0 | 6,385 |
# -*- coding: utf-8 -*-
#
from rest_framework import serializers
from django.utils.translation import ugettext_lazy as _
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from perms.models import AssetPermission, Action
__all__ = [
'AssetPermissionSerializer',
'ActionsField',
]
class ActionsField(serializers.MultipleChoiceField):
def __init__(self, *args, **kwargs):
kwargs['choices'] = Action.CHOICES
super().__init__(*args, **kwargs)
def to_representation(self, value):
return Action.value_to_choices(value)
def to_internal_value(self, data):
if data is None:
return data
return Action.choices_to_value(data)
class ActionsDisplayField(ActionsField):
def to_representation(self, value):
values = super().to_representation(value)
choices = dict(Action.CHOICES)
return [choices.get(i) for i in values]
class AssetPermissionSerializer(BulkOrgResourceModelSerializer):
actions = ActionsField(required=False, allow_null=True)
is_valid = serializers.BooleanField(read_only=True)
is_expired = serializers.BooleanField(read_only=True)
class Meta:
model = AssetPermission
mini_fields = ['id', 'name']
small_fields = mini_fields + [
'is_active', 'is_expired', 'is_valid', 'actions',
'created_by', 'date_created', 'date_expired',
'date_start', 'comment'
]
m2m_fields = [
'users', 'user_groups', 'assets', 'nodes', 'system_users',
'users_amount', 'user_groups_amount', 'assets_amount',
'nodes_amount', 'system_users_amount',
]
fields = small_fields + m2m_fields
read_only_fields = ['created_by', 'date_created']
extra_kwargs = {
'is_expired': {'label': _('Is expired')},
'is_valid': {'label': _('Is valid')},
'actions': {'label': _('Actions')},
'users_amount': {'label': _('Users amount')},
'user_groups_amount': {'label': _('User groups amount')},
'assets_amount': {'label': _('Assets amount')},
'nodes_amount': {'label': _('Nodes amount')},
'system_users_amount': {'label': _('System users amount')},
}
@classmethod
def setup_eager_loading(cls, queryset):
""" Perform necessary eager loading of data. """
queryset = queryset.prefetch_related('users', 'user_groups', 'assets', 'nodes', 'system_users')
return queryset
| skyoo/jumpserver | apps/perms/serializers/asset/permission.py | Python | gpl-2.0 | 2,536 |
#coding=utf-8
#介紹:合併字典方式二
import pickle
import os
hashdataS = {}
hashdataP = {}
def updata(self,hashdic):
dic = open(self, 'rb')
newdata = cPickle.load(dic)
hashdic.update(newdata)
def main(dict1,dict2,hashname,new):
try:
updata(dict1,hashname) #载入
print 'befor: %s' % len(hashname)
updata(dict2,hashname) #覆盖
print len(hashname)
with open(new, 'wb') as output:
cPickle.dump(hashname, output)
output.close()
except IOError:
pass
os.rename('DictS.K','DictS.old')
os.rename('DictP.K','DictP.old')
main('DictS.old','oneDictS.kmy',hashdataS,'DictS.K')
main('DictP.old','oneDictP.kmy',hashdataP,'DictP.K')
| sveiow/doitpy | AutoAnswer/oldversion/kemuyimoni/字典加工/合并字典方式二.py | Python | gpl-2.0 | 758 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def initial_data(apps, schema_editor):
TC = apps.get_model('tech', 'TechCategory')
T = apps.get_model('tech', 'Tech')
tc = TC(name='Advanced Magnetism', cost=250, category='FF')
tc.save()
T(name='Class I Shield', categ=tc, desc="""Protects ships from physical and energy attacks. Absorbs up to 5 times the ship's size in damage before failing, with each attack reduced by 1 points of damage. Regenerates 30% per combat turn.""").save()
T(name='Mass Driver', categ=tc, desc="""A rail gun firing a hyper-velocity projectile. Inflicts 6 points of damage. Damage is not reduced by range. Modifications: autofire, armor piercing, heavy mount, point defense.""").save()
T(name='ECM Jammer', categ=tc, desc="""Creates a magnetic pulse jamming both weapon tracking systems and missile guidance units, adding +70% to the ship's missile evasion.""").save()
tc = TC(name='Gravitic Fields', cost=650, category='FF')
tc.save()
T(name='Anti-Grav Harness', categ=tc, desc="""Allows ground troops and armor to fly, increasing their mobility and defense. Adds +10 to their ground combat rating.""").save()
T(name='Inertial Stabilizer', categ=tc, desc="""Creates a partial warp field that can operate in normal space. It vastly improves the ship's mobility and makes it considerably harder to target, adding +50 to ship's beam defense and halving movement cost for turning.""").save()
T(name='Gyro Destabilizer', categ=tc, desc="""Creates a sudden rift in a ship's subspace field. Weakens the inertial controls of the ship, spinning it in random directions, inflicting 1-4 points of structural damage times the ship's size class. Range is 15 squares.""").save()
tc = TC(name='Magneto Gravitics', cost=900, category='FF')
tc.save()
T(name='Class III Shield', categ=tc, desc="""Protects ships from physical and energy attacks. Absorbs up to 15 times the ship's size in damage before failing, with each attack reduced by 3 points of damage. Regenerates 30% per combat turn.""").save()
T(name='Planetary Radiation Shield', categ=tc, desc="""Reduces solar and cosmic bombardment so lifeforms can comfortably move about the surface. Radiated climates become Barren. Reduces damage against a planet by 5 points.""").save()
T(name='Warp Dissipator', categ=tc, desc="""Creates a large radius field about a ship that prevents any ship from enter into hyperspace. Enemy ships cannon retreat while the dissipator is functioning.""").save()
tc = TC(name='Electromagnetic Refraction', cost=1500, category='FF')
tc.save()
T(name='Stealth Field', categ=tc, desc="""Reduces the gravitic emissions from FTL drives. Ships equipped with stealth fields can not be detected at any range on the main screen.""").save()
T(name='Personal Shield', categ=tc, desc="""Deflects physical and energy attacks, increasing the combat rating of militia, troops and armor by +20.""").save()
T(name='Stealth Suit', categ=tc, desc="""Renders a person virtually invisible, blending the wearer into the background. Adds +10 to all spy rolls.""").save()
tc = TC(name='Warp Fields', cost=2000, category='FF')
tc.save()
T(name='Pulsar', categ=tc, desc="""When fired, causes violent vibrations in ships, missiles and fighters adjacent to the equipped ships. Inflicts 2-24 points of damage per size class of the ship damaged.""").save()
T(name='Warp Field Interdicter', categ=tc, desc="""Creates a warp destabilizing field around the star system in which it is built. Generates a field 2 parsecs in radius about the system that slows all enemy movement to 1 parsec per turn.""").save()
T(name='Lightning Field', categ=tc, desc="""Surrounds a ship in a powerful energized field capable of overloading the targeting system of any missile, torpedo or fighter. Has a 50% chance of destroying these weapons before they damage the ship.""").save()
tc = TC(name='Subspace Fields', cost=2750, category='FF')
tc.save()
T(name='Class V Shield', categ=tc, desc="""Protects ships from physical and energy attacks. Absorbs up to 25 times the ship's size in damage before failing, with each attack reduced by 5 points of damage. Regenerates 30% per combat turn.""").save()
T(name='Multi-Wave ECM Jammer', categ=tc, desc="""Jamming frequencies across the entire spectrum this device adds +100 to the ships missile evasion rating.""").save()
T(name='Gauss Cannon', categ=tc, desc="""An extremely powerful linear accelerator which inflicts 18 points of damage regardless of range. Modifications: autofire, armor piercing, heavy mount.""").save()
tc = TC(name='Distortion Fields', cost=3500, category='FF')
tc.save()
T(name='Cloaking Device', categ=tc, desc="""Completely hides a ship from long range scans. In combat, it adds +80 to the ship's beam defense and missiles have a 50% chance to miss as long as the ship does not attack. Must wait one turn without firing to recloak.""").save()
T(name='Stasis Field', categ=tc, desc="""Places target in suspended animation. It cannot fire, recharge weapons, cloak or be attacked. Field has a 3-square range and will hold the target ship until the stasis field is turned off or destroyed.""").save()
T(name='Hard Shields', categ=tc, desc="""Reduces the damage of any attack by an additional 3 points. Allows operation of all shields in a nebula and prevents the use of enemy tranporters even after shields have been dropped. Immune to shield-piercing weapons.""").save()
tc = TC(name='Quantum Fields', cost=4500, category='FF')
tc.save()
T(name='Class VII Shield', categ=tc, desc="""Protects ships from physical and energy attacks. Absorbs up to 35 times the ship's size in damage before failing, with each attack reduced by 7 points of damage. Regenerates 30% per combat turn.""").save()
T(name='Planetary Flux Shield', categ=tc, desc="""Seals planet in an energy field. Converts Radiated climates into Barren. Reduces damage against the planet 10 points. It replaces any planetary radiation shield already built.""").save()
T(name='Wide Area Jammer', categ=tc, desc="""Adds +130 to a ship's missile evasion rating, adds +70 to the missile evasion of other ships in the fleet. The fleet's bonus is not cumulative with any missile evasion bonus given by other jammers. Only the best bonus will apply.""").save()
tc = TC(name='Transwarp Fields', cost=7500, category='FF')
tc.save()
T(name='Displacement Device', categ=tc, desc="""All weapons targeted against an equipped ship have a 30% chance of missing completely, regardless of any other considerations.""").save()
T(name='Subspace Teleporter', categ=tc, desc="""Allows a ship to execute a hyperspace jump of up to 20 squares. The jump does not change the ship's direction; it must rotate noramlly to change facing.""").save()
T(name='Inertial Nullifier', categ=tc, desc="""Creates a warp field that can operate in normal space. Increases ship mobility and makes it harder to target. Adds +100 to the ship's beam defense, and allows it to change direction without movement cost.""").save()
tc = TC(name='Temporal Fields', cost=15000, category='FF')
tc.save()
T(name='Class X Shield', categ=tc, desc="""Protects ships from physical and energy attacks. Absorbs up to 50 times the ship's size in damage before failing, with each attack reduced by 10 points of damage. Regenerates 30% per combat turn.""").save()
T(name='Planetary Barrier Shield', categ=tc, desc="""Seals a planet in an energy field. Converts Radiated climates to Barren climates. Reduces damage against a planet by 20 points. Ground troops and biological weapons cannot pass.""").save()
T(name='Phasing Cloak', categ=tc, desc="""Allows a ship to temporarily enter another dimension instead of just visually disappearing. While cloaked, the ship cannot be detected or attacked. Can only function for 10 turns, after which it will act like an ordinary cloaking device.""").save()
class Migration(migrations.Migration):
dependencies = [
('tech', '0009_physics_techs'),
]
operations = [
migrations.RunPython(initial_data),
]
# EOF
| dwagon/pymoo | moo/tech/migrations/0010_forcefield_techs.py | Python | gpl-2.0 | 8,262 |
# HTTP proxy for serving static resources and forwarding requests to Synthese.
# @file proxy.py
# @author Sylvain Pasche
#
# This file belongs to the SYNTHESE project (public transportation specialized software)
# Copyright (C) 2002 Hugues Romain - RCSmobility <contact@rcsmobility.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import httplib
import logging
import os
import threading
import time
import urllib
import urlparse
from wsgiref import simple_server
import static
from paste.proxy import parse_headers
from paste.proxy import Proxy
import werkzeug.wsgi
from synthesepy import utils
log = logging.getLogger(__name__)
# The Synthese daemon expects a x-forwarded-host header when getting a proxied
# request. Paste proxy doesn't provide it, so we monkey patch the __call__
# method here. The modification from Paste are shown in comments.
def paste_proxy_patched_call(self, environ, start_response):
if (self.allowed_request_methods and
environ['REQUEST_METHOD'].lower() not in self.allowed_request_methods):
return httpexceptions.HTTPBadRequest("Disallowed")(environ, start_response)
if self.scheme == 'http':
ConnClass = httplib.HTTPConnection
elif self.scheme == 'https':
ConnClass = httplib.HTTPSConnection
else:
raise ValueError(
"Unknown scheme for %r: %r" % (self.address, self.scheme))
conn = ConnClass(self.host)
headers = {}
for key, value in environ.items():
if key.startswith('HTTP_'):
key = key[5:].lower().replace('_', '-')
if key == 'host' or key in self.suppress_http_headers:
continue
headers[key] = value
headers['host'] = self.host
if 'REMOTE_ADDR' in environ:
headers['x-forwarded-for'] = environ['REMOTE_ADDR']
# synthese modification
if 'HTTP_HOST' in environ:
headers['x-forwarded-host'] = environ['HTTP_HOST']
# end of synthese modification
if environ.get('CONTENT_TYPE'):
headers['content-type'] = environ['CONTENT_TYPE']
if environ.get('CONTENT_LENGTH'):
if environ['CONTENT_LENGTH'] == '-1':
# This is a special case, where the content length is basically undetermined
body = environ['wsgi.input'].read(-1)
headers['content-length'] = str(len(body))
else:
headers['content-length'] = environ['CONTENT_LENGTH']
length = int(environ['CONTENT_LENGTH'])
body = environ['wsgi.input'].read(length)
else:
body = ''
path_info = urllib.quote(environ['PATH_INFO'])
if self.path:
request_path = path_info
if request_path and request_path[0] == '/':
request_path = request_path[1:]
path = urlparse.urljoin(self.path, request_path)
else:
path = path_info
if environ.get('QUERY_STRING'):
path += '?' + environ['QUERY_STRING']
conn.request(environ['REQUEST_METHOD'],
path,
body, headers)
res = conn.getresponse()
headers_out = parse_headers(res.msg)
status = '%s %s' % (res.status, res.reason)
start_response(status, headers_out)
# @@: Default?
length = res.getheader('content-length')
if length is not None:
body = res.read(int(length))
else:
body = res.read()
conn.close()
return [body]
Proxy.__call__ = paste_proxy_patched_call
class WSGIProxy(object):
"""
Runs a HTTP server to serve static files. Requests for the Synthese daemon
are proxied to its configured port.
"""
SYNTHESE_SUFFIXES = ['/synthese', '/synthese3', '/admin']
ADMIN_URL = '/admin/synthese?fonction=admin&mt=17732923532771328&tt=17732923532771329&pt=17732923532771330'
def __init__(self, env, project):
self.env = env
self.proxy_app = Proxy('http://localhost:%s/' % env.c.port)
# import here to avoid circular dependencies.
from synthesepy import web
self.web_app = web.get_application(project=project)
self.static_apps = []
for base, path in env.c.static_paths:
self.static_apps.append((base, static.Cling(path)))
def _redirect(self, environ, start_response, url):
if not url.startswith('http://'):
url = 'http://' + environ['HTTP_HOST'] + url
start_response('302 Found', [
('Location', url),
('Content-type', 'text/plain')])
return '302 Found'
def add_utf8_header(self, start_response):
def start_response_wrapper(status, headers):
headers_dict = dict(headers)
if headers_dict['Content-Type'] == 'text/html':
headers_dict['Content-Type'] = 'text/html; charset=UTF-8'
return start_response(status, headers_dict.items())
return start_response_wrapper
def _handle_static_files(self, environ, start_response):
path_info = environ.get('PATH_INFO', '')
for base, app in self.static_apps:
if not path_info[1:].startswith(base):
continue
path_info = '/' + path_info[1 + len(base):]
environ['PATH_INFO'] = path_info
path = app.root + path_info
if (os.path.isfile(path) or
(path_info.endswith('/') and os.path.isfile(
os.path.join(path, 'index.html')))):
return app(environ, start_response)
log.debug('Path %r not found in any static directories, forwarding '
'to Synthese for Smart URL', path_info)
smart_url_path = path_info[1:]
# Warning: this removes duplicate GET parameters.
qs = dict(urlparse.parse_qsl(environ['QUERY_STRING']))
qs.update(dict(
SERVICE='page',
si=self.env.c.site_id,
smart_url='/' + smart_url_path,
))
environ['PATH_INFO'] = self.SYNTHESE_SUFFIXES[0]
environ['QUERY_STRING'] = urllib.urlencode(qs)
return self.proxy_app(environ, self.add_utf8_header(start_response))
def __call__(self, environ, start_response):
path_info = environ['PATH_INFO']
# Web app
WEB_APP_PREFIX = '/w/'
if path_info.startswith(WEB_APP_PREFIX):
werkzeug.wsgi.pop_path_info(environ)
return self.web_app(environ, start_response)
# Admin redirect helpers.
if path_info in ('/admin', '/admin/'):
return self._redirect(environ, start_response, self.ADMIN_URL)
if path_info.endswith(
tuple(self.SYNTHESE_SUFFIXES + self.env.c.synthese_suffixes)):
return self.proxy_app(environ, self.add_utf8_header(start_response))
return self._handle_static_files(environ, start_response)
# Paste httpd is threaded, which should provide better performance.
USE_PASTE_HTTPD = True
wsgi_httpd = None
def start(env, project):
global wsgi_httpd
if USE_PASTE_HTTPD:
import paste.httpserver
paste_log = logging.getLogger('paste.httpserver.ThreadPool')
paste_log.setLevel(logging.WARNING)
wsgi_httpd = paste.httpserver.serve(
WSGIProxy(env, project), '0.0.0.0', env.c.wsgi_proxy_port, start_loop=False)
else:
wsgi_httpd = simple_server.make_server(
'', env.c.wsgi_proxy_port, WSGIProxy(env))
log.info('WSGI proxy serving on http://localhost:%s' %
env.c.wsgi_proxy_port)
threading.Thread(target=wsgi_httpd.serve_forever).start()
def stop():
global wsgi_httpd
# Shutting down method differs:
# simple_server.simple_server throws an exception when calling
# server_close() and paste.httpd hangs if shutdown() is called.
if USE_PASTE_HTTPD:
wsgi_httpd.server_close()
else:
wsgi_httpd.shutdown()
def serve_forever(env, project):
utils.kill_listening_processes(env.c.wsgi_proxy_port)
if utils.can_connect(env.c.wsgi_proxy_port):
raise Exception(
'Error, something is already listening on port %s',
env.c.wsgi_proxy_port)
proxy = start(env, project)
log.info('Proxy running, press ctrl-c to stop')
try:
while True:
time.sleep(10)
except KeyboardInterrupt:
print '^C'
log.info('Stopping proxy')
stop()
| Open-Transport/synthese | legacy/tools/synthesepy/proxy.py | Python | gpl-2.0 | 9,018 |
import webbrowser
import platform
def get_version():
version = platform.python_version()
if len(version) != 3: # This is to exclude minor versions.
version = version[0:3]
return version
def open_doc(url):
webbrowser.open(url)
def open_library():
version = get_version()
url = "http://docs.python.org/{}/library/re.html".format(version)
open_doc(url)
def open_guide():
version = get_version()
url = "http://docs.python.org/{}/howto/regex.html".format(version)
open_doc(url)
| bittercode/pyrrhic-ree | modules/onlineDocs.py | Python | gpl-2.0 | 530 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
from bs4 import BeautifulSoup
def parse_html(html):
def strip_elem(e):
return str(e).strip()
def non_empty_children(elem):
if hasattr(elem, "children"):
return list(filter(strip_elem, elem.children))
else:
return None
def parse_movimento(movimento):
def parse_tr(tr):
tds = non_empty_children(tr)
if tds and len(tds) == 2:
conteudos = map(lambda f: f.text.strip(), tds)
return tuple(conteudos)
else:
return ()
def items_do_movimento(movimento):
while movimento.next_sibling:
if hasattr(movimento, "name"):
yield movimento
sibling = movimento.next_sibling
if (hasattr(sibling, "attrs") and
"tipoMovimento" in sibling.attrs.get("class", [])):
break
movimento = movimento.next_sibling
mov_tuples = filter(None, map(parse_tr, items_do_movimento(movimento)))
return dict(mov_tuples)
soup = BeautifulSoup(html)
movimentos = soup.table.findAll(attrs={"class": "tipoMovimento"})
return [parse_movimento(m) for m in movimentos]
| lucastx/tjrj-processos | tjrj/scraping.py | Python | gpl-2.0 | 1,302 |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from miasm2.expression.expression import ExprId
from miasm2.core.cpu import gen_reg, gen_regs
gen_reg('PC', globals())
gen_reg('PC_FETCH', globals())
gen_reg('R_LO', globals())
gen_reg('R_HI', globals())
exception_flags = ExprId('exception_flags', 32)
PC_init = ExprId("PC_init")
PC_FETCH_init = ExprId("PC_FETCH_init")
regs32_str = ["ZERO", 'AT', 'V0', 'V1'] +\
['A%d'%i for i in xrange(4)] +\
['T%d'%i for i in xrange(8)] +\
['S%d'%i for i in xrange(8)] +\
['T%d'%i for i in xrange(8, 10)] +\
['K0', 'K1'] +\
['GP', 'SP', 'FP', 'RA']
regs32_expr = [ExprId(x, 32) for x in regs32_str]
regs_flt_str = ['F%d'%i for i in xrange(0x20)]
regs_fcc_str = ['FCC%d'%i for i in xrange(8)]
R_LO = ExprId('R_LO', 32)
R_HI = ExprId('R_HI', 32)
R_LO_init = ExprId('R_LO_init', 32)
R_HI_init = ExprId('R_HI_init', 32)
cpr0_str = ["CPR0_%d"%x for x in xrange(0x100)]
cpr0_str[0] = "INDEX"
cpr0_str[16] = "ENTRYLO0"
cpr0_str[24] = "ENTRYLO1"
cpr0_str[40] = "PAGEMASK"
cpr0_str[72] = "COUNT"
cpr0_str[80] = "ENTRYHI"
cpr0_str[104] = "CAUSE"
cpr0_str[112] = "EPC"
cpr0_str[128] = "CONFIG"
cpr0_str[152] = "WATCHHI"
regs_cpr0_expr, regs_cpr0_init, regs_cpr0_info = gen_regs(cpr0_str, globals())
gpregs_expr, gpregs_init, gpregs = gen_regs(regs32_str, globals())
regs_flt_expr, regs_flt_init, fltregs = gen_regs(regs_flt_str, globals(), sz=64)
regs_fcc_expr, regs_fcc_init, fccregs = gen_regs(regs_fcc_str, globals())
all_regs_ids = [PC, PC_FETCH, R_LO, R_HI] + gpregs_expr + regs_flt_expr + \
regs_fcc_expr + regs_cpr0_expr
all_regs_ids_byname = dict([(x.name, x) for x in all_regs_ids])
all_regs_ids_init = [PC_init, PC_FETCH_init, R_LO_init, R_HI_init] + \
gpregs_init + regs_flt_init + regs_fcc_init + regs_cpr0_init
all_regs_ids_no_alias = all_regs_ids[:]
regs_init = {}
for i, r in enumerate(all_regs_ids):
regs_init[r] = all_regs_ids_init[i]
| rom1sqr/miasm | miasm2/arch/mips32/regs.py | Python | gpl-2.0 | 1,927 |
from datetime import datetime
from unittest import TestCase
from application.backend.models.person import gender, create_person_from_dict
from application.backend.tests.test_helpers import create_person
class PersonTest(TestCase):
def test_should_jsonify_itself(self):
date_of_birth = datetime.now()
person = create_person(
surname='Nekesa', first_name='Patricia',
date_of_birth=date_of_birth, gender=gender.FEMALE
)
expected_json = {
u"surname": u"Nekesa",
u"first_name": u"Patricia",
u"date_of_birth": unicode(date_of_birth),
u"gender": unicode(gender.FEMALE),
u"id": unicode(None)
}
person_json = person.jsonify()
self.assertEqual(person_json, expected_json)
def test_should_construct_self_from_dict(self):
dictionary = {"surname": "Nekesa", "first_name": "Patricia", "date_of_birth": "2012-02-02", "gender": "female"}
expected_person = create_person(
surname=u'Nekesa', first_name=u'Patricia',
date_of_birth=u'2012-02-02', gender=gender.FEMALE
)
person = create_person_from_dict(dictionary)
self.assertEqual(person, expected_person)
def test_should_equate_two_people_if_ids_are_the_same(self):
person_1 = create_person()
person_2 = create_person()
self.assertEqual(person_1, person_2) | wanderanimrod/family-tree | application/backend/tests/models/person_test.py | Python | gpl-2.0 | 1,433 |
import datetime
import hashlib
import random
import re
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db import transaction
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError:
from django.contrib.auth.models import User
try:
from django.utils.timezone import now as datetime_now
except ImportError:
datetime_now = datetime.datetime.now
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = self.model.ACTIVATED
profile.save()
return user
return False
def create_inactive_user(self, username, email, password,
site, send_email=False):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
By default, an activation email will be sent to the new
user. To disable this, pass ``send_email=False``.
"""
new_user = User.objects.create_user(username, email, password)
new_user.is_active = False
new_user.save()
registration_profile = self.create_profile(new_user)
if send_email:
registration_profile.send_activation_email(site)
return new_user
create_inactive_user = transaction.commit_on_success(create_inactive_user)
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
username = user.username
if isinstance(username, unicode):
username = username.encode('utf-8')
activation_key = hashlib.sha1(salt+username).hexdigest()
return self.create(user=user,
activation_key=activation_key)
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in self.all():
try:
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
user.delete()
profile.delete()
except User.DoesNotExist:
profile.delete()
class RegistrationProfile(models.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = models.ForeignKey(User, unique=True, verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u"Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return self.activation_key == self.ACTIVATED or \
(self.user.date_joined + expiration_date <= datetime_now())
activation_key_expired.boolean = True
def send_activation_email(self, site):
"""
Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces.
"""
ctx_dict = {'activation_key': self.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': site}
subject = render_to_string('registration/activation_email_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
ctx_dict)
self.user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
| meletakis/collato | esn/registration/models.py | Python | gpl-2.0 | 10,562 |
import unittest
from antelope_catalog.data_sources.local import TEST_ROOT
from antelope_catalog import LcCatalog
from lcatools.interfaces import IndexRequired
cat = LcCatalog(TEST_ROOT)
ref = 'calrecycle.antelope'
cat.new_resource(ref, 'http://www.antelope-lca.net/uo-lca/api/', 'AntelopeV1Client',
store=False, interfaces=['index', 'inventory', 'quantity'], quiet=True)
ar = cat.get_archive(ref)
class AntelopeV1Client(unittest.TestCase):
def test_stages(self):
self.assertEqual(len(ar.get_endpoint('stages')), 87)
def test_stagename(self):
inv = ar.make_interface('inventory')
self.assertEqual(inv.get_stage_name('42'), 'Natural Gas')
self.assertEqual(inv.get_stage_name('47'), 'Natural Gas Supply')
self.assertEqual(inv.get_stage_name('81'), 'WWTP')
def test_impactcategory(self):
self.assertEqual(ar._get_impact_category(6), 'Cancer human health effects')
with self.assertRaises(ValueError):
ar._get_impact_category(5)
def test_nonimpl(self):
with self.assertRaises(IndexRequired):
next(cat.query(ref).terminate('flows/87'))
def test_traversal(self):
ffs = cat.query(ref).get('fragments/47').traverse()
self.assertEqual(len(ffs), 14)
self.assertSetEqual({-0.5, -0.01163, -0.0102, 0.0, 0.5}, set(round(x.node_weight, 5) for x in ffs))
def test_lcia(self):
lcia = cat.query(ref).get('fragments/19').fragment_lcia('lciamethods/4')
self.assertSetEqual(set(x.external_ref for x in lcia.component_entities()),
{'Crude Oil', 'Electricity', 'Natural Gas', 'Refinery'})
self.assertSetEqual(set(round(x.cumulative_result, 10) for x in lcia.components()),
{0.0004522897, 0.0000733389, 0.0000419222, 0.0001582613})
self.assertAlmostEqual(lcia.total(), 0.0007258121306, places=12)
if __name__ == '__main__':
unittest.main() | bkuczenski/lca-tools | antelope_catalog/providers/v1_client/tests/test_antelope_v1_client.py | Python | gpl-2.0 | 1,973 |
import sys
import numpy as np
import pylab
import matplotlib.pyplot as plt
import scipy.integrate
import scipy.optimize
from collections import namedtuple
import geo
import astro_help as ah
import disk_sub as disk
RADIAN=57.29598
C=2.997925e10
MSOL=1.979e33
G=6.670e-8
YR=3.1556925e7
EPSILON=1e-6
PI=3.1416
STEFAN_BOLTZMANN=5.669e-5
def tdisk (m, mdot, r):
t = 3. * G / (8. * PI * STEFAN_BOLTZMANN) * m * mdot / (r * r * r)
t = pow (t, 0.25)
return (t)
def teff (t, x):
q = (1.e0 - (x ** -0.5e0)) / (x * x * x);
q = t * (q ** 0.25e0);
return (q)
def spec_disk (f1,f2,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
nfreq=(f2/f1)*100
freq=np.linspace(f1,f2,nfreq)
spec=np.empty(nfreq)
dfreq=freq[1]-freq[0]
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
for i in range(len(freq)):
spec[i]=spec[i]+(ah.planck_nu(t,freq[i])*area*PI*2)
return (freq,spec)
def spec_disk1 (f1,f2,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
nfreq=1000
freq=np.logspace(np.log10(f1),np.log10(f2),nfreq)
spec=np.empty(nfreq)
dfreq=freq[1]-freq[0]
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
for i in range(len(freq)-1):
spec[i]=spec[i]+(ah.planck_nu(t,(freq[i+1]+freq[i])/2.0)*area*PI*2*(freq[i+1]-freq[i]))
return (freq,spec)
def lnu_disk (f,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
lnu=0.0
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
lnu=lnu+(ah.planck_nu(t,f)*area*PI*2.0)
return (lnu)
def llamb_disk (lamb,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
llamb=0.0
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
llamb=llamb+(ah.planck_lamb(t,lamb)*area*PI*2.0)
return (llamb)
def spec_disk2 (f1,f2,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
nfreq=10
f1a=10**float(int(np.log10(f1)))
f2a=10**float(int(np.log10(f2))+1)
nrange=int(np.log10((f2a/f1a)))
freq=[]
dfreq=[]
ftemp=f1a
df=f1a/nfreq
for i in range(nrange):
for j in range(nfreq*9):
ftemp=ftemp+df
if ftemp > f2:
break
if ftemp >= f1:
freq.append(ftemp)
df=df*10.0
#print freq[0],freq[len(freq)-1]
spec=np.zeros(len(freq))
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
for i in range(len(freq)-1):
spec[i]=spec[i]+(ah.planck_nu(t,freq[i])*area*PI*2)
return (freq,spec)
| jhmatthews/cobra | source/disk_sub.py | Python | gpl-2.0 | 3,164 |
from time import time
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_delete
from django.dispatch import receiver
import os
def upload_path(instance, filename):
return 'uploads/user_{0}/{1}_{2}'.format(
instance.owner.id,
str(time()).replace('.', '_'),
filename
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
photo = models.TextField()
class Images(models.Model):
owner = models.ForeignKey(User)
image = models.ImageField(upload_to=upload_path)
image_file_name = models.CharField(max_length=100, null=True)
date_created = models.DateTimeField(
auto_now_add=True, verbose_name='created')
# Function to delete from the file storage
@receiver(post_delete, sender=Images)
def delete_from_file_system(sender, instance, **kwargs):
image_path = instance.image.path
# split the image part
filepath, ext = os.path.splitext(image_path)
# create the filtered image path
new_filepath = filepath + "filtered" + ext
# delete from file directory
if os.path.exists(image_path):
# delete image
os.remove(image_path)
if os.path.exists(new_filepath):
# delete filtered image
os.remove(new_filepath)
| andela-ooshodi/django-photo-application | djangophotoapp/photoapp/models.py | Python | gpl-2.0 | 1,310 |
from rest_framework import serializers
from csinterop.models import SharingProposal, Folder, User
class SharingProposalSerializer(serializers.ModelSerializer):
share_id = serializers.RelatedField(source='key')
permission = serializers.CharField(source='get_permission', read_only=True)
folder_name = serializers.RelatedField(source='folder.name')
owner_name = serializers.RelatedField(source='owner.name')
owner_email = serializers.RelatedField(source='owner.email')
protocol_version = serializers.CharField(required=False)
def restore_object(self, attrs, instance=None):
"""
Given a dictionary of deserialized field values, either update
an existing model instance, or create a new model instance.
"""
if instance is not None:
return instance
proposal = SharingProposal(**attrs)
proposal.key = self.context['request'].DATA['share_id']
owner = User()
owner.name = self.context['request'].DATA['owner_name']
owner.email = self.context['request'].DATA['owner_email']
proposal.owner = owner
folder = Folder()
folder.name = self.context['request'].DATA['folder_name']
proposal.folder = folder
write_access = True if self.context['request'].DATA['permission'].lower() is 'read-write' else False
proposal.write_access = write_access
proposal.status = 'PENDING'
return proposal
class Meta:
model = SharingProposal
fields = (
'share_id', 'recipient', 'resource_url', 'owner_name', 'owner_email', 'folder_name', 'permission',
'callback', 'protocol_version',
'status', 'created_at') | cloudspaces/interop-protocol | csinterop/csinterop/serializers.py | Python | gpl-2.0 | 1,718 |
from __future__ import generators
import ConfigParser
import copy
import email.Message
import email.Parser
import email.Utils
import errno
import hmac
import inspect
import md5
import os
import popen2
import random
import re
import select
import sha
import shutil
import socket
import sys
import tempfile
import time
import urllib2
import isconf
from isconf.Errno import iserrno
from isconf.Globals import *
from isconf.fbp822 import fbp822
from isconf.Kernel import kernel
(START,IHAVE,SENDME) = range(3)
# XXX the following were migrated from 4.1.7 for now -- really need to
# be FBP components, at least in terms of logging
class Cache:
"""a combined cache manager and UDP mesh -- XXX needs to be split
>>> pid = os.fork()
>>> if not pid:
... time.sleep(999)
... sys.exit(0)
>>> os.environ["HOSTNAME"] = "testhost"
>>> os.environ["IS_HOME"] = "/tmp/var/is"
>>> cache = Cache(54321,54322)
>>> assert cache
>>> os.kill(pid,9)
"""
def __init__(self,udpport,httpport,timeout=2):
# XXX kludge -- what we really need is a dict which
# shows the "mirror list" of all known locations for
# files, rather than self.req
self.req = {}
self.udpport = udpport
self.httpport = httpport
self.timeout = float(timeout)
self.lastSend = 0
self.sock = None
self.fetched = {}
self.nets = self.readnets()
self.sendq = []
# temporary uid -- uniquely identifies host in non-persistent
# packets. If we want something permanent we should store it
# somewhere under private.
self.tuid = "%s@%s" % (random.random(),
os.environ['HOSTNAME'])
class Path: pass
self.p = Path()
home = os.environ['IS_HOME']
# XXX redundant with definitions in ISFS.py -- use a common lib?
self.p.cache = os.path.join(home,"fs/cache")
self.p.private = os.path.join(home,"fs/private")
self.p.announce = "%s/.announce" % (self.p.private)
self.p.pull = "%s/.pull" % (self.p.private)
for d in (self.p.cache,self.p.private):
if not os.path.isdir(d):
os.makedirs(d,0700)
def readnets(self):
# read network list
nets = {'udp': [], 'tcp': []}
netsfn = os.environ.get('IS_NETS',None)
debug("netsfn", netsfn)
if netsfn and os.path.exists(netsfn):
netsfd = open(netsfn,'r')
for line in netsfd:
(scheme,addr) = line.strip().split()
nets[scheme].append(addr)
debug("nets", str(nets))
return nets
def ihaveTx(self,path):
path = path.lstrip('/')
fullpath = os.path.join(self.p.cache,path)
mtime = 0
if not os.path.exists(fullpath):
warn("file gone: %s" % fullpath)
return
mtime = getmtime_int(fullpath)
reply = FBP.msg('ihave',tuid=self.tuid,
file=path,mtime=mtime,port=self.httpport,scheme='http')
HMAC.msgset(reply)
self.bcast(str(reply))
def bcast(self,msg):
# XXX only udp supported so far
debug("bcast")
addrs = self.nets['udp']
if not os.environ.get('IS_NOBROADCAST',None):
addrs.append('<broadcast>')
for addr in addrs:
if len(self.sendq) > 20:
debug("sendq overflow")
return
self.sendq.append((msg,addr,self.udpport))
def sender(self):
while True:
yield None
yield kernel.sigsleep, 1
while len(self.sendq):
msg,addr,udpport = self.sendq.pop(0)
try:
debug("sendto", addr, msg)
self.sock.sendto(msg,0,(addr,udpport))
except:
info("sendto failed: %s" % addr)
self.sendq.append((msg,addr,udpport))
yield kernel.sigsleep, 1
yield kernel.sigsleep, self.timeout/5.0
def ihaveRx(self,msg,ip):
yield None
scheme = msg['scheme']
port = msg['port']
path = msg['file']
mtime = msg.head.mtime
# XXX is python's pseudo-random good enough here?
#
# probably, but for other cases, use 'gpg --gen-random 2 16'
# to generate 128 bits of random data from entropy
#
challenge = str(random.random())
url = "%s://%s:%s/%s?challenge=%s" % (scheme,ip,port,path,challenge)
path = path.lstrip('/')
# simple check to ignore foreign domains
# XXX probably want to make this a list of domains
domain = os.environ['IS_DOMAIN']
if not path.startswith(domain + '/'):
debug("foreign domain, ignoring: %s" % path)
return
fullpath = os.path.join(self.p.cache,path)
mymtime = 0
debug("checking",url)
if os.path.exists(fullpath):
mymtime = getmtime_int(fullpath)
if mtime > mymtime:
debug("remote is newer:",url)
if self.req.has_key(path):
self.req[path]['state'] = SENDME
yield kernel.wait(self.wget(path,url,challenge))
elif mtime < mymtime:
debug("remote is older:",url)
self.ihaveTx(path)
else:
debug("remote and local times are the same:",path,mtime,mymtime)
def puller(self):
tmp = "%s.tmp" % self.p.pull
while True:
timeout= self.timeout
yield None
# get list of files
if not os.path.exists(self.p.pull):
# hmm. we must have died while pulling
if os.path.exists(tmp):
old = open(tmp,'r').read()
open(self.p.pull,'a').write(old)
open(self.p.pull,'a')
os.rename(self.p.pull,tmp)
# files = open(tmp,'r').read().strip().split("\n")
data = open(tmp,'r').read()
if not len(data):
open(self.p.pull,'a')
yield kernel.sigsleep, 1
continue
files = data.strip().split("\n")
# create requests
for path in files:
path = path.lstrip('/')
fullpath = os.path.join(self.p.cache,path)
mtime = 0
if os.path.exists(fullpath):
mtime = getmtime_int(fullpath)
req = FBP.msg('whohas',file=path,newer=mtime,tuid=self.tuid)
HMAC.msgset(req)
self.req.setdefault(path,{})
self.req[path]['msg'] = req
self.req[path]['expires'] = time.time() + timeout
self.req[path]['state'] = START
while True:
# send requests
yield None
debug("calling resend")
self.resend()
yield kernel.sigsleep, 1
# see if they've all been filled or timed out
# debug(str(self.req))
if not self.req:
# okay, all done -- touch the file so ISFS knows
open(self.p.pull,'a')
break
def resend(self):
"""(re)send outstanding requests"""
if time.time() < self.lastSend + .5:
return
self.lastSend = time.time()
paths = self.req.keys()
for path in paths:
debug("resend", self.req[path]['expires'], path, self.req[path])
if self.req[path]['state'] > START:
# file is being fetched
debug("resend fetching")
pass
elif time.time() > self.req[path]['expires']:
# fetch never started
debug("timeout",path)
del self.req[path]
continue
req = self.req[path]['msg']
debug("calling bcast")
self.bcast(str(req))
def flush(self):
if not os.path.exists(self.p.announce):
return
tmp = "%s.tmp" % self.p.announce
os.rename(self.p.announce,tmp)
files = open(tmp,'r').read().strip().split("\n")
for path in files:
self.ihaveTx(path)
def wget(self,path,url,challenge):
"""
# >>> port=random.randrange(50000,60000)
# >>> class fakesock:
# ... def sendto(self,msg,foo,bar):
# ... print "sendto called"
# >>> srcdir="/tmp/var/is/fs/cache/"
# >>> pridir="/tmp/var/isdst/fs/private/"
# >>> if not os.path.exists(srcdir):
# ... os.makedirs(srcdir)
# >>> if not os.path.exists(pridir):
# ... os.makedirs(pridir)
# >>> open(srcdir + "foo", 'w').write("lakfdsjl")
# >>> open(pridir + ".pull",'w').write("foo\\n")
# >>> h = kernel.spawn(httpServer(port=port,dir=srcdir))
# >>> kernel.run(steps=1000)
# >>> os.environ["HOSTNAME"] = "testhost"
# >>> os.environ["IS_HOME"] = "/tmp/var/isdst"
# >>> shutil.rmtree("/tmp/var/isdst",ignore_errors=True)
# >>> cache = Cache(54321,port)
# >>> assert cache
# >>> cache.sock = fakesock()
# >>> url = "http://localhost:%d/foo" % port
# >>> w = kernel.spawn(cache.wget("foo",url,"abc"))
# >>> kernel.run(steps=1000)
# >>> open("/tmp/var/isdst/fs/cache/foo",'r').read()
"""
yield None
# XXX kludge to keep from beating up HTTP servers
if self.fetched.get(url,0) > time.time() - 5:
debug("toosoon",path,url)
if self.req.has_key(path):
del self.req[path]
return
self.fetched[path] = time.time()
info("fetching", url)
path = path.lstrip('/')
fullpath = os.path.join(self.p.cache,path)
(dir,file) = os.path.split(fullpath)
# XXX security checks on pathname
mtime = 0
if os.path.exists(fullpath):
mtime = getmtime_int(fullpath)
if not os.path.exists(dir):
os.makedirs(dir,0700)
try:
u = urllib2.urlopen(url)
except:
debug("HTTP failed opening %s" % url)
return
uinfo = u.info()
response = uinfo.get('x-hmac')
if not HMAC.ck(challenge,response):
debug("HMAC failed, abort fetching: %s" % url)
return
mod = uinfo.get('last-modified')
size = uinfo.get('content-length')
mod_secs = email.Utils.mktime_tz(email.Utils.parsedate_tz(mod))
if mod_secs <= mtime:
warn("not newer:",url,mod,mod_secs,mtime)
if self.req.has_key(path):
del self.req[path]
return
debug(url,size,mod)
tmp = os.path.join(dir,".%s.tmp" % file)
# XXX set umask somewhere early
# XXX use the following algorithm everywhere else as a more
# secure way of creating files that aren't world readable
# -- also see os.mkstemp()
if os.path.exists(tmp): os.unlink(tmp)
open(tmp,'w')
os.chmod(tmp,0600)
open(tmp,'w') # what does this second open do?
tmpfd = open(tmp,'a')
while True:
# XXX move timeout to here
yield kernel.sigbusy
try:
(r,w,e) = select.select([u],[],[u],0)
if e:
# XXX not sure if we should break or raise here
break
if not r:
continue
except:
# python 2.4 throws a "no fileno attribute" exception if
# the entire page content has already arrived
pass
try:
rxd = u.read(8192)
except:
break
if len(rxd) == 0:
break
# XXX show progress
tmpfd.write(rxd)
tmpfd.close()
actual_size = os.stat(tmp).st_size
if size is None:
warn("""
The host at %s is running an older version of
ISconf; that older version does not send content-length
headers, so we can't check the length of files it sends
us; we might store a corrupt file as a result. You should
upgrade that host to a more recent ISconf version soon.
""")
else:
size = int(size)
if size != actual_size:
debug("size mismatch: wanted %d got %d, abort fetching: %s" %
(size, actual_size, url))
return
meta = (mod_secs,mod_secs)
os.rename(tmp,fullpath)
os.utime(fullpath,meta)
if self.req.has_key(path):
del self.req[path]
self.ihaveTx(path)
def run(self):
from SocketServer import UDPServer
from isconf.fbp822 import fbp822, Error822
kernel.spawn(self.puller())
kernel.spawn(self.sender())
# XXX most of the following should be broken out into a receiver() task
dir = self.p.cache
udpport = self.udpport
debug("UDP server serving %s on port %d" % (dir,udpport))
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock = sock
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, True)
sock.setblocking(0)
sock.bind(('',udpport))
# laddr = sock.getsockname()
# localip = os.environ['HOSTNAME']
while True:
yield None
self.flush()
yield None
try:
data,addr = sock.recvfrom(8192)
# XXX check against addrs or nets
debug("from %s: %s" % (addr,data))
factory = fbp822()
msg = factory.parse(data)
type = msg.type().strip()
if msg.head.tuid == self.tuid:
# debug("one of ours -- ignore",str(msg))
continue
if not HMAC.msgck(msg):
debug("HMAC failed, dropping: %s" % msg)
continue
if type == 'whohas':
path = msg['file']
path = path.lstrip('/')
fullpath = os.path.join(dir,path)
fullpath = os.path.normpath(fullpath)
newer = int(msg.get('newer',None))
# security checks
bad=0
if fullpath != os.path.normpath(fullpath):
bad += 1
if dir != os.path.commonprefix(
(dir,os.path.abspath(fullpath))):
print dir,os.path.commonprefix(
(dir,os.path.abspath(fullpath)))
bad += 2
if bad:
warn("unsafe request %d from %s: %s" % (
bad,addr,fullpath))
continue
if not os.path.isfile(fullpath):
debug("ignoring whohas from %s: not found: %s" % (addr,fullpath))
continue
if newer is not None and newer >= getmtime_int(
fullpath):
debug("ignoring whohas from %s: not newer: %s" % (addr,fullpath))
continue
# url = "http://%s:%d/%s" % (localip,httpport,path)
self.ihaveTx(path)
continue
if type == 'ihave':
debug("gotihave:",str(msg))
ip = addr[0]
yield kernel.wait(self.ihaveRx(msg,ip))
continue
warn("unsupported message type from %s: %s" % (addr,type))
except socket.error:
yield kernel.sigsleep, 1
continue
except Exception, e:
warn("%s from %s: %s" % (e,addr,str(msg)))
continue
def httpServer(port,dir):
from BaseHTTPServer import HTTPServer
from isconf.HTTPServer import SimpleHTTPRequestHandler
from SocketServer import ThreadingMixIn
"""
# >>> port=random.randrange(50000,60000)
# >>> srcdir="/tmp/var/is/fs/cache/"
# >>> if not os.path.exists(srcdir):
# >>> os.makedirs(srcdir)
# >>> open(srcdir + "foo",'w').write("lakfdsjl")
# >>> pid = os.fork()
# >>> if not pid:
# >>> kernel.run(httpServer(port=port,dir=srcdir))
# >>> time.sleep(1)
# >>> u = urllib2.urlopen("http://localhost:%d/foo" % port)
# >>> k = u.info().keys()
# >>> k.sort()
# >>> k
# ['content-length', 'content-type', 'date', 'last-modified', 'server']
# >>> u.read()
# 'lakfdsjl'
# >>> os.kill(pid,9)
"""
# Note: Switched from ForkingMixIn to ThreadingMixIn around
# 4.2.8.206 in order to remove nasty race condition between the
# waitpid() calls generated by the popen2 library in
# ISFS.updateExec and by the SocketServer.ForkingMixIn. The HTTP
# server was sometimes reaping exec processes and stealing the
# exit status... ForkingMixIn is *not* thread-safe or
# microtask-safe, because it calls waitpid(0, ...) rather than
# using the child pid list it already has. Argh.
def logger(*args):
msg = str(args)
open("/tmp/isconf.http.log",'a').write(msg+"\n")
SimpleHTTPRequestHandler.log_message = logger
if not os.path.isdir(dir):
os.makedirs(dir,0700)
os.chdir(dir)
class ThreadingServer(ThreadingMixIn,HTTPServer): pass
serveraddr = ('',port)
svr = ThreadingServer(serveraddr,SimpleHTTPRequestHandler)
svr.daemon_threads = True
svr.socket.setblocking(0)
debug("HTTP server serving %s on port %d" % (dir,port))
while True:
yield None
try:
request, client_address = svr.get_request()
except socket.error:
yield kernel.sigsleep, .1
# includes EAGAIN
continue
except Exception, e:
debug("get_request exception:", str(e))
yield kernel.sigsleep, 1
continue
# XXX filter request -- e.g. do we need directory listings?
try:
# process_request does the fork... For now we're going to
# say that it's okay that the Kernel and other tasks fork
# with it; since process_request does not yield, nothing
# else will run in the child before it exits.
os.chdir(dir)
svr.process_request(request, client_address)
except:
svr.handle_error(request, client_address)
svr.close_request(request)
class Hmac:
'''HMAC key management
>>> HMAC = Hmac(ckfreq=1)
>>> keyfile = "/tmp/hmac_keys-test-case-data"
>>> factory = fbp822()
>>> msg = factory.mkmsg('red apple')
>>> os.environ['IS_HMAC_KEYS'] = ""
>>> msg.hmacset('foo')
'8ca8301bb1a077358ce8c3e9a601d83a2643f33d'
>>> HMAC.msgck(msg)
True
>>> os.environ['IS_HMAC_KEYS'] = keyfile
>>> open(keyfile,'w').write("\\n\\n")
>>> time.sleep(2)
>>> msg.hmacset('foo')
'8ca8301bb1a077358ce8c3e9a601d83a2643f33d'
>>> HMAC.msgck(msg)
True
>>> open(keyfile,'w').write("someauthenticationkey\\nanotherkey\\n")
>>> time.sleep(2)
>>> HMAC.msgset(msg)
'0abf42fd374fc75cdc4bd0284f4c9ec48f9e0569'
>>> HMAC.msgck(msg)
True
>>> msg.hmacset('foo')
'8ca8301bb1a077358ce8c3e9a601d83a2643f33d'
>>> HMAC.msgck(msg)
False
>>> msg.hmacset('anotherkey')
'51116aaa8bc9de5078850b9347aa95ada066b259'
>>> HMAC.msgck(msg)
True
>>> msg.hmacset('someauthenticationkey')
'0abf42fd374fc75cdc4bd0284f4c9ec48f9e0569'
>>> HMAC.msgck(msg)
True
>>> res = HMAC.response('foo')
>>> res
'525a59615b881ab282ca60b2ab31e82aec7e31db'
>>> HMAC.ck('foo',res)
True
>>> HMAC.ck('foo','afds')
False
>>> HMAC.ck('bar',res)
False
>>> open(keyfile,'a').write("+ANY+\\n")
>>> time.sleep(2)
>>> HMAC.msgset(msg)
'0abf42fd374fc75cdc4bd0284f4c9ec48f9e0569'
>>> HMAC.msgck(msg)
True
>>> msg.hmacset('foo')
'8ca8301bb1a077358ce8c3e9a601d83a2643f33d'
>>> HMAC.msgck(msg)
True
>>> HMAC.ck('foo','afds')
True
'''
def __init__(self,ckfreq=10):
self.expires = 0
self.mtime = 0
self.ckfreq = ckfreq
self.reset()
def reset(self):
self._keys = []
self.any = False
def reload(self):
path = os.environ.get('IS_HMAC_KEYS',None)
if not path:
return []
if time.time() > self.expires \
and os.path.exists(path) \
and self.mtime < getmtime_int(path):
self.expires = time.time() + self.ckfreq
debug("reloading",path)
self.mtime = getmtime_int(path)
self.reset()
for line in open(path,'r').readlines():
line = line.strip()
if line.startswith('#'):
continue
if not len(line):
continue
if line == '+ANY+':
self.any = True
continue
self._keys.append(line)
# debug('XXX keys',self._keys)
return self._keys
def msgck(self,msg):
keys = self.reload()
if not len(keys):
return True
if self.any:
return True
for key in keys:
if msg.hmacok(key):
return True
return False
def msgset(self,msg):
keys = self.reload()
if not len(keys):
return
key = keys[0]
return msg.hmacset(key)
def ck(self,challenge,response):
debug('ck(): challenge',challenge)
debug('ck(): response',response)
keys = self.reload()
if not len(keys):
return True
if self.any:
return True
for key in keys:
h = hmac.new(key,msg=challenge,digestmod=sha)
digest = h.hexdigest()
if digest == response:
debug('ck: response ok')
debug('XXX ck(): key',key)
return True
debug('ck: bad response')
return False
def response(self,challenge):
keys = self.reload()
if not len(keys):
return
key = keys[0]
h = hmac.new(key,msg=challenge,digestmod=sha)
response = h.hexdigest()
debug('response(): challenge',challenge)
debug('response(): response',response)
return response
HMAC = Hmac()
| stevegt/isconf4 | lib/python/isconf/Cache.py | Python | gpl-2.0 | 23,037 |
#!/usr/bin/env python
# Creates and saves a JSON file to update the D3.js graphs
import MySQLdb
import MySQLdb.cursors
import json
import Reference as r
import logging
def CreateSentimentIndex(NegativeWords, PositiveWords, TotalWords):
''' Creates a sentiment value for the word counts'''
if TotalWords != 0:
Sentiment = ((PositiveWords - NegativeWords)/float(TotalWords))
return Sentiment
def CreateJsonData(QueryResults):
''' Creates a list of dictionaries containing the dates and sentiment indexes'''
Output = []
for Row in QueryResults:
RowDate = Row['DateTime'].strftime('%Y-%m-%d %H:%M:%S')
RowSentiment = CreateSentimentIndex(Row['Negative'], Row['Positive'], Row['TotalWords'])
Output.append({"date" : RowDate, "index" : RowSentiment})
return Output
def OutputJsonFile(InputDictionary):
'''Saves a dictionary to an output file in a JSON format'''
JsonOutput = json.dumps(InputDictionary)
OutputFileName = 'json/twittermetrics_sentiment.js'
FileOutput = open(OutputFileName,'w')
print >> FileOutput, JsonOutput
return True
def CreateJsonFile():
'''Extracts data from the database and saves a JSON file to the server'''
FN_NAME = "CreateJsonFile"
dbDict = MySQLdb.connect(
host=r.DB_HOST,
user=r.DB_USER,
passwd=r.DB_PASSWORD,
db=r.DB_NAME,
cursorclass=MySQLdb.cursors.DictCursor
)
curDict = dbDict.cursor()
Query = "SELECT " + r.KR_FIELD_TOTALWORDS + ", " + r.KR_FIELD_POSITIVE + ", " + r.KR_FIELD_NEGATIVE + ", " + r.KR_FIELD_DATETIME + " FROM " + r.DB_TABLE_KEYWORDSRESULTS + ";"
logging.debug(FN_NAME, Query)
curDict.execute(Query)
QueryResults = curDict.fetchall()
Output = CreateJsonData(QueryResults)
ProcessResult = OutputJsonFile(Output)
logging.info('%s - JSON file created and saved to server with result %s', FN_NAME, ProcessResult)
dbDict.close
return ProcessResult
| AdamDynamic/TwitterMetrics | CreateJson.py | Python | gpl-2.0 | 2,191 |