code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import os
import glob
import time
import threading
import readMaxim
import subprocess
#import Adafruit_DHT
#import RPi.GPIO as pigpio
#import pigpio
#import DHT22
class TaskPrintHum(threading.Thread):
def __init__(self, taskid = 0, mData = readMaxim.MaximData()):
threading.Thread.__init__(self)
self.taskid = taskid
self._stopevent = threading.Event( )
self.mData = mData
def run(self):
print "thread capteur no", self.taskid, "is readry!"
while not self._stopevent.isSet():
timestamp = time.time()
try:
task = subprocess.Popen(['sudo','python','/home/pi/pygame/AdafruitDHT.py','2302','17'],stdout=subprocess.PIPE)
t,h = task.stdout.readline().split(' ')
temperature = float(t)
humidity = float(h)
except:
humidity = 0
temperature = 0
if ( humidity == 0 ) and (temperature == 0):
print "Pas de donnees"
else:
print 'Time={0:d} Temp={1:0.1f}*C Humidity={2:0.1f}%'.format((int(time.time())),temperature, humidity)
self.mData.setTempHum(temperature, humidity)
# Try to grab a sensor reading. Use the read_retry method which will retry up
# to 15 times to get a sensor reading (waiting 2 seconds between each retry).
# humidity, temperature = Adafruit_DHT.read_retry(Adafruit_DHT.AM2302, 17)
# Note that sometimes you won't get a reading and
# the results will be null (because Linux can't
# guarantee the timing of calls to read the sensor).
# If this happens try again!
# if humidity is not None and temperature is not None:
# print 'Temp={0:0.1f}*C Humidity={1:0.1f}%'.format(temperature, humidity)
# self.mData.setTempHum(temperature, humidity)
# else:
# print 'Failed to get reading. Try again!'
#wait at least 3 seconds to avoid sensor hang
#timewaited = time.time() - timestamp
#if timewaited < 3:
# self._stopevent.wait(3 - timewaited)
#wait for 30 seconds before new read, we don't need so much updates on the hygrometry
self._stopevent.wait(30)
def stop(self):
print "stopping thread no", self.taskid
self._stopevent.set( )
|
r0bin-fr/pirok
|
multithreadHum.py
|
Python
|
gpl-2.0
| 2,098
|
# #
# Copyright 2014-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Unit tests for easyconfig/format/version.py
@author: Stijn De Weirdt (Ghent University)
"""
import copy
import sys
from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered
from unittest import TextTestRunner
from easybuild.framework.easyconfig.format.version import VersionOperator, ToolchainVersionOperator
from easybuild.framework.easyconfig.format.version import OrderedVersionOperators
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.toolchain.utilities import search_toolchain
class EasyConfigVersion(EnhancedTestCase):
"""Unit tests for format.version module."""
def test_parser_regex(self):
"""Test the version parser"""
vop = VersionOperator()
# version tests
self.assertTrue(vop.regex.search('< 4'))
self.assertTrue(vop.regex.search('>= 20131016'))
self.assertTrue(vop.regex.search('<= 1.2.3'))
self.assertTrue(vop.regex.search('> 2.4'))
self.assertTrue(vop.regex.search('== 1.2b'))
self.assertTrue(vop.regex.search('< 2.0dev'))
self.assertTrue(vop.regex.search('1.2.3')) # operator is optional, '==' is default
self.assertFalse(vop.regex.search('>=')) # version is mandatory (even if DEFAULT_UNDEFINED_VERSION exists)
self.assertFalse(vop.regex.search('%s1.2.3' % vop.SEPARATOR)) # no separator usage w/o something to separate
self.assertFalse(vop.regex.search('1.2.3%s' % vop.SEPARATOR)) # no separator usage w/o something to separate
self.assertFalse(vop.regex.search('>%s2.4' % vop.SEPARATOR * 2)) # double space as separator is not allowed
self.assertFalse(vop.regex.search('>%s 2.4' % vop.SEPARATOR)) # double separator is not allowed
self.assertTrue(vop.regex.search('>%sa2.4' % vop.SEPARATOR)) # version starts/ends with *any* word character
self.assertTrue(vop.regex.search('>%s2.4_' % vop.SEPARATOR)) # version starts/ends with *any* word character
self.assertTrue(vop.regex.search('>%sG2.4_' % vop.SEPARATOR)) # version starts/ends with *any* word character
def test_boolean(self):
"""Test boolean test"""
self.assertTrue(VersionOperator('>= 123'))
self.assertTrue(VersionOperator('123'))
error_msg = "Failed to parse '<=' as a version operator string"
self.assertErrorRegex(EasyBuildError, error_msg, VersionOperator, '<=')
def test_vop_test(self):
"""Test version checker"""
vop = VersionOperator('1.2.3')
self.assertTrue(vop.operator == vop.DEFAULT_UNDEFINED_OPERATOR)
vop = VersionOperator('>= 1.2.3')
self.assertTrue(vop.test('1.2.3')) # 1.2.3 >= 1.2.3: True
self.assertFalse(vop.test('1.2.2')) # 1.2.2 >= 1.2.3 : False
self.assertTrue(vop.test('1.2.4')) # 1.2.4 >= 1.2.3 : True
vop = VersionOperator('< 1.2.3')
self.assertFalse(vop.test('1.2.3')) # 1.2.3 < 1.2.3: False
self.assertTrue(vop.test('1.2.2')) # 1.2.2 < 1.2.3 : True
self.assertFalse(vop.test('1.2.4')) # 1.2.4 < 1.2.3 : False
self.assertFalse(vop.test('2a')) # 2a < 1.2.3 : False
self.assertTrue(vop.test('1.1a')) # 1.1a < 1.2.3 : True
self.assertFalse(vop.test('1.2.3dev')) # 1.2.3dev < 1.2.3 : False (beware!)
# disabled this check, since it results in a TypeError in Python 3
# (due to https://bugs.python.org/issue14894),
# which gets ignored in VersionOperator.test by always returning True
# fixing this is non-trivial, and considered not worth the effort right now
# since it is only required for the infamouns "easyconfigs format v2"
# self.assertFalse(vop.self('1a')) # 1a < 1.2.3 : False (beware!)
def test_versop_overlap_conflict(self):
"""Test overlap/conflicts"""
overlap_conflict = [
('> 3', '> 3', (True, False)), # equal, and thus overlap. no conflict
('> 3', '< 2', (False, False)), # no overlap
('> 3', '== 3', (False, False)), # no overlap
('< 3', '> 2', (True, True)), # overlap, and conflict (region between 2 and 3 is ambiguous)
('>= 3', '== 3', (True, True)), # overlap, and conflict (boundary 3 is ambigous)
('> 3', '>= 3', (True, False)), # overlap, no conflict ('> 3' is more strict then '>= 3')
# suffix
('> 2', '> 1', (True, False)), # suffix both equal (both None), ordering like above
('> 2 suffix:-x1', '> 1 suffix:-x1', (True, False)), # suffix both equal (both -x1), ordering like above
('> 2 suffix:-x1', '> 1 suffix:-x2', (True, True)), # suffix not equal, conflict (and overlap)
('> 2 suffix:-x1', '< 1 suffix:-x2', (False, True)), # suffix not equal, conflict (and no overlap)
('> 2 suffix:-x1', '< 1 suffix:-x1', (False, False)), # suffix equal, no conflict (and no overlap)
]
for l, r, res in overlap_conflict:
vl = VersionOperator(l)
vr = VersionOperator(r)
self.assertEqual(vl.test_overlap_and_conflict(vr), res)
def test_versop_gt(self):
"""Test strict greater then ordering"""
left_gt_right = [
('> 2', '> 1'), # True, order by strictness equals order by boundaries for gt/ge
('< 8', '< 10'), # True, order by strictness equals inversed order by boundaries for lt/le
('== 4', '> 3'), # equality is more strict then inequality, but this order by boundaries
('> 3', '== 2'), # there is no overlap, so just order the intervals according their boundaries
('== 1', '> 1'), # no overlap, same boundaries, order by operator
('== 1', '< 1'), # no overlap, same boundaries, order by operator
('> 1', '>= 1'), # no overlap, same boundaries, order by operator (order by strictness)
('< 1', '<= 1'), # no overlap, same boundaries, order by operator (order by strictness)
('> 1', '< 1'), # no overlap, same boundaries, order by operator (quite arbitrary in this case)
# suffix
('> 2 suffix:-x1', '> 1 suffix:-x1'), # equal suffixes, regular ordering
]
for l, r in left_gt_right:
self.assertTrue(VersionOperator(l) > VersionOperator(r), "%s gt %s" % (l, r))
def test_ordered_versop_expressions(self):
"""Given set of ranges, order them according to version/operator (most recent/specific first)"""
# simple version ordering, all different versions
ovop = OrderedVersionOperators()
versop_exprs = [
'> 3.0.0',
'>= 2.5.0',
'> 2.0.0',
'== 1.0.0',
]
# add version expressions out of order intentionally
ovop.add(versop_exprs[1])
ovop.add(versop_exprs[-1])
ovop.add(versop_exprs[0])
ovop.add(versop_exprs[2])
# verify whether order is what we expect it to be
self.assertEqual(ovop.versops, [VersionOperator(x) for x in versop_exprs])
# more complex version ordering, identical/overlapping vesions
ovop = OrderedVersionOperators()
versop_exprs = [
'== 1.0.0',
'> 1.0.0',
'< 1.0.0',
]
# add version expressions out of order intentionally
ovop.add(versop_exprs[-1])
ovop.add(versop_exprs[1])
ovop.add(versop_exprs[0])
# verify whether order is what we expect it to be
self.assertEqual(ovop.versops, [VersionOperator(x) for x in versop_exprs])
def test_parser_toolchain_regex(self):
"""Test the ToolchainVersionOperator parser"""
top = ToolchainVersionOperator()
_, tcs = search_toolchain('')
tc_names = [x.NAME for x in tcs]
for tc in tc_names: # test all known toolchain names
# test version expressions with optional version operator
ok_tests = [
("%s >= 1.2.3" % tc, None), # only dict repr for == operator
("%s == 1.2.3" % tc, {'name': tc, 'version': '1.2.3'}),
(tc, None), # only toolchain name, no dict repr (default operator is >=, version is 0.0.0)
]
for txt, as_dict in ok_tests:
self.assertTrue(top.regex.search(txt), "%s matches toolchain section marker regex" % txt)
tcversop = ToolchainVersionOperator(txt)
self.assertTrue(tcversop)
self.assertEqual(tcversop.as_dict(), as_dict)
# only accept known toolchain names
fail_tests = [
"x%s >= 1.2.3" % tc,
"%sx >= 1.2.3" % tc,
"foo",
">= 1.2.3",
]
for txt in fail_tests:
self.assertFalse(top.regex.search(txt), "%s doesn't match toolchain section marker regex" % txt)
tcv = ToolchainVersionOperator(txt)
self.assertEqual(tcv.tc_name, None)
self.assertEqual(tcv.tcversop_str, None)
def test_toolchain_versop_test(self):
"""Test the ToolchainVersionOperator test"""
_, tcs = search_toolchain('')
tc_names = [x.NAME for x in tcs]
for tc in tc_names: # test all known toolchain names
# test version expressions with optional version operator
tests = [
("%s >= 1.2.3" % tc, (
(tc, '1.2.3', True), # version ok, name ok
(tc, '1.2.4', True), # version ok, name ok
(tc, '1.2.2', False), # version not ok, name ok
('x' + tc, '1.2.3', False), # version ok, name not ok
('x' + tc, '1.2.2', False), # version not ok, name not ok
)),
]
for txt, subtests in tests:
tcversop = ToolchainVersionOperator(txt)
for name, version, res in subtests:
self.assertEqual(tcversop.test(name, version), res)
def test_ordered_versop_add_data(self):
"""Test the add and data handling"""
ovop = OrderedVersionOperators()
tests = [
('> 1', '5'),
('> 2', {'x': 3}),
]
for versop_txt, data in tests:
versop = VersionOperator(versop_txt)
ovop.add(versop)
# no data was added, this is a new entry, mapper is initialised with None
self.assertEqual(ovop.get_data(versop), None)
ovop.add(versop, data)
# test data
self.assertEqual(ovop.get_data(versop), data)
# new data for same versops
tests = [
('> 1', '6'),
('> 2', {'x': 4}),
]
for versop_txt, data in tests:
versop = VersionOperator(versop_txt)
ovop.add(versop, data)
# test updated data
self.assertEqual(ovop.get_data(versop), data)
# 'update' a value
# the data for '> 1' has no .update()
extra_data = {'y': 4}
tests = [
('> 2', extra_data),
]
for versop_txt, data in tests:
versop = VersionOperator(versop_txt)
prevdata = copy.deepcopy(ovop.get_data(versop))
prevdata.update(extra_data)
ovop.add(versop, data, update=True)
# test updated data
self.assertEqual(ovop.get_data(versop), prevdata)
# use update=True on new element
versop = VersionOperator('> 10000')
new_data = {'new': 5}
ovop.add(versop, new_data, update=True)
# test updated data
self.assertEqual(ovop.get_data(versop), new_data)
def test_hashing(self):
"""Test hashing of VersionOperator and ToolchainVersionOperator instances."""
test_cases = [
VersionOperator('1.2.3'),
VersionOperator('> 1.2.3'),
ToolchainVersionOperator('foo'),
ToolchainVersionOperator('foo > 1.2.3'),
]
for test_case in test_cases:
self.assertTrue(hash(test_case))
def suite():
""" returns all the testcases in this module """
return TestLoaderFiltered().loadTestsFromTestCase(EasyConfigVersion, sys.argv[1:])
if __name__ == '__main__':
res = TextTestRunner(verbosity=1).run(suite())
sys.exit(len(res.failures))
|
pescobar/easybuild-framework
|
test/framework/easyconfigversion.py
|
Python
|
gpl-2.0
| 13,440
|
from key_mapping import *
from pdcresource import *
from pdcglobal import *
import pygame
class Item(object):
eq_tiles = None
dd_tiles = None
game = None
def __init__(self, add):
self.game.add_item(self, add)
self.cur_surf = None
self.eq_img = None
self.eq_img_c = None
self.dd_img = None
self.dd_img_c = None
self.blit_pos=None
self.equipped = False
self.picked_up = False
self.y = 0
self.x = 0
self.name = 'empty'
self.full_name = 'empty'
self.flags = IF_MELEE | IF_FIRES_DART | IF_RANGED
self.type = I_VOID
self.av_fx = []
self.dv_fx = []
self.special = False
self.amount = 0
self.damage_type = D_GENERIC
self.infotext = ''
self.text = ''
self.player_symbol = None
self.skills = []
self.locations = 0
self.AP = 0
self.HP = 0
self.TSP = 0
self.ENC = 0
self.H2 = False
def get_ps(self):
if self.player_symbol == None:
self.player_symbol = self.game.get_symbol()
return self.player_symbol
def used(self):
self.amount -= 1
if self.amount == 0:
return False
return True
def get_name(self):
if self.flags & IF_IDENTIFIED:
name = self.full_name
else:
name = self.name
if self.amount > 0:
name += ' (%s)' % (self.amount)
return name
def read(self, item, obj):
self.game.shout('Nothing interesting')
def drink(self, item, obj):
self.game.shout('Tastes like water')
def info(self):
l = []
if not self.flags & IF_IDENTIFIED:
if len(self.infotext) > 0:
l.append(self.infotext)
l.append('not identified')
return l
if len(self.infotext) > 0:
l.append(self.infotext)
if self.AP>0 and self.HP>0:
l.append('AP/HP: %i/%i' % (self.AP,self.HP))
elif self.AP > 0:
l.append('AP: %i' % (self.AP))
l.append('ENC: %i' % (self.ENC))
#if self.av > 0:
# l.append('av: %i' % (self.av))
#if self.max_damage > 0:
# l.append('dam: %i-%i' % (self.min_damage, self.max_damage))
#if self.dv > 0:
# l.append('dv: %i' % (self.dv))
#if self.amount > 0:
# l.append('count: %i' % (self.amount))
for fx in self.av_fx:
l.append(fx.weaponinfotext)
for fx in self.dv_fx:
l.append(fx.weaponinfotext)
return l
def set_pos(self, pos):
self.game.update_item_pos(self, pos)
self.x = pos[0]
self.y = pos[1]
def pos(self):
return self.x, self.y
def clear_surfaces(self):
self.eq_img_c = None
self.dd_img_c = None
Item.dd_tiles = None
Item.eq_tiles = None
self.cur_surf = None
self = None
def check_tiles(self):
if Item.eq_tiles == None:
Item.eq_tiles = Res('dc-pl.png', TILESIZE)
if Item.dd_tiles == None:
Item.dd_tiles = Res('dc-item.png', TILESIZE)
def get_eq_img(self):
self.check_tiles()
if self.eq_img_c == None:
if self.eq_img == None:
self.eq_img_c = pygame.Surface((1, 1), pygame.SRCALPHA, 32)
else:
self.eq_img_c = self.eq_tiles.get_subs(self.eq_img)
return self.eq_img_c
def get_dd_img(self):
self.check_tiles()
if self.dd_img_c == None:
if self.dd_img == None:
self.dd_img_c = pygame.Surface((1, 1), pygame.SRCALPHA, 32)
else:
self.dd_img_c = self.dd_tiles.get(self.dd_img)
return self.dd_img_c
class Corpse(Item):
def __init__(self, owner):
Item.__init__(self, True)
self.type = I_CORPSE
self.dd_img = 208
self.flags = IF_EATABLE
self.name = '%s corpse' % (owner.name)
self.full_name = self.name
self.set_pos(owner.pos())
if not self.flags & IF_IDENTIFIED:
self.flags ^= IF_IDENTIFIED
|
cycladesnz/chambersAndCreatures
|
src/item/item.py
|
Python
|
gpl-2.0
| 4,441
|
# coding: utf-8
#
# Copyright 2009 Prima Tech Informatica LTDA.
#
# Licensed under the Environ License, Version 1.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.cmspublica.com.br/licenses/ENVIRON-LICENSE-1.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from time import time, strftime, strptime
from datetime import datetime, timedelta
from urllib import unquote
from publica.admin.file import File
from publica.admin.error import UserError
from publica.core.portal import Portal
from publica.utils.json import encode, decode
from publica.utils.decorators import serialize, dbconnectionapp,\
Permission
import base64
class Adm(object):
"""
"""
def _getDadosApp(self):
"""
"""
portal = Portal(id_site=self.id_site,
request=self.request)
return portal._getApp(env_site=self.id_site,
schema=self.schema)["dados"]
@dbconnectionapp
def _getEventos(self, id_post):
"""
"""
for i in self.execSql("select_todos_eventos",
id_post=int(id_post)):
return i
@dbconnectionapp
def _getIds(self):
"""
retorna os ids dos eventos que estão agendados
"""
date = (datetime.now() - timedelta(minutes=5)).timetuple()
publicado_em = strftime("%Y-%m-%d %H:%M", date)
return self.execSql("select_ids_agendados",
publicado_em=publicado_em)
@dbconnectionapp
@serialize
@Permission("PERM APP")
def addConteudo(self, id_site, id_treeapp, id_aplicativo,
titulo, preco_entrada, consumacao_minima, local, site,
categoria, credito_imagem, data_inicio, usuario, email_user,
imagemint, publicado_em,
data_fim=None, telefones=None, email=None,
expira_em=None, publicado=None,
hora_inicio=None, hora_fim=None,
titulo_destaque=None, descricao_destaque=None,
imagem_destaque=None, peso_destaque=None,
relacionamento=[], tags="", permissao=None,
exportar_xml=None, exportar_json=None,
exportar=None):
"""
"""
dadosapp = self._getDadosApp()
publicado = True if publicado else False
tags = tags if tags else None
dt = publicado_em
try:
p = strptime(publicado_em, "%d/%m/%Y %H:%M")
dt2 = strftime("%d/%m/%Y %H:%M", p)
publicado_em = strftime("%Y-%m-%d %H:%M", p)
except ValueError:
dt2 = None
raise UserError(("Ocorreu um erro: "
"Data de publicaçåo "
"inválida (%s)") % publicado_em)
try:
p = strptime(data_inicio, "%d/%m/%Y")
data_inicio = strftime("%Y-%m-%d %H:%M", p)
except ValueError:
return "data em formato incorreto"
try:
p = strptime(data_fim, "%d/%m/%Y")
data_fim = strftime("%Y-%m-%d %H:%M", p)
except ValueError:
return "data em formato incorreto"
try:
p = strptime(expira_em, "%d/%m/%Y %H:%M")
expira_em = strftime("%Y-%m-%d %H:%M", p)
except ValueError:
expira_em = None
# inserir conteudo
portal = Portal(id_site=self.id_site, request=self.request)
id_conteudo = self.execSql("select_nextval_evento").next()["id"]
id_imagem = self._addFile(arquivo=imagemint,
id_conteudo=id_conteudo,
schema=self.schema,
dt=dt)
self.execSqlBatch("insert_evento",
id_conteudo=int(id_conteudo),
titulo=titulo,
imagemint=id_imagem,
telefones=telefones,
email=email,
data_inicio=data_inicio,
data_fim=data_fim,
hora_inicio=hora_inicio,
hora_fim=hora_fim,
publicado_em=publicado_em,
expira_em=expira_em,
credito_imagem=credito_imagem,
publicado=publicado,
preco_entrada=preco_entrada,
consumacao_minima=consumacao_minima,
local=local,
site=site,
usuario=usuario,
email_user=email_user)
if type(categoria) is not list:
categoria =[categoria]
for i in range(len(categoria)):
self.execSqlBatch("insert_categoria_evento",
id_categoria=int(categoria[i]),
id_conteudo=int(id_conteudo))
# inserindo os destaques
if not imagem_destaque:
imagem_destaque = None
try:
peso_destaque = int(peso_destaque)
except:
peso_destaque = 0
dados_destaque = []
id_destaque = self.execSql("select_nextval_destaque").next()["id"]
if imagem_destaque:
imagem_destaque = self._addFile(arquivo=imagem_destaque,
id_conteudo=id_conteudo,
schema=self.schema,
dt=dt)
self.execSqlBatch("insert_destaque",
id_destaque=int(id_destaque),
id_conteudo=int(id_conteudo),
titulo=titulo,
descricao=descricao_destaque,
img=imagem_destaque,
peso=int(peso_destaque))
self.execSqlCommit()
# acoes para o portal
dados = self._setDados(id_conteudo=id_conteudo)
self._addContentPortal(env_site=self.id_site,
id_pk=id_conteudo,
schema=self.schema,
meta_type=self.meta_type,
id_aplicativo=id_aplicativo,
id_treeapp=id_treeapp,
peso=peso_destaque,
titulo=titulo,
publicado=publicado,
publicado_em=publicado_em,
expira_em=expira_em,
titulo_destaque=titulo_destaque,
descricao_destaque=descricao_destaque,
imagem_destaque=imagem_destaque,
tags=tags,
permissao=permissao,
relacionamento=relacionamento,
dados=dados)
if (exportar_xml=='1') or (exportar_json=='1') or (exportar=='1'):
self._addLog("Novo conteudo cadastrado e publicado '%s'" % titulo)
self._exportContent(id_aplicativo=id_aplicativo,
id_conteudo=id_conteudo,
schema=self.schema,
id_treeapp=id_treeapp,
html=exportar,
xml=exportar_xml,
json=exportar_json,
dados=dados,
subitems=None,
add=1)
return ("Conteudo cadastrado com sucesso! "
"Publicação iniciada.")
self._addLog("Novo conteudo cadastrada '%s'" % titulo)
return "Conteudo cadastrado com sucesso."
@dbconnectionapp
def getCategorias(self):
"""
retorna todas categorias
"""
return self.execSql("select_categorias")
@dbconnectionapp
def getCatjson(self):
"""
"""
return encode([i for i in self.execSql("select_categorias")])
@dbconnectionapp
@serialize
@Permission("PERM APP")
def addcategoria(self, nome):
"""
insert a new categoria
>>> self.addCategoria(nome=\"name\")
"""
id_categoria = self.execSql("select_nextval_categoria").next()["id"]
self.execSqlu("insert_categoria",
id_categoria=int(id_categoria),
nome_categoria=nome)
return {"ok":"Categoria adicionada com sucesso!", "id_categoria":id_categoria}
@dbconnectionapp
@serialize
@Permission("PERM APP")
def editcategoria(self, nome, id_categoria):
self.execSqlBatch("update_categoria",
nome_categoria= nome,
id_categoria=int(id_categoria))
self.execSqlCommit()
return {"ok":"categoria editada com sucesso!"}
@dbconnectionapp
@serialize
@Permission("PERM APP")
def delCategoria(self, id_categoria):
self.execSqlBatch("delete_categoria",
id_categoria=int(id_categoria))
self.execSqlCommit()
return {"ok":"ok"}
@dbconnectionapp
@Permission("PERM APP")
def _getConteudo(self, id_conteudo):
"""
"""
dadosapp = self._getDadosApp()
for i in self.execSql("select_evento",
id_conteudo=int(id_conteudo)):
p = strptime(i["data_inicio"],"%Y-%m-%d %H:%M:%S")
i["data_inicio"] = strftime("%d/%m/%Y", p)
o = strptime(i["data_fim"],"%Y-%m-%d %H:%M:%S")
i["data_fim"] = strftime("%d/%m/%Y", o)
i["categorias"] =[]
for j in self.execSql("select_destaque", id_conteudo=id_conteudo):
i["destaque"] = {"id_destaque": j["id_destaque"],
"titulo_destaque": j["titulo"],
"descricao_destaque": j["descricao"],
"imagem_destaque": j["img"],
"peso_destaque": j["peso"]}
for k in self.execSql("select_categoria_evento", id_conteudo=int(id_conteudo)):
categoria = {"id_categoria":k["id_categoria"],
"nome_categoria":k["nome_categoria"]}
i["categorias"].append(categoria)
for t in self.execSql("select_temp_imagem",
id_destaque=int(i["destaque"]["id_destaque"])):
t["imagem_temp"] = {"id_imagem": t["id_imagem"] if t["id_imagem"] else None,
"tempimg": base64.decodestring(t["imagembin"]) if t["imagembin"] else None,
"type": t["tipoarq"] if t["tipoarq"] else None,
"filename": t["nomearq"] if t["nomearq"] else None}
class FakeReader(object):
def __init__(self, source):
self.source = source
def read(self):
return self.source
class FakeFile(object):
"""Cria um objeto com atributos de um arquivo enviado por wsgi"""
def __init__(self, source, mimetype, filename):
self.type = mimetype
self.filename = filename
self.file = FakeReader(source)
if (t["imagem_temp"]["tempimg"]):
arq = File(request=self.request, id_site=self.id_site)
filesource = t["imagem_temp"]["tempimg"]
filenome = t["imagem_temp"]["filename"]
filetype = t["imagem_temp"]["type"]
filenomeint = "i" + t["imagem_temp"]["filename"]
fakefile = FakeFile(filesource, filetype, filenome)
fakefileint = FakeFile(filesource, filetype, filenome)
arquivo = arq.addFileTemp(arquivo=fakefile)
arquivoint = arq.addFileTemp(arquivo=fakefileint)
cont = self._addFile(arquivo=decode(arquivo)["id"],
id_conteudo=id_conteudo,
schema=self.schema,
dt=i["publicado_em"],
transform={"metodo":dadosapp['redimensionamento'],
"dimenx":dadosapp['dimenx'],
"dimeny":dadosapp['dimeny']})
cont2 = self._addFile(arquivo=decode(arquivoint)["id"],
id_conteudo=id_conteudo,
schema=self.schema,
dt=i["publicado_em"],
transform={"metodo":dadosapp['redimensionamentog'],
"dimenx":dadosapp['dimenxg'],
"dimeny":dadosapp['dimenyg']})
i["destaque"]["imagem_destaque"] = cont
i["imagemint"] = cont2
self.execSqlBatch("update_imagem",
id_conteudo=int(id_conteudo),
imagemint=cont2)
self.execSqlBatch("update_destaque",
id_destaque=i["destaque"]["id_destaque"],
img=cont)
self.execSqlBatch("delete_tempimg",
id_destaque=i["destaque"]["id_destaque"])
self.execSqlCommit()
return i
@dbconnectionapp
@serialize
@Permission("PERM APP")
def editConteudo(self, id_conteudo, id_site, id_treeapp, id_aplicativo,
titulo, preco_entrada,consumacao_minima,local,site,
categoria, credito_imagem, data_inicio,
usuario, email_user, imagemint,
publicado_em, data_fim = None,telefones=None, email=None,
expira_em=None, publicado=None, hora_inicio=None,
titulo_destaque=None, descricao_destaque=None,
imagem_destaque=None, peso_destaque=None,
relacionamento=[], tags="", permissao=None,
exportar_xml=None, exportar_json=None,
exportar=None, hora_fim=None):
"""
"""
publicado = True if publicado else False
tags = tags if tags else None
portal = Portal(id_site=self.id_site, request=self.request)
dt = publicado_em
try:
p = strptime(publicado_em, "%d/%m/%Y %H:%M")
dt2 = strftime("%d/%m/%Y %H:%M", p)
publicado_em = strftime("%Y-%m-%d %H:%M", p)
except ValueError, e:
dt2 = None
raise UserError(("Ocorreu um erro: "
"Data de publicaçåo "
"inválida (%s)") % publicado_em)
try:
p = strptime(data_inicio, "%d/%m/%Y")
data_inicio = strftime("%Y-%m-%d %H:%M", p)
except ValueError, e:
return "data em formato incorreto"
try:
p = strptime(data_fim, "%d/%m/%Y")
data_fim = strftime("%Y-%m-%d %H:%M", p)
except ValueError, e:
return "data em formato incorreto"
try:
p = strptime(expira_em, "%d/%m/%Y %H:%M")
expira_em = strftime("%Y-%m-%d %H:%M", p)
except ValueError, e:
expira_em = None
id_imagem = portal.addArquivo(arquivo=imagemint,
id_conteudo=id_conteudo,
schema=self.schema,
dt=dt)
# deletar conteudo tabela destaques ou outras tabelas
self.execSqlBatch("delete_destaque",
id_conteudo=int(id_conteudo))
self.execSqlBatch("delete_categoria_evento",
id_conteudo=int(id_conteudo))
if type(categoria) is not list:
categoria =[categoria]
for i in range(len(categoria)):
self.execSqlBatch("insert_categoria_evento",
id_categoria=int(categoria[i]),
id_conteudo=int(id_conteudo))
if imagem_destaque:
if type(imagem_destaque) is str:
try:
imagem_destaque.index("tmp")
arquivo = imagem_destaque
cont = self._addFile(arquivo = arquivo,
id_conteudo=id_conteudo,
schema=self.schema,
dt=dt2)
except:
cont = imagem_destaque
else:
arq = File(request={}, id_site=self.id_site)
arquivo = arq.addFileTemp(arquivo=imagem_destaque)
cont = self._addFile(arquivo=decode(arquivo)["id"],
id_conteudo=id_conteudo,
schema=self.schema,
dt=dt2)
else:
cont = None
self.execSqlBatch("update_evento",
id_conteudo=int(id_conteudo),
titulo=titulo,
imagemint=id_imagem,
telefones = telefones,
email = email,
data_inicio = data_inicio,
data_fim = data_fim,
hora_inicio = hora_inicio,
hora_fim = hora_fim,
publicado_em=publicado_em,
expira_em=expira_em,
credito_imagem = credito_imagem,
publicado=publicado,
preco_entrada = preco_entrada,
consumacao_minima = consumacao_minima,
local = local,
site = site,
usuario = usuario,
email_user = email_user)
# inserindo os destaques
dados_destaque = []
id_destaque = self.execSql("select_nextval_destaque").next()["id"]
self.execSqlBatch("insert_destaque",
id_destaque=int(id_destaque),
id_conteudo=int(id_conteudo),
titulo=titulo,
descricao=descricao_destaque,
img=cont,
peso=int(peso_destaque))
if titulo_destaque or imagem_destaque or descricao_destaque:
imagem_destaque = self._addFile(arquivo=imagem_destaque,
id_conteudo=id_conteudo,
schema=self.schema,
dt=dt)
if not imagem_destaque:
imagem_destaque = None
try:
peso_destaque = int(peso_destaque)
except:
peso_destaque = 0
self.execSqlCommit()
# acoes para o portal
dados = self._setDados(id_conteudo=id_conteudo)
self._editContentPortal(env_site=self.id_site,
id_pk=id_conteudo,
id_aplicativo=int(id_aplicativo),
schema=self.schema,
id_treeapp=id_treeapp,
peso=peso_destaque,
titulo=titulo,
publicado=publicado,
publicado_em=publicado_em,
expira_em=expira_em,
titulo_destaque=titulo_destaque,
descricao_destaque=descricao_destaque,
imagem_destaque=cont,
permissao=permissao,
tags=tags,
relacionamento=relacionamento,
dados=dados)
if (exportar_xml=='1') or (exportar_json=='1') or (exportar=='1'):
self._addLog("Conteudo '%s' editado e publicado" % titulo)
self._exportContent(id_aplicativo=id_aplicativo,
id_conteudo=id_conteudo,
schema=self.schema,
id_treeapp=id_treeapp,
html=exportar,
xml=exportar_xml,
json=exportar_json,
dados=dados,
subitems=None,
add=1)
return ("Conteudo editado com sucesso! "
"Publicação iniciada.")
self._addLog("Conteudo editado '%s'" % titulo)
return "Conteudo editado com sucesso."
|
intip/da-apps
|
applications/da_eventos/adm.py
|
Python
|
gpl-2.0
| 22,106
|
from fabric.api import *
import time
import re
import logging
import utils.log
l = logging.getLogger()
l = utils.log.CustomLogAdapter(l, None)
@task(default=True)
def check(input_params, cluster):
""" Check the uptime for a machine and compare it against a pre-defined value
input_params parameter is a string, with the following fields:
uptime[unit]:[sleep_before_returning]
Example:
1d|2h
1w
60m
60s
If no unit is used, _minute_ is used! (seconds in terms of uptime doesn't make much sense)
If the parameter 'sleep_before_returning' is defined, when the uptime is less than the desired,
the check will sleep this time before returning - this can be used to do less frequent checking,
which results less frequent ssh connections to be made.
Default is not to sleep before returning.
"""
# split up the input_params, and make sense of it
m = re.search("^([0-9.]+)([wdhms])?(?::([0-9.]+)?([wdhms])?)?$", input_params)
if not m:
raise AttributeError("The given input_params '%s' doesn't match the requirements!" % input_params)
value, unit, sleep, sleep_unit = m.groups()
try:
value = float(value)
except Exception as e:
raise ValueError("Can't convert value to float: %s" % str(e))
if unit is None:
unit = "m"
sleep_sec = None
if not sleep is None and sleep_unit is None:
sleep_unit = "m"
# convert value to seconds
uptime_req_sec = convert_human_readable_duration_to_seconds(value, unit)
if not sleep is None:
sleep_sec = convert_human_readable_duration_to_seconds(sleep, sleep_unit)
return check_uptime(uptime_req_sec, unit, cluster, sleep_sec)
def check_uptime(uptime_req_sec, unit, cluster, sleep_sec=None):
""" Compare the actual uptime (in seconds) against a pre-defined value (in seconds) """
try:
result = run("cat /proc/uptime")
except:
raise RuntimeError("%s: failed running 'cat /proc/uptime': %s" % (env.command, command))
if not result.succeeded:
return False
else:
uptime_real = float(result.split(" ")[0])
uptime_human = convert_seconds_to_human_readable_duration(uptime_real, unit)
if uptime_real > uptime_req_sec:
l.debug("Uptime is '%.2f'" % uptime_human, env.host_string, cluster)
return True
else:
l.info("Uptime is '%.2f%s'" % (uptime_human, unit), env.host_string, cluster)
if not sleep_sec is None:
l.info("Sleeping '%d' seconds before returning" % int(sleep_sec), env.host_string, cluster)
time.sleep(sleep_sec)
return False
def convert_human_readable_duration_to_seconds(value, unit):
""" convert 5m to 300, 1h to 3600 """
value = float(value)
if unit == "s":
time_seconds = value
elif unit == "m":
time_seconds = value * 60
elif unit == "h":
time_seconds = value * 60 * 60
elif unit == "d":
time_seconds = value * 60 * 60 * 24
elif unit == "w":
time_seconds = value * 60 * 60 * 24 * 7
else:
raise ValueError("Unknown unit '%s' was used" % unit)
return float(time_seconds)
def convert_seconds_to_human_readable_duration(time_seconds, unit):
""" convert (300, m) to 5, (240, h) to 4 """
if unit == "s":
value = float(time_seconds)
elif unit == "m":
value = float(time_seconds) / 60
elif unit == "h":
value = float(time_seconds) / 60 / 60
elif unit == "d":
value = float(time_seconds) / 60 / 60 / 24
elif unit == "w":
value = float(time_seconds) / 60 / 60 / 24 / 7
else:
raise ValueError("Unknown unit '%s' was used" % unit)
return float(value)
|
bdeak/taskmgr
|
fabfile/checks/uptime.py
|
Python
|
gpl-2.0
| 3,836
|
#
# Copyright 2009-2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import os
import logging
import threading
import uuid
from contextlib import contextmanager
import volume
from sdc import sdCache
import sd
import misc
import fileUtils
from config import config
import storage_exception as se
import task
from threadLocal import vars
import resourceFactories
import resourceManager as rm
rmanager = rm.ResourceManager.getInstance()
# Disk type
UNKNOWN_DISK_TYPE = 0
SYSTEM_DISK_TYPE = 1
DATA_DISK_TYPE = 2
SHARED_DISK_TYPE = 3
SWAP_DISK_TYPE = 4
TEMP_DISK_TYPE = 5
DISK_TYPES = {UNKNOWN_DISK_TYPE:'UNKNOWN', SYSTEM_DISK_TYPE:'SYSTEM',
DATA_DISK_TYPE:'DATA', SHARED_DISK_TYPE:'SHARED',
SWAP_DISK_TYPE:'SWAP', TEMP_DISK_TYPE:'TEMP'}
# Image Operations
UNKNOWN_OP = 0
COPY_OP = 1
MOVE_OP = 2
OP_TYPES = {UNKNOWN_OP:'UNKNOWN', COPY_OP:'COPY', MOVE_OP:'MOVE'}
REMOVED_IMAGE_PREFIX = "_remove_me_"
RENAME_RANDOM_STRING_LEN = 8
class Image:
""" Actually represents a whole virtual disk.
Consist from chain of volumes.
"""
log = logging.getLogger('Storage.Image')
_fakeTemplateLock = threading.Lock()
@classmethod
def createImageRollback(cls, taskObj, imageDir):
"""
Remove empty image folder
"""
cls.log.info("createImageRollback: imageDir=%s" % (imageDir))
if os.path.exists(imageDir):
if not len(os.listdir(imageDir)):
fileUtils.cleanupdir(imageDir)
else:
cls.log.error("createImageRollback: Cannot remove dirty image folder %s" % (imageDir))
def __init__(self, repoPath):
self.repoPath = repoPath
self.storage_repository = config.get('irs', 'repository')
def create(self, sdUUID, imgUUID):
"""Create placeholder for image's volumes
'sdUUID' - storage domain UUID
'imgUUID' - image UUID
"""
imageDir = os.path.join(self.repoPath, sdUUID, sd.DOMAIN_IMAGES, imgUUID)
if not os.path.isdir(imageDir):
self.log.info("Create placeholder %s for image's volumes",
imageDir)
taskName = "create image rollback: " + imgUUID
vars.task.pushRecovery(task.Recovery(taskName, "image", "Image", "createImageRollback",
[imageDir]))
os.mkdir(imageDir)
return imageDir
def getImageDir(self, sdUUID, imgUUID):
"""
Return image directory
"""
return os.path.join(self.repoPath, sdUUID, sd.DOMAIN_IMAGES, imgUUID)
def preDeleteHandler(self, sdUUID, imgUUID):
"""
Pre-delete handler for images on backup domain
"""
# We should handle 2 opposite scenarios:
# 1. Remove template's image: Create 'fake' template instead of deleted one
# 2. Remove regular image: Remove parent-'fake' template if nobody need it already
try:
pvol = self.getTemplate(sdUUID=sdUUID, imgUUID=imgUUID)
# 1. If we required to delete template's image that have VMs
# based on it, we should create similar 'fake' template instead
if pvol:
pvolParams = pvol.getVolumeParams()
# Find out real imgUUID of parent volume
pimg = pvolParams['imgUUID']
# Check whether deleted image is a template itself
if imgUUID == pimg:
imglist = pvol.findImagesByVolume()
if len(imglist) > 1:
return pvolParams
# 2. If we required to delete regurar (non-template) image, we should also
# check its template (if exists) and in case that template is 'fake'
# and no VMs based on it, remove it too.
if pvol and pvol.isFake():
# At this point 'pvol' is a fake template and we should find out all its children
chList = pvol.getAllChildrenList(self.repoPath, sdUUID, pimg, pvol.volUUID)
# If 'pvol' has more than one child don't touch it, else remove it
if len(chList) <= 1:
# Delete 'fake' parent image before deletion required image
# will avoid situation which new image based on this 'fake' parent
# can be created.
self._delete(sdUUID=sdUUID, imgUUID=pimg, postZero=False, force=True)
except se.StorageException:
self.log.warning("Image %s in domain %s had problem during deletion process", imgUUID, sdUUID, exc_info=True)
return None
def delete(self, sdUUID, imgUUID, postZero, force):
"""
Delete whole image
"""
name = "delete image %s retry" % imgUUID
vars.task.pushRecovery(task.Recovery(name, "image", "Image", "deleteRecover",
[self.repoPath, sdUUID, imgUUID, str(postZero), str(force)]))
try:
self._delete(sdUUID, imgUUID, postZero, force)
except se.StorageException:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
raise se.ImageDeleteError("%s: %s" % (imgUUID, str(e)))
@classmethod
def deleteRecover(cls, taskObj, repoPath, sdUUID, imgUUID, postZero, force):
"""
Delete image rollforward
"""
Image(repoPath)._delete(sdUUID, imgUUID, misc.parseBool(postZero), misc.parseBool(force))
def validateDelete(self, sdUUID, imgUUID):
"""
Validate image before deleting
"""
# Get the list of the volumes
volclass = sdCache.produce(sdUUID).getVolumeClass()
uuidlist = volclass.getImageVolumes(self.repoPath, sdUUID, imgUUID)
volumes = [volclass(self.repoPath, sdUUID, imgUUID, volUUID) for volUUID in uuidlist]
for vol in volumes:
try:
if vol.isShared():
images = vol.findImagesByVolume(legal=True)
if len(images) > 1:
msg = "Cannot delete image %s due to shared volume %s" % (imgUUID, vol.volUUID)
raise se.CannotDeleteSharedVolume(msg)
except se.MetaDataKeyNotFoundError, e:
# In case of metadata key error, we have corrupted
# volume (One of metadata corruptions may be
# previous volume deletion failure).
# So, there is no reasons to avoid its deletion
self.log.warn("Volume %s metadata error (%s)", vol.volUUID, str(e))
return volumes
def _delete(self, sdUUID, imgUUID, postZero, force):
"""Delete Image folder with all volumes
'sdUUID' - storage domain UUID
'imgUUID' - image UUID
'force' - make it brutal
"""
# Get the list of the volumes
volclass = sdCache.produce(sdUUID).getVolumeClass()
uuidlist = volclass.getImageVolumes(self.repoPath, sdUUID, imgUUID)
# If we are not 'force'd to remove check that there will be no issues
if not force:
volumes = self.validateDelete(sdUUID, imgUUID)
else:
volumes = [volclass(self.repoPath, sdUUID, imgUUID, volUUID) for volUUID in uuidlist]
# If we got here than go ahead and remove all of them without mercy
if volumes:
try:
mod = __import__(volclass.__module__,
fromlist=['deleteMultipleVolumes'])
mod.deleteMultipleVolumes(sdUUID, volumes, postZero)
except (se.CannotRemoveLogicalVolume, se.VolumeAccessError):
#Any volume deletion failed, but we don't really care at this point
self.log.warn("Problems during image %s deletion. Continue...",
imgUUID, exc_info=True)
# Now clean the image directory
removedImage = imageDir = self.getImageDir(sdUUID, imgUUID)
# If image directory doesn't exist we are done
if not os.path.exists(imageDir):
return True
# Otherwise move it out of the way if it hasn't been moved yet
if not imgUUID.startswith(REMOVED_IMAGE_PREFIX):
removedImage = os.path.join(os.path.dirname(imageDir),
REMOVED_IMAGE_PREFIX + os.path.basename(imageDir))
os.rename(imageDir, removedImage)
# Cleanup (hard|soft) links and other state files,
# i.e. remove everything left including directory itself
#
# N.B. The cleanup can fail, but it doesn't bother us at all
# since the image directory is removed and this image will not show up
# in the image list anymore. If will be cleaned up at some later time
# by one of the hosts running vdsm.
#
# Inquiring mind can notice that it might happen even on HSM (the
# image removal itself only performed by SPM), but that is OK - we
# are not touching any live data. We are removing garbage, that is not
# used anyway.
fileUtils.cleanupdir(removedImage)
return True
def preDeleteRename(self, sdUUID, imgUUID):
# Get the list of the volumes
volclass = sdCache.produce(sdUUID).getVolumeClass()
uuidlist = volclass.getImageVolumes(self.repoPath, sdUUID, imgUUID)
imageDir = self.getImageDir(sdUUID, imgUUID)
# If image directory doesn't exist we are done
if not os.path.exists(imageDir):
return imgUUID
# Create random string
randomStr = misc.randomStr(RENAME_RANDOM_STRING_LEN)
renameFormat = lambda uuid: "%s%s_%s" % (REMOVED_IMAGE_PREFIX, randomStr, uuid)
# Otherwise move it out of the way
newImgUUID = renameFormat(imgUUID)
self.log.info("Rename image %s -> %s", imgUUID, newImgUUID)
if not imgUUID.startswith(REMOVED_IMAGE_PREFIX):
removedImage = os.path.join(os.path.dirname(imageDir), newImgUUID)
os.rename(imageDir, removedImage)
else:
self.log.warning("Image %s in domain %s already renamed", imgUUID, sdUUID)
volumes = [volclass(self.repoPath, sdUUID, newImgUUID, volUUID) for volUUID in uuidlist]
for vol in volumes:
if not vol.volUUID.startswith(REMOVED_IMAGE_PREFIX):
vol.rename(renameFormat(vol.volUUID), recovery=False)
else:
self.log.warning("Volume %s of image %s already renamed", vol.volUUID, imgUUID)
# We change image UUID in metadata
# (and IU_ LV tag for block volumes) of all volumes in image
vol.setImage(newImgUUID)
return newImgUUID
def __chainSizeCalc(self, sdUUID, imgUUID, volUUID, size):
"""
Compute an estimate of the whole chain size
using the sum of the actual size of the chain's volumes
"""
chain = self.getChain(sdUUID, imgUUID, volUUID)
newsize = 0
template = chain[0].getParentVolume()
if template:
newsize = template.getVolumeSize()
for vol in chain:
newsize += vol.getVolumeSize()
if newsize > size:
newsize = size
newsize = int(newsize * 1.1) # allocate %10 more for cow metadata
return newsize
@classmethod
def subChainSizeCalc(cls, sdUUID, imgUUID, chain, size):
"""
Compute an estimate of the subchain size
using the sum of the actual size of the subchain's volumes
"""
newsize = 0
for volUUID in chain:
vol = sdCache.produce(sdUUID).produceVolume(imgUUID=imgUUID, volUUID=volUUID)
newsize += vol.getVolumeSize()
if newsize > size:
newsize = size
newsize = int(newsize * 1.1) # allocate %10 more for cow metadata
return newsize
def getChain(self, sdUUID, imgUUID, volUUID=None):
"""
Return the chain of volumes of image as a sorted list
(not including a shared base (template) if any)
"""
chain = []
# Find all volumes of image
volclass = sdCache.produce(sdUUID).getVolumeClass()
uuidlist = volclass.getImageVolumes(self.repoPath, sdUUID, imgUUID)
if not uuidlist:
raise se.ImageDoesNotExistInSD(imgUUID, sdUUID)
srcVol = volclass(self.repoPath, sdUUID, imgUUID, uuidlist[0])
# For template image include only one volume (template itself)
if len(uuidlist) == 1 and srcVol.isShared():
return [srcVol]
# find the leaf
for vol in uuidlist:
srcVol = volclass(self.repoPath, sdUUID, imgUUID, vol)
if srcVol.isLeaf():
if not volUUID or volUUID == srcVol.volUUID:
break
srcVol = None
if not srcVol:
self.log.error("There is no leaf in the image %s", imgUUID)
raise se.ImageIsNotLegalChain(imgUUID)
# Build up the sorted (parent->child) chain
while not srcVol.isShared():
chain.insert(0, srcVol)
if srcVol.getParent() == volume.BLANK_UUID:
break
srcVol = srcVol.getParentVolume()
self.log.info("sdUUID=%s imgUUID=%s chain=%s ", sdUUID, imgUUID, str(chain))
return chain
def getTemplate(self, sdUUID, imgUUID):
"""
Return template of the image
"""
tmpl = None
# Find all volumes of image (excluding template)
chain = self.getChain(sdUUID, imgUUID)
# check if the chain is build above a template, or it is a standalone
pvol = chain[0].getParentVolume()
if pvol:
tmpl = pvol
elif chain[0].isShared():
tmpl = chain[0]
return tmpl
def validate(self, srcSdUUID, dstSdUUID, imgUUID, op=MOVE_OP):
"""
Validate template on destination domain
"""
# Find all volumes of source image
chain = self.getChain(srcSdUUID, imgUUID)
leafVol = chain[-1]
srcDom = sdCache.produce(srcSdUUID)
# Avoid move template's image if there is a VM based on it (except 'Backup' domain)
if op == MOVE_OP and leafVol.isShared() and not srcDom.isBackup():
chList = leafVol.getAllChildrenList(self.repoPath, srcSdUUID, imgUUID, leafVol.volUUID)
if chList:
raise se.MoveTemplateImageError(imgUUID)
# check if the chain is build above a template, or it is a standalone
pvol = chain[0].getParentVolume()
if pvol: # this is a shared template based chain
if not pvol.isShared():
raise se.ImageIsNotLegalChain("Base image parent vol %s is not shared" % pvol.volUUID)
pimg = pvol.getImage() # pimg == template image
try:
volclass = sdCache.produce(dstSdUUID).getVolumeClass()
# Validate that the destination template exists and accessible
volclass(self.repoPath, dstSdUUID, pimg, pvol.volUUID)
except se.StorageException, e:
self.log.error("Unexpected error", exc_info=True)
raise se.CouldNotValideTemplateOnTargetDomain("Template %s Destination domain %s: %s" % (pimg, dstSdUUID, str(e)))
def __templateRelink(self, destDom, imgUUID, volUUID):
"""
Relink all hardlinks of the template 'volUUID' in all VMs based on it
"""
# Avoid relink templates for non-NFS domains
if destDom.getStorageType() not in [ sd.NFS_DOMAIN ]:
self.log.debug("Doesn't relink templates non-NFS domain %s", destDom.sdUUID)
return
vol = destDom.produceVolume(imgUUID=imgUUID, volUUID=volUUID)
# Relink templates only
if not vol.isShared():
self.log.debug("Doesn't relink regular volume %s of image %s", volUUID, imgUUID)
return
chList = vol.getAllChildrenList(self.repoPath, destDom.sdUUID, imgUUID, volUUID)
for ch in chList:
# Remove hardlink of this template
v = destDom.produceVolume(imgUUID=ch['imgUUID'], volUUID=volUUID)
v.delete(postZero=False, force=True)
# Now we should re-link deleted hardlink, if exists
newVol = destDom.produceVolume(imgUUID=imgUUID, volUUID=volUUID)
imageDir = self.getImageDir(destDom.sdUUID, ch['imgUUID'])
newVol.share(imageDir)
def createFakeTemplate(self, sdUUID, volParams):
"""
Create fake template (relevant for Backup domain only)
"""
with self._fakeTemplateLock:
try:
destDom = sdCache.produce(sdUUID)
volclass = destDom.getVolumeClass()
# Validate that the destination template exists and accessible
volclass(self.repoPath, sdUUID, volParams['imgUUID'], volParams['volUUID'])
except (se.VolumeDoesNotExist, se.ImagePathError):
try:
# Create fake parent volume
destDom.createVolume(imgUUID=volParams['imgUUID'], size=volParams['size'],
volFormat=volume.COW_FORMAT, preallocate=volume.SPARSE_VOL,
diskType=volParams['disktype'], volUUID=volParams['volUUID'], desc="Fake volume",
srcImgUUID=volume.BLANK_UUID, srcVolUUID=volume.BLANK_UUID)
vol = destDom.produceVolume(imgUUID=volParams['imgUUID'], volUUID=volParams['volUUID'])
# Mark fake volume as "FAKE"
vol.setLegality(volume.FAKE_VOL)
# Mark fake volume as shared
vol.setShared()
# Now we should re-link all hardlinks of this template in all VMs based on it
self.__templateRelink(destDom, volParams['imgUUID'], volParams['volUUID'])
self.log.debug("Succeeded to create fake image %s in domain %s", volParams['imgUUID'], destDom.sdUUID)
except Exception:
self.log.error("Failure to create fake image %s in domain %s", volParams['imgUUID'],
destDom.sdUUID, exc_info=True)
def isLegal(self, sdUUID, imgUUID):
"""
Check correctness of the whole chain (excluding template)
"""
try:
legal = True
volclass = sdCache.produce(sdUUID).getVolumeClass()
vollist = volclass.getImageVolumes(self.repoPath, sdUUID, imgUUID)
self.log.info("image %s in domain %s has vollist %s", imgUUID, sdUUID, str(vollist))
for v in vollist:
vol = volclass(self.repoPath, sdUUID, imgUUID, v)
if not vol.isLegal() or vol.isFake():
legal = False
break
except:
legal = False
return legal
def __cleanupMove(self, srcVol, dstVol):
"""
Cleanup environments after move operation
"""
try:
if srcVol:
srcVol.teardown(sdUUID=srcVol.sdUUID, volUUID=srcVol.volUUID)
if dstVol:
dstVol.teardown(sdUUID=dstVol.sdUUID, volUUID=dstVol.volUUID)
except Exception:
self.log.error("Unexpected error", exc_info=True)
def _createTargetImage(self, destDom, srcSdUUID, imgUUID):
# Before actual data copying we need perform several operation
# such as: create all volumes, create fake template if needed, ...
try:
# Find all volumes of source image
srcChain = self.getChain(srcSdUUID, imgUUID)
except se.StorageException:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
raise se.SourceImageActionError(imgUUID, srcSdUUID, str(e))
fakeTemplate = False
pimg = volume.BLANK_UUID # standalone chain
# check if the chain is build above a template, or it is a standalone
pvol = srcChain[0].getParentVolume()
if pvol:
# find out parent volume parameters
volParams = pvol.getVolumeParams()
pimg = volParams['imgUUID'] # pimg == template image
if destDom.isBackup():
# FIXME: This workaround help as copy VM to the backup domain without its template
# We will create fake template for future VM creation and mark it as FAKE volume
# This situation is relevant for backup domain only
fakeTemplate = True
@contextmanager
def justLogIt(img):
self.log.debug("You don't really need lock parent of image %s", img)
yield
dstImageResourcesNamespace = sd.getNamespace(destDom.sdUUID, resourceFactories.IMAGE_NAMESPACE)
# In destination domain we need to lock image's template if exists
with rmanager.acquireResource(dstImageResourcesNamespace, pimg, rm.LockType.shared) \
if pimg != volume.BLANK_UUID else justLogIt(imgUUID):
if fakeTemplate:
self.createFakeTemplate(destDom.sdUUID, volParams)
dstChain = []
for srcVol in srcChain:
# Create the dst volume
try:
# find out src volume parameters
volParams = srcVol.getVolumeParams(bs=1)
# To avoid 'prezeroing' preallocated volume on NFS domain,
# we create the target volume with minimal size and after that w'll change
# its metadata back to the original size.
tmpSize = 20480 # in sectors (10M)
destDom.createVolume(imgUUID=imgUUID, size=tmpSize,
volFormat=volParams['volFormat'], preallocate=volParams['prealloc'],
diskType=volParams['disktype'], volUUID=srcVol.volUUID, desc=volParams['descr'],
srcImgUUID=pimg, srcVolUUID=volParams['parent'])
dstVol = destDom.produceVolume(imgUUID=imgUUID, volUUID=srcVol.volUUID)
# Extend volume (for LV only) size to the actual size
dstVol.extend((volParams['apparentsize'] + 511) / 512)
# Change destination volume metadata back to the original size.
dstVol.setSize(volParams['size'])
dstChain.append(dstVol)
except se.StorageException:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
raise se.DestImageActionError(imgUUID, destDom.sdUUID, str(e))
# only base may have a different parent image
pimg = imgUUID
return {'srcChain':srcChain, 'dstChain':dstChain}
def _interImagesCopy(self, destDom, srcSdUUID, imgUUID, chains):
srcLeafVol = chains['srcChain'][-1]
dstLeafVol = chains['dstChain'][-1]
try:
# Prepare the whole chains before the copy
srcLeafVol.prepare(rw=False)
dstLeafVol.prepare(rw=True, chainrw=True, setrw=True)
except Exception:
self.log.error("Unexpected error", exc_info=True)
# teardown volumes
self.__cleanupMove(srcLeafVol, dstLeafVol)
raise
try:
for srcVol in chains['srcChain']:
# Do the actual copy
try:
dstVol = destDom.produceVolume(imgUUID=imgUUID, volUUID=srcVol.volUUID)
srcSize = srcVol.getVolumeSize(bs=1)
misc.ddWatchCopy(srcVol.getVolumePath(), dstVol.getVolumePath(), vars.task.aborting, size=srcSize)
except se.ActionStopped:
raise
except se.StorageException:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception:
self.log.error("Copy image error: image=%s, src domain=%s, dst domain=%s", imgUUID, srcSdUUID,
destDom.sdUUID, exc_info=True)
raise se.CopyImageError()
finally:
# teardown volumes
self.__cleanupMove(srcLeafVol, dstLeafVol)
def _finalizeDestinationImage(self, destDom, imgUUID, chains, force):
for srcVol in chains['srcChain']:
try:
dstVol = destDom.produceVolume(imgUUID=imgUUID, volUUID=srcVol.volUUID)
# In case of copying template, we should set the destination volume
# as SHARED (after copy because otherwise prepare as RW would fail)
if srcVol.isShared():
dstVol.setShared()
elif srcVol.isInternal():
dstVol.setInternal()
except se.StorageException:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
raise se.DestImageActionError(imgUUID, destDom.sdUUID, str(e))
def move(self, srcSdUUID, dstSdUUID, imgUUID, vmUUID, op, postZero, force):
"""
Move/Copy image between storage domains within same storage pool
"""
self.log.info("srcSdUUID=%s dstSdUUID=%s "\
"imgUUID=%s vmUUID=%s op=%s force=%s postZero=%s",
srcSdUUID, dstSdUUID, imgUUID, vmUUID, OP_TYPES[op], str(force), str(postZero))
destDom = sdCache.produce(dstSdUUID)
# If image already exists check whether it illegal/fake, overwrite it
if not self.isLegal(destDom.sdUUID, imgUUID):
force = True
# We must first remove the previous instance of image (if exists)
# in destination domain, if we got the overwrite command
if force:
self.log.info("delete image %s on domain %s before overwriting", imgUUID, destDom.sdUUID)
self.delete(destDom.sdUUID, imgUUID, postZero, force=True)
chains = self._createTargetImage(destDom, srcSdUUID, imgUUID)
self._interImagesCopy(destDom, srcSdUUID, imgUUID, chains)
self._finalizeDestinationImage(destDom, imgUUID, chains, force)
if force:
leafVol = chains['dstChain'][-1]
# Now we should re-link all deleted hardlinks, if exists
self.__templateRelink(destDom, imgUUID, leafVol.volUUID)
# At this point we successfully finished the 'copy' part of the operation
# and we can clear all recoveries.
vars.task.clearRecoveries()
# If it's 'move' operation, we should delete src image after copying
if op == MOVE_OP:
self.delete(srcSdUUID, imgUUID, postZero, force=True)
self.log.info("%s task on image %s was successfully finished", OP_TYPES[op], imgUUID)
return True
def __cleanupMultimove(self, sdUUID, imgList, postZero=False):
"""
Cleanup environments after multiple-move operation
"""
for imgUUID in imgList:
try:
self.delete(sdUUID, imgUUID, postZero, force=True)
except Exception:
pass
def multiMove(self, srcSdUUID, dstSdUUID, imgDict, vmUUID, force):
"""
Move multiple images between storage domains within same storage pool
"""
self.log.info("srcSdUUID=%s dstSdUUID=%s imgDict=%s vmUUID=%s force=%s",
srcSdUUID, dstSdUUID, str(imgDict), vmUUID, str(force))
cleanup_candidates = []
# First, copy all images to the destination domain
for (imgUUID, postZero) in imgDict.iteritems():
self.log.info("srcSdUUID=%s dstSdUUID=%s imgUUID=%s postZero=%s",
srcSdUUID, dstSdUUID, imgUUID, postZero)
try:
self.move(srcSdUUID, dstSdUUID, imgUUID, vmUUID, COPY_OP, postZero, force)
except se.StorageException:
self.__cleanupMultimove(sdUUID=dstSdUUID, imgList=cleanup_candidates, postZero=postZero)
raise
except Exception, e:
self.__cleanupMultimove(sdUUID=dstSdUUID, imgList=cleanup_candidates, postZero=postZero)
self.log.error(e, exec_info=True)
raise se.CopyImageError("image=%s, src domain=%s, dst domain=%s: msg %s" % (imgUUID, srcSdUUID, dstSdUUID, str(e)))
cleanup_candidates.append(imgUUID)
# Remove images from source domain only after successfull copying of all images to the destination domain
for (imgUUID, postZero) in imgDict.iteritems():
try:
self.delete(srcSdUUID, imgUUID, postZero, force=True)
except Exception:
pass
def __cleanupCopy(self, srcVol, dstVol):
"""
Cleanup environments after copy operation
"""
try:
if srcVol:
srcVol.teardown(sdUUID=srcVol.sdUUID, volUUID=srcVol.volUUID)
if dstVol:
dstVol.teardown(sdUUID=dstVol.sdUUID, volUUID=dstVol.volUUID)
except Exception:
self.log.error("Unexpected error", exc_info=True)
def validateVolumeChain(self, sdUUID, imgUUID):
"""
Check correctness of the whole chain (including template if exists)
"""
if not self.isLegal(sdUUID, imgUUID):
raise se.ImageIsNotLegalChain(imgUUID)
chain = self.getChain(sdUUID, imgUUID)
# check if the chain is build above a template, or it is a standalone
pvol = chain[0].getParentVolume()
if pvol:
if not pvol.isLegal() or pvol.isFake():
raise se.ImageIsNotLegalChain(imgUUID)
def copy(self, sdUUID, vmUUID, srcImgUUID, srcVolUUID, dstImgUUID, dstVolUUID,
descr, dstSdUUID, volType, volFormat, preallocate, postZero, force):
"""
Create new template/volume from VM.
Do it by collapse and copy the whole chain (baseVolUUID->srcVolUUID)
"""
self.log.info("sdUUID=%s vmUUID=%s "\
"srcImgUUID=%s srcVolUUID=%s dstImgUUID=%s dstVolUUID=%s dstSdUUID=%s volType=%s"\
" volFormat=%s preallocate=%s force=%s postZero=%s", sdUUID, vmUUID, srcImgUUID, srcVolUUID,
dstImgUUID, dstVolUUID, dstSdUUID, volType, volume.type2name(volFormat),
volume.type2name(preallocate), str(force), str(postZero))
try:
srcVol = dstVol = None
# Find out dest sdUUID
if dstSdUUID == sd.BLANK_UUID:
dstSdUUID = sdUUID
volclass = sdCache.produce(sdUUID).getVolumeClass()
destDom = sdCache.produce(dstSdUUID)
# find src volume
try:
srcVol = volclass(self.repoPath, sdUUID, srcImgUUID, srcVolUUID)
except se.StorageException:
raise
except Exception, e:
self.log.error(e, exc_info=True)
raise se.SourceImageActionError(srcImgUUID, sdUUID, str(e))
# Create dst volume
try:
# find out src volume parameters
volParams = srcVol.getVolumeParams()
if volParams['parent'] and volParams['parent'] != volume.BLANK_UUID:
# Volume has parent and therefore is a part of a chain
# in that case we can not know what is the exact size of
# the space target file (chain ==> cow ==> sparse).
# Therefore compute an estimate of the target volume size
# using the sum of the actual size of the chain's volumes
if volParams['volFormat'] != volume.COW_FORMAT or volParams['prealloc'] != volume.SPARSE_VOL:
raise se.IncorrectFormat(self)
volParams['apparentsize'] = self.__chainSizeCalc(sdUUID, srcImgUUID,
srcVolUUID, volParams['size'])
# Find out dest volume parameters
if preallocate in [volume.PREALLOCATED_VOL, volume.SPARSE_VOL]:
volParams['prealloc'] = preallocate
if volFormat in [volume.COW_FORMAT, volume.RAW_FORMAT]:
dstVolFormat = volFormat
else:
dstVolFormat = volParams['volFormat']
self.log.info("copy source %s:%s:%s vol size %s destination %s:%s:%s apparentsize %s" % (
sdUUID, srcImgUUID, srcVolUUID, volParams['size'], dstSdUUID, dstImgUUID,
dstVolUUID, volParams['apparentsize']))
# If image already exists check whether it illegal/fake, overwrite it
if not self.isLegal(dstSdUUID, dstImgUUID):
force = True
# We must first remove the previous instance of image (if exists)
# in destination domain, if we got the overwrite command
if force:
self.log.info("delete image %s on domain %s before overwriting", dstImgUUID, dstSdUUID)
self.delete(dstSdUUID, dstImgUUID, postZero, force=True)
# To avoid 'prezeroing' preallocated volume on NFS domain,
# we create the target volume with minimal size and after that w'll change
# its metadata back to the original size.
tmpSize = 20480 # in sectors (10M)
destDom.createVolume(imgUUID=dstImgUUID, size=tmpSize,
volFormat=dstVolFormat, preallocate=volParams['prealloc'],
diskType=volParams['disktype'], volUUID=dstVolUUID, desc=descr,
srcImgUUID=volume.BLANK_UUID, srcVolUUID=volume.BLANK_UUID)
dstVol = sdCache.produce(dstSdUUID).produceVolume(imgUUID=dstImgUUID, volUUID=dstVolUUID)
# For convert to 'raw' we need use the virtual disk size instead of apparent size
if dstVolFormat == volume.RAW_FORMAT:
newsize = volParams['size']
else:
newsize = volParams['apparentsize']
dstVol.extend(newsize)
dstPath = dstVol.getVolumePath()
# Change destination volume metadata back to the original size.
dstVol.setSize(volParams['size'])
except se.StorageException, e:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
raise se.CopyImageError("Destination volume %s error: %s" % (dstVolUUID, str(e)))
try:
# Start the actual copy image procedure
srcVol.prepare(rw=False)
dstVol.prepare(rw=True, setrw=True)
try:
(rc, out, err) = volume.qemuConvert(volParams['path'], dstPath,
volParams['volFormat'], dstVolFormat, vars.task.aborting,
size=srcVol.getVolumeSize(bs=1), dstvolType=dstVol.getType())
if rc:
raise se.StorageException("rc: %s, err: %s" % (rc, err))
except se.ActionStopped, e:
raise e
except se.StorageException, e:
raise se.CopyImageError(str(e))
# Mark volume as SHARED
if volType == volume.SHARED_VOL:
dstVol.setShared()
if force:
# Now we should re-link all deleted hardlinks, if exists
self.__templateRelink(destDom, dstImgUUID, dstVolUUID)
except se.StorageException, e:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
raise se.CopyImageError("src image=%s, dst image=%s: msg=%s" % (srcImgUUID, dstImgUUID, str(e)))
self.log.info("Finished copying %s:%s -> %s:%s", sdUUID, srcVolUUID, dstSdUUID, dstVolUUID)
#TODO: handle return status
return dstVolUUID
finally:
self.__cleanupCopy(srcVol=srcVol, dstVol=dstVol)
def getSubChain(self, sdUUID, imgUUID, startUUID, endUUID):
"""
Check if startUUID..endUUID is a valid simple link list (and not a tree).
"""
chain = [startUUID]
volclass = sdCache.produce(sdUUID).getVolumeClass()
volUUID = startUUID
try:
while volUUID != endUUID:
vol = volclass(self.repoPath, sdUUID, imgUUID, volUUID)
ch = vol.getChildrenList()
# If a volume has more than 1 child, it is a tree.
if len(ch) != 1:
raise se.ImageIsNotLegalChain("%s:%s..%s" % (imgUUID, startUUID, endUUID))
volUUID = ch[0]
chain.append(volUUID)
return chain
except se.StorageException:
raise
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
raise se.ImageIsNotLegalChain("%s" % (str(e)))
@classmethod
def markIllegalVolumeRollback(cls, taskObj, sdUUID, imgUUID, volUUID, legality):
"""
Mark illegal volume rollback
"""
try:
cls.log.info("markIllegalVolumeRollback: sdUUID=%s img=%s vol=%s "\
"legality=%s" % (sdUUID, imgUUID, volUUID, legality))
vol = sdCache.produce(sdUUID).produceVolume(imgUUID=imgUUID, volUUID=volUUID)
vol.setLegality(legality)
except Exception:
cls.log.error("Failure in mark illegal volume rollback: sdUUID=%s img=%s vol=%s "\
"legality=%s" % (sdUUID, imgUUID, volUUID, legality), exc_info=True)
def markIllegalSubChain(self, sdUUID, imgUUID, chain):
"""
Mark all volumes in the sub-chain as illegal
"""
if not chain:
raise se.InvalidParameterException("chain", str(chain))
volclass = sdCache.produce(sdUUID).getVolumeClass()
ancestor = chain[0]
successor = chain[-1]
tmpVol = volclass(self.repoPath, sdUUID, imgUUID, successor)
dstParent = volclass(self.repoPath, sdUUID, imgUUID, ancestor).getParent()
# Mark all volumes as illegal
while tmpVol and dstParent != tmpVol.volUUID:
name = "Mark illegal volume: " + tmpVol.volUUID
vars.task.pushRecovery(task.Recovery(name, "image", "Image", "markIllegalVolumeRollback",
[sdUUID, imgUUID, tmpVol.volUUID, tmpVol.getLegality()]))
vol = tmpVol.getParentVolume()
tmpVol.setLegality(volume.ILLEGAL_VOL)
tmpVol = vol
def __teardownSubChain(self, sdUUID, imgUUID, chain):
"""
Teardown all volumes in the sub-chain
"""
if not chain:
raise se.InvalidParameterException("chain", str(chain))
# Teardown subchain ('ancestor' ->...-> 'successor') volumes
# before they will deleted.
# This subchain include volumes that were merged (rebased)
# into 'successor' and now should be deleted.
# We prepared all these volumes as part of preparing the whole
# chain before rebase, but during rebase we detached all of them from the chain
# and couldn't teardown they properly.
# So, now we must teardown them to release they resources.
volclass = sdCache.produce(sdUUID).getVolumeClass()
ancestor = chain[0]
successor = chain[-1]
srcVol = volclass(self.repoPath, sdUUID, imgUUID, successor)
dstParent = volclass(self.repoPath, sdUUID, imgUUID, ancestor).getParent()
while srcVol and dstParent != srcVol.volUUID:
try:
self.log.info("Teardown volume %s from image %s", srcVol.volUUID, imgUUID)
vol = srcVol.getParentVolume()
srcVol.teardown(sdUUID=srcVol.sdUUID, volUUID=srcVol.volUUID, justme=True)
srcVol = vol
except Exception:
self.log.info("Failure to teardown volume %s in subchain %s -> %s", srcVol.volUUID,
ancestor, successor, exc_info=True)
def removeSubChain(self, sdUUID, imgUUID, chain, postZero):
"""
Remove all volumes in the sub-chain
"""
if not chain:
raise se.InvalidParameterException("chain", str(chain))
volclass = sdCache.produce(sdUUID).getVolumeClass()
ancestor = chain[0]
successor = chain[-1]
srcVol = volclass(self.repoPath, sdUUID, imgUUID, successor)
dstParent = volclass(self.repoPath, sdUUID, imgUUID, ancestor).getParent()
while srcVol and dstParent != srcVol.volUUID:
try:
self.log.info("Remove volume %s from image %s", srcVol.volUUID, imgUUID)
vol = srcVol.getParentVolume()
chain.remove(srcVol.volUUID)
srcVol.delete(postZero=postZero, force=True)
srcVol = vol
except Exception:
self.log.error("Failure to remove volume %s in subchain %s -> %s", srcVol.volUUID,
ancestor, successor, exc_info=True)
def _internalVolumeMerge(self, sdUUID, srcVolParams, volParams, newSize, chain):
"""
Merge internal volume
"""
srcVol = sdCache.produce(sdUUID).produceVolume(imgUUID=srcVolParams['imgUUID'], volUUID=srcVolParams['volUUID'])
# Extend successor volume to new accumulated subchain size
srcVol.extend(newSize)
srcVol.prepare(rw=True, chainrw=True, setrw=True)
try:
backingVolPath = os.path.join('..', srcVolParams['imgUUID'], volParams['volUUID'])
srcVol.rebase(volParams['volUUID'], backingVolPath, volParams['volFormat'], unsafe=False, rollback=True)
finally:
srcVol.teardown(sdUUID=srcVol.sdUUID, volUUID=srcVol.volUUID)
# Prepare chain for future erase
chain.remove(srcVolParams['volUUID'])
self.__teardownSubChain(sdUUID, srcVolParams['imgUUID'], chain)
return chain
def _baseCowVolumeMerge(self, sdUUID, srcVolParams, volParams, newSize, chain):
"""
Merge snapshot with base COW volume
"""
# FIXME!!! In this case we need workaround to rebase successor
# and transform it to be a base volume (without pointing to any backing volume).
# Actually this case should be handled by 'qemu-img rebase' (RFE to kvm).
# At this point we can achive this result by 4 steps prosedure:
# Step 1: create temporary empty volume similar to ancestor volume
# Step 2: Rebase (safely) successor volume on top of this temporary volume
# Step 3: Rebase (unsafely) successor volume on top of "" (empty string)
# Step 4: Delete temporary volume
srcVol = sdCache.produce(sdUUID).produceVolume(imgUUID=srcVolParams['imgUUID'], volUUID=srcVolParams['volUUID'])
# Extend successor volume to new accumulated subchain size
srcVol.extend(newSize)
# Step 1: Create temporary volume with destination volume's parent parameters
newUUID = str(uuid.uuid4())
sdCache.produce(sdUUID).createVolume(imgUUID=srcVolParams['imgUUID'],
size=volParams['size'], volFormat=volParams['volFormat'],
preallocate=volume.SPARSE_VOL, diskType=volParams['disktype'],
volUUID=newUUID, desc="New base volume",
srcImgUUID=volume.BLANK_UUID, srcVolUUID=volume.BLANK_UUID)
tmpVol = sdCache.produce(sdUUID).produceVolume(imgUUID=srcVolParams['imgUUID'], volUUID=newUUID)
tmpVol.prepare(rw=True, justme=True, setrw=True)
# We should prepare/teardown volume for every single rebase.
# The reason is recheckIfLeaf at the end of the rebase, that change
# volume permissions to RO for internal volumes.
srcVol.prepare(rw=True, chainrw=True, setrw=True)
try:
# Step 2: Rebase successor on top of tmpVol
# qemu-img rebase -b tmpBackingFile -F backingFormat -f srcFormat src
backingVolPath = os.path.join('..', srcVolParams['imgUUID'], newUUID)
srcVol.rebase(newUUID, backingVolPath, volParams['volFormat'], unsafe=False, rollback=True)
finally:
srcVol.teardown(sdUUID=srcVol.sdUUID, volUUID=srcVol.volUUID)
srcVol.prepare(rw=True, chainrw=True, setrw=True)
try:
# Step 3: Remove pointer to backing file from the successor by 'unsafed' rebase
# qemu-img rebase -u -b "" -F backingFormat -f srcFormat src
srcVol.rebase(volume.BLANK_UUID, "", volParams['volFormat'], unsafe=True, rollback=False)
finally:
srcVol.teardown(sdUUID=srcVol.sdUUID, volUUID=srcVol.volUUID)
# Step 4: Delete temporary volume
tmpVol.teardown(sdUUID=tmpVol.sdUUID, volUUID=tmpVol.volUUID, justme=True)
tmpVol.delete(postZero=False, force=True)
# Prepare chain for future erase
chain.remove(srcVolParams['volUUID'])
self.__teardownSubChain(sdUUID, srcVolParams['imgUUID'], chain)
return chain
def _baseRawVolumeMerge(self, sdUUID, srcVolParams, volParams, chain):
"""
Merge snapshot with base RAW volume
"""
# In this case we need convert ancestor->successor subchain to new volume
# and rebase successor's children (if exists) on top of it.
# At this point we can achive this result by 3 steps prosedure:
# Step 1: Create temporary empty volume similar to ancestor volume
# Step 2: Convert successor to new temporary volume
# Step 3: Rename temporary volume as successor
# Step 4: Unsafely rebase successor's children on top of temporary volume
srcVol = sdCache.produce(sdUUID).produceVolume(imgUUID=srcVolParams['imgUUID'], volUUID=srcVolParams['volUUID'])
srcVol.prepare(rw=True, chainrw=True, setrw=True)
# Find out successor's children list
chList = srcVol.getChildrenList()
# Step 1: Create temporary volume with destination volume's parent parameters
newUUID = str(uuid.uuid4())
sdCache.produce(sdUUID).createVolume(imgUUID=srcVolParams['imgUUID'],
size=volParams['size'], volFormat=volParams['volFormat'],
preallocate=volParams['prealloc'], diskType=volParams['disktype'],
volUUID=newUUID, desc=srcVolParams['descr'],
srcImgUUID=volume.BLANK_UUID, srcVolUUID=volume.BLANK_UUID)
newVol = sdCache.produce(sdUUID).produceVolume(imgUUID=srcVolParams['imgUUID'], volUUID=newUUID)
newVol.prepare(rw=True, justme=True, setrw=True)
# Step 2: Convert successor to new volume
# qemu-img convert -f qcow2 successor -O raw newUUID
(rc, out, err) = volume.qemuConvert(srcVolParams['path'], newVol.getVolumePath(),
srcVolParams['volFormat'], volParams['volFormat'], vars.task.aborting,
size=volParams['apparentsize'], dstvolType=newVol.getType())
if rc:
raise se.MergeSnapshotsError(newUUID)
newVol.teardown(sdUUID=newVol.sdUUID, volUUID=newVol.volUUID)
srcVol.teardown(sdUUID=srcVol.sdUUID, volUUID=srcVol.volUUID)
if chList:
newVol.setInternal()
# Step 3: Rename successor as tmpUUID and new volume as successor
tmpUUID = str(uuid.uuid4())
srcVol.rename(tmpUUID)
newVol.rename(srcVolParams['volUUID'])
# Step 4: Rebase children 'unsafely' on top of new volume
# qemu-img rebase -u -b tmpBackingFile -F backingFormat -f srcFormat src
for v in chList:
ch = sdCache.produce(sdUUID).produceVolume(imgUUID=srcVolParams['imgUUID'], volUUID=v)
ch.prepare(rw=True, chainrw=True, setrw=True, force=True)
try:
backingVolPath = os.path.join('..', srcVolParams['imgUUID'], srcVolParams['volUUID'])
ch.rebase(srcVolParams['volUUID'], backingVolPath, volParams['volFormat'], unsafe=True, rollback=True)
finally:
ch.teardown(sdUUID=ch.sdUUID, volUUID=ch.volUUID)
ch.recheckIfLeaf()
# Prepare chain for future erase
chain.remove(srcVolParams['volUUID'])
chain.append(tmpUUID)
return chain
def merge(self, sdUUID, vmUUID, imgUUID, ancestor, successor, postZero):
"""Merge source volume to the destination volume.
'successor' - source volume UUID
'ancestor' - destination volume UUID
"""
self.log.info("sdUUID=%s vmUUID=%s"\
" imgUUID=%s ancestor=%s successor=%s postZero=%s",
sdUUID, vmUUID, imgUUID,
ancestor, successor, str(postZero))
chain = []
srcVol = dstVol = None
volclass = sdCache.produce(sdUUID).getVolumeClass()
try:
srcVol = volclass(self.repoPath, sdUUID, imgUUID, successor)
srcVolParams = srcVol.getVolumeParams()
dstVol = volclass(self.repoPath, sdUUID, imgUUID, ancestor)
if dstVol.isShared():
raise se.MergeSnapshotsError(ancestor)
dstParent = dstVol.getParentVolume()
if dstParent:
volParams = dstParent.getVolumeParams()
else:
volParams = dstVol.getVolumeParams()
chain = self.getSubChain(sdUUID, imgUUID, ancestor, successor)
# Calculate size of subchain ancestor -> successor
newSize = self.subChainSizeCalc(sdUUID, srcVolParams['imgUUID'], chain, volParams['size'])
except se.StorageException, e:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error(e, exc_info=True)
raise se.SourceImageActionError(imgUUID, sdUUID, str(e))
try:
# Start the actual merge image procedure
if dstParent:
# The ancestor isn't a base volume of the chain.
self.log.info("Internal volume merge: src = %s dst = %s", srcVol.getVolumePath(), dstVol.getVolumePath())
chain = self._internalVolumeMerge(sdUUID, srcVolParams, volParams, newSize, chain)
else:
# The ancestor is actually a base volume of the chain.
# We have 2 cases here:
# Case 1: ancestor is a COW volume (use 'rebase' workaround)
# Case 2: ancestor is a RAW volume (use 'convert + rebase')
if volParams['volFormat'] == volume.RAW_FORMAT:
self.log.info("merge with convert: src = %s dst = %s", srcVol.getVolumePath(), dstVol.getVolumePath())
chain = self._baseRawVolumeMerge(sdUUID, srcVolParams, volParams, chain)
else:
self.log.info("4 steps merge: src = %s dst = %s", srcVol.getVolumePath(), dstVol.getVolumePath())
chain = self._baseCowVolumeMerge(sdUUID, srcVolParams, volParams, newSize, chain)
# mark all snapshots from 'ancestor' to 'successor' as illegal
self.markIllegalSubChain(sdUUID, imgUUID, chain)
# This is unrecoverable point, clear all recoveries
vars.task.clearRecoveries()
try:
# remove all snapshots from 'ancestor' to 'successor'
self.removeSubChain(sdUUID, imgUUID, chain, postZero)
except Exception, e:
self.log.error("Failure to remove subchain %s -> %s in image %s", ancestor,
successor, imgUUID, exc_info=True)
chain = [successor]
except se.ActionStopped, e:
raise e
except se.StorageException, e:
self.log.error("Unexpected error", exc_info=True)
raise
except Exception, e:
self.log.error(e, exc_info=True)
raise se.SourceImageActionError(imgUUID, sdUUID, str(e))
self.log.info("Merge src=%s with dst=%s was successfully finished.", srcVol.getVolumePath(), dstVol.getVolumePath())
def check(self, sdUUID, imgUUID):
"""
Validate image
"""
badvols = {}
imagestatus = 0
message = "Image OK"
try:
# Find all volumes of source image
volclass = sdCache.produce(sdUUID).getVolumeClass()
vollist = volclass.getImageVolumes(self.repoPath, sdUUID, imgUUID)
vol = None
for volUUID in vollist:
try:
vol = volclass(self.repoPath, sdUUID, imgUUID, volUUID)
if vol.isLeaf():
vol.prepare(rw=True, setrw=True)
else:
vol.prepare(rw=False, setrw=True)
vol.teardown(sdUUID=vol.sdUUID, volUUID=vol.volUUID)
vol = None
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
message = "Image has bad volumes"
imagestatus = se.ImageIsNotLegalChain.code
badvols[volUUID] = str(e)
if vol:
vol.teardown(sdUUID=vol.sdUUID, volUUID=vol.volUUID)
except se.StorageException, e:
imagestatus = e.code
message = str(e)
except Exception, e:
self.log.error("Unexpected error", exc_info=True)
imagestatus = se.ImageIsNotLegalChain.code
message = str(e)
return dict(imagestatus=imagestatus, message=message, badvols=badvols)
|
openSUSE/vdsm
|
vdsm/storage/image.py
|
Python
|
gpl-2.0
| 55,653
|
## system-config-printer
## Copyright (C) 2008 Red Hat, Inc.
## Copyright (C) 2008 Tim Waugh <twaugh@redhat.com>
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from distutils.core import setup
setup(name='cupshelpers',
version='1.0',
description='Helper functions and classes for using CUPS',
maintainer='Tim Waugh',
maintainer_email='twaugh@redhat.com',
packages=['cupshelpers'])
|
poolooloo/emind-cloud-printer
|
setup.py
|
Python
|
gpl-2.0
| 1,083
|
from Credit import Credit
from fit.ColumnFixture import ColumnFixture
class CalculateCredit(ColumnFixture):
months = 0
reliable = False
balance = 0.0
credit = Credit()
def allow_credit(self):
return self.credit.allowsCredit(self.months, self.reliable, self.balance)
def credit_limit(self):
return self.credit.limit(self.months, self.reliable, self.balance)
|
epronk/pyfit2
|
examples/CalculateCredit.py
|
Python
|
gpl-2.0
| 406
|
from builder.btools import RegisterCustomTest
from builder.btools import AddConfigKey
from builder.bconfig import getAutoconfPrefix
from builder.bconfig import filterOut
from builder.bconfig import Version
def CheckAxtor(ctx, write_config_h=False, add_to_compiler_env=False,
min_version=None, max_version=None):
ctx.Message('Checking for Axtor Backends (OpenCL, GLSL) ... ')
confprefix = getAutoconfPrefix(ctx.env)
key = confprefix +'HAVE_AXTOR'
# if min_version is not None:
# min_version = Version(min_version)
# if max_version is not None:
# max_version = Version(max_version)
# LLVM is required for axtor
if not ctx.env.GetPackage('llvm'):
ctx.Message('LLVM not detected')
if write_config_h:
AddConfigKey(ctx, key, 0)
ctx.Result(0)
return 0
savedVars = ctx.env.RequirePackage('llvm')
axtorCoreLibs = ctx.env.Split("""
axtorMetainfo
axtorWriter
axtorIntrinsics
axtorGenericC
axtorInterface
axtorConsole
axtorPass
axtorParsers
axtorSolvers
axtorCNS
axtorAST
axtorUtil
""")
axtorBackendLibs = ['Axtor_OCL','Axtor_GLSL']
axtorLibs = axtorBackendLibs + axtorCoreLibs
ctx.env.Prepend(LIBS = axtorLibs)
ret, outputStr = ctx.TryRun("""
#include <axtor_ocl/OCLBackend.h>
#include <axtor_glsl/GLSLBackend.h>
int main(int argc, char** argv)
{
axtor::OCLBackend oclBackend;
axtor::GLSLBackend glslBackend;
printf("%d",1);
return 0;
}
""", extension='.cpp')
ctx.env.RestoreVars(savedVars)
if ret:
ctx.env.DeclarePackage('axtor',
vars={'LIBS' : axtorLibs},
dependencies='llvm',
trigger_libs=['Axtor', 'Axtor'],
trigger_frameworks=['Axtor', 'Axtor'])
# if ctx.env.GetPackage('llvm_shared'):
# ctx.env.DeclarePackage('axtor_shared',
# vars={'LIBS' : axtorSharedLibs},
# trigger_libs=['Axtor_shared'],
# trigger_frameworks=['Axtor_shared'])
# define
# if ret:
# ctx.env.DeclarePackage('axtor',
# vars={'LIBS' : axtorLibs,
# 'CPPDEFINES' : key},
# dependencies='llvm',
# trigger_libs=['axtor', 'Axtor'],
# trigger_frameworks=['Axtor'])
ctx.Result(ret)
return ret
RegisterCustomTest('CheckAxtor', CheckAxtor)
|
paeschli/scons-builder
|
modules/axtor_check.py
|
Python
|
gpl-2.0
| 2,706
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2004 Robert Kaye
# Copyright (C) 2006-2009, 2011-2014, 2017 Lukáš Lalinský
# Copyright (C) 2008 Gary van der Merwe
# Copyright (C) 2008 amckinle
# Copyright (C) 2008-2010, 2014-2015, 2018-2022 Philipp Wolfer
# Copyright (C) 2009 Carlin Mangar
# Copyright (C) 2010 Andrew Barnert
# Copyright (C) 2011-2014 Michael Wiencek
# Copyright (C) 2011-2014, 2017-2019 Wieland Hoffmann
# Copyright (C) 2012 Chad Wilson
# Copyright (C) 2013 Calvin Walton
# Copyright (C) 2013 Ionuț Ciocîrlan
# Copyright (C) 2013 brainz34
# Copyright (C) 2013-2014, 2017 Sophist-UK
# Copyright (C) 2013-2015, 2017-2022 Laurent Monin
# Copyright (C) 2016 Rahul Raturi
# Copyright (C) 2016 Simon Legner
# Copyright (C) 2016 Suhas
# Copyright (C) 2016-2018 Sambhav Kothari
# Copyright (C) 2017-2018 Vishal Choudhary
# Copyright (C) 2018 Bob Swift
# Copyright (C) 2018 virusMac
# Copyright (C) 2019 Joel Lintunen
# Copyright (C) 2020 Julius Michaelis
# Copyright (C) 2020-2021 Gabriel Ferreira
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import argparse
from concurrent.futures import ThreadPoolExecutor
from functools import partial
import itertools
import logging
import os.path
import platform
import re
import shutil
import signal
import sys
from PyQt5 import (
QtCore,
QtGui,
QtWidgets,
)
from picard import (
PICARD_APP_ID,
PICARD_APP_NAME,
PICARD_DESKTOP_NAME,
PICARD_FANCY_VERSION_STR,
PICARD_ORG_NAME,
acoustid,
log,
)
from picard.acoustid.manager import AcoustIDManager
from picard.album import (
Album,
NatAlbum,
run_album_post_removal_processors,
)
from picard.browser.browser import BrowserIntegration
from picard.browser.filelookup import FileLookup
from picard.cluster import (
Cluster,
ClusterList,
UnclusteredFiles,
)
from picard.collection import load_user_collections
from picard.config import (
get_config,
setup_config,
)
from picard.config_upgrade import upgrade_config
from picard.const import (
USER_DIR,
USER_PLUGIN_DIR,
)
from picard.const.sys import (
IS_FROZEN,
IS_HAIKU,
IS_WIN,
)
from picard.dataobj import DataObject
from picard.disc import (
Disc,
eaclog,
whipperlog,
)
from picard.file import File
from picard.formats import open_ as open_file
from picard.i18n import setup_gettext
from picard.pluginmanager import PluginManager
from picard.releasegroup import ReleaseGroup
from picard.track import (
NonAlbumTrack,
Track,
)
from picard.util import (
check_io_encoding,
decode_filename,
encode_filename,
is_hidden,
iter_files_from_objects,
mbid_validate,
normpath,
process_events_iter,
system_supports_long_paths,
thread,
versions,
webbrowser2,
)
from picard.util.checkupdate import UpdateCheckManager
from picard.webservice import WebService
from picard.webservice.api_helpers import (
AcoustIdAPIHelper,
MBAPIHelper,
)
import picard.resources # noqa: F401 # pylint: disable=unused-import
from picard.ui import theme
from picard.ui.mainwindow import MainWindow
from picard.ui.searchdialog.album import AlbumSearchDialog
from picard.ui.searchdialog.artist import ArtistSearchDialog
from picard.ui.searchdialog.track import TrackSearchDialog
# A "fix" for https://bugs.python.org/issue1438480
def _patched_shutil_copystat(src, dst, *, follow_symlinks=True):
try:
_orig_shutil_copystat(src, dst, follow_symlinks=follow_symlinks)
except OSError:
pass
_orig_shutil_copystat = shutil.copystat
shutil.copystat = _patched_shutil_copystat
def plugin_dirs():
if IS_FROZEN:
toppath = sys.argv[0]
else:
toppath = os.path.abspath(__file__)
topdir = os.path.dirname(toppath)
plugin_dir = os.path.join(topdir, "plugins")
yield plugin_dir
if not os.path.exists(USER_PLUGIN_DIR):
os.makedirs(USER_PLUGIN_DIR)
yield USER_PLUGIN_DIR
class Tagger(QtWidgets.QApplication):
tagger_stats_changed = QtCore.pyqtSignal()
listen_port_changed = QtCore.pyqtSignal(int)
cluster_added = QtCore.pyqtSignal(Cluster)
cluster_removed = QtCore.pyqtSignal(Cluster)
album_added = QtCore.pyqtSignal(Album)
album_removed = QtCore.pyqtSignal(Album)
__instance = None
_debug = False
_no_restore = False
def __init__(self, picard_args, unparsed_args, localedir, autoupdate):
super().__init__(sys.argv)
self.__class__.__instance = self
setup_config(self, picard_args.config_file)
config = get_config()
self.setStyle(OverrideStyle())
theme.setup(self)
self._cmdline_files = picard_args.FILE
self.autoupdate_enabled = autoupdate
self._no_restore = picard_args.no_restore
self._no_plugins = picard_args.no_plugins
self.set_log_level(config.setting['log_verbosity'])
if picard_args.debug or "PICARD_DEBUG" in os.environ:
self.set_log_level(logging.DEBUG)
# Default thread pool
self.thread_pool = ThreadPoolExecutor()
# Provide a separate thread pool for operations that should not be
# delayed by longer background processing tasks, e.g. because the user
# expects instant feedback instead of waiting for a long list of
# operations to finish.
self.priority_thread_pool = ThreadPoolExecutor(max_workers=1)
# Use a separate thread pool for file saving, with a thread count of 1,
# to avoid race conditions in File._save_and_rename.
self.save_thread_pool = ThreadPoolExecutor(max_workers=1)
if not IS_WIN:
# Set up signal handling
# It's not possible to call all available functions from signal
# handlers, therefore we need to set up a QSocketNotifier to listen
# on a socket. Sending data through a socket can be done in a
# signal handler, so we use the socket to notify the application of
# the signal.
# This code is adopted from
# https://qt-project.org/doc/qt-4.8/unix-signals.html
# To not make the socket module a requirement for the Windows
# installer, import it here and not globally
import socket
self.signalfd = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM, 0)
self.signalnotifier = QtCore.QSocketNotifier(self.signalfd[1].fileno(),
QtCore.QSocketNotifier.Type.Read, self)
self.signalnotifier.activated.connect(self.sighandler)
signal.signal(signal.SIGHUP, self.signal)
signal.signal(signal.SIGINT, self.signal)
signal.signal(signal.SIGTERM, self.signal)
# Setup logging
log.debug("Starting Picard from %r", os.path.abspath(__file__))
log.debug("Platform: %s %s %s", platform.platform(),
platform.python_implementation(), platform.python_version())
log.debug("Versions: %s", versions.as_string())
log.debug("Configuration file path: %r", config.fileName())
log.debug("User directory: %r", os.path.abspath(USER_DIR))
log.debug("System long path support: %r", system_supports_long_paths())
# for compatibility with pre-1.3 plugins
QtCore.QObject.tagger = self
QtCore.QObject.config = config
QtCore.QObject.log = log
check_io_encoding()
# Must be before config upgrade because upgrade dialogs need to be
# translated
setup_gettext(localedir, config.setting["ui_language"], log.debug)
upgrade_config(config)
self.webservice = WebService()
self.mb_api = MBAPIHelper(self.webservice)
load_user_collections()
# Initialize fingerprinting
acoustid_api = AcoustIdAPIHelper(self.webservice)
self._acoustid = acoustid.AcoustIDClient(acoustid_api)
self._acoustid.init()
self.acoustidmanager = AcoustIDManager(acoustid_api)
self.enable_menu_icons(config.setting['show_menu_icons'])
# Load plugins
self.pluginmanager = PluginManager()
if not self._no_plugins:
for plugin_dir in plugin_dirs():
self.pluginmanager.load_plugins_from_directory(plugin_dir)
self.browser_integration = BrowserIntegration()
self.browser_integration.listen_port_changed.connect(self.listen_port_changed)
self._pending_files_count = 0
self.files = {}
self.clusters = ClusterList()
self.albums = {}
self.release_groups = {}
self.mbid_redirects = {}
self.unclustered_files = UnclusteredFiles()
self.nats = None
self.window = MainWindow(disable_player=picard_args.no_player)
self.exit_cleanup = []
self.stopping = False
# Load release version information
if self.autoupdate_enabled:
self.updatecheckmanager = UpdateCheckManager(parent=self.window)
def enable_menu_icons(self, enabled):
self.setAttribute(QtCore.Qt.ApplicationAttribute.AA_DontShowIconsInMenus, not enabled)
def register_cleanup(self, func):
self.exit_cleanup.append(func)
def run_cleanup(self):
for f in self.exit_cleanup:
f()
def set_log_level(self, level):
self._debug = level == logging.DEBUG
log.set_level(level)
def _mb_login_dialog(self, parent):
if not parent:
parent = self.window
dialog = QtWidgets.QInputDialog(parent)
dialog.setWindowModality(QtCore.Qt.WindowModality.WindowModal)
dialog.setWindowTitle(_("MusicBrainz Account"))
dialog.setLabelText(_("Authorization code:"))
status = dialog.exec_()
if status == QtWidgets.QDialog.DialogCode.Accepted:
return dialog.textValue()
else:
return None
def mb_login(self, callback, parent=None):
scopes = "profile tag rating collection submit_isrc submit_barcode"
authorization_url = self.webservice.oauth_manager.get_authorization_url(scopes)
webbrowser2.open(authorization_url)
authorization_code = self._mb_login_dialog(parent)
if authorization_code is not None:
self.webservice.oauth_manager.exchange_authorization_code(
authorization_code, scopes,
partial(self.on_mb_authorization_finished, callback))
else:
callback(False, None)
def on_mb_authorization_finished(self, callback, successful=False, error_msg=None):
if successful:
self.webservice.oauth_manager.fetch_username(
partial(self.on_mb_login_finished, callback))
else:
callback(False, error_msg)
@classmethod
def on_mb_login_finished(self, callback, successful, error_msg):
if successful:
load_user_collections()
callback(successful, error_msg)
def mb_logout(self):
self.webservice.oauth_manager.revoke_tokens()
load_user_collections()
def move_files_to_album(self, files, albumid=None, album=None):
"""Move `files` to tracks on album `albumid`."""
if album is None:
album = self.load_album(albumid)
album.match_files(files)
def move_file_to_album(self, file, albumid):
"""Move `file` to a track on album `albumid`."""
self.move_files_to_album([file], albumid)
def move_file_to_track(self, file, albumid, recordingid):
"""Move `file` to recording `recordingid` on album `albumid`."""
album = self.load_album(albumid)
file.match_recordingid = recordingid
album.match_files([file])
def create_nats(self):
if self.nats is None:
self.nats = NatAlbum()
self.albums["NATS"] = self.nats
self.album_added.emit(self.nats)
self.nats.item.setExpanded(True)
return self.nats
def move_file_to_nat(self, file, recordingid, node=None):
self.create_nats()
file.move(self.nats.unmatched_files)
nat = self.load_nat(recordingid, node=node)
nat.run_when_loaded(partial(file.move, nat))
if nat.loaded:
self.nats.update()
def exit(self):
if self.stopping:
return
self.stopping = True
log.debug("Picard stopping")
self._acoustid.done()
self.thread_pool.shutdown()
self.save_thread_pool.shutdown()
self.priority_thread_pool.shutdown()
self.browser_integration.stop()
self.webservice.stop()
self.run_cleanup()
QtCore.QCoreApplication.processEvents()
def _run_init(self):
if self._cmdline_files:
files = [decode_filename(f) for f in self._cmdline_files]
self.add_paths(files)
del self._cmdline_files
def run(self):
self.update_browser_integration()
self.window.show()
QtCore.QTimer.singleShot(0, self._run_init)
res = self.exec_()
self.exit()
return res
def update_browser_integration(self):
config = get_config()
if config.setting["browser_integration"]:
self.browser_integration.start()
else:
self.browser_integration.stop()
def event(self, event):
if isinstance(event, thread.ProxyToMainEvent):
event.run()
elif event.type() == QtCore.QEvent.Type.FileOpen:
file = event.file()
self.add_paths([file])
if IS_HAIKU:
self.bring_tagger_front()
# We should just return True here, except that seems to
# cause the event's sender to get a -9874 error, so
# apparently there's some magic inside QFileOpenEvent...
return 1
return super().event(event)
def _file_loaded(self, file, target=None, remove_file=False, unmatched_files=None):
config = get_config()
self._pending_files_count -= 1
if self._pending_files_count == 0:
self.window.set_sorting(True)
if remove_file:
file.remove()
return
if file is None:
return
if file.has_error():
self.unclustered_files.add_file(file)
return
file_moved = False
if not config.setting["ignore_file_mbids"]:
recordingid = file.metadata.getall('musicbrainz_recordingid')
recordingid = recordingid[0] if recordingid else ''
is_valid_recordingid = mbid_validate(recordingid)
albumid = file.metadata.getall('musicbrainz_albumid')
albumid = albumid[0] if albumid else ''
is_valid_albumid = mbid_validate(albumid)
if is_valid_albumid and is_valid_recordingid:
log.debug("%r has release (%s) and recording (%s) MBIDs, moving to track...",
file, albumid, recordingid)
self.move_file_to_track(file, albumid, recordingid)
file_moved = True
elif is_valid_albumid:
log.debug("%r has only release MBID (%s), moving to album...",
file, albumid)
self.move_file_to_album(file, albumid)
file_moved = True
elif is_valid_recordingid:
log.debug("%r has only recording MBID (%s), moving to non-album track...",
file, recordingid)
self.move_file_to_nat(file, recordingid)
file_moved = True
if not file_moved:
target = self.move_file(file, target)
if target and target != self.unclustered_files:
file_moved = True
if not file_moved and unmatched_files is not None:
unmatched_files.append(file)
# fallback on analyze if nothing else worked
if not file_moved and config.setting['analyze_new_files'] and file.can_analyze():
log.debug("Trying to analyze %r ...", file)
self.analyze([file])
# Auto cluster newly added files if they are not explicitly moved elsewhere
if self._pending_files_count == 0 and unmatched_files and config.setting["cluster_new_files"]:
self.cluster(unmatched_files)
def move_file(self, file, target):
"""Moves a file to target, if possible
Returns the actual target the files has been moved to or None
"""
if isinstance(target, Album):
self.move_files_to_album([file], album=target)
else:
if isinstance(target, File) and target.parent:
target = target.parent
if not file.move(target):
# Ensure a file always has a parent so it shows up in UI
if not file.parent:
target = self.unclustered_files
file.move(target)
# Unsupported target, do not move the file
else:
target = None
return target
def move_files(self, files, target, move_to_multi_tracks=True):
if target is None:
log.debug("Aborting move since target is invalid")
return
self.window.set_sorting(False)
if isinstance(target, Cluster):
for file in process_events_iter(files):
file.move(target)
elif isinstance(target, Track):
album = target.album
for file in process_events_iter(files):
file.move(target)
if move_to_multi_tracks: # Assign next file to following track
target = album.get_next_track(target) or album.unmatched_files
elif isinstance(target, File):
for file in process_events_iter(files):
file.move(target.parent)
elif isinstance(target, Album):
self.move_files_to_album(files, album=target)
elif isinstance(target, ClusterList):
self.cluster(files)
self.window.set_sorting(True)
def add_files(self, filenames, target=None):
"""Add files to the tagger."""
ignoreregex = None
config = get_config()
pattern = config.setting['ignore_regex']
if pattern:
try:
ignoreregex = re.compile(pattern)
except re.error as e:
log.error("Failed evaluating regular expression for ignore_regex: %s", e)
ignore_hidden = config.setting["ignore_hidden_files"]
new_files = []
for filename in filenames:
filename = normpath(filename)
if ignore_hidden and is_hidden(filename):
log.debug("File ignored (hidden): %r" % (filename))
continue
# Ignore .smbdelete* files which Applie iOS SMB creates by renaming a file when it cannot delete it
if os.path.basename(filename).startswith(".smbdelete"):
log.debug("File ignored (.smbdelete): %r", filename)
continue
if ignoreregex is not None and ignoreregex.search(filename):
log.info("File ignored (matching %r): %r" % (pattern, filename))
continue
if filename not in self.files:
file = open_file(filename)
if file:
self.files[filename] = file
new_files.append(file)
QtCore.QCoreApplication.processEvents()
if new_files:
log.debug("Adding files %r", new_files)
new_files.sort(key=lambda x: x.filename)
self.window.set_sorting(False)
self._pending_files_count += len(new_files)
unmatched_files = []
for i, file in enumerate(new_files):
file.load(partial(self._file_loaded, target=target, unmatched_files=unmatched_files))
# Calling processEvents helps processing the _file_loaded
# callbacks in between, which keeps the UI more responsive.
# Avoid calling it to often to not slow down the loading to much
# Using an uneven number to have the unclustered file counter
# not look stuck in certain digits.
if i % 17 == 0:
QtCore.QCoreApplication.processEvents()
@staticmethod
def _scan_paths_recursive(paths, recursive, ignore_hidden):
local_paths = list(paths)
while local_paths:
current_path = normpath(local_paths.pop(0))
try:
if os.path.isdir(current_path):
for entry in os.scandir(current_path):
if ignore_hidden and is_hidden(entry.path):
continue
if recursive and entry.is_dir():
local_paths.append(entry.path)
else:
yield entry.path
else:
yield current_path
except OSError as err:
log.warning(err)
def add_paths(self, paths, target=None):
config = get_config()
files = self._scan_paths_recursive(paths,
config.setting['recursively_add_files'],
config.setting["ignore_hidden_files"])
self.add_files(files, target=target)
def get_file_lookup(self):
"""Return a FileLookup object."""
config = get_config()
return FileLookup(self, config.setting["server_host"],
config.setting["server_port"],
self.browser_integration.port)
def search(self, text, search_type, adv=False, mbid_matched_callback=None, force_browser=False):
"""Search on the MusicBrainz website."""
search_types = {
'track': {
'entity': 'recording',
'dialog': TrackSearchDialog
},
'album': {
'entity': 'release',
'dialog': AlbumSearchDialog
},
'artist': {
'entity': 'artist',
'dialog': ArtistSearchDialog
},
}
if search_type not in search_types:
return
search = search_types[search_type]
lookup = self.get_file_lookup()
config = get_config()
if config.setting["builtin_search"] and not force_browser:
if not lookup.mbid_lookup(text, search['entity'],
mbid_matched_callback=mbid_matched_callback):
dialog = search['dialog'](self.window)
dialog.search(text)
dialog.exec_()
else:
lookup.search_entity(search['entity'], text, adv, mbid_matched_callback=mbid_matched_callback)
def collection_lookup(self):
"""Lookup the users collections on the MusicBrainz website."""
lookup = self.get_file_lookup()
config = get_config()
lookup.collection_lookup(config.persist["oauth_username"])
def browser_lookup(self, item):
"""Lookup the object's metadata on the MusicBrainz website."""
lookup = self.get_file_lookup()
metadata = item.metadata
# Only lookup via MB IDs if matched to a DataObject; otherwise ignore and use metadata details
if isinstance(item, DataObject):
itemid = item.id
if isinstance(item, Track):
lookup.recording_lookup(itemid)
elif isinstance(item, Album):
lookup.album_lookup(itemid)
else:
lookup.tag_lookup(
metadata["albumartist"] if item.is_album_like() else metadata["artist"],
metadata["album"],
metadata["title"],
metadata["tracknumber"],
'' if item.is_album_like() else str(metadata.length),
item.filename if isinstance(item, File) else '')
def get_files_from_objects(self, objects, save=False):
"""Return list of unique files from list of albums, clusters, tracks or files.
Note: Consider using picard.util.iter_files_from_objects instead, which returns an iterator.
"""
return list(iter_files_from_objects(objects, save=save))
def save(self, objects):
"""Save the specified objects."""
for file in iter_files_from_objects(objects, save=True):
file.save()
def load_mbid(self, type, mbid):
self.bring_tagger_front()
if type == 'album':
self.load_album(mbid)
elif type == 'nat':
self.load_nat(mbid)
else:
log.warning('Unknown type to load: %s', type)
def load_album(self, album_id, discid=None):
album_id = self.mbid_redirects.get(album_id, album_id)
album = self.albums.get(album_id)
if album:
log.debug("Album %s already loaded.", album_id)
album.add_discid(discid)
return album
album = Album(album_id, discid=discid)
self.albums[album_id] = album
self.album_added.emit(album)
album.load()
return album
def load_nat(self, nat_id, node=None):
self.create_nats()
nat = self.get_nat_by_id(nat_id)
if nat:
log.debug("NAT %s already loaded.", nat_id)
return nat
nat = NonAlbumTrack(nat_id)
self.nats.tracks.append(nat)
self.nats.update(True)
if node:
nat._parse_recording(node)
else:
nat.load()
return nat
def get_nat_by_id(self, nat_id):
if self.nats is not None:
for nat in self.nats.tracks:
if nat.id == nat_id:
return nat
def get_release_group_by_id(self, rg_id):
return self.release_groups.setdefault(rg_id, ReleaseGroup(rg_id))
def remove_files(self, files, from_parent=True):
"""Remove files from the tagger."""
for file in files:
if file.filename in self.files:
file.clear_lookup_task()
self._acoustid.stop_analyze(file)
del self.files[file.filename]
file.remove(from_parent)
self.tagger_stats_changed.emit()
def remove_album(self, album):
"""Remove the specified album."""
log.debug("Removing %r", album)
if album.id not in self.albums:
return
album.stop_loading()
self.remove_files(list(album.iterfiles()))
del self.albums[album.id]
if album.release_group:
album.release_group.remove_album(album.id)
if album == self.nats:
self.nats = None
self.album_removed.emit(album)
run_album_post_removal_processors(album)
self.tagger_stats_changed.emit()
def remove_nat(self, track):
"""Remove the specified non-album track."""
log.debug("Removing %r", track)
self.remove_files(list(track.iterfiles()))
if not self.nats:
return
self.nats.tracks.remove(track)
if not self.nats.tracks:
self.remove_album(self.nats)
else:
self.nats.update(True)
def remove_cluster(self, cluster):
"""Remove the specified cluster."""
if not cluster.special:
log.debug("Removing %r", cluster)
files = list(cluster.files)
cluster.files = []
cluster.clear_lookup_task()
self.remove_files(files, from_parent=False)
self.clusters.remove(cluster)
self.cluster_removed.emit(cluster)
def remove(self, objects):
"""Remove the specified objects."""
files = []
with self.window.ignore_selection_changes:
for obj in objects:
if isinstance(obj, File):
files.append(obj)
elif isinstance(obj, NonAlbumTrack):
self.remove_nat(obj)
elif isinstance(obj, Track):
files.extend(obj.files)
elif isinstance(obj, Album):
self.window.set_statusbar_message(
N_("Removing album %(id)s: %(artist)s - %(album)s"),
{
'id': obj.id,
'artist': obj.metadata['albumartist'],
'album': obj.metadata['album']
}
)
self.remove_album(obj)
elif isinstance(obj, UnclusteredFiles):
files.extend(list(obj.files))
elif isinstance(obj, Cluster):
self.remove_cluster(obj)
if files:
self.remove_files(files)
def _lookup_disc(self, disc, result=None, error=None):
self.restore_cursor()
if error is not None:
QtWidgets.QMessageBox.critical(self.window, _("CD Lookup Error"),
_("Error while reading CD:\n\n%s") % error)
else:
disc.lookup()
def lookup_cd(self, action):
"""Reads CD from the selected drive and tries to lookup the DiscID on MusicBrainz."""
config = get_config()
if isinstance(action, QtWidgets.QAction):
data = action.data()
if data == 'logfile:eac':
return self.lookup_discid_from_logfile()
else:
device = data
elif config.setting["cd_lookup_device"] != '':
device = config.setting["cd_lookup_device"].split(",", 1)[0]
else:
# rely on python-discid auto detection
device = None
disc = Disc()
self.set_wait_cursor()
thread.run_task(
partial(disc.read, encode_filename(device)),
partial(self._lookup_disc, disc),
traceback=self._debug)
def lookup_discid_from_logfile(self):
file_chooser = QtWidgets.QFileDialog(self.window)
file_chooser.setNameFilters([
_("EAC / XLD / Whipper log files") + " (*.log)",
_("All files") + " (*)",
])
if file_chooser.exec_():
files = file_chooser.selectedFiles()
disc = Disc()
self.set_wait_cursor()
thread.run_task(
partial(self._parse_disc_ripping_log, disc, files[0]),
partial(self._lookup_disc, disc),
traceback=self._debug)
def _parse_disc_ripping_log(self, disc, path):
try:
log.debug('Trying to parse "%s" as EAC / XLD log...', path)
toc = eaclog.toc_from_file(path)
except Exception:
try:
log.debug('Trying to parse "%s" as Whipper log...', path)
toc = whipperlog.toc_from_file(path)
except Exception:
log.warning('Failed parsing ripping log "%s"', path, exc_info=True)
raise
disc.put(toc)
@property
def use_acoustid(self):
config = get_config()
return config.setting["fingerprinting_system"] == "acoustid"
def analyze(self, objs):
"""Analyze the file(s)."""
if not self.use_acoustid:
return
for file in iter_files_from_objects(objs):
if file.can_analyze():
file.set_pending()
self._acoustid.analyze(file, partial(file._lookup_finished, File.LOOKUP_ACOUSTID))
def generate_fingerprints(self, objs):
"""Generate the fingerprints without matching the files."""
if not self.use_acoustid:
return
def finished(file, result):
file.clear_pending()
for file in iter_files_from_objects(objs):
file.set_pending()
self._acoustid.fingerprint(file, partial(finished, file))
# =======================================================================
# Metadata-based lookups
# =======================================================================
def autotag(self, objects):
for obj in objects:
if obj.can_autotag():
obj.lookup_metadata()
# =======================================================================
# Clusters
# =======================================================================
def cluster(self, objs, callback=None):
"""Group files with similar metadata to 'clusters'."""
log.debug("Clustering %r", objs)
files = iter_files_from_objects(objs)
try:
file = next(files)
except StopIteration:
files = self.unclustered_files.files
else:
files = itertools.chain([file], files)
thread.run_task(
partial(self._do_clustering, list(files)),
partial(self._clustering_finished, callback))
def _do_clustering(self, files):
# The clustering algorithm should completely run in the thread,
# hence do not return the iterator.
return list(Cluster.cluster(files))
def _clustering_finished(self, callback, result=None, error=None):
if error:
log.error('Error while clustering: %r', error)
return
with self.window.ignore_selection_changes:
self.window.set_sorting(False)
for file_cluster in process_events_iter(result):
files = set(file_cluster.files)
if len(files) > 1:
cluster = self.load_cluster(file_cluster.title, file_cluster.artist)
else:
cluster = self.unclustered_files
cluster.add_files(files)
self.window.set_sorting(True)
if callback:
callback()
def load_cluster(self, name, artist):
for cluster in self.clusters:
cm = cluster.metadata
if name == cm["album"] and artist == cm["albumartist"]:
return cluster
cluster = Cluster(name, artist)
self.clusters.append(cluster)
self.cluster_added.emit(cluster)
return cluster
# =======================================================================
# Utils
# =======================================================================
def set_wait_cursor(self):
"""Sets the waiting cursor."""
super().setOverrideCursor(
QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
def restore_cursor(self):
"""Restores the cursor set by ``set_wait_cursor``."""
super().restoreOverrideCursor()
def refresh(self, objs):
for obj in objs:
if obj.can_refresh():
obj.load(priority=True, refresh=True)
def bring_tagger_front(self):
self.window.setWindowState(self.window.windowState() & ~QtCore.Qt.WindowState.WindowMinimized | QtCore.Qt.WindowState.WindowActive)
self.window.raise_()
self.window.activateWindow()
@classmethod
def instance(cls):
return cls.__instance
def signal(self, signum, frame):
log.debug("signal %i received", signum)
# Send a notification about a received signal from the signal handler
# to Qt.
self.signalfd[0].sendall(b"a")
def sighandler(self):
self.signalnotifier.setEnabled(False)
self.exit()
self.quit()
self.signalnotifier.setEnabled(True)
def version():
print("%s %s %s" % (PICARD_ORG_NAME, PICARD_APP_NAME, PICARD_FANCY_VERSION_STR))
def longversion():
print(versions.as_string())
def process_picard_args():
parser = argparse.ArgumentParser(
epilog="If one of the filenames begins with a hyphen, use -- to separate the options from the filenames."
)
# Qt default arguments. Parse them so Picard does not interpret the
# arguments as file names to load.
parser.add_argument("-style", nargs=1, help=argparse.SUPPRESS)
parser.add_argument("-stylesheet", nargs=1, help=argparse.SUPPRESS)
# Same for default X arguments
parser.add_argument("-display", nargs=1, help=argparse.SUPPRESS)
# Picard specific arguments
parser.add_argument("-c", "--config-file", action='store',
default=None,
help="location of the configuration file")
parser.add_argument("-d", "--debug", action='store_true',
help="enable debug-level logging")
parser.add_argument("-M", "--no-player", action='store_true',
help="disable built-in media player")
parser.add_argument("-N", "--no-restore", action='store_true',
help="do not restore positions and/or sizes")
parser.add_argument("-P", "--no-plugins", action='store_true',
help="do not load any plugins")
parser.add_argument("--no-crash-dialog", action='store_true',
help="disable the crash dialog")
parser.add_argument('-v', '--version', action='store_true',
help="display version information and exit")
parser.add_argument("-V", "--long-version", action='store_true',
help="display long version information and exit")
parser.add_argument('FILE', nargs='*')
picard_args, unparsed_args = parser.parse_known_args()
return picard_args, unparsed_args
class OverrideStyle(QtWidgets.QProxyStyle):
"""Override the default style to fix some platform specific issues"""
def styleHint(self, hint, option, widget, returnData):
# This is disabled on macOS, but prevents collapsing tree view items easily with
# left arrow key. Enable this consistently on all platforms.
# See https://tickets.metabrainz.org/browse/PICARD-2417
# and https://bugreports.qt.io/browse/QTBUG-100305
if hint == QtWidgets.QStyle.StyleHint.SH_ItemView_ArrowKeysNavigateIntoChildren:
return True
return super().styleHint(hint, option, widget, returnData)
def main(localedir=None, autoupdate=True):
# Some libs (ie. Phonon) require those to be set
QtWidgets.QApplication.setApplicationName(PICARD_APP_NAME)
QtWidgets.QApplication.setOrganizationName(PICARD_ORG_NAME)
QtWidgets.QApplication.setDesktopFileName(PICARD_DESKTOP_NAME)
# Allow High DPI Support
QtWidgets.QApplication.setAttribute(QtCore.Qt.ApplicationAttribute.AA_UseHighDpiPixmaps)
QtWidgets.QApplication.setAttribute(QtCore.Qt.ApplicationAttribute.AA_EnableHighDpiScaling)
# HighDpiScaleFactorRoundingPolicy is available since Qt 5.14. This is
# required to support fractional scaling on Windows properly.
# It causes issues without scaling on Linux, see https://tickets.metabrainz.org/browse/PICARD-1948
if IS_WIN and hasattr(QtGui.QGuiApplication, 'setHighDpiScaleFactorRoundingPolicy'):
QtGui.QGuiApplication.setHighDpiScaleFactorRoundingPolicy(
QtCore.Qt.HighDpiScaleFactorRoundingPolicy.PassThrough)
# Enable mnemonics on all platforms, even macOS
QtGui.qt_set_sequence_auto_mnemonic(True)
signal.signal(signal.SIGINT, signal.SIG_DFL)
picard_args, unparsed_args = process_picard_args()
if picard_args.version:
return version()
if picard_args.long_version:
return longversion()
try:
from PyQt5.QtDBus import QDBusConnection
dbus = QDBusConnection.sessionBus()
dbus.registerService(PICARD_APP_ID)
except ImportError:
pass
tagger = Tagger(picard_args, unparsed_args, localedir, autoupdate)
# Initialize Qt default translations
translator = QtCore.QTranslator()
locale = QtCore.QLocale()
translation_path = QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.LibraryLocation.TranslationsPath)
log.debug("Looking for Qt locale %s in %s", locale.name(), translation_path)
if translator.load(locale, "qtbase_", directory=translation_path):
tagger.installTranslator(translator)
else:
log.debug('Qt locale %s not available', locale.name())
tagger.startTimer(1000)
sys.exit(tagger.run())
|
metabrainz/picard
|
picard/tagger.py
|
Python
|
gpl-2.0
| 41,007
|
from numba import cuda, float32
def make_loop(cfun, model, n_svar):
"Construct CUDA device function for integration loop."
@cuda.jit
def loop(n_step, W, X, G):
# TODO only for 1D grid/block dims
t = cuda.blockIdx.x * cuda.blockDim.x + cuda.threadIdx.x
ti = cuda.threadIdx.x
x = cuda.shared.array((n_svar, 64), float32)
g = G[t]
for j in range(n_step):
for i in range(W.shape[0]):
x[:, ti] = X[i, :, t]
model(x, g * cfun(W, X, i, t))
X[i, :, t] = x[:, ti]
# TODO hack
loop.n_svar = n_svar
return loop
|
stuart-knock/tvb-library
|
tvb/simulator/_numba/loops.py
|
Python
|
gpl-2.0
| 636
|
from __future__ import unicode_literals
#from django.db import models
from abstract_component.models import Component
from exercises.models import GradedExercise
class Practicer(Component):
"""
Model for practicer component.
"""
BEHAVIORS_PATH = 'practice/practicer-behaviors/'
@classmethod
def get_behaviors_path(cls):
return cls.BEHAVIORS_PATH
def next_exercise(self, unused_graded_exercises, accumulated_feedback,
feedbacked_exercises):
"""
Returns new exercise.
Args:
unused_graded_exercises: collection of exercises and their grades
which were not already used
accumulated_feedback: feedback from previous exercises to help us
decide which exercise is best for the user
feedbacked_exercises: collection of feedbacked finnished exercises
Returns:
new exercise || None if there is no exercise left
"""
if not unused_graded_exercises:
return None
behavior = self.get_behavior()
graded_exercise = behavior.next_exercise(unused_graded_exercises,
accumulated_feedback, feedbacked_exercises)
assert isinstance(graded_exercise, GradedExercise)
#exercise = graded_exercise.exercise
return graded_exercise
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
return '<Practicer {name}; parameters={parameters}>'.format(
name=self.behavior_name,
parameters=self.parameters)
|
effa/smartoo
|
practice/models.py
|
Python
|
gpl-2.0
| 1,593
|
class deferedrun(object):
"""
An object which may be returned from a run telling the taskqueue what the next
stage (runnable) is of the task and what requirements it has.
"""
def __init__(self, runnable, requirements):
self.runnable = runnable
self.requirements = requirements
class Task(object):
def preFlight(self):
"""
The first stage of the task, will in the default implementation just
return a defered run, one which requires all from function require and
runs the method run.
"""
return deferedrun(self.run, self.require())
def require(self):
"""
Return required tasks to run this task
All tasks needed should be created, i.e. return objects not classes
The data returned by an required task will be piped to the run method in order of require
"""
pass
def run(self, *require):
"""
This method is called when all requirements are fulfilled in the requires return.
The function will be called with an unpacked list of the requirements in the same order as
the require function returned its dependencies. *require is a placeholder for this.
The task may return anything which is meant to be sent to a triggering task
"""
raise NotImplementedError("A Task must implement the run method")
class SubjectTask(Task):
"""
Subject Tasks are run "on" a subject, and tied to this subject.
The subject will through the trigger member add this task to the task queue
automatically, making it possible for a SubjectTask to specify when it should run but
leaving control of actual launch to the subject and task queue.
"""
trigger = None
demand = None
supply = None
def __init__(self, subject):
self.subject = subject
|
topfs2/heimdall
|
src/heimdall/tasks.py
|
Python
|
gpl-2.0
| 1,865
|
'''
Created on 20.03.2014
@auUhor: Charun
'''
with open ("input.txt", "r") as textfile:
RNAseq = textfile.read()
pass
codons = {"UUU" : "F", "UUC" : "F", "UUA" : "L", "UUG" : "L", "CUU" : "L", "CUC" : "L", \
"CUA" : "L", "CUG" : "L", "AUU" : "I", "AUC" : "I", "AUA" : "I", "AUG" : "M", \
"GUU" : "V", "GUC" : "V", "GUA" : "V", "GUG" : "V", "UCU" : "S", "UCC" : "S", \
"UCA" : "S", "UCG" : "S", "CCU" : "P", "CCC" : "P", "CCA" : "P", "CCG" : "P", \
"ACU" : "T", "ACC" : "T", "ACA" : "T", "ACG" : "T", "GCU" : "A", "GCC" : "A", \
"GCA" : "A", "GCG" : "A", "UAU" : "Y", "UAC" : "Y", "UAA" : "STOP", "UAG" : "STOP", \
"CAU" : "H", "CAC" : "H", "CAA" : "Q", "CAG" : "Q", "AAU" :"N", "AAC" : "N", \
"AAA" : "K", "AAG" : "K", "GAU" : "D", "GAC" : "D", "GAA" : "E", "GAG" : "E", \
"UGU" : "C", "UGC" : "C", "UGA" : "STOP", "UGG" : "W", "CGU" : "R", "CGC" :"R", \
"CGA" : "R", "CGG" : "R", "AGU" : "S", "AGC" : "S", "AGA" : "R", "AGG" : "R", \
"GGU" : "G", "GGC" : "G", "GGA" :"G", "GGG" : "G"}
i = 0
Pseq = ""
while i <= len(RNAseq) - 3:
if RNAseq[i:i+3] == "UAA" or RNAseq[i:i+3] == "UAG" or RNAseq[i:i+3] == "UGA":
break
else:
Pseq += codons[RNAseq[i:i+3]]
i += 3
pass
with open ("output.txt", "w") as textfile:
textfile.write(Pseq)
pass
|
David-J-R/Rosalind-Solutions
|
src/bioinformatics_stronghold/prot/prot.py
|
Python
|
gpl-2.0
| 1,420
|
import pandas as pd
import numpy as np
from distance_matrix_generator import *
# Dataset albero distribuzioni linux
df_distro = pd.read_csv('./data/distro_parent.csv', skipinitialspace=True)
# Dataset distanze geografiche normalizzate [0,1]
df_geo = np.genfromtxt("./data/geo_distance_norm.csv", delimiter=",",dtype=str)
# Crea un arco completo simmetrico computando tutte le distanze
# tra i record del dataset df contenente le macchine linux.
# Salva nella forma "Source Target Type Weight" utilizzata da gephi
# df - dataset contenente le macchine
# out_path - file in cui salvare il grafo
# index - indice del dataset da cui iniziare a calcolare gli archi
def graph_distance(df,out_path,index=0):
values = df.values
f = open(out_path,"a")
for i_a in range(index,len(values)-1):
for i_b in range(i_a+1,len(values)):
ris = machine_distance((values[i_a],values[i_b]))
f.write("%s %s Undirected %s\n" % (i_a,i_b,ris))
#time.sleep(0.01)
f.close()
# Calcola la distanza tra due macchine
# pair - tupla contenente le due macchine come lista di attributi
def machine_distance(pair):
a = pair[0]
b = pair[1]
numCores_dist = abs(a[1] -b[1])
kernel_dist = kernel_distance(a[2],b[2])
class_dist = class_distance(a[3],b[3])
cpu_dist = cpu_distance(a[4],b[4])
country_dist = country_distance(a[5],b[5])
architecture_dist = architecture_distance(a[6],b[6])
distro_dist = distribution_dist(a[7],b[7])
return (numCores_dist + kernel_dist + class_dist + country_dist + architecture_dist + distro_dist)
# Recupera da file la distanza precomputata tra due location
def country_distance(a,b,df=df_geo):
for e in df:
if a in e and b in e:
return float(e[2])
# Calcola la distanza tra due stringhe a e b
# rappresentanti due CPU
def cpu_distance(a,b):
return 1 - similarity_string(a,b,"matcher")
# Calcola la distanza tra due stringhe a e b
# rappresentanti due architetture
def architecture_distance(a,b):
return 1 - similarity_string(a,b,"matcher")
# Calcola la distanza tra due classi di computer a e b
# utilizzando delle categorie che raggruppano classi simili
def class_distance(a,b):
set1 = ["desktop","games","laptop","netbook","notebook","personal","workstation"]
set2 = ["embedded","raspberry pi","smarphone","tablet"]
set3 = ["server","server/workstation","workstation"]
if a == b:
return 0
elif (a in set1 and b in set1) or (a in set2 and b in set2) or (a in set3 and b in set3):
return 0.5
else:
return 1
# Calcola la distanza tra due distribuzioni utilizzando un albero
# delle derivazioni linux
def distribution_dist(a,b,df=df_distro):
if a==b:
return 0
else:
parent_a = df[df["distro"] == a]["parent"].iloc[0]
parent_b = df[df["distro"] == b]["parent"].iloc[0]
if a == parent_b or b == parent_a or parent_a == parent_b:
return 0.3
else:
parent_a = df[df["distro"] == parent_a]["parent"].iloc[0]
parent_b = df[df["distro"] == parent_b]["parent"].iloc[0]
if parent_a == parent_b:
return 0.6
else:
return 1
# Ritorna la distanza tra due kernel trasformandoli in numeri interi
# e calcolando la differenza
def kernel_distance(a,b):
a_int_norm = norm_kernel(int("".join(str(a).replace("+","").split("."))))
b_int_norm = norm_kernel(int("".join(str(b).replace("+","").split("."))))
if (a.find("+") != -1) != (b.find("+") != -1):
return abs(a_int_norm - b_int_norm) - norm_kernel(1)
else:
return abs(a_int_norm - b_int_norm)
# Normalizzazione del numero intero che rappresenta il kernel
# utilizzando il più alto valore e il più basso valore presenti
# nel dataset
# k - kernel da normalizzare
def norm_kernel(k):
max_kernel = 34110
min_kernel = 32
return ((k-min_kernel)/(max_kernel-min_kernel))
# Ritorna due interi rappresentanti il più grande e più
# piccolo kernel come intero all'interno del dataset
# kernels - lista dei kernel
def min_max_kernel(kernels):
kernels_int = [int("".join(str(x).replace("+","").split("."))) for x in kernels]
return max(kernels_int),min(kernels_int)
|
pigna90/ars-project
|
attributes_distance.py
|
Python
|
gpl-2.0
| 3,996
|
#!/usr/bin/python
"""
This is a unittest for kvm_qtree library.
@author: Lukas Doktor <ldoktor@redhat.com>
@copyright: 2012 Red Hat, Inc.
"""
__author__ = """Lukas Doktor (ldoktor@redhat.com)"""
import unittest
import sys
import os
import common
from autotest.client.shared.test_utils import mock
test_dir = os.path.abspath(os.path.dirname(os.path.dirname(sys.modules[__name__].__file__)))
print test_dir
sys.path.append(test_dir)
from virttest import kvm_qtree
OFFSET_PER_LEVEL = kvm_qtree.OFFSET_PER_LEVEL
# Dummy classes and functions
class ParamsDict(dict):
""" params like dictionary """
def objects(self, item):
if self.get(item):
return self.get(item).split(' ')
def object_params(self, obj):
ret = self.copy()
for (param, value) in self.iteritems():
if param.endswith('_%s' % obj):
ret[param[:-len('_%s' % obj)]] = value
return ret
def combine(first, second, offset):
""" Add string line-by-line with offset*OFFSET_PER_LEVEL """
out = first[:]
offset = ' ' * OFFSET_PER_LEVEL * offset
for line in second.splitlines():
out += '\n' + offset + line
return out
# Dummy variables
qtree_header = """bus: main-system-bus
type System
"""
dev_ide_disk = """dev: piix3-ide, id ""
bus-prop: addr = 01.1
bus-prop: romfile = <null>
bus-prop: rombar = 1
bus-prop: multifunction = off
bus-prop: command_serr_enable = on
class IDE controller, addr 00:01.1, pci id 8086:7010 (sub 1af4:1100)
bar 4: i/o at 0xc2a0 [0xc2af]
bus: ide.0
type IDE
dev: ide-hd, id ""
dev-prop: drive = ide0-hd0
dev-prop: logical_block_size = 512
dev-prop: physical_block_size = 512
dev-prop: min_io_size = 0
dev-prop: opt_io_size = 0
dev-prop: bootindex = -1
dev-prop: discard_granularity = 0
dev-prop: ver = "1.0.50"
dev-prop: serial = "QM00001"
bus-prop: unit = 0"""
dev_usb_disk = """dev: ich9-usb-uhci1, id "usb1"
dev-prop: masterbus = <null>
dev-prop: firstport = 0
bus-prop: addr = 04.0
bus-prop: romfile = <null>
bus-prop: rombar = 1
bus-prop: multifunction = off
bus-prop: command_serr_enable = on
class USB controller, addr 00:04.0, pci id 8086:2934 (sub 1af4:1100)
bar 4: i/o at 0xc280 [0xc29f]
bus: usb1.0
type USB
dev: usb-hub, id ""
bus-prop: port = <null>
addr 0.3, port 2, speed 12, name QEMU USB Hub, attached
dev: usb-tablet, id "usb-tablet1"
bus-prop: port = <null>
addr 0.4, port 2.1, speed 12, name QEMU USB Tablet, attached
dev: usb-storage, id ""
dev-prop: drive = <null>
dev-prop: logical_block_size = 512
dev-prop: physical_block_size = 512
dev-prop: min_io_size = 0
dev-prop: opt_io_size = 0
dev-prop: bootindex = -1
dev-prop: discard_granularity = 0
dev-prop: serial = <null>
dev-prop: removable = off
bus-prop: port = <null>
addr 0.2, port 1, speed 12, name QEMU USB MSD, attached
bus: scsi.0
type SCSI
dev: scsi-disk, id ""
dev-prop: drive = usb2.6
dev-prop: logical_block_size = 512
dev-prop: physical_block_size = 512
dev-prop: min_io_size = 0
dev-prop: opt_io_size = 0
dev-prop: bootindex = -1
dev-prop: discard_granularity = 0
dev-prop: ver = "1.0.50"
dev-prop: serial = <null>
dev-prop: removable = off
bus-prop: channel = 0
bus-prop: scsi-id = 0
bus-prop: lun = 0"""
dev_dummy_mmio = """dev: fw_cfg, id ""
dev-prop: ctl_iobase = 0x510
dev-prop: data_iobase = 0x511
irq 0
mmio ffffffffffffffff/0000000000000002
mmio ffffffffffffffff/0000000000000001"""
info_block = ('ide0-hd0: removable=0 io-status=ok file=/tmp/vl.UWzrkU backing_'
'file=/dummy/directory/f16-64.qcow2 ro=1 drv=qcow2 encrypted=0 '
'bps=0 bps_rd=0 bps_wr=0 iops=0 iops_rd=0 iops_wr=0\n')
info_block += ('usb2.6: removable=0 io-status=ok file=/tmp/stg4.qcow2 ro=0 '
'drv=qcow2 encrypted=0 bps=0 bps_rd=0 bps_wr=0 iops=0 iops_rd=0'
' iops_wr=0')
guest_proc_scsi = """Attached devices:
Host: scsi4 Channel: 00 Id: 00 Lun: 00
Vendor: QEMU Model: QEMU HARDDISK Rev: 1.0.
Type: Direct-Access ANSI SCSI revision: 05"""
params = ParamsDict({'images': 'image1 stg4',
'drive_format': 'ide',
'drive_format_stg4': 'usb2',
'drive_index_image1': '0',
'drive_index_stg4': '6',
'image_format': 'qcow2',
'image_name': '/dummy/directory/f16-64',
'image_name_stg4': 'stg4',
'image_size': '10G',
'image_size_stg4': '1M',
'image_snapshot': 'yes',
'image_snapshot_stg4': 'no',
'image_readonly_image1': 'yes'})
class QtreeContainerTest(unittest.TestCase):
""" QtreeContainer tests """
def test_qtree(self):
""" Correct workflow """
reference_nodes = [kvm_qtree.QtreeDisk, kvm_qtree.QtreeBus,
kvm_qtree.QtreeDev, kvm_qtree.QtreeDev,
kvm_qtree.QtreeDev, kvm_qtree.QtreeDisk,
kvm_qtree.QtreeBus, kvm_qtree.QtreeDev,
kvm_qtree.QtreeBus, kvm_qtree.QtreeDev,
kvm_qtree.QtreeDev, kvm_qtree.QtreeBus]
info = qtree_header
info = combine(info, dev_ide_disk, 1)
info = combine(info, dev_usb_disk, 1)
info = combine(info, dev_dummy_mmio, 1)
info += "\n"
qtree = kvm_qtree.QtreeContainer()
qtree.parse_info_qtree(info)
nodes = qtree.get_nodes()
self.assertEqual(len(nodes), len(reference_nodes), ("Number of parsed "
"nodes is not equal to the number of qtree nodes. "
"%s != %s" % (len(nodes), len(reference_nodes))))
for i in xrange(len(nodes)):
self.assertTrue(isinstance(nodes[i], reference_nodes[i]),
("Node %d should be class %s but is %s instead" %
(i, reference_nodes[i], type(reference_nodes))))
tree = qtree.get_qtree()
self.assertTrue(isinstance(tree.str_qtree(), str),
"qtree.str_qtree() returns nonstring output.")
self.assertTrue(isinstance(str(tree), str),
"str(qtree) returns nonstring output.")
def test_bad_qtree(self):
""" Incorrect qtree """
qtree = kvm_qtree.QtreeContainer()
info = combine(qtree_header, "Very_bad_line", 1)
self.assertRaises(ValueError, qtree.parse_info_qtree, info)
class QtreeDiskContainerTest(unittest.TestCase):
""" QtreeDiskContainer tests """
def setUp(self):
# Get rid of logging errors
def dumm(*args, **kvargs):
pass
self.god = mock.mock_god(ut=self)
self.god.stub_with(kvm_qtree.logging, 'error', dumm)
info = qtree_header
info = combine(info, dev_ide_disk, 1)
info = combine(info, dev_usb_disk, 1)
info = combine(info, dev_dummy_mmio, 1)
info += "\n"
self.no_disks = 2
self.qtree = kvm_qtree.QtreeContainer()
self.qtree.parse_info_qtree(info)
self.disks = kvm_qtree.QtreeDisksContainer(self.qtree.get_nodes())
def tearDown(self):
self.god.unstub_all()
def test_check_params(self):
""" Correct workflow """
disks = self.disks
self.assertEqual(len(self.disks.disks), self.no_disks)
self.assertEqual(disks.parse_info_block(info_block), (0, 0))
self.assertEqual(disks.generate_params(), 0)
self.assertEqual(disks.check_disk_params(params, '/tmp'), 0)
self.assertEqual(disks.check_guests_proc_scsi(guest_proc_scsi),
(0, 0, 1, 0))
# Check the full disk output (including params)
for disk in disks.disks:
self.assertTrue(isinstance(str(disk), str),
"str(disk) returns nonstring output.")
def test_check_params_bad(self):
""" Whole workflow with bad data """
disks = self.disks
# missing disk in info block
_info_block = info_block.split('\n')[1]
# snapshot in info qtree but not in params
_info_block = _info_block.replace('file=/tmp/stg4.qcow2',
'file=none.qcow2 backing_file=/tmp/stg4.qcow2')
# additional disk in info block
_info_block += '\nmissing_bad_disk1:\n \n'
# additional disk in params
_params = ParamsDict(params)
_params['images'] += ' bad_disk2'
# Missing disk in proc_scsi
_guest_proc_scsi = guest_proc_scsi.replace('Channel: 00',
'Channel: 01')
# Ignored disk in proc_scsi
_guest_proc_scsi += """
Host: scsi1 Channel: 00 Id: 00 Lun: 00
Vendor: ATA Model: QEMU HARDDISK Rev: 1.0.
Type: Direct-Access ANSI SCSI revision: 05"""
self.assertEqual(disks.parse_info_block(_info_block), (1, 1))
self.assertEqual(disks.generate_params(), 1)
self.assertEqual(disks.check_disk_params(_params, '/tmp'), 4)
self.assertEqual(disks.check_guests_proc_scsi(_guest_proc_scsi),
(1, 1, 1, 1))
class KvmQtreeClassTest(unittest.TestCase):
""" Additional tests for kvm_qtree classes """
def test_qtree_bus_bus(self):
""" Bus' child can't be Bus() """
test = kvm_qtree.QtreeBus()
self.assertRaises(kvm_qtree.IncompatibleTypeError,
test.add_child, kvm_qtree.QtreeBus())
def test_qtree_dev_dev(self):
""" Dev's child can't be Dev() """
test = kvm_qtree.QtreeDev()
self.assertRaises(kvm_qtree.IncompatibleTypeError,
test.add_child, kvm_qtree.QtreeDev())
def test_qtree_disk_missing_filename(self):
""" in info_block must contain info about file or backing_file """
test = kvm_qtree.QtreeDisk()
test.set_qtree({'something': 'something'})
test.set_block_prop('prop', 'value')
self.assertRaises(ValueError, test.generate_params)
if __name__ == "__main__":
""" Run unittest """
unittest.main()
|
ldoktor/virt-test
|
virttest/kvm_qtree_unittest.py
|
Python
|
gpl-2.0
| 10,535
|
import sys
from skimage.feature import hog
from skimage import exposure
from PIL import Image
import numpy
from tempfile import TemporaryFile
import random
PATCH_SIZE = (55, 55)
IMAGE_SIZE = (1024,1224)
POPULATION_M = 3000
POPULATION_U = 5000
CHANNELS = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
def extract_patch(ims, x, y):
X = []
#If the patch extraction point is too close to the image border
if x<=PATCH_SIZE[0]/2 or y<=PATCH_SIZE[1]/2 or x>=IMAGE_SIZE[0]-PATCH_SIZE[0]/2 or y>=IMAGE_SIZE[1]-PATCH_SIZE[1]/2:
return X
for i in CHANNELS:
#channel = im_name+'-'+str(i)+'.png'
#im = Image.open(channel)
#im = numpy.array(im)
im = ims[i]
for cur_x in xrange(x-PATCH_SIZE[0]/2, x+PATCH_SIZE[0]/2+1):
for cur_y in xrange(y-PATCH_SIZE[1]/2, y+PATCH_SIZE[1]/2+1):
X.append(im[cur_x][cur_y])
return X
if __name__=='__main__':
if len(sys.argv)!=3:
print 'usage: patches.py image_name mask_name'
quit(-1)
im_name = sys.argv[1]
mask_name = sys.argv[2]
sample_name = mask_name[:-4]+'_'+str(POPULATION_M+POPULATION_U)+'_sample.npy'
np_file = open(im_name+'_X.npy', 'w+')
np_file_y = open(im_name+'_Y.npy', 'w+')
mask = Image.open(mask_name)
mask = numpy.array(mask)
sampling = numpy.load(sample_name)
X = []
Y = []
marked = []
around = []
# Optimizing by only opening image files once
ims = []
for i in CHANNELS:
channel = im_name+'-'+str(i)+'.png' # change to '.png' maybe
im = Image.open(channel)
fd, im = hog(im, visualise=True)
im = exposure.rescale_intensity(im, in_range=(0, 1))
#im = numpy.array(im, dtype='float32')
ims.append(im)
# extract patches from sampled points
print 'extracting patches from - '+im_name+' ...'
print len(sampling)
for i,s in enumerate(sampling):
extracted = extract_patch(ims, s[0], s[1])
if extracted==[]:
print 'sampling error at ' + str(s) + '\nExiting...'
quit(-1)
X.append(extracted)
if mask[s[0]][s[1]] == 0:
Y.append(1)
else:
Y.append(0)
print 'saving patches ...'
X = numpy.array(X)
Y = numpy.array(Y)
print X.shape
numpy.save(np_file, X)
numpy.save(np_file_y, Y)
|
saahil/MSSegmentation
|
utils/hog.py
|
Python
|
gpl-2.0
| 2,359
|
import sys
# Set default encoding to UTF-8
reload(sys)
# noinspection PyUnresolvedReferences
sys.setdefaultencoding('utf-8')
import time
import json
import httplib
import traceback
import click
from flask import Flask, request, render_template, url_for, redirect, g
from flask.ext.cache import Cache
from flask.ext.github import GitHub
from flask.ext.login import LoginManager, current_user
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.assets import Environment, Bundle
from werkzeug.routing import BaseConverter
from werkzeug.exceptions import HTTPException
from sqlalchemy.ext.declarative import declarative_base
from .modules.search.models import Search
from .lib.util import to_canonical, remove_ext, mkdir_safe, gravatar_url, to_dict
from .lib.hook import HookModelMeta, HookMixin
from .lib.util import is_su, in_virtualenv
from .version import __version__
class Application(Flask):
def __call__(self, environ, start_response):
path_info = environ.get('PATH_INFO')
if path_info and len(path_info) > 1 and path_info.endswith('/'):
environ['PATH_INFO'] = path_info[:-1]
scheme = environ.get('HTTP_X_SCHEME')
if scheme:
environ['wsgi.url_scheme'] = scheme
real_ip = environ.get('HTTP_X_REAL_IP')
if real_ip:
environ['REMOTE_ADDR'] = real_ip
return super(Application, self).__call__(environ, start_response)
def discover(self):
import_name = 'realms.modules'
fromlist = (
'assets',
'commands',
'models',
'views',
'hooks'
)
start_time = time.time()
__import__(import_name, fromlist=fromlist)
for module_name in self.config['MODULES']:
sources = __import__('%s.%s' % (import_name, module_name), fromlist=fromlist)
if hasattr(sources, 'init'):
sources.init(self)
# Blueprint
if hasattr(sources, 'views'):
self.register_blueprint(sources.views.blueprint, url_prefix=self.config['RELATIVE_PATH'])
# Click
if hasattr(sources, 'commands'):
cli.add_command(sources.commands.cli, name=module_name)
# Hooks
if hasattr(sources, 'hooks'):
if hasattr(sources.hooks, 'before_request'):
self.before_request(sources.hooks.before_request)
if hasattr(sources.hooks, 'before_first_request'):
self.before_first_request(sources.hooks.before_first_request)
# print >> sys.stderr, ' * Ready in %.2fms' % (1000.0 * (time.time() - start_time))
def make_response(self, rv):
if rv is None:
rv = '', httplib.NO_CONTENT
elif not isinstance(rv, tuple):
rv = rv,
rv = list(rv)
if isinstance(rv[0], (list, dict)):
rv[0] = self.response_class(json.dumps(rv[0]), mimetype='application/json')
return super(Application, self).make_response(tuple(rv))
class Assets(Environment):
default_filters = {'js': 'rjsmin', 'css': 'cleancss'}
default_output = {'js': 'assets/%(version)s.js', 'css': 'assets/%(version)s.css'}
def register(self, name, *args, **kwargs):
ext = args[0].split('.')[-1]
filters = kwargs.get('filters', self.default_filters[ext])
output = kwargs.get('output', self.default_output[ext])
return super(Assets, self).register(name, Bundle(*args, filters=filters, output=output))
class RegexConverter(BaseConverter):
""" Enables Regex matching on endpoints
"""
def __init__(self, url_map, *items):
super(RegexConverter, self).__init__(url_map)
self.regex = items[0]
def redirect_url(referrer=None):
if not referrer:
referrer = request.referrer
return request.args.get('next') or referrer or url_for('index')
def error_handler(e):
try:
if isinstance(e, HTTPException):
status_code = e.code
message = e.description if e.description != type(e).description else None
tb = None
else:
status_code = httplib.INTERNAL_SERVER_ERROR
message = None
tb = traceback.format_exc() if current_user.admin else None
if request.is_xhr or request.accept_mimetypes.best in ['application/json', 'text/javascript']:
response = {
'message': message,
'traceback': tb
}
else:
response = render_template('errors/error.html',
title=httplib.responses[status_code],
status_code=status_code,
message=message,
traceback=tb)
except HTTPException as e2:
return error_handler(e2)
return response, status_code
def create_app(config=None):
app = Application(__name__)
app.config.from_object('realms.config')
app.url_map.converters['regex'] = RegexConverter
app.url_map.strict_slashes = False
login_manager.init_app(app)
db.init_app(app)
cache.init_app(app)
assets.init_app(app)
search.init_app(app)
github.init_app(app)
db.Model = declarative_base(metaclass=HookModelMeta, cls=HookMixin)
for status_code in httplib.responses:
if status_code >= 400:
app.register_error_handler(status_code, error_handler)
@app.before_request
def init_g():
g.assets = dict(css=['main.css'], js=['main.js'])
@app.template_filter('datetime')
def _jinja2_filter_datetime(ts):
return time.strftime('%b %d, %Y %I:%M %p', time.localtime(ts))
@app.errorhandler(404)
def page_not_found(e):
return render_template('errors/404.html'), 404
if app.config['RELATIVE_PATH']:
@app.route("/")
def root():
return redirect(url_for(app.config['ROOT_ENDPOINT']))
app.discover()
# This will be removed at some point
with app.app_context():
db.metadata.create_all(db.get_engine(app))
return app
# Init plugins here if possible
login_manager = LoginManager()
db = SQLAlchemy()
cache = Cache()
assets = Assets()
search = Search()
github = GitHub()
assets.register('main.js',
'vendor/jquery/dist/jquery.js',
'vendor/components-bootstrap/js/bootstrap.js',
'vendor/handlebars/handlebars.js',
'vendor/js-yaml/dist/js-yaml.js',
'vendor/marked/lib/marked.js',
'js/html-sanitizer-minified.js', # don't minify?
'vendor/highlightjs/highlight.pack.js',
'vendor/parsleyjs/dist/parsley.js',
'vendor/datatables/media/js/jquery.dataTables.js',
'vendor/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.js',
'js/hbs-helpers.js',
'js/mdr.js',
'js/main.js')
assets.register('main.css',
'vendor/bootswatch-dist/css/bootstrap.css',
'vendor/components-font-awesome/css/font-awesome.css',
'vendor/highlightjs/styles/github.css',
'vendor/datatables-plugins/integration/bootstrap/3/dataTables.bootstrap.css',
'css/style.css')
from functools import update_wrapper
def with_appcontext(f):
"""Wraps a callback so that it's guaranteed to be executed with the
script's application context. If callbacks are registered directly
to the ``app.cli`` object then they are wrapped with this function
by default unless it's disabled.
"""
@click.pass_context
def decorator(__ctx, *args, **kwargs):
with create_app().app_context():
return __ctx.invoke(f, *args, **kwargs)
return update_wrapper(decorator, f)
class AppGroup(click.Group):
"""This works similar to a regular click :class:`~click.Group` but it
changes the behavior of the :meth:`command` decorator so that it
automatically wraps the functions in :func:`with_appcontext`.
Not to be confused with :class:`FlaskGroup`.
"""
def command(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it wraps callbacks in :func:`with_appcontext`
unless it's disabled by passing ``with_appcontext=False``.
"""
wrap_for_ctx = kwargs.pop('with_appcontext', True)
def decorator(f):
if wrap_for_ctx:
f = with_appcontext(f)
return click.Group.command(self, *args, **kwargs)(f)
return decorator
def group(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it defaults the group class to
:class:`AppGroup`.
"""
kwargs.setdefault('cls', AppGroup)
return click.Group.group(self, *args, **kwargs)
flask_cli = AppGroup()
@flask_cli.group()
def cli():
pass
|
doordash/realms-wiki
|
realms/__init__.py
|
Python
|
gpl-2.0
| 9,122
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 19 13:13:13 2015
@author: Acer
"""
import sys
# sys.path.append('C:\Users\Acer\Documents\GitHub\CD3Waterbalance\Modelcreator')
from Global_counters import Global_counters
from XML_Creator import XML_Creator
from To_XML_Creator_modified_Simulator import TheHoleLot
from Global_meaning_list import Global_meaning_list
'''
CREATING THE XML
Supplyvec and Attributevecs explanation in the XML-Creator.md on Github in the doc folder
'''
#for fixing problem
#supplyvec=[[[[[[1,0],[0,0,1],1],[[0,0],[0,0,1],1],0],[[[1,1],[0,1,1],1],[[0,1],[1,1,1],1],1],1],[[[[0,1],[0,0,0],1],[[1,1],[0,0,0],1],0],[[[0,0],[1,0,0],1],[[1,0],[0,1,0],1],1],0]],
# [[[[[1,0],[0,0,0],1],[[0,0],[0,0,0],1],0],[[[0,1],[0,0,0],1],[[1,1],[0,0,0],1],0],0],[[[[0,1],[0,0,0],1],[[0,0],[0,0,0],1],0],[[[1,1],[0,0,0],1],[[1,0],[0,0,0],1],0],0]]]
#
#Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
# [1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
# [1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
# [1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
# [1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
#Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
# [[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
def create_cluster(buildings):
building_in_cluster = []
for h in range(buildings):
building_in_cluster.append(1) # 1 Greywater 0 without
#[contributes, uses, users stormwater]
connection_higher = [0,0,1]
#copies
copies = 1
return [ building_in_cluster, connection_higher, copies ]
def reset_global_counter():
Global_counters.number_of_buildings = 0
Global_counters.number_of_connections = 0
Global_counters.number_of_greywatertanks = 0
Global_counters.number_of_raintanks = 0
Global_counters.number_of_collectors = 0
Global_counters.number_of_distributors = 0
Global_counters.number_of_stormwaterpipes = 0
Global_counters.number_of_sewers = 0
Global_counters.number_of_potablwaterreservoirs = 0
Global_counters.number_of_catchments = 0
Global_counters.number_of_demandmodels = 0
Global_counters.number_of_stormwaterreservoirs = 0
Global_counters.number_of_filereaders = 0
Global_counters.number_of_patternimplementers = 0
Global_counters.number_of_fileouts = 0
Global_counters.number_of_gardenwateringmodules = 0
Global_counters.number_of_collectors_ports_list = []
Global_counters.number_of_distributors_ports_list = []
Global_counters.numbers_names_of_fileouts_list = []
def create_catchments(buildings):
catchment_attr = []
demand_vec = []
#[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
for h in range(buildings):
catchment_attr.append([1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'])
demand_vec.append([[5,6,5],[5], "Simple_Model"])
catchment_attr.append([1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'])
return catchment_attr, demand_vec
# supplyvec= [[[[[[1, 1], [0, 0, 1], 1], 0], 1]]] #[[[[[[1,0],[0,0,1],1],0],1]]]
# Catchattrvec=[[1, 1.9, 800, 0.4, 0.2, 0.4, 0.6, 0.21, 1.5, 0.4, 0.5, 400, 500, 700, 0.04, 0.05, 0.06, 'without'], [1, 1.9, 800, 0.4, 0.2, 0.4, 0.6, 0.21, 1.5, 0.4, 0.5, 400, 500, 700, 0.04, 0.05, 0.06, 'without'], [1, 1.8, 10000, 0, 0.5, 0.5, 0.6, 0.21, 1.5, 0.4, 0.5, 380, 510, 710, 0.04, 0.05, 0.06, 'without']] #[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
# Demandmodelattrvec =[[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#for i in range(len(Demandmodelattrvec)):
# Demandmodelattrvec[i][-1]="Stochastic_Model"
#print Demandmodelattrvec
def XML(supplyvec, Catchattrvec, Demandmodelattrvec):
#creating Connectionlist
CreateXML = XML_Creator()
#supplyvec = [[[[[[1],[1,1,1],1],1],1]]]
CreateXML.WriteConnections(supplyvec)
#creating Nodelist
#Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Greywaterattrvec = [[0.5,5]]*(Global_counters.number_of_greywatertanks)
Stormwaterresattrvec = [[0.5,5]]*(Global_counters.number_of_stormwaterreservoirs)
Rainwaterattrvec = [[5]]*(Global_counters.number_of_raintanks)
#Demandmodelattrvec = [[[1000],[0], "Simple_Model"]]
Gardenwaterattrvec = [[7,2,22,[18,6],"Smart_Watering"]]*Global_counters.number_of_gardenwateringmodules
Simulationsetupvec = ["2000-Jan-01 00:00:00", "2001-Jan-01 00:00:00", "86400", "/Users/christianurich/Documents/CD3Waterbalance/Module/cd3waterbalancemodules.py"]
Needtohaveinputsvec = ["/Users/christianurich/Documents/CD3Waterbalance/simulationwithpatterns/inputfiles/rain.ixx", "/Users/christianurich/Documents/CD3Waterbalance/simulationwithpatterns/inputfiles/evapo.ixx", "13", "20.5"]
CreateXML.WriteNodes(Catchattrvec, Greywaterattrvec, Stormwaterresattrvec, Rainwaterattrvec, Demandmodelattrvec, Simulationsetupvec, Needtohaveinputsvec,Gardenwaterattrvec)
#printing the Connectionlist to insert Fileouts
# CreateXML.PrintConnections()
#insert Fileouts()
# Fileout_Connection_Name_List = [[724, 'EXAMPLE.txt']]
# CreateXML.Additional_Fileouts(Fileout_Connection_Name_List)
#safe the xml file
CreateXML.SaveXML('/Users/christianurich/Documents/CD3Waterbalance/simulationwithpatterns/outputfiles/Garden.xml')
# for i in range(len(Global_meaning_list.collectors)):
# print Global_meaning_list.collectors[i]
return
'''
RUNNING AND CHECKING THE XML
'''
def Simulator(Catchattrvec):
Simulator = TheHoleLot()
Simulator.Deleter('/Users/christianurich/Documents/CD3Waterbalance/simulationwithpatterns/outputfiles')
Simulator.runcd3('/Users/christianurich/Documents/CityDrain3/build/cd3', '-m /Users/christianurich/Documents/CD3Waterbalance/simulationwithpatterns/outputfiles/Garden.xml -v 1')
Simulator.Fractioncalculator(Catchattrvec)#,[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']])
Simulator.getoutputdata('/Users/christianurich/Documents/CD3Waterbalance/simulationwithpatterns/outputfiles')
Simulator.getinputdata('/Users/christianurich/Documents/CD3Waterbalance/simulationwithpatterns/inputfiles')
Simulator.Balance(['Greywatertanklevels', 'Rainwatertanklevels', 'Stormwaterreservoirlevels','Gardenwateringstorage'], ['Evapo_Model', 'Rain_Model'], ['Actual_Infiltration', 'Potable_Water_Demand', 'Sewer', 'Stormwaterdrain'])
#Simulator.Plotter([20,10],[0,365], [0,100], [ 'Greywatertanklevels', 'Rainwatertanklevels', 'Stormwaterreservoirlevels'])
return
if __name__ == "__main__":
print
#[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [0, 0, 1], 1], "greywater"],"stormwater"]
#[[[[[ building_in_cluster ,[0,0,1],1],0],1]]]
# print [ [ [ [ create_cluster(i), 0],1] ] ]
# print create_catchments(i)[0]
buildings = int(sys.argv[1])
XML([ [ [ [ create_cluster(buildings), 0],1] ] ], create_catchments(buildings)[0], create_catchments(buildings)[1])
Simulator( create_catchments(buildings)[0])
# supplyvec= [[[[[[1,0],[0,0,1],1],0],1]]]
# Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
# Demandmodelattrvec =[[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
##Input description for Simulator!!!!!!
'''
Deleter - method delets all .txt - files is the City Drain output folder
Input: Deleter( - path to City Drain output folder - )
Example: Deleter('C:\Users\Acer\Documents\GitHub\CD3Waterbalance\simulationwithpatterns\outputfiles')
runcd3 - method runs City Drain
Input: runcd3( - path to CityDrain.exe (cd3.exe), path to XML - file that contains model - )
Example: runcd3('C:\Program Files (x86)\CityDrain3\\bin\cd3.exe', C:\Users\Acer\Documents\GitHub\CD3Waterbalance\simulationwithpatterns\outputfiles\Test.xml)
Attention \b in the program path has to be written \\b
Fractioncalculator - method calculates the total area of all Cachtments and the average perv, imperv to stormwaterdrain and imperv to storage area
Input: Fractioncalculator( - the catchmentattributevec that was used for setting up the Test.xml - )
Example: Fractioncalculator([[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06],[1,1.8,10000,0,1,0,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06]])
getoutputdata - method imports all data from the output .txt - files created by City Drain
Input: getoutputdata( - path to City Drain output folder - )
Example: getoutputdata('C:\Users\Acer\Documents\GitHub\CD3Waterbalance\simulationwithpatterns\outputfiles')
getinputdata - method imports rain and evapotranspiration .ixx - files used for the City Drain simulation
Input: getoutputdata( - path to City Drain input folder - )
Example: getoutputdata('C:\Users\Acer\Documents\GitHub\CD3Waterbalance\simulationwithpatterns\inputfiles')
Balance - method checks the models mass balance by comparing input and output data
Input: Balance( - list of all storage output file names, list of filereader/pattern implemeter output file names, list of other neccessary output file names - )
Example: Balance(0.4, 1.5, ['Greywatertanklevels', 'Rainwatertanklevels', 'Stormwaterreservoirlevels'], ['Evapo_Model', 'Rain_Model'], ['Actual_Infiltration', 'Potable_Water_Demand', 'Sewer', 'Stormwaterdrain'])
Plotter - method plots any time series (file) wanted
Input: Plotter( -size (height and width), range of x to plot (in days), range ofy to plot (in m^3), list of file names to plot - )
Example: Plotter([12,10],[0,365], [0,1], ['Rain_Model', 'Stormwaterdrain', 'Evapo_Model', 'effective_rain','Indoor_Demand','Outdoor_Demand'])
Attention!!
The Methods getoutputdata, Balance and Plotter contain file names of the rain and evapotranspiration inputfiles, the rain and evapotr. files given out by the file reader/ pattern implementer and other file names.
Those do have to be adapted to the file names given to the corresponding files! See the methods code for closer description!
'''
|
iut-ibk/DynaMind-ToolBox
|
DynaMind-Performance-Assessment/3rdparty/CD3Waterbalance/Modelcreator/Interface_cu.py
|
Python
|
gpl-2.0
| 15,389
|
__author__ = 'sjp1'
METRES_IN_A_MILE = 1609
SECONDS_IN_AN_HOUR = 3600
CONVERT_TO_KILO = 1e-3
CONVERT_TO_MEGA = 1e-6
mass = 13782 # kg
print('Mass: %i kg' % mass)
speed_in_mph = 350 # mph
print('Speed: %i mph' % speed_in_mph)
speed = speed_in_mph * METRES_IN_A_MILE / SECONDS_IN_AN_HOUR
print('Velocity: %.1f m/s' % speed)
energy = 0.5 * mass * speed**2
print('Energy: %.2f MJ' % (CONVERT_TO_MEGA*energy))
momentum = mass * speed
print('Momentum: %.2f MN s' % (CONVERT_TO_MEGA*momentum))
bullet_velocity = 1070 # m/s
print('Muzzle velocity: %i m/s' % bullet_velocity)
bullet_mass = 0.395 # kg
print('Projectile mass: %.3f kg' % (bullet_mass))
rate_of_fire = 3900 / 60 # rps
print('Rate of fire: %.2f rounds/s' % rate_of_fire)
time_of_fire = 2 # s
print('Time of fire: %i s' % time_of_fire)
momentum_per_bullet = bullet_mass * bullet_velocity
print('Momentum per projectile: %.2f N s' % (momentum_per_bullet))
momentum_per_burst = momentum_per_bullet * rate_of_fire * time_of_fire
print('Momentum per burst: %.2f kN s' % (CONVERT_TO_KILO*momentum_per_burst))
velocity_change = momentum_per_burst / mass
print('Change in velocity: %.2f m/s' % (velocity_change))
print('Change in velocity: %.2f mph' % (velocity_change*SECONDS_IN_AN_HOUR/METRES_IN_A_MILE))
|
JediStuart/PythonFirst
|
src/A10.py
|
Python
|
gpl-2.0
| 1,257
|
from func import logger
from func.utils import is_public_valid_method
class BaseFactModule(object):
"""
The base fact module which is clever
enough to register the facts it is kind
of FuncModule but registers modules with
different convention and style .. Look
into other modules to get the idea ...
"""
version = "0.0.0"
description = "Base module of all facts"
def __init__(self):
self.__init_log()
def __init_log(self):
log = logger.Logger()
self.logger = log.logger
def register_facts(self,fact_callers,module_name,abort_on_conflict=False):
# a dictionary to catch the conflicts
"""
Be careful not override that method in your classes!
@param abort_on_conflict : Fact methods use a system called global tagging
So it maybe easy to get conflicts (having 2 facts)
with tha same name so when that is True it will
tell user that something is wrong. There is no need
to worry about the unittests will vcatch if sth bad
happens ...
"""
conflicts = {}
for attr in dir(self):
if self.__is_public_valid_method(attr):
fact_method = getattr(self, attr)
fact_callers["%s.%s"%(module_name,attr)] = fact_method
if hasattr(fact_method,"tag"):
method_tag = getattr(fact_method,"tag")
if fact_callers.has_key(method_tag):
self.logger.info("Facts has registered the tag : %s before, it was overriden"%method_tag)
if abort_on_conflict:
if not conflicts.has_key(method_tag):
conflicts[method_tag] = []
conflicts[method_tag].append(getattr(fact_method,"__name__","default"))
if getattr(fact_callers[method_tag],"__name__","default") not in conflicts[method_tag]:
conflicts[method_tag].append(getattr(fact_callers[method_tag],"__name__","default"))
fact_callers[method_tag] = fact_method
#if there is conflict show it
if abort_on_conflict:
return conflicts
def __is_public_valid_method(self,attr):
return is_public_valid_method(self, attr, blacklist=['register_facts'])
|
makkalot/func
|
func/minion/facts/modules/fact_module.py
|
Python
|
gpl-2.0
| 2,557
|
import argparse
parser = argparse.ArgumentParser(description='Run simulation for nora w 3d layers')
parser.add_argument('t', metavar='threads', type=int,
default=1,
help='number of nest threads')
parser.add_argument('n', metavar='nn',
default=3000,
help='desired number of neurons')
args = parser.parse_args()
# Quality of graphics
dpi_n = 120
number_of_threads = args.t
# Number of neurons
NN = args.n
# T - simulation time | dt - simulation pause step
T = 1000.
dt = 10.
# Neurons number for spike detector
N_detect = 100
# Neurons number for multimeter
N_volt = 3
# Generator delay
pg_delay = 10.
# Synapse weights
w_Glu = 3.
w_GABA = -w_Glu * 2
w_ACh = 8.
w_NA_ex = 13.
w_NA_in = -w_NA_ex
w_DA_ex = 13.
w_DA_in = -w_DA_ex
w_SERO_ex = 13.
w_SERO_in = -w_SERO_ex
# Minimal number of neurons
NN_minimal = 10
# Additional settings
serotonin_flag = True
noradrenaline_flag = True # noradrenaline modulation flag
dopamine_flag = True # dopamine modulation flag
generator_flag = True
create_images = True
MaxSynapses = 4000 # max synapses
BOUND = 0.2 # outer bound of rectangular 3d layer
R = .25 # radius of connectivity sphere of a neuron
|
research-team/NEUCOGAR
|
NEST/cube/integration/excitement/simulation_params.py
|
Python
|
gpl-2.0
| 1,252
|
from flask import render_template
from . import auth
from .models import User
@auth.route('/')
def index():
users = User.query.all()
print('users:')
print(users)
return render_template('index.html')
|
ZhangBohan/python_web_start_kit
|
app/user/views.py
|
Python
|
gpl-2.0
| 217
|
"""
Running small pieces of cobbler sync when certain actions are taken,
such that we don't need a time consuming sync when adding new
systems if nothing has changed for systems that have already
been created.
Copyright 2006-2009, Red Hat, Inc
Michael DeHaan <mdehaan@redhat.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import os
import os.path
import utils
import traceback
import clogger
class BootLiteSync:
"""
Handles conversion of internal state to the tftpboot tree layout
"""
def __init__(self,config,verbose=False,logger=None):
"""
Constructor
"""
self.verbose = verbose
self.config = config
self.distros = config.distros()
self.profiles = config.profiles()
self.systems = config.systems()
self.images = config.images()
self.settings = config.settings()
self.repos = config.repos()
if logger is None:
logger = clogger.Logger()
self.logger = logger
self.sync = config.api.get_sync(verbose,logger=self.logger)
self.sync.make_tftpboot()
def add_single_distro(self, name):
# get the distro record
distro = self.distros.find(name=name)
if distro is None:
return
# copy image files to images/$name in webdir & tftpboot:
self.sync.pxegen.copy_single_distro_files(distro)
# generate any templates listed in the distro
self.sync.pxegen.write_templates(distro)
# cascade sync
kids = distro.get_children()
for k in kids:
self.add_single_profile(k.name, rebuild_menu=False)
self.sync.pxegen.make_pxe_menu()
def add_single_image(self, name):
image = self.images.find(name=name)
self.sync.pxegen.copy_single_image_files(image)
kids = image.get_children()
for k in kids:
self.add_single_system(k.name)
self.sync.pxegen.make_pxe_menu()
def remove_single_distro(self, name):
bootloc = utils.tftpboot_location()
# delete contents of images/$name directory in webdir
utils.rmtree(os.path.join(self.settings.webdir, "images", name))
# delete contents of images/$name in tftpboot
utils.rmtree(os.path.join(bootloc, "images", name))
# delete potential symlink to tree in webdir/links
utils.rmfile(os.path.join(self.settings.webdir, "links", name))
def remove_single_image(self, name):
bootloc = utils.tftpboot_location()
utils.rmfile(os.path.join(bootloc, "images2", name))
def add_single_profile(self, name, rebuild_menu=True):
# get the profile object:
profile = self.profiles.find(name=name)
if profile is None:
# most likely a subprofile's kid has been
# removed already, though the object tree has
# not been reloaded ... and this is just noise.
return
# rebuild the yum configuration files for any attached repos
# generate any templates listed in the distro
self.sync.pxegen.write_templates(profile)
# cascade sync
kids = profile.get_children()
for k in kids:
if k.COLLECTION_TYPE == "profile":
self.add_single_profile(k.name, rebuild_menu=False)
else:
self.add_single_system(k.name)
if rebuild_menu:
self.sync.pxegen.make_pxe_menu()
return True
def remove_single_profile(self, name):
# delete profiles/$name file in webdir
utils.rmfile(os.path.join(self.settings.webdir, "profiles", name))
# delete contents on kickstarts/$name directory in webdir
utils.rmtree(os.path.join(self.settings.webdir, "kickstarts", name))
def update_system_netboot_status(self,name):
system = self.systems.find(name=name)
if system is None:
utils.die(self.logger,"error in system lookup for %s" % name)
self.sync.pxegen.write_all_system_files(system)
# generate any templates listed in the system
self.sync.pxegen.write_templates(system)
def add_single_system(self, name):
# get the system object:
system = self.systems.find(name=name)
if system is None:
return
# rebuild system_list file in webdir
if self.settings.manage_dhcp:
self.sync.dhcp.regen_ethers()
if self.settings.manage_dns:
self.sync.dns.regen_hosts()
# write the PXE files for the system
self.sync.pxegen.write_all_system_files(system)
# generate any templates listed in the distro
self.sync.pxegen.write_templates(system)
def remove_single_system(self, name):
bootloc = utils.tftpboot_location()
system_record = self.systems.find(name=name)
# delete contents of kickstarts_sys/$name in webdir
system_record = self.systems.find(name=name)
itanic = False
profile = self.profiles.find(name=system_record.profile)
if profile is not None:
distro = self.distros.find(name=profile.distro)
if distro is not None and distro in [ "ia64", "IA64"]:
itanic = True
for (name,interface) in system_record.interfaces.iteritems():
filename = utils.get_config_filename(system_record,interface=name)
if not itanic:
utils.rmfile(os.path.join(bootloc, "pxelinux.cfg", filename))
else:
utils.rmfile(os.path.join(bootloc, filename))
|
ssalevan/cobbler
|
cobbler/action_litesync.py
|
Python
|
gpl-2.0
| 6,275
|
#!/usr/bin/env python3
a = 1
while True:
if str(a**2).endswith(str(a)):
print(a)
a += 1
|
Foggalong/scraps
|
files/maths/jack.py
|
Python
|
gpl-2.0
| 106
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# $Id: rasterio.py 32166 2015-12-13 19:29:52Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test default implementation of GDALRasterBand::IRasterIO
# Author: Even Rouault <even dot rouault at mines dash paris dot org>
#
###############################################################################
# Copyright (c) 2008-2013, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
from osgeo import gdal
###############################################################################
# Test writing a 1x1 buffer to a 10x6 raster and read it back
def rasterio_1():
data = 'A'.encode('ascii')
drv = gdal.GetDriverByName('GTiff')
ds = drv.Create('tmp/rasterio1.tif', 10, 6, 1)
ds.GetRasterBand(1).Fill(65)
checksum = ds.GetRasterBand(1).Checksum()
ds.GetRasterBand(1).Fill(0)
ds.WriteRaster(0, 0, ds.RasterXSize, ds.RasterYSize, data, buf_type = gdal.GDT_Byte, buf_xsize=1, buf_ysize=1)
if checksum != ds.GetRasterBand(1).Checksum():
gdaltest.post_reason( 'Didnt get expected checksum ')
return 'fail'
data2 = ds.ReadRaster(0, 0, ds.RasterXSize, ds.RasterYSize, 1, 1)
if data2 != data:
gdaltest.post_reason( 'Didnt get expected buffer ')
return 'fail'
ds = None
drv.Delete('tmp/rasterio1.tif')
return 'success'
###############################################################################
# Test writing a 5x4 buffer to a 10x6 raster and read it back
def rasterio_2():
data = 'AAAAAAAAAAAAAAAAAAAA'.encode('ascii')
drv = gdal.GetDriverByName('GTiff')
ds = drv.Create('tmp/rasterio2.tif', 10, 6, 1)
ds.GetRasterBand(1).Fill(65)
checksum = ds.GetRasterBand(1).Checksum()
ds.GetRasterBand(1).Fill(0)
ds.WriteRaster(0, 0, ds.RasterXSize, ds.RasterYSize, data, buf_type = gdal.GDT_Byte, buf_xsize=5, buf_ysize=4)
if checksum != ds.GetRasterBand(1).Checksum():
gdaltest.post_reason( 'Didnt get expected checksum ')
return 'fail'
data2 = ds.ReadRaster(0, 0, ds.RasterXSize, ds.RasterYSize, 5, 4)
if data2 != data:
gdaltest.post_reason( 'Didnt get expected buffer ')
return 'fail'
ds = None
drv.Delete('tmp/rasterio2.tif')
return 'success'
###############################################################################
# Test extensive read & writes into a non tiled raster
def rasterio_3():
data = [['' for i in range(4)] for i in range(5)]
for xsize in range(5):
for ysize in range(4):
for m in range((xsize + 1) * (ysize + 1)):
data[xsize][ysize] = data[xsize][ysize] + 'A'
data[xsize][ysize] = data[xsize][ysize].encode('ascii')
drv = gdal.GetDriverByName('GTiff')
ds = drv.Create('tmp/rasterio3.tif', 10, 6, 1)
i = 0
while i < ds.RasterXSize:
j = 0
while j < ds.RasterYSize:
k = 0
while k < ds.RasterXSize - i:
l = 0
while l < ds.RasterYSize - j:
for xsize in range(5):
for ysize in range(4):
ds.GetRasterBand(1).Fill(0)
ds.WriteRaster(i, j, k + 1, l + 1, data[xsize][ysize],
buf_type = gdal.GDT_Byte,
buf_xsize=xsize + 1, buf_ysize=ysize + 1)
data2 = ds.ReadRaster(i, j, k + 1, l + 1, xsize + 1, ysize + 1, gdal.GDT_Byte)
if data2 != data[xsize][ysize]:
gdaltest.post_reason( 'Didnt get expected buffer ')
return 'fail'
l = l + 1
k = k + 1
j = j + 1
i = i + 1
ds = None
drv.Delete('tmp/rasterio3.tif')
return 'success'
###############################################################################
# Test extensive read & writes into a tiled raster
def rasterio_4():
data = [ '' for i in range(5 * 4)]
for size in range(5 * 4):
for k in range(size+1):
data[size] = data[size] + 'A'
data[size] = data[size].encode('ascii')
drv = gdal.GetDriverByName('GTiff')
ds = drv.Create('tmp/rasterio4.tif', 20, 20, 1, options = [ 'TILED=YES', 'BLOCKXSIZE=16', 'BLOCKYSIZE=16' ])
i = 0
while i < ds.RasterXSize:
j = 0
while j < ds.RasterYSize:
k = 0
while k < ds.RasterXSize - i:
l = 0
while l < ds.RasterYSize - j:
for xsize in range(5):
for ysize in range(4):
ds.GetRasterBand(1).Fill(0)
ds.WriteRaster(i, j, k + 1, l + 1, data[(xsize + 1) * (ysize + 1) - 1],
buf_type = gdal.GDT_Byte,
buf_xsize=xsize + 1, buf_ysize=ysize + 1)
data2 = ds.ReadRaster(i, j, k + 1, l + 1, xsize + 1, ysize + 1, gdal.GDT_Byte)
if data2 != data[(xsize + 1) * (ysize + 1) - 1]:
gdaltest.post_reason( 'Didnt get expected buffer ')
print(i,j,k,l,xsize,ysize)
print(data2)
print(data[(xsize + 1) * (ysize + 1) - 1])
return 'fail'
l = l + 1
k = k + 1
if j >= 15:
j = j + 1
else:
j = j + 3
if i >= 15:
i = i + 1
else:
i = i + 3
ds = None
drv.Delete('tmp/rasterio4.tif')
return 'success'
###############################################################################
# Test error cases of ReadRaster()
def rasterio_5():
ds = gdal.Open('data/byte.tif')
for obj in [ds, ds.GetRasterBand(1)]:
obj.ReadRaster(0,0,-2000000000,1,1,1)
obj.ReadRaster(0,0,1,-2000000000,1,1)
for band_number in [-1,0,2]:
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
res = ds.ReadRaster(0,0,1,1,band_list=[band_number])
gdal.PopErrorHandler()
error_msg = gdal.GetLastErrorMsg()
if res is not None:
gdaltest.post_reason('expected None')
return 'fail'
if error_msg.find('this band does not exist on dataset') == -1:
gdaltest.post_reason('did not get expected error msg')
print(error_msg)
return 'fail'
res = ds.ReadRaster(0,0,1,1,band_list=[1,1])
if res is None:
gdaltest.post_reason('expected non None')
return 'fail'
for obj in [ds, ds.GetRasterBand(1)]:
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
res = obj.ReadRaster(0,0,21,21)
gdal.PopErrorHandler()
error_msg = gdal.GetLastErrorMsg()
if res is not None:
gdaltest.post_reason('expected None')
return 'fail'
if error_msg.find('Access window out of range in RasterIO()') == -1:
gdaltest.post_reason('did not get expected error msg (1)')
print(error_msg)
return 'fail'
# This should only fail on a 32bit build
try:
maxsize = sys.maxint
except:
maxsize = sys.maxsize
# On win64, maxsize == 2147483647 and ReadRaster()
# fails because of out of memory condition, not
# because of integer overflow. I'm not sure on how
# to detect win64 better.
if maxsize == 2147483647 and sys.platform != 'win32':
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
res = obj.ReadRaster(0,0,1,1,1000000,1000000)
gdal.PopErrorHandler()
error_msg = gdal.GetLastErrorMsg()
if res is not None:
gdaltest.post_reason('expected None')
return 'fail'
if error_msg.find('Integer overflow') == -1:
gdaltest.post_reason('did not get expected error msg (2)')
print(error_msg)
return 'fail'
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
res = obj.ReadRaster(0,0,0,1)
gdal.PopErrorHandler()
error_msg = gdal.GetLastErrorMsg()
if res is not None:
gdaltest.post_reason('expected None')
return 'fail'
if error_msg.find('Illegal values for buffer size') == -1:
gdaltest.post_reason('did not get expected error msg (3)')
print(error_msg)
return 'fail'
ds = None
return 'success'
###############################################################################
# Test error cases of WriteRaster()
def rasterio_6():
ds = gdal.GetDriverByName('MEM').Create('', 2, 2)
for obj in [ds, ds.GetRasterBand(1)]:
try:
obj.WriteRaster(0,0,2,2,None)
gdaltest.post_reason('expected exception')
return 'fail'
except:
pass
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
obj.WriteRaster(0,0,2,2,' ')
gdal.PopErrorHandler()
error_msg = gdal.GetLastErrorMsg()
if error_msg.find('Buffer too small') == -1:
gdaltest.post_reason('did not get expected error msg (1)')
print(error_msg)
return 'fail'
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
obj.WriteRaster(-1,0,1,1,' ')
gdal.PopErrorHandler()
error_msg = gdal.GetLastErrorMsg()
if error_msg.find('Access window out of range in RasterIO()') == -1:
gdaltest.post_reason('did not get expected error msg (2)')
print(error_msg)
return 'fail'
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
obj.WriteRaster(0,0,0,1,' ')
gdal.PopErrorHandler()
error_msg = gdal.GetLastErrorMsg()
if error_msg.find('Illegal values for buffer size') == -1:
gdaltest.post_reason('did not get expected error msg (3)')
print(error_msg)
return 'fail'
ds = None
return 'success'
###############################################################################
# Test that default window reading works via ReadRaster()
def rasterio_7():
ds = gdal.Open('data/byte.tif')
data = ds.GetRasterBand(1).ReadRaster()
l = len(data)
if l != 400:
gdaltest.post_reason('did not read expected band data via ReadRaster()')
return 'fail'
data = ds.ReadRaster()
l = len(data)
if l != 400:
gdaltest.post_reason('did not read expected dataset data via ReadRaster()')
return 'fail'
return 'success'
###############################################################################
# Test callback of ReadRaster()
def rasterio_8_progress_callback(pct, message, user_data):
if abs(pct - (user_data[0] + 0.05)) > 1e-5:
print('Expected %f, got %f' % (user_data[0] + 0.05, pct))
user_data[1] = False
user_data[0] = pct
return 1 # 1 to continue, 0 to stop
def rasterio_8_progress_interrupt_callback(pct, message, user_data):
user_data[0] = pct
if pct >= 0.5:
return 0
return 1 # 1 to continue, 0 to stop
def rasterio_8_progress_callback_2(pct, message, user_data):
if pct < user_data[0]:
print('Got %f, last pct was %f' % (pct, user_data[0]))
return 0
user_data[0] = pct
return 1 # 1 to continue, 0 to stop
def rasterio_8():
ds = gdal.Open('data/byte.tif')
# Progress not implemented yet
if gdal.GetConfigOption('GTIFF_DIRECT_IO') == 'YES' or \
gdal.GetConfigOption('GTIFF_VIRTUAL_MEM_IO') == 'YES':
return 'skip'
# Test RasterBand.ReadRaster
tab = [ 0, True ]
data = ds.GetRasterBand(1).ReadRaster(resample_alg = gdal.GRIORA_NearestNeighbour,
callback = rasterio_8_progress_callback,
callback_data = tab)
l = len(data)
if l != 400:
gdaltest.post_reason('did not read expected band data via ReadRaster()')
return 'fail'
if abs(tab[0] - 1) > 1e-5 or not tab[1]:
gdaltest.post_reason('failure')
return 'fail'
# Test interruption
tab = [ 0 ]
data = ds.GetRasterBand(1).ReadRaster(resample_alg = gdal.GRIORA_NearestNeighbour,
callback = rasterio_8_progress_interrupt_callback,
callback_data = tab)
if data is not None:
gdaltest.post_reason('failure')
return 'fail'
if tab[0] < 0.50:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster with type change
tab = [ 0, True ]
data = ds.GetRasterBand(1).ReadRaster(buf_type = gdal.GDT_Int16,
callback = rasterio_8_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('did not read expected band data via ReadRaster()')
return 'fail'
if abs(tab[0] - 1) > 1e-5 or not tab[1]:
gdaltest.post_reason('failure')
return 'fail'
# Same with interruption
tab = [ 0 ]
data = ds.GetRasterBand(1).ReadRaster(buf_type = gdal.GDT_Int16,
callback = rasterio_8_progress_interrupt_callback,
callback_data = tab)
if data is not None or tab[0] < 0.50:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster with resampling
tab = [ 0, True ]
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 40,
callback = rasterio_8_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('did not read expected band data via ReadRaster()')
return 'fail'
if abs(tab[0] - 1) > 1e-5 or not tab[1]:
gdaltest.post_reason('failure')
return 'fail'
# Same with interruption
tab = [ 0 ]
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 40,
callback = rasterio_8_progress_interrupt_callback,
callback_data = tab)
if data is not None or tab[0] < 0.50:
gdaltest.post_reason('failure')
return 'fail'
# Test Dataset.ReadRaster
tab = [ 0, True ]
data = ds.ReadRaster(resample_alg = gdal.GRIORA_NearestNeighbour,
callback = rasterio_8_progress_callback,
callback_data = tab)
l = len(data)
if l != 400:
gdaltest.post_reason('did not read expected dataset data via ReadRaster()')
return 'fail'
if abs(tab[0] - 1) > 1e-5 or not tab[1]:
gdaltest.post_reason('failure')
return 'fail'
ds = None
# Test Dataset.ReadRaster on a multi band file, with INTERLEAVE=BAND
ds = gdal.Open('data/rgbsmall.tif')
last_pct = [ 0 ]
data = ds.ReadRaster(resample_alg = gdal.GRIORA_NearestNeighbour,
callback = rasterio_8_progress_callback_2,
callback_data = last_pct)
if data is None or abs(last_pct[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Same with interruption
tab = [ 0 ]
data = ds.ReadRaster(callback = rasterio_8_progress_interrupt_callback,
callback_data = tab)
if data is not None or tab[0] < 0.50:
gdaltest.post_reason('failure')
return 'fail'
ds = None
# Test Dataset.ReadRaster on a multi band file, with INTERLEAVE=PIXEL
ds = gdal.Open('data/rgbsmall_cmyk.tif')
last_pct = [ 0 ]
data = ds.ReadRaster(resample_alg = gdal.GRIORA_NearestNeighbour,
callback = rasterio_8_progress_callback_2,
callback_data = last_pct)
if data is None or abs(last_pct[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Same with interruption
tab = [ 0 ]
data = ds.ReadRaster(callback = rasterio_8_progress_interrupt_callback,
callback_data = tab)
if data is not None or tab[0] < 0.50:
gdaltest.post_reason('failure')
return 'fail'
return 'success'
###############################################################################
# Test resampling algorithm of ReadRaster()
def rasterio_9_progress_callback(pct, message, user_data):
if pct < user_data[0]:
print('Got %f, last pct was %f' % (pct, user_data[0]))
return 0
user_data[0] = pct
if user_data[1] is not None and pct >= user_data[1]:
return 0
return 1 # 1 to continue, 0 to stop
def rasterio_9_checksum(data, buf_xsize, buf_ysize, data_type = gdal.GDT_Byte):
ds = gdal.GetDriverByName('MEM').Create('', buf_xsize, buf_ysize, 1)
ds.GetRasterBand(1).WriteRaster(0,0,buf_xsize,buf_ysize,data, buf_type = data_type)
cs = ds.GetRasterBand(1).Checksum()
return cs
def rasterio_9():
ds = gdal.Open('data/byte.tif')
# Test RasterBand.ReadRaster, with Bilinear
tab = [ 0, None ]
data = ds.GetRasterBand(1).ReadRaster(buf_type = gdal.GDT_Int16,
buf_xsize = 10,
buf_ysize = 10,
resample_alg = gdal.GRIORA_Bilinear,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 10, 10, data_type = gdal.GDT_Int16)
if cs != 1211: # checksum of gdal_translate data/byte.tif out.tif -outsize 10 10 -r BILINEAR
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster, with Lanczos
tab = [ 0, None ]
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 10,
buf_ysize = 10,
resample_alg = gdal.GRIORA_Lanczos,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 10, 10)
if cs != 1154: # checksum of gdal_translate data/byte.tif out.tif -outsize 10 10 -r LANCZOS
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster, with Bilinear and UInt16 data type
src_ds_uint16 = gdal.Open('data/uint16.tif')
tab = [ 0, None ]
data = src_ds_uint16.GetRasterBand(1).ReadRaster(buf_type = gdal.GDT_UInt16,
buf_xsize = 10,
buf_ysize = 10,
resample_alg = gdal.GRIORA_Bilinear,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 10, 10, data_type = gdal.GDT_UInt16)
if cs != 1211: # checksum of gdal_translate data/byte.tif out.tif -outsize 10 10 -r BILINEAR
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster, with Bilinear on Complex, thus using warp API
tab = [ 0, None ]
complex_ds = gdal.GetDriverByName('MEM').Create('', 20, 20, 1, gdal.GDT_CInt16)
complex_ds.GetRasterBand(1).WriteRaster(0,0,20,20, ds.GetRasterBand(1).ReadRaster(), buf_type = gdal.GDT_Byte)
data = complex_ds.GetRasterBand(1).ReadRaster(buf_xsize = 10,
buf_ysize = 10,
resample_alg = gdal.GRIORA_Bilinear,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 10, 10, data_type = gdal.GDT_CInt16)
if cs != 1211: # checksum of gdal_translate data/byte.tif out.tif -outsize 10 10 -r BILINEAR
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test interruption
tab = [ 0, 0.5 ]
gdal.PushErrorHandler('CPLQuietErrorHandler')
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 10,
buf_ysize = 10,
resample_alg = gdal.GRIORA_Bilinear,
callback = rasterio_9_progress_callback,
callback_data = tab)
gdal.PopErrorHandler()
if data is not None:
gdaltest.post_reason('failure')
return 'fail'
if tab[0] < 0.50:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster, with Gauss, and downsampling
tab = [ 0, None ]
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 10,
buf_ysize = 10,
resample_alg = gdal.GRIORA_Gauss,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 10, 10)
if cs != 1089: # checksum of gdal_translate data/byte.tif out.tif -outsize 10 10 -r GAUSS
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster, with Cubic, and downsampling
tab = [ 0, None ]
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 10,
buf_ysize = 10,
resample_alg = gdal.GRIORA_Cubic,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 10, 10)
if cs != 1059: # checksum of gdal_translate data/byte.tif out.tif -outsize 10 10 -r CUBIC
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test RasterBand.ReadRaster, with Cubic and supersampling
tab = [ 0, None ]
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 40,
buf_ysize = 40,
resample_alg = gdal.GRIORA_Cubic,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 40, 40)
if cs != 19556: # checksum of gdal_translate data/byte.tif out.tif -outsize 40 40 -r CUBIC
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test Dataset.ReadRaster, with Cubic and supersampling
tab = [ 0, None ]
data = ds.ReadRaster(buf_xsize = 40,
buf_ysize = 40,
resample_alg = gdal.GRIORA_CubicSpline,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 40, 40)
if cs != 19041: # checksum of gdal_translate data/byte.tif out.tif -outsize 40 40 -r CUBICSPLINE
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
# Test Dataset.ReadRaster on a multi band file, with INTERLEAVE=PIXEL
ds = gdal.Open('data/rgbsmall_cmyk.tif')
tab = [ 0, None ]
data = ds.ReadRaster(buf_xsize = 25,
buf_ysize = 25,
resample_alg = gdal.GRIORA_Cubic,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data[0:25*25], 25, 25)
if cs != 5975: # checksum of gdal_translate data/rgbsmall_cmyk.tif out.tif -outsize 25 25 -r CUBIC
gdaltest.post_reason('failure')
print(cs)
return 'fail'
cs = rasterio_9_checksum(data[25*25:2*25*25], 25, 25)
if cs != 6248: # checksum of gdal_translate data/rgbsmall_cmyk.tif out.tif -outsize 25 25 -r CUBIC
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
ds = None
# Test Band.ReadRaster on a RGBA with parts fully opaque, and fully transparent and with huge upscaling
ds = gdal.Open('data/stefan_full_rgba.png')
tab = [ 0, None ]
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 162 * 16,
buf_ysize = 150 * 16,
resample_alg = gdal.GRIORA_Cubic,
callback = rasterio_9_progress_callback,
callback_data = tab)
if data is None:
gdaltest.post_reason('failure')
return 'fail'
cs = rasterio_9_checksum(data, 162 * 16, 150 * 16)
if cs != 30836: # checksum of gdal_translate data/stefan_full_rgba.png out.tif -outsize 1600% 1600% -r CUBIC
gdaltest.post_reason('failure')
print(cs)
return 'fail'
if abs(tab[0] - 1.0) > 1e-5:
gdaltest.post_reason('failure')
return 'fail'
return 'success'
###############################################################################
# Test error when getting a block
def rasterio_10():
ds = gdal.Open('data/byte_truncated.tif')
gdal.PushErrorHandler()
data = ds.GetRasterBand(1).ReadRaster()
gdal.PopErrorHandler()
if data is not None:
gdaltest.post_reason('failure')
return 'fail'
# Change buffer type
gdal.PushErrorHandler()
data = ds.GetRasterBand(1).ReadRaster(buf_type = gdal.GDT_Int16)
gdal.PopErrorHandler()
if data is not None:
gdaltest.post_reason('failure')
return 'fail'
# Resampling case
gdal.PushErrorHandler()
data = ds.GetRasterBand(1).ReadRaster(buf_xsize = 10,
buf_ysize = 10)
gdal.PopErrorHandler()
if data is not None:
gdaltest.post_reason('failure')
return 'fail'
return 'success'
###############################################################################
# Test cubic resampling and nbits
def rasterio_11():
try:
from osgeo import gdalnumeric
gdalnumeric.zeros
import numpy
except:
return 'skip'
mem_ds = gdal.GetDriverByName('MEM').Create('', 4, 3)
mem_ds.GetRasterBand(1).WriteArray(numpy.array([[80,125,125,80],[80,125,125,80],[80,125,125,80]]))
# A bit dummy
mem_ds.GetRasterBand(1).SetMetadataItem('NBITS', '8', 'IMAGE_STRUCTURE')
ar = mem_ds.GetRasterBand(1).ReadAsArray(0,0,4,3,8,3, resample_alg = gdal.GRIORA_Cubic)
if ar.max() != 129:
gdaltest.post_reason('failure')
print(ar.max())
return 'fail'
# NBITS=7
mem_ds.GetRasterBand(1).SetMetadataItem('NBITS', '7', 'IMAGE_STRUCTURE')
ar = mem_ds.GetRasterBand(1).ReadAsArray(0,0,4,3,8,3, resample_alg = gdal.GRIORA_Cubic)
# Would overshoot to 129 if NBITS was ignored
if ar.max() != 127:
gdaltest.post_reason('failure')
print(ar.max())
return 'fail'
return 'success'
###############################################################################
# Test cubic resampling on dataset RasterIO with an alpha channel
def rasterio_12_progress_callback(pct, message, user_data):
if pct < user_data[0]:
print('Got %f, last pct was %f' % (pct, user_data[0]))
return 0
user_data[0] = pct
return 1 # 1 to continue, 0 to stop
def rasterio_12():
try:
from osgeo import gdalnumeric
gdalnumeric.zeros
import numpy
except:
return 'skip'
mem_ds = gdal.GetDriverByName('MEM').Create('', 4, 3, 4)
for i in range(3):
mem_ds.GetRasterBand(i+1).SetColorInterpretation(gdal.GCI_GrayIndex)
mem_ds.GetRasterBand(4).SetColorInterpretation(gdal.GCI_AlphaBand)
for i in range(4):
mem_ds.GetRasterBand(i+1).WriteArray(numpy.array([[0,0,0,0],[0,255,0,0],[0,0,0,0]]))
tab = [ 0 ]
ar_ds = mem_ds.ReadAsArray(0,0,4,3,buf_xsize = 8, buf_ysize = 3, resample_alg = gdal.GRIORA_Cubic, \
callback = rasterio_12_progress_callback, \
callback_data = tab)
if tab[0] != 1.0:
gdaltest.post_reason('failure')
print(tab)
return 'fail'
ar_ds2 = mem_ds.ReadAsArray(0,0,4,3,buf_xsize = 8, buf_ysize = 3, resample_alg = gdal.GRIORA_Cubic)
if not numpy.array_equal(ar_ds, ar_ds2):
gdaltest.post_reason('failure')
print(ar_ds)
print(ar_ds2)
return 'fail'
ar_bands = [mem_ds.GetRasterBand(i+1).ReadAsArray(0,0,4,3,buf_xsize = 8, buf_ysize = 3, resample_alg = gdal.GRIORA_Cubic) for i in range(4) ]
# Results of band or dataset RasterIO should be the same
for i in range(4):
if not numpy.array_equal(ar_ds[i],ar_bands[i]):
gdaltest.post_reason('failure')
print(ar_ds)
print(ar_bands[i])
return 'fail'
# First, second and third band should have identical content
if not numpy.array_equal(ar_ds[0],ar_ds[1]):
gdaltest.post_reason('failure')
print(ar_ds[0])
print(ar_ds[1])
return 'fail'
# Alpha band should be different
if numpy.array_equal(ar_ds[0],ar_ds[3]):
gdaltest.post_reason('failure')
print(ar_ds[0])
print(ar_ds[3])
return 'fail'
return 'success'
gdaltest_list = [
rasterio_1,
rasterio_2,
rasterio_3,
rasterio_4,
rasterio_5,
rasterio_6,
rasterio_7,
rasterio_8,
rasterio_9,
rasterio_10,
rasterio_11,
rasterio_12
]
if __name__ == '__main__':
gdaltest.setup_run( 'rasterio' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
|
nextgis-extra/tests
|
lib_gdal/gcore/rasterio.py
|
Python
|
gpl-2.0
| 33,068
|
# This file is part of Merlin/Arthur.
# Merlin/Arthur is the Copyright (C)2009,2010 of Elliot Rosemarine.
# Individual portions may be copyright by individual contributors, and
# are included in this collective work with permission of the copyright
# owners.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from sqlalchemy.sql import asc, desc
from Core.db import session
from Core.maps import Alliance
from Arthur.context import menu, render
from Arthur.loadable import loadable, load
@menu("Rankings", "Alliances")
@load
class alliances(loadable):
def execute(self, request, user, page="1", sort="score"):
page = int(page)
offset = (page - 1)*50
order = {"score" : (asc(Alliance.score_rank),),
"size" : (asc(Alliance.size_rank),),
"ratio" : (desc(Alliance.ratio),),
"avg_score" : (asc(Alliance.score_avg_rank),),
"avg_size" : (asc(Alliance.size_avg_rank),),
"members" : (asc(Alliance.members_rank),),
"score_growth" : (desc(Alliance.score_growth),),
"size_growth" : (desc(Alliance.size_growth),),
"avg_score_growth" : (desc(Alliance.score_avg_growth),),
"avg_size_growth" : (desc(Alliance.size_avg_growth),),
"score_growth_pc" : (desc(Alliance.score_growth_pc),),
"size_growth_pc" : (desc(Alliance.size_growth_pc),),
"avg_score_growth_pc" : (desc(Alliance.score_avg_growth_pc),),
"avg_size_growth_pc" : (desc(Alliance.size_avg_growth_pc),),
}
if sort not in order.keys():
sort = "score"
order = order.get(sort)
Q = session.query(Alliance)
Q = Q.filter(Alliance.active == True)
count = Q.count()
pages = count/50 + int(count%50 > 0)
pages = range(1, 1+pages)
for o in order:
Q = Q.order_by(o)
Q = Q.limit(50).offset(offset)
return render("alliances.tpl", request, alliances=Q.all(), offset=offset, pages=pages, page=page, sort=sort)
|
ellonweb/merlin
|
Arthur/views/alliance/alliances.py
|
Python
|
gpl-2.0
| 2,810
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2011 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import gobject
import gtk
from gtk import gdk
from translate.storage.placeables import StringElem
from virtaal.common import GObjectWrapper
from basecontroller import BaseController
class UndoController(BaseController):
"""Contains "undo" logic."""
__gtype_name__ = 'UndoController'
# INITIALIZERS #
def __init__(self, main_controller):
"""Constructor.
@type main_controller: virtaal.controllers.MainController"""
GObjectWrapper.__init__(self)
self.main_controller = main_controller
self.main_controller.undo_controller = self
self.unit_controller = self.main_controller.store_controller.unit_controller
self.enabled = True
from virtaal.models.undomodel import UndoModel
self.model = UndoModel(self)
self._setup_key_bindings()
self._connect_undo_signals()
def _connect_undo_signals(self):
# First connect to the unit controller
self.unit_controller.connect('unit-delete-text', self._on_unit_delete_text)
self.unit_controller.connect('unit-insert-text', self._on_unit_insert_text)
self.main_controller.store_controller.connect('store-closed', self._on_store_loaded_closed)
self.main_controller.store_controller.connect('store-loaded', self._on_store_loaded_closed)
mainview = self.main_controller.view
mainview.gui.get_widget('menu_edit').set_accel_group(self.accel_group)
self.mnu_undo = mainview.gui.get_widget('mnu_undo')
self.mnu_undo.set_accel_path('<Virtaal>/Edit/Undo')
self.mnu_undo.connect('activate', self._on_undo_activated)
def _setup_key_bindings(self):
"""Setup Gtk+ key bindings (accelerators).
This method *may* need to be moved into a view object, but if it is,
it will be the only functionality in such a class. Therefore, it
is done here. At least for now."""
gtk.accel_map_add_entry("<Virtaal>/Edit/Undo", gtk.keysyms.z, gdk.CONTROL_MASK)
self.accel_group = gtk.AccelGroup()
# The following line was commented out, because it caused a double undo when pressing
# Ctrl+Z, but only one if done through the menu item. This way it all works as expected.
#self.accel_group.connect_by_path("<Virtaal>/Edit/Undo", self._on_undo_activated)
mainview = self.main_controller.view # FIXME: Is this acceptable?
mainview.add_accel_group(self.accel_group)
# DECORATORS #
def if_enabled(method):
def enabled_method(self, *args, **kwargs):
if self.enabled:
return method(self, *args, **kwargs)
return enabled_method
# METHODS #
def disable(self):
self.enabled = False
def enable(self):
self.enabled = True
def push_current_text(self, textbox):
"""Save the current text in the given (target) text box on the undo stack."""
current_text = textbox.elem.copy()
unitview = self.unit_controller.view
curpos = textbox.get_cursor_position()
targetn = unitview.targets.index(textbox)
def undo_set_text(unit):
textbox.elem.sub = current_text.sub
self.model.push({
'action': undo_set_text,
'cursorpos': curpos,
'desc': 'Set target %d text to %s' % (targetn, repr(current_text)),
'targetn': targetn,
'unit': unitview.unit
})
def remove_blank_undo(self):
"""Removes items from the top of the undo stack with no C{value} or
C{action} values. The "top of the stack" is one of the top 2 items.
This is a convenience method that can be used by any code that
directly sets unit values."""
if not self.model.undo_stack:
return
head = self.model.head()
if 'action' in head and not head['action'] or True:
self.model.pop(permanent=True)
return
item = self.model.peek(offset=-1)
if 'action' in item and not item['action'] or True:
self.model.index -= 1
self.model.undo_stack.remove(item)
def record_stop(self):
self.model.record_stop()
def record_start(self):
self.model.record_start()
def _disable_unit_signals(self):
"""Disable all signals emitted by the unit view.
This should always be followed, as soon as possible, by
C{self._enable_unit_signals()}."""
self.unit_controller.view.disable_signals()
def _enable_unit_signals(self):
"""Enable all signals emitted by the unit view.
This should always follow, as soon as possible, after a call to
C{self._disable_unit_signals()}."""
self.unit_controller.view.enable_signals()
def _perform_undo(self, undo_info):
self._select_unit(undo_info['unit'])
#if 'desc' in undo_info:
# logging.debug('Description: %s' % (undo_info['desc']))
self._disable_unit_signals()
undo_info['action'](undo_info['unit'])
self._enable_unit_signals()
textbox = self.unit_controller.view.targets[undo_info['targetn']]
def refresh():
textbox.refresh_cursor_pos = undo_info['cursorpos']
textbox.refresh()
gobject.idle_add(refresh)
def _select_unit(self, unit):
"""Select the given unit in the store view.
This is to select the unit where the undo-action took place.
@type unit: translate.storage.base.TranslationUnit
@param unit: The unit to select in the store view."""
self.main_controller.select_unit(unit, force=True)
# EVENT HANDLERS #
def _on_store_loaded_closed(self, storecontroller):
if storecontroller.store is not None:
self.mnu_undo.set_sensitive(True)
else:
self.mnu_undo.set_sensitive(False)
self.model.clear()
@if_enabled
def _on_undo_activated(self, *args):
undo_info = self.model.pop()
if not undo_info:
return
if isinstance(undo_info, list):
for ui in reversed(undo_info):
self._perform_undo(ui)
else:
self._perform_undo(undo_info)
@if_enabled
def _on_unit_delete_text(self, unit_controller, unit, deleted, parent, offset, cursor_pos, elem, target_num):
def undo_action(unit):
#logging.debug('(undo) %s.insert(%d, "%s")' % (repr(elem), offset, deleted))
if parent is None:
elem.sub = deleted.sub
return
if isinstance(deleted, StringElem):
elem.insert(offset, deleted)
elem.prune()
desc = 'offset=%d, deleted="%s", parent=%s, cursor_pos=%d, elem=%s' % (offset, repr(deleted), repr(parent), cursor_pos, repr(elem))
self.model.push({
'action': undo_action,
'cursorpos': cursor_pos,
'desc': desc,
'targetn': target_num,
'unit': unit,
})
@if_enabled
def _on_unit_insert_text(self, unit_controller, unit, ins_text, offset, elem, target_num):
#logging.debug('_on_unit_insert_text(ins_text="%r", offset=%d, elem=%s, target_n=%d)' % (ins_text, offset, repr(elem), target_num))
len_ins_text = len(ins_text) # remember, since ins_text might change
def undo_action(unit):
if isinstance(ins_text, StringElem) and hasattr(ins_text, 'gui_info') and ins_text.gui_info.widgets:
# Only for elements with representation widgets
elem.delete_elem(ins_text)
else:
tree_offset = elem.gui_info.gui_to_tree_index(offset)
#logging.debug('(undo) %s.delete_range(%d, %d)' % (repr(elem), tree_offset, tree_offset+len_ins_text))
elem.delete_range(tree_offset, tree_offset+len_ins_text)
elem.prune()
desc = 'ins_text="%s", offset=%d, elem=%s' % (ins_text, offset, repr(elem))
self.model.push({
'action': undo_action,
'desc': desc,
'unit': unit,
'targetn': target_num,
'cursorpos': offset
})
|
elric/virtaal-debian-snapshots
|
virtaal/controllers/undocontroller.py
|
Python
|
gpl-2.0
| 9,007
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
from __future__ import unicode_literals, division, absolute_import, print_function
from .compatibility_utils import text_type
from . import unipath
from .unipath import pathof
DUMP = False
""" Set to True to dump all possible information. """
import os
import re
# note: re requites the pattern to be the exact same type as the data to be searched in python3
# but u"" is not allowed for the pattern itself only b""
import zipfile
import binascii
from .mobi_utils import mangle_fonts
class unpackException(Exception):
pass
class ZipInfo(zipfile.ZipInfo):
def __init__(self, *args, **kwargs):
if 'compress_type' in kwargs:
compress_type = kwargs.pop('compress_type')
super(ZipInfo, self).__init__(*args, **kwargs)
self.compress_type = compress_type
class fileNames:
def __init__(self, infile, outdir):
self.infile = infile
self.outdir = outdir
if not unipath.exists(self.outdir):
unipath.mkdir(self.outdir)
self.mobi7dir = os.path.join(self.outdir,'mobi7')
if not unipath.exists(self.mobi7dir):
unipath.mkdir(self.mobi7dir)
self.imgdir = os.path.join(self.mobi7dir, 'Images')
if not unipath.exists(self.imgdir):
unipath.mkdir(self.imgdir)
self.hdimgdir = os.path.join(self.outdir,'HDImages')
if not unipath.exists(self.hdimgdir):
unipath.mkdir(self.hdimgdir)
self.outbase = os.path.join(self.outdir, os.path.splitext(os.path.split(infile)[1])[0])
def getInputFileBasename(self):
return os.path.splitext(os.path.basename(self.infile))[0]
def makeK8Struct(self):
self.k8dir = os.path.join(self.outdir,'mobi8')
if not unipath.exists(self.k8dir):
unipath.mkdir(self.k8dir)
self.k8metainf = os.path.join(self.k8dir,'META-INF')
if not unipath.exists(self.k8metainf):
unipath.mkdir(self.k8metainf)
self.k8oebps = os.path.join(self.k8dir,'OEBPS')
if not unipath.exists(self.k8oebps):
unipath.mkdir(self.k8oebps)
self.k8images = os.path.join(self.k8oebps,'Images')
if not unipath.exists(self.k8images):
unipath.mkdir(self.k8images)
self.k8fonts = os.path.join(self.k8oebps,'Fonts')
if not unipath.exists(self.k8fonts):
unipath.mkdir(self.k8fonts)
self.k8styles = os.path.join(self.k8oebps,'Styles')
if not unipath.exists(self.k8styles):
unipath.mkdir(self.k8styles)
self.k8text = os.path.join(self.k8oebps,'Text')
if not unipath.exists(self.k8text):
unipath.mkdir(self.k8text)
# recursive zip creation support routine
def zipUpDir(self, myzip, tdir, localname):
currentdir = tdir
if localname != "":
currentdir = os.path.join(currentdir,localname)
list = unipath.listdir(currentdir)
for file in list:
afilename = file
localfilePath = os.path.join(localname, afilename)
realfilePath = os.path.join(currentdir,file)
if unipath.isfile(realfilePath):
myzip.write(pathof(realfilePath), pathof(localfilePath), zipfile.ZIP_DEFLATED)
elif unipath.isdir(realfilePath):
self.zipUpDir(myzip, tdir, localfilePath)
def makeEPUB(self, usedmap, obfuscate_data, uid):
bname = os.path.join(self.k8dir, self.getInputFileBasename() + '.epub')
# Create an encryption key for Adobe font obfuscation
# based on the epub's uid
if isinstance(uid,text_type):
uid = uid.encode('ascii')
if obfuscate_data:
key = re.sub(br'[^a-fA-F0-9]', b'', uid)
key = binascii.unhexlify((key + key)[:32])
# copy over all images and fonts that are actually used in the ebook
# and remove all font files from mobi7 since not supported
imgnames = unipath.listdir(self.imgdir)
for name in imgnames:
if usedmap.get(name,'not used') == 'used':
filein = os.path.join(self.imgdir,name)
if name.endswith(".ttf"):
fileout = os.path.join(self.k8fonts,name)
elif name.endswith(".otf"):
fileout = os.path.join(self.k8fonts,name)
elif name.endswith(".failed"):
fileout = os.path.join(self.k8fonts,name)
else:
fileout = os.path.join(self.k8images,name)
data = b''
with open(pathof(filein),'rb') as f:
data = f.read()
if obfuscate_data:
if name in obfuscate_data:
data = mangle_fonts(key, data)
open(pathof(fileout),'wb').write(data)
if name.endswith(".ttf") or name.endswith(".otf"):
os.remove(pathof(filein))
# opf file name hard coded to "content.opf"
container = '<?xml version="1.0" encoding="UTF-8"?>\n'
container += '<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">\n'
container += ' <rootfiles>\n'
container += '<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>'
container += ' </rootfiles>\n</container>\n'
fileout = os.path.join(self.k8metainf,'container.xml')
with open(pathof(fileout),'wb') as f:
f.write(container.encode('utf-8'))
if obfuscate_data:
encryption = '<encryption xmlns="urn:oasis:names:tc:opendocument:xmlns:container" \
xmlns:enc="http://www.w3.org/2001/04/xmlenc#" xmlns:deenc="http://ns.adobe.com/digitaleditions/enc">\n'
for font in obfuscate_data:
encryption += ' <enc:EncryptedData>\n'
encryption += ' <enc:EncryptionMethod Algorithm="http://ns.adobe.com/pdf/enc#RC"/>\n'
encryption += ' <enc:CipherData>\n'
encryption += ' <enc:CipherReference URI="OEBPS/Fonts/' + font + '"/>\n'
encryption += ' </enc:CipherData>\n'
encryption += ' </enc:EncryptedData>\n'
encryption += '</encryption>\n'
fileout = os.path.join(self.k8metainf,'encryption.xml')
with open(pathof(fileout),'wb') as f:
f.write(encryption.encode('utf-8'))
# ready to build epub
self.outzip = zipfile.ZipFile(pathof(bname), 'w')
# add the mimetype file uncompressed
mimetype = b'application/epub+zip'
fileout = os.path.join(self.k8dir,'mimetype')
with open(pathof(fileout),'wb') as f:
f.write(mimetype)
nzinfo = ZipInfo('mimetype', compress_type=zipfile.ZIP_STORED)
self.outzip.writestr(nzinfo, mimetype)
self.zipUpDir(self.outzip,self.k8dir,'META-INF')
self.zipUpDir(self.outzip,self.k8dir,'OEBPS')
self.outzip.close()
|
robwebset/script.ebooks
|
resources/lib/kindleunpack/unpack_structure.py
|
Python
|
gpl-2.0
| 7,089
|
#
# Copyright 2015 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import threading
from collections import namedtuple
Result = namedtuple("Result", ["succeeded", "value"])
def tmap(func, iterable):
args = list(iterable)
results = [None] * len(args)
def worker(i, f, arg):
try:
results[i] = Result(True, f(arg))
except Exception as e:
results[i] = Result(False, e)
threads = []
for i, arg in enumerate(args):
t = threading.Thread(target=worker, args=(i, func, arg))
t.daemon = True
t.start()
threads.append(t)
for t in threads:
t.join()
return results
|
kvaps/vdsm
|
lib/vdsm/concurrent.py
|
Python
|
gpl-2.0
| 1,455
|
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import pygears
__version__ = pygears.__version__
|
looooo/FCGear
|
freecad/gears/__init__.py
|
Python
|
gpl-2.0
| 1,556
|
"""
TODO:
- Create my own ID system
- Transparent
- Status Cancelled
- source.url (url to paper) - impossible
"""
from __future__ import print_function
import requests
import pandas as pd
from bs4 import BeautifulSoup
import httplib2
import os
import sys
import time
from apiclient import discovery
from oauth2client.service_account import ServiceAccountCredentials
path = os.path.dirname(sys.argv[0])
if path == '':
path = '.'
os.chdir(path)
cal_id = 'murj9blvn8bq5sc33khkh568d8@group.calendar.google.com'
# cal_id = 'lo63qeln25u2niq0eog0v2uvs0@group.calendar.google.com'
tz = 'Canada/Eastern' # Toronto timezone
SCOPES = 'https://www.googleapis.com/auth/calendar'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'EconSeminars'
def ask_yn():
response = raw_input('Is this what you want? [y]/n : ')
if (response == 'y') or (response == ''):
return
elif response == 'n':
sys.exit()
else:
ask_yn()
def get_credentials_sa():
return ServiceAccountCredentials.from_json_keyfile_name(
'gspread-0e66a6d8d261.json', scopes=SCOPES)
def parse_seminar(seminar_html):
"""
Parse a BeautifulSoup object representing a table (a seminar)
"""
elem = seminar_html.find_all('td')
out = dict()
out['date'] = elem[0].text.strip()
out['time'] = elem[1].text.strip()
out['field'] = elem[2].text.strip()
out['presenter'] = elem[3].text.strip()
if 'Cancelled' in out['presenter']:
out['presenter'] = 'CANCELLED - ' + out['presenter']
out['title'] = elem[4].text.strip()
out['location'] = elem[-3].text.strip()
out['organizer'] = ' '.join(elem[-2].text.split()[1:])
return out
def get_seminars():
# Getting the new data by scraping the webpage
url = 'https://www.economics.utoronto.ca/index.php/index/research/seminars'
payload = {'dateRange': 'all', 'seriesId': 0}
r = requests.get(url, params=payload)
soup = BeautifulSoup(r.text, 'lxml')
seminars = soup.find_all('table', 'people')
data = [parse_seminar(sem) for sem in seminars]
# The new dataset
df = pd.DataFrame(data)
df['time'] = df['time'].str.replace(u'\u2013', '-')
datestr1 = df['date'] + ' ' + df['time'].str.split('-').str[0]
datestr2 = df['date'] + ' ' + df['time'].str.split('-').str[1]
df['start'] = pd.to_datetime(datestr1)
df['end'] = pd.to_datetime(datestr2)
df['starttime'] = df.start.map(lambda x: x.isoformat())
df['endtime'] = df.end.map(lambda x: x.isoformat())
return df[df.start.dt.year >= 2016]
def delete_event(cal, row):
"""
Deletes an event from the calendar.
args:
cal - the API handler
row - a dictionary-like object containing the key date, time, and location
"""
cal.events().delete(calendarId=cal_id, eventId=row['id']).execute()
def delete_all(cal):
"""
Deletes an event from the calendar.
args:
cal - the API handler
row - a dictionary-like object containing the key date, time, and location
"""
page_token = None
while True:
eventsResult = cal.events().list(
calendarId=cal_id, pageToken=page_token, maxResults=50).execute()
events = eventsResult.get('items', [])
for event in events:
print(event['summary'])
cal.events().delete(calendarId=cal_id, eventId=event['id']).execute()
time.sleep(0.5)
page_token = eventsResult.get('nextPageToken')
if not page_token:
break
def get_all_events(cal):
list_events = []
page_token = None
while True:
eventsResult = cal.events().list(
calendarId=cal_id, pageToken=page_token, maxResults=50).execute()
events = eventsResult.get('items', [])
for event in events:
list_events.append(event)
page_token = eventsResult.get('nextPageToken')
if not page_token:
break
return list_events
def add_event(cal, row):
"""
Inserts an event in the calendar.
args:
cal - the API handler
row - a dictionary-like object containing the key date, time, location,
field, presenter, and title
"""
body = {
'summary': row['presenter'] + ' - ' + row['field'],
'location': row['location'],
'description': row['title'],
'start': {
'dateTime': pd.to_datetime(row['date'] + ' ' + row['time']
.split('-')[0]).isoformat(),
'timeZone': tz
},
'end': {
'dateTime': pd.to_datetime(row['date'] + ' ' + row['time']
.split('-')[1]).isoformat(),
'timeZone': tz
},
'transparency': 'transparent',
}
response = cal.events().insert(calendarId=cal_id, body=body).execute()
time.sleep(0.5)
return response
if __name__ == '__main__':
df = get_seminars()
# If there are changes, do them.
credentials = get_credentials_sa()
http = credentials.authorize(httplib2.Http())
cal = discovery.build('calendar', 'v3', http=http)
cal_events_pre = pd.DataFrame(get_all_events(cal))
cal_events = pd.DataFrame()
cal_events['title'] = cal_events_pre['description']
cal_events['start'] = cal_events_pre.start.map(lambda x: x['dateTime'])
cal_events['end'] = cal_events_pre.end.map(lambda x: x['dateTime'])
cal_events['presenter'] = cal_events_pre.summary.str.rsplit(' - ', 1).str[0]
cal_events['field'] = cal_events_pre.summary.str.rsplit(' - ', 1).str[1]
cal_events['location'] = cal_events_pre.location
cal_events['starttime'] = cal_events.start.str[:19]
cal_events['endtime'] = cal_events.end.str[:19]
cal_events['id'] = cal_events_pre.id
cols = ['title', 'starttime', 'endtime', 'field', 'presenter', 'location']
# We compare the two dataset. If an entry as changed, was added, or was deleted,
# it will be included in to_remove and/or to_add.
diff = pd.merge(df, cal_events, on=cols, how='outer', indicator=True)
to_remove = diff[diff['_merge'] == 'right_only']
to_add = diff[diff['_merge'] == 'left_only']
if (len(to_remove) != 0) or (len(to_add) != 0):
print('Deleting {} seminars'.format(len(to_remove)))
for key, row in to_remove.iterrows():
delete_event(cal, row)
print('Adding {} seminars'.format(len(to_add)))
for key, row in to_add.iterrows():
add_event(cal, row)
print('Done!')
|
ma-schmidt/EconSeminars
|
econ_seminars.py
|
Python
|
gpl-2.0
| 6,548
|
#coding:utf-8
from redis import Redis as SyncRedis
from async_redis.redis_client import AsyncRedis
from async_redis.redis_encode import *
from py.common.convert import resolve_redis_url
class Redis(object):
def __init__(self, url, sync=True, host='localhost', port=6379, db=0, pwd=None):
"""
:param url: redis://localhost:6379/0?pwd=xx
"""
self.__is_sync = sync
if url:
#单个db实例
self.__redis_ob = SyncRedis(*resolve_redis_url(url)) if sync else AsyncRedis(redis_uri=url)
else:
if sync:
self.__redis_ob = SyncRedis(host, port, db, pwd)
else:
self.__redis_ob = AsyncRedis(redis_tuple=(host, port, db, pwd))
def send_cmd(self, *args, **kwargs):
"""
:param args: redis命令: cmd, key, value, ...
:param kwargs: 设置事务开关,active_trans=True/False
"""
if self.__is_sync:
return getattr(self.__redis_ob, args[0])(*args[1:])
cmd = 'redis_' + args[0]
return self.__redis_ob.invoke(globals().get(cmd)(*args[1:]), **kwargs)
def send_multi_cmd(self, *args):
"""
:param args: 多条redis命令, 命令间用','分隔
每条命令为一个tuple:(cmd, key, value, ...)
"""
if isinstance(args, (list, tuple)):
if not args:
raise ValueError('multi_cmd args at least one')
if self.__is_sync:
#pipeline
p = self.__redis_ob.pipeline()
p.multi()
for arg in args:
getattr(p, arg[0])(*arg[1:])
resp = p.execute()
if 1 == len(resp):
return resp[0]
return resp
cmds = []
for arg in args:
cmd = 'redis_' + arg[0]
func = globals().get(cmd)
if func is None:
raise ValueError('AsyncRedis unsupport cmd:{0}'.format(cmd))
_ = func(*arg[1:])
cmds.append(_)
return self.__redis_ob.invoke(*cmds)
|
fuwenqing0988/hanfeng
|
py/common/redis/redis_client.py
|
Python
|
gpl-2.0
| 2,099
|
# improb is a Python module for working with imprecise probabilities
# Copyright (c) 2008-2011, Matthias Troffaes
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Decision trees."""
from abc import ABCMeta, abstractmethod, abstractproperty
import cdd # NumberTypeable
import collections
import itertools
import numbers
from improb import PSpace, Event, Gamble
from improb._compat import OrderedDict
from improb.decision.opt import Opt
class Tree(collections.MutableMapping):
"""Abstract base class for decision trees.
>>> pspace = PSpace("AB", "XY")
>>> A = pspace.make_event("A", "XY", name="A")
>>> B = pspace.make_event("B", "XY", name="B")
>>> X = pspace.make_event("AB", "X", name="X")
>>> Y = pspace.make_event("AB", "Y", name="Y")
>>> t1 = Chance(pspace)
>>> t1[A] = '1' # using strings for fractions
>>> t1[B] = '2/11'
>>> t2 = Chance(pspace)
>>> t2[A] = '5/3'
>>> t2[B] = '6'
>>> t12 = Decision()
>>> t12["d1"] = t1
>>> t12["d2"] = t2
>>> t3 = Chance(pspace)
>>> t3[A] = '8'
>>> t3[B] = '4.5'
>>> t = Chance(pspace)
>>> t[X] = t12
>>> t[Y] = t3
>>> print(t)
O--X--#--d1--O--A--:1
| | |
| | B--:2/11
| |
| d2--O--A--:5/3
| |
| B--:6
|
Y--O--A--:8
|
B--:9/2
>>> t.pspace
PSpace([('A', 'X'), ('A', 'Y'), ('B', 'X'), ('B', 'Y')])
>>> for gamble, normal_tree in t.get_normal_form():
... print(gamble)
... print('')
('A', 'X') : 1
('A', 'Y') : 8
('B', 'X') : 2/11
('B', 'Y') : 9/2
<BLANKLINE>
('A', 'X') : 5/3
('A', 'Y') : 8
('B', 'X') : 6
('B', 'Y') : 9/2
<BLANKLINE>
>>> for gamble, normal_tree in t.get_normal_form():
... print(normal_tree)
... print('')
O--X--#--d1--O--A--:1
| |
| B--:2/11
|
Y--O--A--:8
|
B--:9/2
<BLANKLINE>
O--X--#--d2--O--A--:5/3
| |
| B--:6
|
Y--O--A--:8
|
B--:9/2
<BLANKLINE>
"""
__metaclass__ = ABCMeta
@abstractproperty
def pspace(self):
"""The possibility space, or None if there are no chance nodes
in the tree.
"""
raise NotImplementedError
def get_number_type(self):
"""Get the number type of the first reward node in the tree.
:return: The number type.
:rtype: :class:`str`
"""
for subtree in self.itervalues():
# this just picks the first reward node
return subtree.get_number_type()
def check_pspace(self):
"""Check the possibility spaces.
:raise: :exc:`~exceptions.ValueError` on mismatch
"""
if self.pspace is None:
# no further chance nodes, is ok!
return
for tree in self.itervalues():
if tree.pspace is not None and tree.pspace != self.pspace:
raise ValueError('possibility space mismatch')
tree.check_pspace()
def __str__(self):
"""Return string representation of tree."""
# note: special case for Event to make it fit on a single line
children = [key.name if isinstance(key, Event) else str(key)
for key in self]
subtrees = [str(subtree).split('\n')
for subtree in self.itervalues()]
width = max(len(child) for child in children) + 2
children = [child.ljust(width, '-') + subtree[0]
for child, subtree in itertools.izip(children, subtrees)]
children = (
["\n".join([child]
+ ["|" + " " * (width - 1) + line
for line in subtree[1:]]
+ ["|"])
for child, subtree
in itertools.izip(children[:-1], subtrees[:-1])]
+
["\n".join([child]
+ [" " * width + line
for line in subtree[1:]])
for child, subtree
in itertools.izip(children[-1:], subtrees[-1:])]
)
return "\n".join(children)
def get_normal_form(self):
"""Calculate all normal form decisions, and their
corresponding gambles.
:return: The normal form of the decision tree.
:rtype: Yields (:class:`~improb.Gamble`, :class:`Tree`) pairs,
where the tree is a normal form decision (i.e. a tree
where each decision node has a single branch), and the
gamble is the one induced by this tree.
"""
# get_norm_back_opt without optimality operator gives exactly all
# normal form gambles!
return self._get_norm_back_opt()
def get_norm_opt(self, opt=None, event=True):
"""Get the optimal normal form decisions with respect to the
optimality operator *opt*, conditional on *event*. This method
does not use backward induction: it simply calculates all
normal form decisions and then applies *opt* on them.
:param opt: The optimality operator (optional).
:type opt: :class:`~improb.decision.opt.Opt`
:param event: The event to condition on (optional).
:type event: |eventtype|
:return: Optimal normal form decisions.
:rtype: Yields (:class:`~improb.Gamble`, :class:`Tree`) pairs,
where the tree is a normal form decision (i.e. a tree
where each decision node has a single branch), and the
gamble is the one induced by this tree.
"""
if opt is None:
for gamble, normal_tree in self._get_norm_back_opt():
yield gamble, normal_tree
elif not isinstance(opt, Opt):
raise TypeError("expected a subclass of Opt")
else:
normal_form = list(self._get_norm_back_opt())
opt_gambles = set(
opt((gamble for gamble, tree in normal_form), event))
for gamble, normal_tree in normal_form:
if gamble in opt_gambles:
yield gamble, normal_tree
def get_norm_back_opt(self, opt=None, event=True):
"""Like :meth:`get_norm_opt`, but uses normal form backward
induction, which is more efficient.
.. warning::
If *opt* does not satisfy certain properties, the result
can be different from :meth:`get_norm_opt`.
"""
if opt is None:
for gamble, normal_tree in self._get_norm_back_opt():
yield gamble, normal_tree
elif not isinstance(opt, Opt):
raise TypeError("expected a subclass of Opt")
else:
_norm_back_opt = list(self._get_norm_back_opt(opt, event))
opt_gambles = set(
opt((gamble for gamble, tree in _norm_back_opt), event))
for gamble, normal_tree in _norm_back_opt:
if gamble in opt_gambles:
yield gamble, normal_tree
@abstractmethod
def _get_norm_back_opt(self, opt=None, event=True):
"""Like :meth:`get_norm_back_opt` but without applying *opt*
at the root of the tree in the final stage.
All other normal form methods (:meth:`get_normal_form`,
:meth:`get_norm_opt`, and :meth:`get_norm_back_opt`) are
defined in terms of this method, so subclasses only need to
implement this one as far as normal form calculations are
concerned.
"""
raise NotImplementedError
@abstractmethod
def __add__(self, value):
"""Add a value to all final reward nodes.
:param value: The value to add.
:type value: |numbertype|
"""
raise NotImplementedError
@abstractmethod
def __sub__(self, value):
"""Subtract a value from all final reward nodes.
:param value: The value to subtract.
:type value: |numbertype|
"""
raise NotImplementedError
class Reward(Tree, cdd.NumberTypeable):
"""A reward node.
:param reward: The reward.
:type reward: |numbertype|
:param number_type: The number type (optional). If omitted,
:func:`~cdd.get_number_type_from_value` is used.
:type number_type: :class:`str`
>>> t = Reward(5)
>>> print(t.pspace)
None
>>> print(t)
:5.0
>>> list(t.get_normal_form())
[(5.0, Reward(5.0, number_type='float'))]
"""
def __init__(self, reward, number_type=None):
if not isinstance(reward, (numbers.Real, str)):
raise TypeError('specify a numeric reward')
if number_type is None:
number_type = cdd.get_number_type_from_value(reward)
cdd.NumberTypeable.__init__(self, number_type)
self.reward = self.make_number(reward)
@property
def pspace(self):
return None
def get_number_type(self):
return self.number_type
def _get_norm_back_opt(self, opt=None, event=True):
yield self.reward, self
def __contains__(self, key):
return False
def __iter__(self):
return iter([]) # empty iterator
def __len__(self):
return 0
def __getitem__(self, key):
raise ValueError('reward node has no children')
def __setitem__(self, key, value):
raise ValueError('reward node has no children')
def __delitem__(self, key):
raise ValueError('reward node has no children')
def __str__(self):
return ":" + self.number_str(self.reward)
def __repr__(self):
return "Reward({0}, number_type='{1}')".format(
self.number_repr(self.reward),
self.number_type)
def __add__(self, value):
return Reward(self.reward + self.make_number(value),
number_type=self.number_type)
def __sub__(self, value):
return Reward(self.reward - self.make_number(value),
number_type=self.number_type)
class Decision(Tree):
"""A decision tree rooted at a decision node.
:param data: Mapping from decisions (i.e. strings, but any
immutable object would work) to trees (optional).
:type data: :class:`collections.Mapping`
>>> t = Decision({"d1": 5,
... "d2": 6})
>>> print(t.pspace)
None
>>> print(t) # dict can change ordering
#--d2--:6.0
|
d1--:5.0
>>> for gamble, normal_tree in sorted(t.get_normal_form()):
... print(gamble)
5.0
6.0
>>> for gamble, normal_tree in sorted(t.get_normal_form()):
... print(normal_tree)
#--d1--:5.0
#--d2--:6.0
"""
def __init__(self, data=None):
self._data = OrderedDict()
# check type
if isinstance(data, collections.Mapping):
for key, value in data.iteritems():
self[key] = value
elif data is not None:
raise TypeError('specify a mapping')
@property
def pspace(self):
for subtree in self.itervalues():
if subtree.pspace is not None:
return subtree.pspace
# no chance node children, so return None
return None
def _get_norm_back_opt(self, opt=None, event=True):
for decision, subtree in self.iteritems():
for gamble, normal_subtree in subtree.get_norm_back_opt(opt, event):
yield gamble, Decision(data={decision: normal_subtree})
def __contains__(self, key):
return key in self._data
def __iter__(self):
return iter(self._data)
def __len__(self):
return len(self._data)
def __getitem__(self, key):
return self._data[key]
def __setitem__(self, key, value):
if isinstance(value, (numbers.Real, str)):
value = Reward(value) # number type assumed to be float
if not isinstance(value, Tree):
raise TypeError('expected Tree')
self._data[key] = value
def __delitem__(self, key):
del self._data[key]
def __str__(self):
return "#--" + "\n ".join(Tree.__str__(self).split("\n"))
def __repr__(self):
return (
"Decision({"
+ ", ".join("{0}: {1}".format(repr(key), repr(value))
for key, value in self.iteritems())
+ "})"
)
def __add__(self, value):
return Decision(
OrderedDict((decision, subtree + value)
for decision, subtree in self.iteritems()))
def __sub__(self, value):
return Decision(
OrderedDict((decision, subtree - value)
for decision, subtree in self.iteritems()))
class Chance(Tree):
"""A decision tree rooted at a chance node.
:param pspace: The possibility space.
:type pspace: |pspacetype|
:param data: Mapping from events to trees (optional).
:type data: :class:`collections.Mapping`
>>> t = Chance(pspace=(0, 1), data={(0,): 5, (1,): 6})
>>> t.pspace
PSpace(2)
>>> t.get_number_type()
'float'
>>> print(t)
O--(0)--:5.0
|
(1)--:6.0
>>> list(gamble for gamble, normal_tree in t.get_normal_form())
[Gamble(pspace=PSpace(2), mapping={0: 5.0, 1: 6.0})]
"""
def __init__(self, pspace, data=None):
self._data = OrderedDict()
self._pspace = PSpace.make(pspace)
# extract data
if isinstance(data, collections.Mapping):
for key, value in data.iteritems():
self[key] = value
elif data is not None:
raise TypeError('data must be a mapping')
def check_pspace(self):
"""Events of the chance nodes must form the possibility space.
>>> t = Chance(pspace='ab', data={'a': 5, 'ab': 6})
>>> t.check_pspace() # doctest: +ELLIPSIS
Traceback (most recent call last):
...
ValueError: ...
>>> t = Chance(pspace='ab', data={'a': 5})
>>> t.check_pspace() # doctest: +ELLIPSIS
Traceback (most recent call last):
...
ValueError: ...
>>> t = Chance(pspace='ab', data={'a': 5, 'b': 6})
>>> t.check_pspace()
"""
# check that there are no pairwise intersections
union = self.pspace.make_event(False)
for event in self:
if union & event:
raise ValueError('events must not intersect')
union |= event
# check the union
if union != self.pspace.make_event(True):
raise ValueError('union of events must be possibility space')
# check the rest of the tree
Tree.check_pspace(self)
@property
def pspace(self):
return self._pspace
def _get_norm_back_opt(self, opt=None, event=True):
number_type = self.get_number_type()
# note: this implementation depends on the fact that
# iterating self.itervalues() and
# self.iterkeys() correspond to each other
all_normal_forms = itertools.product(
*[tuple(subtree.get_norm_back_opt(opt, event))
for subtree in self.itervalues()])
for normal_forms in all_normal_forms:
data = {}
tree = OrderedDict()
for event, (gamble, normal_subtree) in itertools.izip(
self.iterkeys(), normal_forms):
for omega in event:
if isinstance(gamble, numbers.Real):
data[omega] = gamble
elif isinstance(gamble, Gamble):
data[omega] = gamble[omega]
else:
raise RuntimeError(
"expected int, long, float, or Gamble")
tree[event] = normal_subtree
yield (Gamble(pspace=self.pspace,
data=data,
number_type=number_type),
Chance(pspace=self.pspace,
data=tree))
def __contains__(self, key):
return self.pspace.make_event(key) in self._data
def __iter__(self):
return iter(self._data)
def __len__(self):
return len(self._data)
def __getitem__(self, key):
return self._data[self.pspace.make_event(key)]
def __setitem__(self, key, value):
if isinstance(value, (numbers.Real, str)):
value = Reward(value) # number type assumed to be float
if not isinstance(value, Tree):
raise TypeError('expected Tree')
self._data[self.pspace.make_event(key)] = value
def __delitem__(self, key):
del self._data[self.pspace.make_event(key)]
def __str__(self):
return "O--" + "\n ".join(Tree.__str__(self).split("\n"))
def __repr__(self):
return (
"Chance("
+ "pspace={0}".format(repr(self.pspace))
+ ", data={"
+ ", ".join("{0}: {1}".format(repr(key), repr(value))
for key, value in self.iteritems())
+ "})"
)
def __add__(self, value):
return Chance(
self.pspace,
OrderedDict((event, subtree + value)
for event, subtree in self.iteritems()))
def __sub__(self, value):
return Chance(
self.pspace,
OrderedDict((event, subtree - value)
for event, subtree in self.iteritems()))
|
mcmtroffaes/improb
|
improb/decision/tree.py
|
Python
|
gpl-2.0
| 18,245
|
from enigma import getPrevAsciiCode
from Tools.NumericalTextInput import NumericalTextInput
from Tools.Directories import resolveFilename, SCOPE_CONFIG, fileExists
from Components.Harddisk import harddiskmanager
from copy import copy as copy_copy
from os import path as os_path
from time import localtime, strftime
# ConfigElement, the base class of all ConfigElements.
# it stores:
# value the current value, usefully encoded.
# usually a property which retrieves _value,
# and maybe does some reformatting
# _value the value as it's going to be saved in the configfile,
# though still in non-string form.
# this is the object which is actually worked on.
# default the initial value. If _value is equal to default,
# it will not be stored in the config file
# saved_value is a text representation of _value, stored in the config file
#
# and has (at least) the following methods:
# save() stores _value into saved_value,
# (or stores 'None' if it should not be stored)
# load() loads _value from saved_value, or loads
# the default if saved_value is 'None' (default)
# or invalid.
#
class ConfigElement(object):
def __init__(self):
self.saved_value = None
self.save_forced = False
self.last_value = None
self.save_disabled = False
self.__notifiers = None
self.__notifiers_final = None
self.enabled = True
self.callNotifiersOnSaveAndCancel = False
def getNotifiers(self):
if self.__notifiers is None:
self.__notifiers = [ ]
return self.__notifiers
def setNotifiers(self, val):
self.__notifiers = val
notifiers = property(getNotifiers, setNotifiers)
def getNotifiersFinal(self):
if self.__notifiers_final is None:
self.__notifiers_final = [ ]
return self.__notifiers_final
def setNotifiersFinal(self, val):
self.__notifiers_final = val
notifiers_final = property(getNotifiersFinal, setNotifiersFinal)
# you need to override this to do input validation
def setValue(self, value):
self._value = value
self.changed()
def getValue(self):
return self._value
value = property(getValue, setValue)
# you need to override this if self.value is not a string
def fromstring(self, value):
return value
# you can overide this for fancy default handling
def load(self):
sv = self.saved_value
if sv is None:
self.value = self.default
else:
self.value = self.fromstring(sv)
def tostring(self, value):
return str(value)
# you need to override this if str(self.value) doesn't work
def save(self):
if self.save_disabled or (self.value == self.default and not self.save_forced):
self.saved_value = None
else:
self.saved_value = self.tostring(self.value)
if self.callNotifiersOnSaveAndCancel:
self.changed()
def cancel(self):
self.load()
if self.callNotifiersOnSaveAndCancel:
self.changed()
def isChanged(self):
sv = self.saved_value
if sv is None and self.value == self.default:
return False
return self.tostring(self.value) != sv
def changed(self):
if self.__notifiers:
for x in self.notifiers:
x(self)
def changedFinal(self):
if self.__notifiers_final:
for x in self.notifiers_final:
x(self)
def addNotifier(self, notifier, initial_call = True, immediate_feedback = True):
assert callable(notifier), "notifiers must be callable"
if immediate_feedback:
self.notifiers.append(notifier)
else:
self.notifiers_final.append(notifier)
# CHECKME:
# do we want to call the notifier
# - at all when adding it? (yes, though optional)
# - when the default is active? (yes)
# - when no value *yet* has been set,
# because no config has ever been read (currently yes)
# (though that's not so easy to detect.
# the entry could just be new.)
if initial_call:
notifier(self)
def disableSave(self):
self.save_disabled = True
def __call__(self, selected):
return self.getMulti(selected)
def onSelect(self, session):
pass
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
KEY_LEFT = 0
KEY_RIGHT = 1
KEY_OK = 2
KEY_DELETE = 3
KEY_BACKSPACE = 4
KEY_HOME = 5
KEY_END = 6
KEY_TOGGLEOW = 7
KEY_ASCII = 8
KEY_TIMEOUT = 9
KEY_NUMBERS = range(12, 12+10)
KEY_0 = 12
KEY_9 = 12+9
def getKeyNumber(key):
assert key in KEY_NUMBERS
return key - KEY_0
class choicesList(object): # XXX: we might want a better name for this
LIST_TYPE_LIST = 1
LIST_TYPE_DICT = 2
def __init__(self, choices, type = None):
self.choices = choices
if type is None:
if isinstance(choices, list):
self.type = choicesList.LIST_TYPE_LIST
elif isinstance(choices, dict):
self.type = choicesList.LIST_TYPE_DICT
else:
assert False, "choices must be dict or list!"
else:
self.type = type
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices.keys()
return ret or [""]
def __iter__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices
return iter(ret or [""])
def __len__(self):
return len(self.choices) or 1
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
ret = self.choices[index]
if isinstance(ret, tuple):
ret = ret[0]
return ret
return self.choices.keys()[index]
def index(self, value):
try:
return self.__list__().index(value)
except (ValueError, IndexError):
# occurs e.g. when default is not in list
return 0
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
orig = self.choices[index]
if isinstance(orig, tuple):
self.choices[index] = (value, orig[1])
else:
self.choices[index] = value
else:
key = self.choices.keys()[index]
orig = self.choices[key]
del self.choices[key]
self.choices[value] = orig
def default(self):
choices = self.choices
if not choices:
return ""
if self.type is choicesList.LIST_TYPE_LIST:
default = choices[0]
if isinstance(default, tuple):
default = default[0]
else:
default = choices.keys()[0]
return default
class descriptionList(choicesList): # XXX: we might want a better name for this
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[1] for x in self.choices]
else:
ret = self.choices.values()
return ret or [""]
def __iter__(self):
return iter(self.__list__())
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
for x in self.choices:
if isinstance(x, tuple):
if x[0] == index:
return str(x[1])
elif x == index:
return str(x)
return str(index) # Fallback!
else:
return str(self.choices.get(index, ""))
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
i = self.index(index)
orig = self.choices[i]
if isinstance(orig, tuple):
self.choices[i] = (orig[0], value)
else:
self.choices[i] = value
else:
self.choices[index] = value
#
# ConfigSelection is a "one of.."-type.
# it has the "choices", usually a list, which contains
# (id, desc)-tuples (or just only the ids, in case the id
# will be used as description)
#
# all ids MUST be plain strings.
#
class ConfigSelection(ConfigElement):
def __init__(self, choices, default = None):
ConfigElement.__init__(self)
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self._descr = None
self.default = self._value = self.last_value = default
def setChoices(self, choices, default = None):
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self.default = default
if self.value not in self.choices:
self.value = default
def setValue(self, value):
if value in self.choices:
self._value = value
else:
self._value = self.default
self._descr = None
self.changed()
def tostring(self, val):
return val
def getValue(self):
return self._value
def setCurrentText(self, text):
i = self.choices.index(self.value)
self.choices[i] = text
self._descr = self.description[text] = text
self._value = text
value = property(getValue, setValue)
def getIndex(self):
return self.choices.index(self.value)
index = property(getIndex)
# GUI
def handleKey(self, key):
nchoices = len(self.choices)
if nchoices > 1:
i = self.choices.index(self.value)
if key == KEY_LEFT:
self.value = self.choices[(i + nchoices - 1) % nchoices]
elif key == KEY_RIGHT:
self.value = self.choices[(i + 1) % nchoices]
elif key == KEY_HOME:
self.value = self.choices[0]
elif key == KEY_END:
self.value = self.choices[nchoices - 1]
def selectNext(self):
nchoices = len(self.choices)
i = self.choices.index(self.value)
self.value = self.choices[(i + 1) % nchoices]
def getText(self):
if self._descr is not None:
return self._descr
descr = self._descr = self.description[self.value]
if descr:
return _(descr)
return descr
def getMulti(self, selected):
if self._descr is not None:
descr = self._descr
else:
descr = self._descr = self.description[self.value]
if descr:
return ("text", _(descr))
return ("text", descr)
# HTML
def getHTML(self, id):
res = ""
for v in self.choices:
descr = self.description[v]
if self.value == v:
checked = 'checked="checked" '
else:
checked = ''
res += '<input type="radio" name="' + id + '" ' + checked + 'value="' + v + '">' + descr + "</input></br>\n"
return res;
def unsafeAssign(self, value):
# setValue does check if value is in choices. This is safe enough.
self.value = value
description = property(lambda self: descriptionList(self.choices.choices, self.choices.type))
# a binary decision.
#
# several customized versions exist for different
# descriptions.
#
boolean_descriptions = {False: _("false"), True: _("true")}
class ConfigBoolean(ConfigElement):
def __init__(self, default = False, descriptions = boolean_descriptions):
ConfigElement.__init__(self)
self.descriptions = descriptions
self.value = self.last_value = self.default = default
def handleKey(self, key):
if key in (KEY_LEFT, KEY_RIGHT):
self.value = not self.value
elif key == KEY_HOME:
self.value = False
elif key == KEY_END:
self.value = True
def getText(self):
descr = self.descriptions[self.value]
if descr:
return _(descr)
return descr
def getMulti(self, selected):
descr = self.descriptions[self.value]
if descr:
return ("text", _(descr))
return ("text", descr)
def tostring(self, value):
if not value:
return "false"
else:
return "true"
def fromstring(self, val):
if val == "true":
return True
else:
return False
def getHTML(self, id):
if self.value:
checked = ' checked="checked"'
else:
checked = ''
return '<input type="checkbox" name="' + id + '" value="1" ' + checked + " />"
# this is FLAWED. and must be fixed.
def unsafeAssign(self, value):
if value == "1":
self.value = True
else:
self.value = False
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
yes_no_descriptions = {False: _("no"), True: _("yes")}
class ConfigYesNo(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = yes_no_descriptions)
on_off_descriptions = {False: _("off"), True: _("on")}
class ConfigOnOff(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = on_off_descriptions)
enable_disable_descriptions = {False: _("disable"), True: _("enable")}
class ConfigEnableDisable(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = enable_disable_descriptions)
class ConfigDateTime(ConfigElement):
def __init__(self, default, formatstring, increment = 86400):
ConfigElement.__init__(self)
self.increment = increment
self.formatstring = formatstring
self.value = self.last_value = self.default = int(default)
def handleKey(self, key):
if key == KEY_LEFT:
self.value = self.value - self.increment
elif key == KEY_RIGHT:
self.value = self.value + self.increment
elif key == KEY_HOME or key == KEY_END:
self.value = self.default
def getText(self):
return strftime(self.formatstring, localtime(self.value))
def getMulti(self, selected):
return ("text", strftime(self.formatstring, localtime(self.value)))
def fromstring(self, val):
return int(val)
# *THE* mighty config element class
#
# allows you to store/edit a sequence of values.
# can be used for IP-addresses, dates, plain integers, ...
# several helper exist to ease this up a bit.
#
class ConfigSequence(ConfigElement):
def __init__(self, seperator, limits, default, censor_char = ""):
ConfigElement.__init__(self)
assert isinstance(limits, list) and len(limits[0]) == 2, "limits must be [(min, max),...]-tuple-list"
assert censor_char == "" or len(censor_char) == 1, "censor char must be a single char (or \"\")"
#assert isinstance(default, list), "default must be a list"
#assert isinstance(default[0], int), "list must contain numbers"
#assert len(default) == len(limits), "length must match"
self.marked_pos = 0
self.seperator = seperator
self.limits = limits
self.censor_char = censor_char
self.last_value = self.default = default
self.value = copy_copy(default)
self.endNotifier = None
def validate(self):
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
if self._value[num] < self.limits[num][0]:
self._value[num] = self.limits[num][0]
if self._value[num] > self.limits[num][1]:
self._value[num] = self.limits[num][1]
num += 1
if self.marked_pos >= max_pos:
if self.endNotifier:
for x in self.endNotifier:
x(self)
self.marked_pos = max_pos - 1
if self.marked_pos < 0:
self.marked_pos = 0
def validatePos(self):
if self.marked_pos < 0:
self.marked_pos = 0
total_len = sum([len(str(x[1])) for x in self.limits])
if self.marked_pos >= total_len:
self.marked_pos = total_len - 1
def addEndNotifier(self, notifier):
if self.endNotifier is None:
self.endNotifier = []
self.endNotifier.append(notifier)
def handleKey(self, key):
if key == KEY_LEFT:
self.marked_pos -= 1
self.validatePos()
elif key == KEY_RIGHT:
self.marked_pos += 1
self.validatePos()
elif key == KEY_HOME:
self.marked_pos = 0
self.validatePos()
elif key == KEY_END:
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
num += 1
self.marked_pos = max_pos - 1
self.validatePos()
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
block_len = [len(str(x[1])) for x in self.limits]
total_len = sum(block_len)
pos = 0
blocknumber = 0
block_len_total = [0, ]
for x in block_len:
pos += block_len[blocknumber]
block_len_total.append(pos)
if pos - 1 >= self.marked_pos:
pass
else:
blocknumber += 1
# length of numberblock
number_len = len(str(self.limits[blocknumber][1]))
# position in the block
posinblock = self.marked_pos - block_len_total[blocknumber]
oldvalue = self._value[blocknumber]
olddec = oldvalue % 10 ** (number_len - posinblock) - (oldvalue % 10 ** (number_len - posinblock - 1))
newvalue = oldvalue - olddec + (10 ** (number_len - posinblock - 1) * number)
self._value[blocknumber] = newvalue
self.marked_pos += 1
self.validate()
self.changed()
def genText(self):
value = ""
mPos = self.marked_pos
num = 0;
for i in self._value:
if value: #fixme no heading separator possible
value += self.seperator
if mPos >= len(value) - 1:
mPos += 1
if self.censor_char == "":
value += ("%0" + str(len(str(self.limits[num][1]))) + "d") % i
else:
value += (self.censor_char * len(str(self.limits[num][1])))
num += 1
return (value, mPos)
def getText(self):
(value, mPos) = self.genText()
return value
def getMulti(self, selected):
(value, mPos) = self.genText()
# only mark cursor when we are selected
# (this code is heavily ink optimized!)
if self.enabled:
return ("mtext"[1-selected:], value, [mPos])
else:
return ("text", value)
def tostring(self, val):
return self.seperator.join([self.saveSingle(x) for x in val])
def saveSingle(self, v):
return str(v)
def fromstring(self, value):
return [int(x) for x in value.split(self.seperator)]
def onDeselect(self, session):
if self.last_value != self._value:
self.changedFinal()
self.last_value = copy_copy(self._value)
ip_limits = [(0,255),(0,255),(0,255),(0,255)]
class ConfigIP(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = ip_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:(self.marked_block)])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return (value, mBlock)
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return ("mtext"[1-selected:], value, mBlock)
else:
return ("text", value)
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
mac_limits = [(1,255),(1,255),(1,255),(1,255),(1,255),(1,255)]
class ConfigMAC(ConfigSequence):
def __init__(self, default):
ConfigSequence.__init__(self, seperator = ":", limits = mac_limits, default = default)
class ConfigPosition(ConfigSequence):
def __init__(self, default, args):
ConfigSequence.__init__(self, seperator = ",", limits = [(0,args[0]),(0,args[1]),(0,args[2]),(0,args[3])], default = default)
clock_limits = [(0,23),(0,59)]
class ConfigClock(ConfigSequence):
def __init__(self, default):
t = localtime(default)
ConfigSequence.__init__(self, seperator = ":", limits = clock_limits, default = [t.tm_hour, t.tm_min])
def increment(self):
# Check if Minutes maxed out
if self._value[1] == 59:
# Increment Hour, reset Minutes
if self._value[0] < 23:
self._value[0] += 1
else:
self._value[0] = 0
self._value[1] = 0
else:
# Increment Minutes
self._value[1] += 1
# Trigger change
self.changed()
def decrement(self):
# Check if Minutes is minimum
if self._value[1] == 0:
# Decrement Hour, set Minutes to 59
if self._value[0] > 0:
self._value[0] -= 1
else:
self._value[0] = 23
self._value[1] = 59
else:
# Decrement Minutes
self._value[1] -= 1
# Trigger change
self.changed()
integer_limits = (0, 9999999999)
class ConfigInteger(ConfigSequence):
def __init__(self, default, limits = integer_limits):
ConfigSequence.__init__(self, seperator = ":", limits = [limits], default = default)
# you need to override this to do input validation
def setValue(self, value):
self._value = [value]
self.changed()
def getValue(self):
return self._value[0]
value = property(getValue, setValue)
def fromstring(self, value):
return int(value)
def tostring(self, value):
return str(value)
class ConfigPIN(ConfigInteger):
def __init__(self, default, len = 4, censor = ""):
assert isinstance(default, int), "ConfigPIN default must be an integer"
ConfigSequence.__init__(self, seperator = ":", limits = [(0, (10**len)-1)], censor_char = censor, default = default)
self.len = len
def getLength(self):
return self.len
class ConfigFloat(ConfigSequence):
def __init__(self, default, limits):
ConfigSequence.__init__(self, seperator = ".", limits = limits, default = default)
def getFloat(self):
return float(self.value[1] / float(self.limits[1][1] + 1) + self.value[0])
float = property(getFloat)
# an editable text...
class ConfigText(ConfigElement, NumericalTextInput):
def __init__(self, default = "", fixed_size = True, visible_width = False):
ConfigElement.__init__(self)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False)
self.marked_pos = 0
self.allmarked = (default != "")
self.fixed_size = fixed_size
self.visible_width = visible_width
self.offset = 0
self.overwrite = fixed_size
self.help_window = None
self.value = self.last_value = self.default = default
def validateMarker(self):
textlen = len(self.text)
if self.fixed_size:
if self.marked_pos > textlen-1:
self.marked_pos = textlen-1
else:
if self.marked_pos > textlen:
self.marked_pos = textlen
if self.marked_pos < 0:
self.marked_pos = 0
if self.visible_width:
if self.marked_pos < self.offset:
self.offset = self.marked_pos
if self.marked_pos >= self.offset + self.visible_width:
if self.marked_pos == textlen:
self.offset = self.marked_pos - self.visible_width
else:
self.offset = self.marked_pos - self.visible_width + 1
if self.offset > 0 and self.offset + self.visible_width > textlen:
self.offset = max(0, len - self.visible_width)
def insertChar(self, ch, pos, owr):
if owr or self.overwrite:
self.text = self.text[0:pos] + ch + self.text[pos + 1:]
elif self.fixed_size:
self.text = self.text[0:pos] + ch + self.text[pos:-1]
else:
self.text = self.text[0:pos] + ch + self.text[pos:]
def deleteChar(self, pos):
if not self.fixed_size:
self.text = self.text[0:pos] + self.text[pos + 1:]
elif self.overwrite:
self.text = self.text[0:pos] + " " + self.text[pos + 1:]
else:
self.text = self.text[0:pos] + self.text[pos + 1:] + " "
def deleteAllChars(self):
if self.fixed_size:
self.text = " " * len(self.text)
else:
self.text = ""
self.marked_pos = 0
def handleKey(self, key):
# this will no change anything on the value itself
# so we can handle it here in gui element
if key == KEY_DELETE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
else:
self.deleteChar(self.marked_pos)
if self.fixed_size and self.overwrite:
self.marked_pos += 1
elif key == KEY_BACKSPACE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
elif self.marked_pos > 0:
self.deleteChar(self.marked_pos-1)
if not self.fixed_size and self.offset > 0:
self.offset -= 1
self.marked_pos -= 1
elif key == KEY_LEFT:
self.timeout()
if self.allmarked:
self.marked_pos = len(self.text)
self.allmarked = False
else:
self.marked_pos -= 1
elif key == KEY_RIGHT:
self.timeout()
if self.allmarked:
self.marked_pos = 0
self.allmarked = False
else:
self.marked_pos += 1
elif key == KEY_HOME:
self.timeout()
self.allmarked = False
self.marked_pos = 0
elif key == KEY_END:
self.timeout()
self.allmarked = False
self.marked_pos = len(self.text)
elif key == KEY_TOGGLEOW:
self.timeout()
self.overwrite = not self.overwrite
elif key == KEY_ASCII:
self.timeout()
newChar = unichr(getPrevAsciiCode())
if not self.useableChars or newChar in self.useableChars:
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
elif key in KEY_NUMBERS:
owr = self.lastKey == getKeyNumber(key)
newChar = self.getKey(getKeyNumber(key))
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, owr)
elif key == KEY_TIMEOUT:
self.timeout()
if self.help_window:
self.help_window.update(self)
return
if self.help_window:
self.help_window.update(self)
self.validateMarker()
self.changed()
def nextFunc(self):
self.marked_pos += 1
self.validateMarker()
self.changed()
def getValue(self):
try:
return self.text.encode("utf-8")
except UnicodeDecodeError:
print "Broken UTF8!"
return self.text
def setValue(self, val):
try:
self.text = val.decode("utf-8")
except UnicodeDecodeError:
self.text = val.decode("utf-8", "ignore")
print "Broken UTF8!"
value = property(getValue, setValue)
_value = property(getValue, setValue)
def getText(self):
return self.text.encode("utf-8")
def getMulti(self, selected):
if self.visible_width:
if self.allmarked:
mark = range(0, min(self.visible_width, len(self.text)))
else:
mark = [self.marked_pos-self.offset]
return ("mtext"[1-selected:], self.text[self.offset:self.offset+self.visible_width].encode("utf-8")+" ", mark)
else:
if self.allmarked:
mark = range(0, len(self.text))
else:
mark = [self.marked_pos]
return ("mtext"[1-selected:], self.text.encode("utf-8")+" ", mark)
def onSelect(self, session):
self.allmarked = (self.value != "")
if session is not None:
from Screens.NumericalTextInputHelpDialog import NumericalTextInputHelpDialog
self.help_window = session.instantiateDialog(NumericalTextInputHelpDialog, self)
self.help_window.setAnimationMode(0)
self.help_window.show()
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if self.help_window:
session.deleteDialog(self.help_window)
self.help_window = None
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
def getHTML(self, id):
return '<input type="text" name="' + id + '" value="' + self.value + '" /><br>\n'
def unsafeAssign(self, value):
self.value = str(value)
class ConfigPassword(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False, censor = "*"):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
self.censor_char = censor
self.hidden = True
def getMulti(self, selected):
mtext, text, mark = ConfigText.getMulti(self, selected)
if self.hidden:
text = len(text) * self.censor_char
return (mtext, text, mark)
def onSelect(self, session):
ConfigText.onSelect(self, session)
self.hidden = False
def onDeselect(self, session):
ConfigText.onDeselect(self, session)
self.hidden = True
# lets the user select between [min, min+stepwidth, min+(stepwidth*2)..., maxval] with maxval <= max depending
# on the stepwidth
# min, max, stepwidth, default are int values
# wraparound: pressing RIGHT key at max value brings you to min value and vice versa if set to True
class ConfigSelectionNumber(ConfigSelection):
def __init__(self, min, max, stepwidth, default = None, wraparound = False):
self.wraparound = wraparound
if default is None:
default = min
default = str(default)
choices = []
step = min
while step <= max:
choices.append(str(step))
step += stepwidth
ConfigSelection.__init__(self, choices, default)
def getValue(self):
return int(ConfigSelection.getValue(self))
def setValue(self, val):
ConfigSelection.setValue(self, str(val))
def handleKey(self, key):
if not self.wraparound:
if key == KEY_RIGHT:
if len(self.choices) == (self.choices.index(self.value) + 1):
return
if key == KEY_LEFT:
if self.choices.index(self.value) == 0:
return
ConfigSelection.handleKey(self, key)
class ConfigNumber(ConfigText):
def __init__(self, default = 0):
ConfigText.__init__(self, str(default), fixed_size = False)
def getValue(self):
return int(self.text)
def setValue(self, val):
self.text = str(val)
value = property(getValue, setValue)
_value = property(getValue, setValue)
def isChanged(self):
sv = self.saved_value
strv = self.tostring(self.value)
if sv is None and strv == self.default:
return False
return strv != sv
def conform(self):
pos = len(self.text) - self.marked_pos
self.text = self.text.lstrip("0")
if self.text == "":
self.text = "0"
if pos > len(self.text):
self.marked_pos = 0
else:
self.marked_pos = len(self.text) - pos
def handleKey(self, key):
if key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
ascii = getPrevAsciiCode()
if not (48 <= ascii <= 57):
return
else:
ascii = getKeyNumber(key) + 48
newChar = unichr(ascii)
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
else:
ConfigText.handleKey(self, key)
self.conform()
def onSelect(self, session):
self.allmarked = (self.value != "")
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
class ConfigSearchText(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False, search = True)
class ConfigDirectory(ConfigText):
def __init__(self, default="", visible_width=60):
ConfigText.__init__(self, default, fixed_size = True, visible_width = visible_width)
def handleKey(self, key):
pass
def getValue(self):
if self.text == "":
return None
else:
return ConfigText.getValue(self)
def setValue(self, val):
if val == None:
val = ""
ConfigText.setValue(self, val)
def getMulti(self, selected):
if self.text == "":
return ("mtext"[1-selected:], _("List of storage devices"), range(0))
else:
return ConfigText.getMulti(self, selected)
def onSelect(self, session):
self.allmarked = (self.value != "")
# a slider.
class ConfigSlider(ConfigElement):
def __init__(self, default = 0, increment = 1, limits = (0, 100)):
ConfigElement.__init__(self)
self.value = self.last_value = self.default = default
self.min = limits[0]
self.max = limits[1]
self.increment = increment
def checkValues(self):
if self.value < self.min:
self.value = self.min
if self.value > self.max:
self.value = self.max
def handleKey(self, key):
if key == KEY_LEFT:
self.value -= self.increment
elif key == KEY_RIGHT:
self.value += self.increment
elif key == KEY_HOME:
self.value = self.min
elif key == KEY_END:
self.value = self.max
else:
return
self.checkValues()
def getText(self):
return "%d / %d" % (self.value, self.max)
def getMulti(self, selected):
self.checkValues()
return ("slider", self.value, self.max)
def fromstring(self, value):
return int(value)
# a satlist. in fact, it's a ConfigSelection.
class ConfigSatlist(ConfigSelection):
def __init__(self, list, default = None):
if default is not None:
default = str(default)
ConfigSelection.__init__(self, choices = [(str(orbpos), desc) for (orbpos, desc, flags) in list], default = default)
def getOrbitalPosition(self):
if self.value == "":
return None
return int(self.value)
orbital_position = property(getOrbitalPosition)
class ConfigSet(ConfigElement):
def __init__(self, choices, default = []):
ConfigElement.__init__(self)
if isinstance(choices, list):
choices.sort()
self.choices = choicesList(choices, choicesList.LIST_TYPE_LIST)
else:
assert False, "ConfigSet choices must be a list!"
if default is None:
default = []
self.pos = -1
default.sort()
self.last_value = self.default = default
self.value = default[:]
def toggleChoice(self, choice):
value = self.value
if choice in value:
value.remove(choice)
else:
value.append(choice)
value.sort()
self.changed()
def handleKey(self, key):
if key in KEY_NUMBERS + [KEY_DELETE, KEY_BACKSPACE]:
if self.pos != -1:
self.toggleChoice(self.choices[self.pos])
elif key == KEY_LEFT:
if self.pos < 0:
self.pos = len(self.choices)-1
else:
self.pos -= 1
elif key == KEY_RIGHT:
if self.pos >= len(self.choices)-1:
self.pos = -1
else:
self.pos += 1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def genString(self, lst):
res = ""
for x in lst:
res += self.description[x]+" "
return res
def getText(self):
return self.genString(self.value)
def getMulti(self, selected):
if not selected or self.pos == -1:
return ("text", self.genString(self.value))
else:
tmp = self.value[:]
ch = self.choices[self.pos]
mem = ch in self.value
if not mem:
tmp.append(ch)
tmp.sort()
ind = tmp.index(ch)
val1 = self.genString(tmp[:ind])
val2 = " "+self.genString(tmp[ind+1:])
if mem:
chstr = " "+self.description[ch]+" "
else:
chstr = "("+self.description[ch]+")"
len_val1 = len(val1)
return ("mtext", val1+chstr+val2, range(len_val1, len_val1 + len(chstr)))
def onDeselect(self, session):
self.pos = -1
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value[:]
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
description = property(lambda self: descriptionList(self.choices.choices, choicesList.LIST_TYPE_LIST))
class ConfigLocations(ConfigElement):
def __init__(self, default = [], visible_width = False):
ConfigElement.__init__(self)
self.visible_width = visible_width
self.pos = -1
self.default = default
self.locations = []
self.mountpoints = []
self.value = default[:]
def setValue(self, value):
locations = self.locations
loc = [x[0] for x in locations if x[3]]
add = [x for x in value if not x in loc]
diff = add + [x for x in loc if not x in value]
locations = [x for x in locations if not x[0] in diff] + [[x, self.getMountpoint(x), True, True] for x in add]
locations.sort(key = lambda x: x[0])
self.locations = locations
self.changed()
def getValue(self):
self.checkChangedMountpoints()
locations = self.locations
for x in locations:
x[3] = x[2]
return [x[0] for x in locations if x[3]]
value = property(getValue, setValue)
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
def load(self):
sv = self.saved_value
if sv is None:
tmp = self.default
else:
tmp = self.fromstring(sv)
locations = [[x, None, False, False] for x in tmp]
self.refreshMountpoints()
for x in locations:
if fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
self.locations = locations
def save(self):
locations = self.locations
if self.save_disabled or not locations:
self.saved_value = None
else:
self.saved_value = self.tostring([x[0] for x in locations])
def isChanged(self):
sv = self.saved_value
locations = self.locations
if val is None and not locations:
return False
return self.tostring([x[0] for x in locations]) != sv
def addedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = True
elif x[1] == None and fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
def removedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = False
def refreshMountpoints(self):
self.mountpoints = [p.mountpoint for p in harddiskmanager.getMountedPartitions() if p.mountpoint != "/"]
self.mountpoints.sort(key = lambda x: -len(x))
def checkChangedMountpoints(self):
oldmounts = self.mountpoints
self.refreshMountpoints()
newmounts = self.mountpoints
if oldmounts == newmounts:
return
for x in oldmounts:
if not x in newmounts:
self.removedMount(x)
for x in newmounts:
if not x in oldmounts:
self.addedMount(x)
def getMountpoint(self, file):
file = os_path.realpath(file)+"/"
for m in self.mountpoints:
if file.startswith(m):
return m
return None
def handleKey(self, key):
if key == KEY_LEFT:
self.pos -= 1
if self.pos < -1:
self.pos = len(self.value)-1
elif key == KEY_RIGHT:
self.pos += 1
if self.pos >= len(self.value):
self.pos = -1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def getText(self):
return " ".join(self.value)
def getMulti(self, selected):
if not selected:
valstr = " ".join(self.value)
if self.visible_width and len(valstr) > self.visible_width:
return ("text", valstr[0:self.visible_width])
else:
return ("text", valstr)
else:
i = 0
valstr = ""
ind1 = 0
ind2 = 0
for val in self.value:
if i == self.pos:
ind1 = len(valstr)
valstr += str(val)+" "
if i == self.pos:
ind2 = len(valstr)
i += 1
if self.visible_width and len(valstr) > self.visible_width:
if ind1+1 < self.visible_width/2:
off = 0
else:
off = min(ind1+1-self.visible_width/2, len(valstr)-self.visible_width)
return ("mtext", valstr[off:off+self.visible_width], range(ind1-off,ind2-off))
else:
return ("mtext", valstr, range(ind1,ind2))
def onDeselect(self, session):
self.pos = -1
# nothing.
class ConfigNothing(ConfigSelection):
def __init__(self):
ConfigSelection.__init__(self, choices = [("","")])
# until here, 'saved_value' always had to be a *string*.
# now, in ConfigSubsection, and only there, saved_value
# is a dict, essentially forming a tree.
#
# config.foo.bar=True
# config.foobar=False
#
# turns into:
# config.saved_value == {"foo": {"bar": "True"}, "foobar": "False"}
#
class ConfigSubsectionContent(object):
pass
# we store a backup of the loaded configuration
# data in self.stored_values, to be able to deploy
# them when a new config element will be added,
# so non-default values are instantly available
# A list, for example:
# config.dipswitches = ConfigSubList()
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
class ConfigSubList(list, object):
def __init__(self):
list.__init__(self)
self.stored_values = {}
def save(self):
for x in self:
x.save()
def load(self):
for x in self:
x.load()
def getSavedValue(self):
res = { }
for i, val in enumerate(self):
sv = val.saved_value
if sv is not None:
res[str(i)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.stored_values.items():
if int(key) < len(self):
self[int(key)].saved_value = val
saved_value = property(getSavedValue, setSavedValue)
def append(self, item):
i = str(len(self))
list.append(self, item)
if i in self.stored_values:
item.saved_value = self.stored_values[i]
item.load()
def dict(self):
return dict([(str(index), value) for index, value in enumerate(self)])
# same as ConfigSubList, just as a dictionary.
# care must be taken that the 'key' has a proper
# str() method, because it will be used in the config
# file.
class ConfigSubDict(dict, object):
def __init__(self):
dict.__init__(self)
self.stored_values = {}
def save(self):
for x in self.values():
x.save()
def load(self):
for x in self.values():
x.load()
def getSavedValue(self):
res = {}
for (key, val) in self.items():
sv = val.saved_value
if sv is not None:
res[str(key)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.items():
if str(key) in self.stored_values:
val.saved_value = self.stored_values[str(key)]
saved_value = property(getSavedValue, setSavedValue)
def __setitem__(self, key, item):
dict.__setitem__(self, key, item)
if str(key) in self.stored_values:
item.saved_value = self.stored_values[str(key)]
item.load()
def dict(self):
return self
# Like the classes above, just with a more "native"
# syntax.
#
# some evil stuff must be done to allow instant
# loading of added elements. this is why this class
# is so complex.
#
# we need the 'content' because we overwrite
# __setattr__.
# If you don't understand this, try adding
# __setattr__ to a usual exisiting class and you will.
class ConfigSubsection(object):
def __init__(self):
self.__dict__["content"] = ConfigSubsectionContent()
self.content.items = { }
self.content.stored_values = { }
def __setattr__(self, name, value):
if name == "saved_value":
return self.setSavedValue(value)
assert isinstance(value, (ConfigSubsection, ConfigElement, ConfigSubList, ConfigSubDict)), "ConfigSubsections can only store ConfigSubsections, ConfigSubLists, ConfigSubDicts or ConfigElements"
content = self.content
content.items[name] = value
x = content.stored_values.get(name, None)
if x is not None:
#print "ok, now we have a new item,", name, "and have the following value for it:", x
value.saved_value = x
value.load()
def __getattr__(self, name):
return self.content.items[name]
def getSavedValue(self):
res = self.content.stored_values
for (key, val) in self.content.items.items():
sv = val.saved_value
if sv is not None:
res[key] = sv
elif key in res:
del res[key]
return res
def setSavedValue(self, values):
values = dict(values)
self.content.stored_values = values
for (key, val) in self.content.items.items():
value = values.get(key, None)
if value is not None:
val.saved_value = value
saved_value = property(getSavedValue, setSavedValue)
def save(self):
for x in self.content.items.values():
x.save()
def load(self):
for x in self.content.items.values():
x.load()
def dict(self):
return self.content.items
# the root config object, which also can "pickle" (=serialize)
# down the whole config tree.
#
# we try to keep non-existing config entries, to apply them whenever
# a new config entry is added to a subsection
# also, non-existing config entries will be saved, so they won't be
# lost when a config entry disappears.
class Config(ConfigSubsection):
def __init__(self):
ConfigSubsection.__init__(self)
def pickle_this(self, prefix, topickle, result):
for (key, val) in topickle.items():
name = '.'.join((prefix, key))
if isinstance(val, dict):
self.pickle_this(name, val, result)
elif isinstance(val, tuple):
result += [name, '=', val[0], '\n']
else:
result += [name, '=', val, '\n']
def pickle(self):
result = []
self.pickle_this("config", self.saved_value, result)
return ''.join(result)
def unpickle(self, lines, base_file=True):
tree = { }
configbase = tree.setdefault("config", {})
for l in lines:
if not l or l[0] == '#':
continue
result = l.split('=', 1)
if len(result) != 2:
continue
(name, val) = result
val = val.strip()
names = name.split('.')
base = configbase
for n in names[1:-1]:
base = base.setdefault(n, {})
base[names[-1]] = val
if not base_file: # not the initial config file..
#update config.x.y.value when exist
try:
configEntry = eval(name)
if configEntry is not None:
configEntry.value = val
except (SyntaxError, KeyError):
pass
# we inherit from ConfigSubsection, so ...
#object.__setattr__(self, "saved_value", tree["config"])
if "config" in tree:
self.setSavedValue(tree["config"])
def saveToFile(self, filename):
text = self.pickle()
try:
import os
f = open(filename + ".writing", "w")
f.write(text)
f.flush()
os.fsync(f.fileno())
f.close()
os.rename(filename + ".writing", filename)
except IOError:
print "Config: Couldn't write %s" % filename
def loadFromFile(self, filename, base_file=True):
self.unpickle(open(filename, "r"), base_file)
config = Config()
config.misc = ConfigSubsection()
class ConfigFile:
CONFIG_FILE = resolveFilename(SCOPE_CONFIG, "settings")
def load(self):
try:
config.loadFromFile(self.CONFIG_FILE, True)
except IOError, e:
print "unable to load config (%s), assuming defaults..." % str(e)
def save(self):
# config.save()
config.saveToFile(self.CONFIG_FILE)
def __resolveValue(self, pickles, cmap):
key = pickles[0]
if cmap.has_key(key):
if len(pickles) > 1:
return self.__resolveValue(pickles[1:], cmap[key].dict())
else:
return str(cmap[key].value)
return None
def getResolvedKey(self, key):
names = key.split('.')
if len(names) > 1:
if names[0] == "config":
ret=self.__resolveValue(names[1:], config.content.items)
if ret and len(ret):
return ret
print "getResolvedKey", key, "failed !! (Typo??)"
return ""
def NoSave(element):
element.disableSave()
return element
configfile = ConfigFile()
configfile.load()
def getConfigListEntry(*args):
assert len(args) > 1, "getConfigListEntry needs a minimum of two arguments (descr, configElement)"
return args
def updateConfigElement(element, newelement):
newelement.value = element.value
return newelement
#def _(x):
# return x
#
#config.bla = ConfigSubsection()
#config.bla.test = ConfigYesNo()
#config.nim = ConfigSubList()
#config.nim.append(ConfigSubsection())
#config.nim[0].bla = ConfigYesNo()
#config.nim.append(ConfigSubsection())
#config.nim[1].bla = ConfigYesNo()
#config.nim[1].blub = ConfigYesNo()
#config.arg = ConfigSubDict()
#config.arg["Hello"] = ConfigYesNo()
#
#config.arg["Hello"].handleKey(KEY_RIGHT)
#config.arg["Hello"].handleKey(KEY_RIGHT)
#
##config.saved_value
#
##configfile.save()
#config.save()
#print config.pickle()
cec_limits = [(0,15),(0,15),(0,15),(0,15)]
class ConfigCECAddress(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = cec_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:(self.marked_block)])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return (value, mBlock)
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return ("mtext"[1-selected:], value, mBlock)
else:
return ("text", value)
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
|
sodo13/openpli-gls
|
lib/python/Components/config.py
|
Python
|
gpl-2.0
| 49,107
|
#!/usr/bin/env python
import os,sys
import glob,re
import imaplib,email,getpass,argparse
import datetime,dateutil.parser,time
import email.header
script_name = os.path.splitext(__file__)[0]
parser = argparse.ArgumentParser(description='unsub-me.py')
parser.add_argument('-s', '--server', help='IMAP Server eg. imap.gmail.com' ,required=True)
parser.add_argument('-p', '--port', help='IMAP Server port eg. 993' ,required=True)
parser.add_argument('-u', '--user', help='Usename' ,required=True)
parser.add_argument('-f', '--folder', help='Folder to search.',default="inbox")
parser.add_argument('-c', '--cache-dir', help='Local directory to cache emails',default="." + script_name)
parser.add_argument('-d', '--debug', help='Switch on debugging')
parser.add_argument('-n', '--ndays', help='Number of days to grab from mailbox',default=31)
args = parser.parse_args()
imap_cachedir = args.cache_dir
user = args.user
check_days = args.ndays
server = args.server
port = args.port
folder = args.folder
emails = []
unsub_urls = []
unsub_doms = []
password = getpass.getpass("Please enter the password for " + server + ":\n")
suspected_words=['unsubscribe']
if not os.path.exists(imap_cachedir):
os.mkdir(imap_cachedir)
# decode headers
def decode_header(t):
dh = email.header.decode_header(t)
default_charset = 'ISO-8859-1'
r = ''.join([ unicode(t[0], t[1] or default_charset) for t in dh ])
return r.encode('utf-8')
# extract meaningful parts of e-mails
def email_to_string(msg):
parts = []
for part in msg.walk():
if not part.get_content_maintype() == 'text':
continue
parts.append(part.get_payload(decode=True))
return "\n".join(parts)
try:
print "Looking for unsubscribe links from the past %s days." % check_days
mail = imaplib.IMAP4_SSL(server,port)
mail.login(user.strip(), password.strip())
mail.list()
mail.select(folder,readonly=True )
date = (datetime.date.today() - datetime.timedelta(int(check_days))).strftime("%d-%b-%Y")
typ, data = mail.uid('search', None, '(SENTSINCE {date})'.format(date=date))
email_uids = data[0].split()
print "Found %s emails. Fetching:" % len(email_uids)
for emailid in email_uids:
email_cachefile = imap_cachedir+"/o_"+emailid
email_text = "Unfetched"
if os.path.isfile(email_cachefile) and os.stat(email_cachefile).st_size > 0:
with open(email_cachefile, 'r') as cachefile:
email_text = cachefile.read()
sys.stdout.write("+")
else:
typ, data = mail.uid('fetch', emailid, '(RFC822)')
sys.stdout.write(".")
email_text = data[0][1]
with open(email_cachefile + '.tmp', 'w') as cachefile:
cachefile.write(email_text)
os.rename(email_cachefile + '.tmp', email_cachefile)
sys.stdout.flush()
emails.append(email.message_from_string(email_text))
mail.close()
mail.logout()
print " All fetched."
emails.reverse()
except EOFError:
print "Error while fetching emails"
sys.exit(1)
for message in emails:
message_decoded_from = decode_header(message['From'])
message_decoded_subject = decode_header(message['Subject'])
message_body_lower = email_to_string(message).lower()
message_subject_lower = message_decoded_subject.lower()
urls = re.findall(r'href=[\'"]?([^\'" >]+)', message_body_lower)
for url in urls:
if any([word in url for word in suspected_words]):
unsub_urls.append(url)
unsub_doms.append(url.split("//")[-1].split("/")[0])
#print("\n".join( x for x in set(unsub_doms) ) )
print("\n".join( x for x in set(unsub_urls) ) )
#print([[x,unsub_doms.count(x)] for x in set(unsub_doms)])
|
nsivyer/unsub-me
|
unsub-me.py
|
Python
|
gpl-2.0
| 3,791
|
import datetime
from django.db import models
from django.db.models import Q
from django import forms
from django.utils.translation import ugettext_lazy as _
from timezones.fields import TimeZoneField
from pyconde import south_rules
CONFERENCE_CACHE = {}
class Conference(models.Model):
"""
the full conference for a specific year, e.g. US PyCon 2012.
"""
title = models.CharField(_("title"), max_length=100)
# when the conference runs
start_date = models.DateField(_("start date"), null=True, blank=True)
end_date = models.DateField(_("end date"), null=True, blank=True)
# timezone the conference is in
timezone = TimeZoneField(_("timezone"), blank=True)
reviews_start_date = models.DateTimeField(null=True, blank=True)
reviews_end_date = models.DateTimeField(null=True, blank=True)
reviews_active = models.NullBooleanField()
def __unicode__(self):
return self.title
def save(self, *args, **kwargs):
super(Conference, self).save(*args, **kwargs)
if self.id in CONFERENCE_CACHE:
del CONFERENCE_CACHE[self.id]
def delete(self):
pk = self.pk
super(Conference, self).delete()
try:
del CONFERENCE_CACHE[pk]
except KeyError:
pass
def get_reviews_active(self):
if self.reviews_active is not None:
return self.reviews_active
now = datetime.datetime.now()
if self.reviews_start_date and self.reviews_end_date:
return self.reviews_start_date <= now <= self.reviews_end_date
return False
class Meta(object):
verbose_name = _("conference")
verbose_name_plural = _("conferences")
class CurrentConferenceManager(models.Manager):
"""
A simple filter that filters instances of the current class by the
foreign key "conference" being the current conference.
"""
def get_query_set(self):
return super(CurrentConferenceManager, self).get_query_set().filter(conference=current_conference())
class Section(models.Model):
"""
a section of the conference such as "Tutorials", "Workshops",
"Talks", "Expo", "Sprints", that may have its own review and
scheduling process.
"""
conference = models.ForeignKey(Conference, verbose_name=_("conference"),
related_name='sections')
name = models.CharField(_("name"), max_length=100)
# when the section runs
start_date = models.DateField(_("start date"), null=True, blank=True)
end_date = models.DateField(_("end date"), null=True, blank=True)
slug = models.SlugField(_("slug"), null=True, blank=True)
order = models.IntegerField(_("order"), default=0)
description = models.TextField(_("description"), blank=True, null=True)
objects = models.Manager()
current_objects = CurrentConferenceManager()
def __unicode__(self):
return self.name
class Meta(object):
verbose_name = _("section")
verbose_name_plural = _("sections")
class AudienceLevel(models.Model):
"""
Sessions, presentations and so on have all a particular target audience.
Within this target audience you usually have certain levels of experience
with the topic.
Most of the there are 3 levels:
* Novice
* Intermediate
* Experienced
That said, there are sometimes talks that go beyond this by being for
instance targeted at only people with "Core Contributor" expierence.
To make custom styling of these levels a bit more flexible, the audience
level also comes with a slug field for use as CSS-class, while the level
property is used to sort the audience level.
"""
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
name = models.CharField(_("name"), max_length=100)
slug = models.SlugField(_("slug"))
level = models.IntegerField(_("level"), blank=True, null=True)
objects = models.Manager()
current_objects = CurrentConferenceManager()
class Meta(object):
verbose_name = _("audience level")
verbose_name_plural = _("audience levels")
def __unicode__(self):
return self.name
class SessionDuration(models.Model):
"""
A conference has usually two kinds of session slot durations. One for
short talks and one for longer talks. The actual time span varies. Some
conferences have 20 minutes and 50 minutes respectively, some 15 and 30
minutes for each session.
"""
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
label = models.CharField(_("label"), max_length=100)
slug = models.SlugField(_("slug"))
minutes = models.IntegerField(_("minutes"))
objects = models.Manager()
current_objects = CurrentConferenceManager()
class Meta(object):
verbose_name = _("session duration")
verbose_name_plural = _("session durations")
def __unicode__(self):
return u"%s (%d min.)" % (self.label, self.minutes)
class ActiveSessionKindManager(CurrentConferenceManager):
def filter_open_kinds(self):
now = datetime.datetime.utcnow()
return self.get_query_set().filter(
Q(closed=False)
| Q(Q(closed=None) & Q(start_date__lt=now) & Q(end_date__gte=now))
)
class SessionKind(models.Model):
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
name = models.CharField(_("name"), max_length=50)
slug = models.SlugField(_("slug"))
closed = models.NullBooleanField()
start_date = models.DateTimeField(blank=True, null=True)
end_date = models.DateTimeField(blank=True, null=True)
# TODO: available_durations = models.ManyToManyField('SessionDuration', blank=True, null=True)
# TODO: available_tracks = models.ManyToManyField('Track', blank=True, null=True)
objects = models.Manager()
current_objects = ActiveSessionKindManager()
class Meta(object):
verbose_name = _("session kind")
verbose_name_plural = _("session kinds")
def __unicode__(self):
return self.name
def clean(self):
"""
A SessionKind can either have neither start nor end date or both.
"""
super(SessionKind, self).clean()
if self.closed is None:
if self.start_date is None or self.end_date is None:
raise forms.ValidationError(_("You have to specify a start and end date if you leave the 'closed' status undetermined"))
if self.start_date >= self.end_date:
raise forms.ValidationError(_("The end date has to be after the start date"))
def accepts_proposals(self):
if self.conference.get_reviews_active():
return False
now = datetime.datetime.utcnow()
if self.conference.start_date is not None:
if self.conference.start_date < now.date():
return False
if self.closed is None:
return self.start_date <= now <= self.end_date
return not self.closed
class Track(models.Model):
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
name = models.CharField(_("name"), max_length=100)
slug = models.SlugField(_("slug"))
description = models.TextField(_("description"), blank=True, null=True)
visible = models.BooleanField(_("visible"), default=True)
order = models.IntegerField(_("order"), default=0)
objects = models.Manager()
current_objects = CurrentConferenceManager()
class Meta(object):
verbose_name = _("track")
verbose_name_plural = _("tracks")
ordering = ['order']
def __unicode__(self):
return self.name
class Location(models.Model):
"""
A location represents a place associated with some part of the conference
like a session room or a foyer.
"""
conference = models.ForeignKey(Conference,
verbose_name=_("conference"))
name = models.CharField(_("name"), max_length=100)
slug = models.SlugField(_("slug"))
order = models.IntegerField(_("order"), default=0)
used_for_sessions = models.BooleanField(_("used for sessions"),
default=True)
objects = models.Manager()
current_conference = CurrentConferenceManager()
def __unicode__(self):
return self.name
class Meta(object):
verbose_name = _("location")
verbose_name_plural = _("locations")
ordering = ['order']
def current_conference():
from django.conf import settings
try:
conf_id = settings.CONFERENCE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You must set the CONFERENCE_ID setting.")
try:
current_conf = CONFERENCE_CACHE[conf_id]
except KeyError:
try:
current_conf = Conference.objects.get(pk=conf_id)
except Conference.DoesNotExist:
return None
CONFERENCE_CACHE[conf_id] = current_conf
return current_conf
|
viswimmer1/PythonGenerator
|
data/python_files/31214140/models.py
|
Python
|
gpl-2.0
| 9,042
|
# -*- coding: utf-8 -*-
#
# Copyright 2017 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents the Polish language.
.. seealso:: https://en.wikipedia.org/wiki/Polish_language
"""
from translate.lang import common
class pl(common.Common):
"""This class represents Polish."""
validaccel = ("abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"1234567890"
"ąćęłńóśźż"
"ĄĆĘŁŃÓŚŹŻ")
|
unho/translate
|
translate/lang/pl.py
|
Python
|
gpl-2.0
| 1,153
|
# -*- coding: utf-8 -*-
#
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
#
# PAWTRAIN
#
# Pet underground railroad
#
#
# Authors: Baron L. Chandler, baron@venturecranial.com
# -----------------------------------------------------------------------
# COPYRIGHT ©2014 Venture Cranial, LLC
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
#
from django.contrib import admin
from django.contrib.contenttypes import generic
from leaflet.admin import LeafletGeoAdmin
from web.models import (Image, Location, Organization, Pet, Transport,
TransportSegment, UserProfile, )
class UserProfileAdmin(LeafletGeoAdmin):
"""
The admin interface for UserProfiles.
"""
admin.site.register(UserProfile, UserProfileAdmin)
class PetImageInline(generic.GenericTabularInline):
model = Image
exclude = ('width', 'height', 'size', 'source_url', )
class PetAdmin(admin.ModelAdmin):
"""
The admin interface for Pets.
"""
inlines = [PetImageInline,]
exclude = ('images', )
admin.site.register(Pet, PetAdmin)
class TransportSegmentInline(admin.TabularInline):
model = TransportSegment
fk_name = 'transport'
class TransportAdmin(admin.ModelAdmin):
"""
The admin interface for Transports.
"""
inlines = [
TransportSegmentInline,
]
readonly_fields = ('tracking_number', )
fieldsets = (
(None, {
'fields': ('tracking_number', 'shipper', 'receiver', 'notes',)
}),
('Status', {
'fields': ('status', 'started_on', 'finished_on')
}),
('History', {
'fields' : ('created_by', 'last_updated_by', )
}),
)
admin.site.register(Transport, TransportAdmin)
class TransportSegmentAdmin(LeafletGeoAdmin):
"""
The admin interface for TransportSegments.
"""
admin.site.register(TransportSegment, TransportSegmentAdmin)
class OrganizationAdmin(admin.ModelAdmin):
"""
The admin interface for Organizations.
"""
admin.site.register(Organization, OrganizationAdmin)
class LocationAdmin(LeafletGeoAdmin):
"""
The admin interface for Locations.
"""
admin.site.register(Location, LocationAdmin)
|
VentureCranial/PawTrain
|
app/web/admin.py
|
Python
|
gpl-2.0
| 2,237
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0301
"""ErrorLib web interface."""
__revision__ = "$Id$"
__lastupdated__ = "$Date$"
from invenio.config import CFG_SITE_URL
from invenio.webpage import page
from invenio.errorlib import send_error_report_to_admin
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.urlutils import redirect_to_url
from invenio.messages import gettext_set_language
class WebInterfaceErrorPages(WebInterfaceDirectory):
"""Defines the set of /error pages."""
_exports = ['', 'send']
def index(self, req, form):
"""Index page."""
redirect_to_url(req, '%s/error/send' % CFG_SITE_URL)
def send(self, req, form):
"""
Confirmation page of error report sent the admin
parameters are the same as used for the error box. See webstyle_templates.tmpl_error_box
"""
argd = wash_urlargd(form, {'header': (str, "NA"),
'url': (str, "NA"),
'time': (str, "NA"),
'browser': (str, "NA"),
'client': (str, "NA"),
'error': (str, "NA"),
'sys_error': (str, "NA"),
'traceback': (str, "NA"),
'referer': (str, "NA"),
})
_ = gettext_set_language(argd['ln'])
if argd['client'] == "NA":
return page(title=_("Sorry"),
body=_("Cannot send error request, %s parameter missing.") % 'client',
lastupdated=__lastupdated__,
req=req)
elif argd['url'] == "NA":
return page(title=_("Sorry"),
body=_("Cannot send error request, %s parameter missing.") % 'url',
lastupdated=__lastupdated__,
req=req)
elif argd['time'] == "NA":
return page(title=_("Sorry"),
body=_("Cannot send error request, %s parameter missing.") % 'time',
lastupdated=__lastupdated__,
req=req)
elif argd['error'] == "NA":
return page(title=_("Sorry"),
body=_("Cannot send error request, %s parameter missing.") % 'error',
lastupdated=__lastupdated__,
req=req)
else:
send_error_report_to_admin(argd['header'],
argd['url'],
argd['time'],
argd['browser'],
argd['client'],
argd['error'],
argd['sys_error'],
argd['traceback'])
out = """
<p><span class="exampleleader">%(title)s</span>
<p>%(message)s
<p>%(back)s
""" % \
{'title' : _("The error report has been sent."),
'message' : _("Many thanks for helping us to improve the service."),
'back' : argd['referer']!="NA" and "<a href=\"%s\">back</a>" % (argd['referer'],) or \
_("Use the back button of your browser to return to the previous page.")
}
return page(title=_("Thank you!"),
body=out,
lastupdated=__lastupdated__,
req=req)
|
pombredanne/invenio
|
modules/miscutil/lib/errorlib_webinterface.py
|
Python
|
gpl-2.0
| 4,487
|
##############################################
# File Name: module4.py
# Version: 1.0
# Team No.: 22
# Team Name:
# Date: 4 Nov 15
##############################################
import RPi.GPIO as GPIO
import time
print 'Press CTRL+C to exit the program'
GPIO.setmode(GPIO.BOARD)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(13, GPIO.OUT)
GPIO.setup(15, GPIO.OUT)
RUNNING = True
try:
while RUNNING:
print 'Turn on red light\n'
GPIO.output(11, True)
time.sleep(2)
GPIO.output(11, False)
print 'Turn on orange light\n'
GPIO.output(13, True)
time.sleep(2)
GPIO.output(13, False)
print 'Turn on green light\n'
GPIO.output(15, True)
time.sleep(2)
GPIO.output(15, False)
# If CTRL+C is pressed the main loop is broken
except KeyboardInterrupt:
RUNNING = False
print "\nQuitting program."
# Actions under 'finally' will always be called, regardless of
# what stopped the program (be it an error or an interrupt)
finally:
# Stop and cleanup to finish cleanly so the pins
# are available to be used again
GPIO.cleanup()
print "\nProgram ended"
|
konini-school/pibot22
|
module4.py
|
Python
|
gpl-2.0
| 1,123
|
#!/usr/bin/env python
"""
Extend elements of a table w/ given string to match the longest line of the table
Copyright:
smooth_table Extend elements of a table w/ given string to match the longest line of the table
Copyright (C) 2016 William Brazelton
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import print_function
import argparse
import os
from StringIO import StringIO
import sys
def max_line(file_handle, delimiter):
longest_line = 0
for line in file_handle:
line_length = len(line.split(delimiter))
if line_length > longest_line:
longest_line = line_length
file_handle.seek(0)
return longest_line
def smooth_table(in_handle, out_handle, modifier, delimiter):
longest_line = max_line(in_handle, delimiter)
for line in in_handle:
elements = line.strip().split(delimiter)
while len(elements) < longest_line:
elements.append(modifier)
elements[-1] = elements[-1] + os.linesep
out_handle.write(delimiter.join(elements))
def main():
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.
RawDescriptionHelpFormatter)
parser.add_argument('--in_table', '-i',
default=sys.stdin,
help='Table to modify [Default: STDIN]')
parser.add_argument('--out_table', '-o',
default=sys.stdout,
help='File to write modified table to '
'[Default: STDOUT]')
parser.add_argument('--modifier', '-m',
default='UNDEFINED',
help='String to place in added table elements '
'[Default: UNDEFINED]')
parser.add_argument('--delimiter', '-d',
default='\t',
help='Character delimiting table [Default: \\t]')
args = parser.parse_args()
try:
with open(args.in_table, 'rU') as in_handle, \
open(args.out_table, 'w') as out_handle:
smooth_table(in_handle, out_handle, args.modifier, args.delimiter)
except TypeError:
std_input = ''
for line in sys.stdin:
std_input += line
args.in_table = StringIO(std_input)
smooth_table(args.in_table, args.out_table, args.modifier,
args.delimiter)
if __name__ == '__main__':
main()
sys.exit(0)
|
Brazelton-Lab/lab_scripts
|
smooth_table.py
|
Python
|
gpl-2.0
| 3,141
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2008, 2018-2021 Philipp Wolfer
# Copyright (C) 2011, 2013 Michael Wiencek
# Copyright (C) 2013, 2018, 2020-2021 Laurent Monin
# Copyright (C) 2016-2017 Sambhav Kothari
# Copyright (C) 2018 Vishal Choudhary
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from PyQt5 import (
QtCore,
QtGui,
QtWidgets,
)
from picard.config import get_config
class RatingWidget(QtWidgets.QWidget):
def __init__(self, parent, track):
super().__init__(parent)
self._track = track
config = get_config()
self._maximum = config.setting["rating_steps"] - 1
try:
self._rating = int(track.metadata["~rating"] or 0)
except ValueError:
self._rating = 0
self._highlight = 0
self._star_pixmap = QtGui.QPixmap(":/images/star.png")
self._star_gray_pixmap = QtGui.QPixmap(":/images/star-gray.png")
self._star_size = 16
self._star_spacing = 2
self._offset = 16
self._width = self._maximum * (self._star_size + self._star_spacing) + self._offset
self._height = self._star_size + 6
self.setMaximumSize(self._width, self._height)
self.setMinimumSize(self._width, self._height)
self.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed))
self.setMouseTracking(True)
def sizeHint(self):
return QtCore.QSize(self._width, self._height)
def _setHighlight(self, highlight):
assert 0 <= highlight <= self._maximum
if highlight != self._highlight:
self._highlight = highlight
self.update()
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
x = event.x()
if x < self._offset:
return
rating = self._getRatingFromPosition(x)
if self._rating == rating:
rating = 0
self._rating = rating
self._update_track()
self.update()
event.accept()
def mouseMoveEvent(self, event):
self._setHighlight(self._getRatingFromPosition(event.x()))
event.accept()
def leaveEvent(self, event):
self._setHighlight(0)
event.accept()
def _getRatingFromPosition(self, position):
rating = int((position - self._offset) / (self._star_size + self._star_spacing)) + 1
if rating > self._maximum:
rating = self._maximum
return rating
def _update_track(self):
track = self._track
rating = str(self._rating)
track.metadata["~rating"] = rating
for file in track.files:
file.metadata["~rating"] = rating
config = get_config()
if config.setting["submit_ratings"]:
ratings = {("recording", track.id): self._rating}
self.tagger.mb_api.submit_ratings(ratings, None)
def paintEvent(self, event=None):
painter = QtGui.QPainter(self)
offset = self._offset
for i in range(1, self._maximum + 1):
if i <= self._rating or i <= self._highlight:
pixmap = self._star_pixmap
else:
pixmap = self._star_gray_pixmap
painter.drawPixmap(offset, 3, pixmap)
offset += self._star_size + self._star_spacing
|
zas/picard
|
picard/ui/ratingwidget.py
|
Python
|
gpl-2.0
| 4,085
|
"""autogenerated by genpy from color_filter/Rect2D_.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class Rect2D_(genpy.Message):
_md5sum = "4425f1067abc7ec2e487d28194eccff4"
_type = "color_filter/Rect2D_"
_has_header = False #flag to mark the presence of a Header object
_full_text = """int32 x
int32 y
int32 width
int32 height
"""
__slots__ = ['x','y','width','height']
_slot_types = ['int32','int32','int32','int32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
x,y,width,height
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(Rect2D_, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.x is None:
self.x = 0
if self.y is None:
self.y = 0
if self.width is None:
self.width = 0
if self.height is None:
self.height = 0
else:
self.x = 0
self.y = 0
self.width = 0
self.height = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_4i.pack(_x.x, _x.y, _x.width, _x.height))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 16
(_x.x, _x.y, _x.width, _x.height,) = _struct_4i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_4i.pack(_x.x, _x.y, _x.width, _x.height))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 16
(_x.x, _x.y, _x.width, _x.height,) = _struct_4i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_4i = struct.Struct("<4i")
|
grandcat/robotics_g7
|
color_filter/src/color_filter/msg/_Rect2D_.py
|
Python
|
gpl-2.0
| 3,294
|
#!/bin/env python
#urls = [(httpsonly?,'http_host regex pattern', 'path_info regex pattern', 'module.classname', {
# 'request-method':(auth role),
# }),]
import re
from lib.test import Test
urls = [
(0, re.compile('.*', re.I), re.compile('^/test.*'), Test, {}),
]
path = {
'/test/':{'pattern':re.compile('^/test/.*'),
'urls':[
'/test/1234', '/test/abcd/',
'/test?a=1&b=2', '/test?s=abcd',
]},
'/register/validate':{'pattern':re.compile('^/register/validate'),
'urls':[
'/register/validate?email=crown.hg@gmail.com',
]},
}
def test(path:dict):
for k, v in path.items():
pattern = v['pattern']
urls = v['urls']
for url in urls:
if isinstance(pattern, str):
print("{}\t{}".format(pattern, url))
obj = re.match(pattern, url)
else:
print("{}\t{}".format(pattern.pattern, url))
obj = pattern.match(url)
assert obj
if __name__ == '__main__':
test(path)
|
huanguan1978/web3
|
web3/tmpl/url.py
|
Python
|
gpl-2.0
| 1,104
|
# -*- coding: utf-8 -*-
#
from rest_framework import permissions
class IsSwagger(permissions.BasePermission):
def has_permission(self, request, view):
return getattr(view, 'swagger_fake_view', False)
class IsApplicant(permissions.BasePermission):
def has_permission(self, request, view):
return request.user == view.ticket.applicant
class IsAssignee(permissions.BasePermission):
def has_permission(self, request, view):
return view.ticket.has_assignee(request.user)
|
skyoo/jumpserver
|
apps/tickets/permissions/comment.py
|
Python
|
gpl-2.0
| 508
|
# import the main window object (mw) from ankiqt
from aqt import mw
# import the "show info" tool from utils.py
from aqt.utils import showInfo, tooltip, openFolder, isWin
# import all of the Qt GUI library
from aqt.qt import *
import aqt.editor, aqt.modelchooser, aqt.deckchooser
from anki.importing import TextImporter
import sys, os, traceback
import subprocess
import dictionary
from xslt import *
# We're going to add a menu item below. First we want to create a function to
# be called when the menu item is activated.
class DefinitionList(QListWidget):
def __init__(self, dico):
QListWidget.__init__(self)
self.dico = dico
def keyPressEvent(self, event):
if type(event) == QKeyEvent:
if event.key() == Qt.Key_Return :
self.dico.addDefinition()
self.dico.gotoQuery()
elif event.key() < 0xFF:
self.dico.gotoQuery(event)
else:
super(QListWidget, self).keyPressEvent(event)
else:
super(QListWidget, self).keyPressEvent(event)
class DicoWidget(QWidget):
def __init__(self,mw):
## GUI initialisation
super(DicoWidget, self).__init__()
box = QGridLayout(self)
QTextCodec.setCodecForTr(QTextCodec.codecForName("UTF-8"))
QTextCodec.setCodecForCStrings(QTextCodec.codecForName("UTF-8"))
QTextCodec.setCodecForLocale(QTextCodec.codecForName("UTF-8"))
# initialisation
self.mw = mw
self.path = self.addonsFolder()
self.initRenderCard()
# Query widget
self.countLabel = QLCDNumber()
self.countLabel.setFrameStyle(QFrame.NoFrame)
self.countLabel.display(0)
self.tQuery = QLineEdit()
self.tQuery.returnPressed.connect(self.query)
box.addWidget(self.tQuery,1,0,1,1)
box.addWidget(self.countLabel,1,1,1,1)
# Result list
self.listResult = DefinitionList(self)
self.listResult.itemSelectionChanged.connect(self.definitionView)
box.addWidget(self.listResult,2,0,4,1)
# Definition panel
self.tDefinition = QTextEdit()
self.tDefinition.document().setMetaInformation( QTextDocument.DocumentUrl, self.path + "/" )
self.tDefinition.setReadOnly(True)
box.addWidget(self.tDefinition,2,1,4,4)
# self.bAdd = QPushButton('Add')
# self.bAdd.clicked.connect(self.addDefinition)
# box.addWidget(self.bAdd,6,0)
# self.bSync = QPushButton('Synchronize')
# self.bSync.clicked.connect(self.synchronize)
# box.addWidget(self.bSync,6,1)
# Model Chooser
self.modelArea = QWidget()
box.addWidget(self.modelArea,6,1)
self.modelChooser = aqt.modelchooser.ModelChooser(self.mw, self.modelArea)
# Deck Chooser
self.deckArea = QWidget()
box.addWidget(self.deckArea,6,0)
self.deckChooser = aqt.deckchooser.DeckChooser(self.mw, self.deckArea)
# Widget
self.setGeometry(300, 300, 400, 400)
self.show()
# init file
self.initTempFile()
## Dictionary
self.dico = dictionary.Dictionary(self.path)
self.initSuggestion()
def initTempFile(self):
## Temp file for new words
self.filename = "new_words.tmp"
self.file = open(self.filename,"w")
self.nbreAdded = 0
def setList(self,results):
self.listResult.clear()
for l in results:
(f1,f2) = self.getFields(l)
item = QListWidgetItem(f1+": "+f2)
item.setIcon(self.getIconFromLanguage(l["lang_from"]));
self.listResult.addItem(item)
item.setData(Qt.UserRole, l)
self.listResult.setFocus()
def getIconFromLanguage(self,lg):
return QIcon(self.path + "/flags/"+lg+".png")
def query(self):
request = self.tQuery.text()
results = self.dico.query(request.encode("UTF-8"))
self.setList(results)
if len(results):
self.gotoList()
else:
self.gotoQuery()
def initSuggestion(self):
self.listWords = self.dico.getAllWords()
self.listWords = list(set(self.listWords))
self.listWords.sort()
self.completer = QCompleter(self.listWords,self.tQuery)
self.tQuery.setCompleter(self.completer)
def getSelection(self):
item = self.listResult.currentItem()
data = item.data(Qt.UserRole)
return data
def getFields(self,data):
field1 = data["k"]
if 'gr' in data.keys():
field1 = field1 +" ("+data["gr"]+")"
field2 = ""
if len(data["dtrn"]):
for dtrn in data["dtrn"][:-1]:
field2 = field2 + dtrn + ", "
field2 = field2 + data["dtrn"][-1]
return (field1,field2)
def definitionView(self):
selectedCard = self.getSelection()
card = self.renderView(selectedCard)
self.tDefinition.setHtml(card.encode("UTF-16LE"))
def addDefinition(self):
selectedCard = self.getSelection()
card = self.renderCard(selectedCard)
self.file.write(card.encode("UTF-8")+ "\n")
self.nbreAdded = self.nbreAdded + 1
self.countLabel.display(str(self.nbreAdded))
tooltip(_(card + " added"), period=1000)
def gotoQuery(self,event=None):
self.tQuery.setFocus()
self.tQuery.selectAll()
if event!=None:
self.tQuery.keyPressEvent(event)
def gotoList(self):
self.listResult.setFocus()
self.listResult.setCurrentRow(0)
def synchronize(self):
if self.nbreAdded==0:
tooltip(_("Nothing to synchronize"), period=1000)
return
self.file.close()
# select deck
did = self.deckChooser.selectedId()
mw.col.decks.select(did)
# import into the collection
ti = TextImporter(mw.col, self.filename)
ti.delimiter = '\t'
ti.initMapping()
if did != ti.model['did']:
ti.model['did'] = did
mw.col.models.save(ti.model)
ti.run()
self.initTempFile()
tooltip(_("Synchronized"), period=1000)
self.countLabel.display(str(self.nbreAdded))
self.mw.reset()
def initRenderCard(self):
self.xsltView = XSLT(self.path,self.path + '/config/styleView.xsl')
self.xsltCard = XSLT(self.path,self.path + '/config/styleCard.xsl')
def renderCard(self,data):
result = self.xsltCard.apply(data["xml"])
return result
def renderView(self,data):
result = self.xsltView.apply(data["xml"])
return result
def close(self):
self.synchronize()
self.file.close()
self.modelChooser.cleanup()
self.deckChooser.cleanup()
def closeEvent(self,event):
self.close()
def addonsFolder(self):
dir = self.mw.pm.addonFolder()
if isWin:
dir = dir.encode(sys.getfilesystemencoding())
return dir
def startDictionary():
mw.myWidget = dico = DicoWidget(mw)
# create a new menu item, "test"
action = QAction("Dictionnaire", mw)
# set it to call testFunction when it's clicked
mw.connect(action, SIGNAL("triggered()"), startDictionary)
action.setShortcut(QKeySequence("Ctrl+T"))
# and add it to the tools menu
mw.form.menuTools.addAction(action)
|
roussePaul/Dictionary-addon-for-Anki
|
addonTest.py
|
Python
|
gpl-2.0
| 6,424
|
from Products.ATContentTypes.interface.news import IATNewsItem
class IInitiative(IATNewsItem):
"""
marker interface
"""
|
uwosh/uwosh.initiatives
|
uwosh/initiatives/content/interfaces.py
|
Python
|
gpl-2.0
| 134
|
#
# Copyright (C) 2014-
# Sean Poyser (seanpoyser@gmail.com)
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import xbmc
import xbmcgui
import xbmcaddon
import os
_STD_MENU = 0
_ADDTOFAVES = 100
_SF_SETTINGS = 200
_SETTINGS = 250
_LAUNCH_SF = 300
_SEARCH = 400
_SEARCHDEF = 500
_RECOMMEND = 600
_DOWNLOAD = 700
_PLAYLIST = 800
_COPYIMAGES = 900
_SHOWIMAGE = 1000
_QUICKLAUNCH = 1100
_EXTRABASE = 10000
import utils
ADDON = utils.ADDON
ADDONID = utils.ADDONID
ROOT = utils.ROOT
GETTEXT = utils.GETTEXT
MENU_ADDTOFAVES = ADDON.getSetting('MENU_ADDTOFAVES') == 'true'
MENU_DEF_ISEARCH = ADDON.getSetting('MENU_DEF_ISEARCH') == 'true'
MENU_ISEARCH = ADDON.getSetting('MENU_ISEARCH') == 'true'
MENU_IRECOMMEND = ADDON.getSetting('MENU_IRECOMMEND') == 'true'
MENU_COPY_PROPS = ADDON.getSetting('MENU_COPY_PROPS') == 'true'
MENU_VIEW_IMAGES = ADDON.getSetting('MENU_VIEW_IMAGES') == 'true'
MENU_SF_SETTINGS = ADDON.getSetting('MENU_SF_SETTINGS') == 'true'
MENU_ADDON_SETTINGS = ADDON.getSetting('MENU_ADDON_SETTINGS') == 'true'
MENU_STD_MENU = ADDON.getSetting('MENU_STD_MENU') == 'true'
MENU_EDITFAVE = ADDON.getSetting('MENU_EDITFAVE') == 'true'
MENU_PLUGINS = ADDON.getSetting('MENU_PLUGINS') == 'true'
MENU_QUICKLAUNCH = ADDON.getSetting('MENU_QUICKLAUNCH') == 'true'
MENU_DOWNLOADS = ADDON.getSetting('MENU_DOWNLOADS') == 'true'
def getText(title, text=''):
if text == None:
text = ''
kb = xbmc.Keyboard(text.strip(), title)
kb.doModal()
if not kb.isConfirmed():
return None
text = kb.getText().strip()
if len(text) < 1:
return None
return text
def getDefaultSearch():
import search
fave = search.getDefaultSearch()
if fave:
return fave[0]
return ''
def activateWindow(window):
xbmc.executebuiltin('Dialog.Close(all, true)')
xbmc.executebuiltin('ActivateWindow(%s)' % window)
def doStandard(useScript=True):
window = xbmcgui.getCurrentWindowId()
if window == 10000: #home
if xbmc.getCondVisibility('Window.IsActive(favourites)') <> 1:
return
if window == 12005: #video playing
return activateWindow('videoplaylist')
if useScript:
#open menu via script to prevent animation locking up (due to bug in XBMC)
path = utils.HOME
script = os.path.join(path, 'standardMenu.py')
cmd = 'AlarmClock(%s,RunScript(%s),%d,True)' % ('menu', script, 0)
xbmc.executebuiltin(cmd)
else:
xbmc.executebuiltin('Action(ContextMenu)')
def copyFave(name, thumb, cmd):
import favourite
text = GETTEXT(30019)
folder = utils.GetFolder(text)
if not folder:
return False
file = os.path.join(folder, utils.FILENAME)
if MENU_EDITFAVE:
name = getText(GETTEXT(30021), name)
if not name:
return False
fave = [name, thumb, cmd]
return favourite.copyFave(file, fave)
def activateCommand(cmd):
cmds = cmd.split(',', 1)
activate = cmds[0]+',return)'
plugin = cmds[1][:-1]
#check if it is a different window and if so activate it
id = str(xbmcgui.getCurrentWindowId())
if id not in activate:
xbmc.executebuiltin(activate)
xbmc.executebuiltin('Container.Update(%s)' % plugin)
def getDescription():
labels = []
labels.append('ListItem.Plot')
labels.append('ListItem.Property(Addon.Description)')
labels.append('ListItem.Property(Addon.Summary)')
labels.append('ListItem.Property(Artist_Description)')
labels.append('ListItem.Property(Album_Description)')
labels.append('ListItem.Artist')
labels.append('ListItem.Comment')
for label in labels:
desc = xbmc.getInfoLabel(label)
if len(desc) > 0:
return desc
return ''
def getPlugins():
if not MENU_PLUGINS:
return []
import os
path = xbmc.translatePath(os.path.join(ROOT, 'Plugins'))
sys.path.insert(0, path)
plugin = []
import sfile
files = sfile.glob(path)
for name in files:
name = name.rsplit(os.sep, 1)[1]
if name.rsplit('.', 1)[-1] == 'py':
plugin.append(name .rsplit('.', 1)[0])
plugins = map(__import__, plugin)
return plugins
def addPlugins(menu, plugins, params, base):
offset = 0
for plugin in plugins:
items = None
if hasattr(plugin, 'add') and hasattr(plugin, 'process'):
try : items = plugin.add(params)
except: items = None
if items:
if not isinstance(items, list):
items = [items]
for item in items:
menu.append((item, base+offset))
offset += 1
offset = 0
base += 1000
def quickLaunch():
import chooser
if not chooser.GetFave('SF_QL'):
return False
path = xbmc.getInfoLabel('Skin.String(SF_QL.Path)')
if len(path) == 0 or path == 'noop':
return
if path.lower().startswith('activatewindow') and ',' in path: #i.e. NOT like ActivateWindow(filemanager)
xbmc.executebuiltin(path)
return
import player
player.playCommand(path)
def getExt(url):
url = url.lower()
exts = ['.mp4', '.avi', '.mpg', '.flv', '.mkv', '.m4v', '.mov']
for ext in exts:
if ext in url:
return ext
return '.avi'
def getDownloadTitle(url):
import re
title = xbmc.getInfoLabel('VideoPlayer.Title')
try:
season = int(xbmc.getInfoLabel('VideoPlayer.Season'))
title += ' S%02d' % season
except:
pass
try:
episode = int(xbmc.getInfoLabel('VideoPlayer.Episode'))
title += 'E%02d' % episode
except:
pass
title = re.sub('[:\\/*?\<>|"]+', '', title)
title = title.strip()
title += getExt(url)
return title
def whitelisted():
#folder = xbmc.getInfoLabel('Container.FolderPath')
#if not folder.startswith('addons'):
# return False
filename = xbmc.getInfoLabel('ListItem.FilenameAndPath')
try:
addon = filename.split('://', 1)[-1].split('/', 1)[0]
addon = xbmcaddon.Addon(addon).getAddonInfo('path')
addon = addon.rsplit(os.path.sep, 1)[-1]
return addon in ADDON.getSetting('WHITELIST')
except:
pass
return False
def doMenu(mode):
utils.log('**** Context Menu Information ****')
window = xbmcgui.getCurrentWindowId()
DEBUG = ADDON.getSetting('DEBUG') == 'true'
if DEBUG:
utils.DialogOK('Current Window ID %d' % window)
utils.log('Capture window\t: %d' % window)
#active = [0, 1, 2, 3, 25, 40, 500, 501, 502, 601, 2005]
#if window-10000 not in active:
# doStandard(useScript=False)
# return
if window > 12999:
doStandard(useScript=False)
return
# to prevent master profile setting being used in other profiles
if mode == 0 and ADDON.getSetting('CONTEXT') != 'true':
doStandard(useScript=False)
return
folder = xbmc.getInfoLabel('Container.FolderPath')
path = xbmc.getInfoLabel('ListItem.FolderPath')
#ignore if in Super Favourites
if (ADDONID in folder) or (ADDONID in path):
doStandard(useScript=False)
return
if mode == 0 and whitelisted():
doStandard(useScript=False)
return
choice = 0
label = xbmc.getInfoLabel('ListItem.Label')
filename = xbmc.getInfoLabel('ListItem.FilenameAndPath')
thumb = xbmc.getInfoLabel('ListItem.Thumb')
icon = xbmc.getInfoLabel('ListItem.ActualIcon')
#thumb = xbmc.getInfoLabel('ListItem.Art(thumb)')
playable = xbmc.getInfoLabel('ListItem.Property(IsPlayable)').lower() == 'true'
fanart = xbmc.getInfoLabel('ListItem.Property(Fanart_Image)')
fanart = xbmc.getInfoLabel('ListItem.Art(fanart)')
isFolder = xbmc.getCondVisibility('ListItem.IsFolder') == 1
hasVideo = xbmc.getCondVisibility('Player.HasVideo') == 1
desc = getDescription()
if not thumb:
thumb = icon
try: file = xbmc.Player().getPlayingFile()
except: file = None
isStream = False
#if hasattr(xbmc.Player(), 'isInternetStream'):
# isStream = xbmc.Player().isInternetStream()
#elif file:
if file:
isStream = file.startswith('http')
if window == 10003: #filemanager
control = 0
if xbmc.getCondVisibility('Control.HasFocus(20)') == 1:
control = 20
elif xbmc.getCondVisibility('Control.HasFocus(21)') == 1:
control = 21
if control == 0:
return doStandard(useScript=False)
label = xbmc.getInfoLabel('Container(%d).ListItem.Label' % control)
root = xbmc.getInfoLabel('Container(%d).ListItem.Path' % control)
path = root + label
isFolder = True
thumb = 'DefaultFolder.png'
#if not path.endswith(os.sep):
# path += os.sep
if isFolder:
path = path.replace('\\', '\\\\')
filename = filename.replace('\\', '\\\\')
params = {}
params['label'] = label
params['folder'] = folder
params['path'] = path
params['filename'] = filename
params['thumb'] = thumb
params['icon'] = icon
params['fanart'] = fanart
params['window'] = window
params['isplayable'] = playable
params['isfolder'] = isFolder
params['file'] = file
params['isstream'] = isStream
params['description'] = desc
params['hasVideo'] = hasVideo
for key in params:
utils.log('%s\t\t: %s' % (key, params[key]))
menu = []
localAddon = None
if MENU_QUICKLAUNCH:
menu.append((GETTEXT(30219), _QUICKLAUNCH))
plugins = []
try:
plugins = getPlugins()
addPlugins(menu, plugins, params, _EXTRABASE)
except Exception, e:
utils.log('Error adding plugins : %s' % str(e))
if len(path) > 0:
if MENU_ADDTOFAVES:
menu.append((GETTEXT(30047), _ADDTOFAVES))
if MENU_ADDON_SETTINGS:
localAddon = utils.findAddon(path)
if localAddon:
name = utils.getSettingsLabel(localAddon)
menu.append((name, _SETTINGS))
if MENU_DEF_ISEARCH:
default = getDefaultSearch()
if len(default) > 0:
menu.append((GETTEXT(30098) % default, _SEARCHDEF))
if MENU_ISEARCH: menu.append((GETTEXT(30054), _SEARCH))
if MENU_IRECOMMEND: menu.append((GETTEXT(30088), _RECOMMEND))
if MENU_COPY_PROPS:
if len(thumb) > 0 or len(fanart) > 0:
menu.append((GETTEXT(30209), _COPYIMAGES))
if MENU_VIEW_IMAGES: menu.append((GETTEXT(30216), _SHOWIMAGE))
else:
if len(description) > 0: menu.append((GETTEXT(30209), _COPYIMAGES))
if MENU_SF_SETTINGS:
menu.append((GETTEXT(30049), _SF_SETTINGS))
stdMenu = False
if MENU_STD_MENU:
if (len(path) > 0) or (window == 10034): #10034 is profile dialog
stdMenu = True
menu.append((GETTEXT(30048), _STD_MENU))
else:
if hasVideo:
menu.append((xbmc.getLocalizedString(31040), _PLAYLIST)) #Now Playing
if MENU_DOWNLOADS and isStream:
menu.append((GETTEXT(30241), _DOWNLOAD))
if len(menu) == 0 or (len(menu) == 1 and stdMenu):
doStandard(useScript=False)
return
xbmcgui.Window(10000).setProperty('SF_MENU_VISIBLE', 'true')
dialog = ADDON.getSetting('CONTEXT_STYLE') == '1'
import menus
if dialog:
choice = menus.selectMenu(utils.TITLE, menu)
else:
choice = menus.showMenu(ADDONID, menu)
utils.log('selection\t\t: %s' % choice)
if choice >= _EXTRABASE:
module = (choice - _EXTRABASE) / 1000
option = (choice - _EXTRABASE) % 1000
utils.log('plugin\t\t: %s' % module)
utils.log('option\t\t: %s' % option)
try:
plugins[module].process(option, params)
except Exception, e:
utils.log('Error processing plugin: %s' % str(e))
if choice == _QUICKLAUNCH:
try: quickLaunch()
except: pass
if choice == _STD_MENU:
doStandard(useScript=True)
if choice == _PLAYLIST:
activateWindow('videoplaylist')
if choice == _DOWNLOAD:
utils.log('download url: %s' % file)
dst = os.path.join(ADDON.getSetting('DOWNLOAD_FOLDER'), getDownloadTitle(file))
if utils.DialogYesNo(GETTEXT(30243), GETTEXT(30244)):
xbmc.Player().stop()
import download
download.download(file, dst, 'Super Favourites')
if choice == _SF_SETTINGS:
utils.ADDON.openSettings()
if choice == _SETTINGS:
xbmcaddon.Addon(localAddon).openSettings()
if choice == _ADDTOFAVES:
import favourite
if path.lower().startswith('addons://user/'):
path = path.replace('addons://user/', 'plugin://')
isFolder = True
window = 10025
if isFolder:
cmd = 'ActivateWindow(%d,"%s' % (window, path)
elif path.lower().startswith('script'):
#if path[-1] == '/':
# path = path[:-1]
cmd = 'RunScript("%s' % path.replace('script://', '')
elif path.lower().startswith('videodb') and len(filename) > 0:
cmd = 'PlayMedia("%s' % filename
#elif path.lower().startswith('musicdb') and len(filename) > 0:
# cmd = 'PlayMedia("%s")' % filename
elif path.lower().startswith('androidapp'):
cmd = 'StartAndroidActivity("%s")' % path.replace('androidapp://sources/apps/', '', 1)
else:
cmd = 'PlayMedia("%s")' % path
cmd = favourite.updateSFOption(cmd, 'winID', window)
cmd = favourite.addFanart(cmd, fanart)
cmd = favourite.updateSFOption(cmd, 'desc', desc)
if isFolder:
cmd = cmd.replace('")', '",return)')
copyFave(label, thumb, cmd)
if choice == _LAUNCH_SF:
utils.LaunchSF()
if choice in [_SEARCH, _SEARCHDEF, _RECOMMEND]:
if utils.ADDON.getSetting('STRIPNUMBERS') == 'true':
label = utils.Clean(label)
thumb = thumb if len(thumb) > 0 else 'null'
fanart = fanart if len(fanart) > 0 else 'null'
#declared in default.py
_SUPERSEARCH = 0
_SUPERSEARCHDEF = 10
_RECOMMEND_KEY = 2700
valid = [10001, 10002, 10025, 10502]
if window not in valid:
window = 10025 #video window
import urllib
if choice == _RECOMMEND:
mode = _RECOMMEND_KEY
else:
mode = _SUPERSEARCH if (choice == _SEARCH) else _SUPERSEARCHDEF
cmd = 'ActivateWindow(%d,"plugin://%s/?mode=%d&keyword=%s&image=%s&fanart=%s")' % (window, ADDONID, mode, urllib.quote_plus(label), urllib.quote_plus(thumb), urllib.quote_plus(fanart))
activateCommand(cmd)
if choice == _COPYIMAGES:
if not fanart:
fanart = thumb
xbmcgui.Window(10000).setProperty('SF_THUMB', thumb)
xbmcgui.Window(10000).setProperty('SF_FANART', fanart)
xbmcgui.Window(10000).setProperty('SF_DESCRIPTION', desc)
if choice == _SHOWIMAGE:
if not fanart:
fanart = thumb
import viewer
viewer.show(fanart, thumb, ADDONID)
def menu(mode):
if xbmcgui.Window(10000).getProperty('SF_MENU_VISIBLE') == 'true':
return
if ADDON.getSetting('MENU_MSG') == 'true':
ADDON.setSetting('MENU_MSG', 'false')
if utils.DialogYesNo(GETTEXT(35015), GETTEXT(35016), GETTEXT(35017)):
utils.openSettings(ADDONID, 2.6)
return
xbmc.executebuiltin('Dialog.Close(all, true)')
doMenu(mode)
def main():
if xbmc.getCondVisibility('Window.IsActive(favourites)') == 1:
return doStandard(useScript=False)
mode = 0
if len(sys.argv) > 0 and sys.argv[0] == '':
mode = 1 #launched via std context menu
try:
menu(mode)
except Exception, e:
utils.log('Exception in capture.py %s' % str(e))
main()
xbmc.sleep(1000)
xbmcgui.Window(10000).clearProperty('SF_MENU_VISIBLE')
|
EdLogan18/logan-repository
|
plugin.program.super.favourites/capture.py
|
Python
|
gpl-2.0
| 17,511
|
# pygsear
# Copyright (C) 2003 Lee Harr
#
#
# This file is part of pygsear.
#
# pygsear is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pygsear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygsear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import time
from pygsear.Network import reactor
import twisted.internet.app
from twisted.cred.authorizer import DefaultAuthorizer
from twisted.spread import pb
from twisted.internet import defer
from twisted.python import log
class PygameServerError(pb.Error):
"""Something went wrong in the PygameServer"""
pass
class PygameServer:
def __init__(self):
self.players = []
self.playerGames = {}
def addPlayer(self):
self.numPlayers += 1
def gameIsOver(self):
return 0
def takePlayerGame(self, playerGame, playerName):
log.msg('receiving traveler from '+str(playerName))
self.playerGames[playerName] = playerGame
if len(self.playerGames) == 2:
self.makeTravelers()
def makeTravelers(self):
log.msg('making travelers')
for playerGame in self.playerGames.values():
log.msg('make traveler for %s' % playerGame)
d = playerGame.callRemote('makeTraveler')
d.addCallback(self.moveTravelers)
def dropPlayerGame(self, playerName):
log.msg('dropping game %s' %playerName)
del self.playerGames[playerName]
def moveTravelers(self, dum=None):
#time.sleep(1)
for playerGame in self.playerGames.values():
#log.msg('calling move() on player '+str(playerGame))
playerGame.callRemote('move')
d = defer.Deferred()
d.addCallback(self.moveTravelers)
d.set_timeout(0, self.moveTravelers)
class Player(pb.Perspective):
def __init__(self, playerName, identityName="Nobody"):
pb.Perspective.__init__(self, playerName, identityName)
self.playerName = playerName
def set_server(self, server):
self.server = server
server.players.append(self)
def attached(self, clientref, identity):
print "player '%s' joining game" % identity.name
self.identity = identity
#self.server.addPlayer()
return self
def detached(self, clientref, identity):
self.server.dropPlayerGame(self.playerName)
def perspective_takePlayerGame(self, game):
log.msg('takePlayerGame called')
return self.server.takePlayerGame(game, self.playerName)
def perspective_makeTraveler(self):
log.msg('makeTraveler called')
return self.server.makeTraveler()
#def perspective_moveTraveler(self):
# log.msg('moveTraveler called')
# return self.game.moveTraveler(self.playerName)
class PygameService(pb.Service):
def __init__(self, serviceName, serviceParent=None, authorizer=None,
application=None):
pb.Service.__init__(self, serviceName, serviceParent, authorizer,
application)
log.msg('making game')
self.server = PygameServer()
def get_perspectiveRequest(self, name):
log.msg('trying to get perspective for: ' + str(name))
log.msg('service'+str(self))
player = Player(name)
player.set_server(self.server)
log.msg('player'+str(player))
return defer.succeed(player)
def setup_players(auth, players):
for (name, pw) in players:
i = auth.createIdentity(name)
i.set_password(pw)
i.addKeyByString("pygameservice", name)
auth.addIdentity(i)
def main():
log.msg('******STARTING******')
app = twisted.internet.app.Application("pygame_server")
auth = DefaultAuthorizer(app)
service = PygameService("pygameservice", app, auth)
players = [["alice", "sekrit"],
["bob", "b0b"],
["charlie", "chuck"],
["david", "password"],
]
setup_players(auth, players)
# start the application
app.listenTCP(8800, pb.BrokerFactory(pb.AuthRoot(auth)))
app.run()
if __name__ == '__main__':
main()
|
davesteele/pygsear-debian
|
examples/twisted-pygame/pygame_server.py
|
Python
|
gpl-2.0
| 4,614
|
#!/usr/bin/python
# vim: set fileencoding=utf-8 :
"""Queries the databases for songs without albums recorded, and queries
Musicbrainz for the album. This is done as a separate process, as the
Musicbrainz API limits how fast you can grab data from it."""
from __future__ import print_function
import boto
import time
import datetime
import yaml
import sys
from musicbrainz2.webservice import Query, TrackFilter, WebServiceError, \
AuthenticationError, ConnectionError, RequestError, ResponseError, \
ResourceNotFoundError
from musicbrainz2.model import Release
SONGS_PER_DOMAIN = 100
def main():
"""Looks for songs without an album attribute and fills them in
via Musicbrainz"""
print(datetime.datetime.now())
try:
stations_yaml = open('stations.yaml')
except IOError:
print("Failed to load station list", file=sys.stderr)
sys.exit(-1)
stations = yaml.load(stations_yaml)
sdb = boto.connect_sdb()
while True:
for station_id in stations.keys():
print("Checking ", station_id)
if not add_album_attribute( station_id, sdb ):
del stations[station_id]
if not len(stations):
break
print(datetime.datetime.now())
def add_album_attribute( station, sdb ):
"""Adds Album attribute for plays where it is not already set.
Arguments:
station -- Station ID
sdb -- SimpleDB connection
Returns the number of plays that were updated.
"""
count = 0
domain = sdb.get_domain("%s-whatson" % station )
query = 'select count(*) from `%s-whatson` where `Album` is null' \
% (station)
result_set = domain.select(query, max_items=1)
for item in result_set:
print(item['Count'], "songs needing albums from", station)
count = item['Count']
# Get a list of items from the domain that don't have an album
# attribute
#
# Find its album name
#
# Find all of the songs with that title/artist, and update with
# the album
query = 'select * from `%s-whatson` where `Album` is null limit %d' \
% (station, SONGS_PER_DOMAIN)
result_set = domain.select(query, max_items=SONGS_PER_DOMAIN)
for item in result_set:
album = find_album_name(item)
print("Album for ", item, "is", album)
if album == "":
continue
artist = item['Artist'].replace('"', '""')
title = item['Title'].replace('"', '""')
song_query = 'select * from `%s-whatson` where `Title` = "%s" '\
'and `Artist` = "%s" and `Album` is not NULL' \
% (station, title, artist)
song_rs = domain.select(song_query)
for song in song_rs:
song_item = domain.get_item(song.name)
# print("Updating", item.name, item)
song_item['Album'] = album
song_item.save()
return count != 0
def find_album_name( track_details ):
"""Find album name via Musicbrainz API
Arguments:
track_details -- dict containing 'Title' and 'Artist'
Returns album name, empty string if unsuccessful.
"""
album = ""
# Musicbrainz limits API calls to one per second from a specific IP
time.sleep(1.1)
query = Query()
# Loop through at most 9 times as the webservice is occasionally busy.
# Results will not be None if is successful
i = 1
results = None
while (results == None) and (i < 10):
try:
tracks = TrackFilter(title=track_details['Title'],
artistName=track_details['Artist'])
results = query.getTracks(tracks)
except (AuthenticationError,
ConnectionError,
RequestError,
ResponseError,
ResourceNotFoundError,
WebServiceError) as error:
print('Error:', error, 'waiting', i*10, 'seconds')
time.sleep(i*10)
i += 1
results = None
if (results != None) and (len(results) != 0):
album = find_preferred_album( results )
return album
def find_preferred_album( results ):
"""Find the most likely to be reasonable name of the album
Arguments:
results -- result of a Musicbrainz query
Returns an album string. Empty if unsuccessful.
"""
# TODO: FIXME: Musicbrainz results are not returning a date
# associated with a release. Would prefer to use the oldest
# album if possible.
found_release = None
release_type = None
release = None
for result in results:
track = result.track
# Prefer: album, single, live, anything else
for release in track.releases:
if Release.TYPE_ALBUM in release.getTypes():
found_release = release
release_type = Release.TYPE_ALBUM
break
elif Release.TYPE_SINGLE in release.getTypes():
if release_type != Release.TYPE_ALBUM:
found_release = release
release_type = Release.TYPE_SINGLE
elif Release.TYPE_LIVE in release.getTypes():
if release_type != Release.TYPE_ALBUM and \
release_type != Release.TYPE_SINGLE:
found_release = release
release_type = Release.TYPE_LIVE
else:
if release_type != Release.TYPE_ALBUM and \
release_type != Release.TYPE_SINGLE and \
release_type != Release.TYPE_LIVE:
found_release = release
if release_type == Release.TYPE_ALBUM:
break
if found_release == None:
album = ""
else:
album = release.title
return album
if __name__ == "__main__":
main()
|
alexjh/whatson
|
script-archive/whats_on_find_album.py
|
Python
|
gpl-2.0
| 5,876
|
# -*- coding: utf-8 -*-
import datetime
import fauxfactory
import pytest
from cfme.rest import dialog as _dialog
from cfme.rest import services as _services
from cfme.rest import service_catalogs as _service_catalogs
from cfme.rest import service_templates as _service_templates
from utils.providers import setup_a_provider as _setup_a_provider
from utils.wait import wait_for
from utils import error, version
pytestmark = [pytest.mark.tier(2)]
class TestServiceRESTAPI(object):
@pytest.fixture(scope="module")
def a_provider(self):
return _setup_a_provider("infra")
@pytest.mark.usefixtures("logged_in")
@pytest.fixture(scope="function")
def dialog(self):
return _dialog()
@pytest.fixture(scope="function")
def service_catalogs(self, request, rest_api):
return _service_catalogs(request, rest_api)
@pytest.mark.usefixtures("logged_in")
@pytest.fixture(scope="function")
def services(self, request, rest_api, a_provider, dialog, service_catalogs):
return _services(request, rest_api, a_provider, dialog, service_catalogs)
def test_edit_service(self, rest_api, services):
"""Tests editing a service.
Prerequisities:
* An appliance with ``/api`` available.
Steps:
* POST /api/services (method ``edit``) with the ``name``
* Check if the service with ``new_name`` exists
Metadata:
test_flag: rest
"""
ser = services[0]
new_name = fauxfactory.gen_alphanumeric()
ser.action.edit(name=new_name)
wait_for(
lambda: rest_api.collections.services.find_by(name=new_name),
num_sec=180,
delay=10,
)
def test_edit_multiple_services(self, rest_api, services):
"""Tests editing multiple service catalogs at time.
Prerequisities:
* An appliance with ``/api`` available.
Steps:
* POST /api/services (method ``edit``) with the list of dictionaries used to edit
* Check if the services with ``new_name`` each exists
Metadata:
test_flag: rest
"""
new_names = []
services_data_edited = []
for ser in services:
new_name = fauxfactory.gen_alphanumeric()
new_names.append(new_name)
services_data_edited.append({
"href": ser.href,
"name": new_name,
})
rest_api.collections.services.action.edit(*services_data_edited)
for new_name in new_names:
wait_for(
lambda: rest_api.collections.service_templates.find_by(name=new_name),
num_sec=180,
delay=10,
)
def test_delete_service(self, rest_api, services):
service = rest_api.collections.services[0]
service.action.delete()
with error.expected("ActiveRecord::RecordNotFound"):
service.action.delete()
def test_delete_services(self, rest_api, services):
rest_api.collections.services.action.delete(*services)
with error.expected("ActiveRecord::RecordNotFound"):
rest_api.collections.services.action.delete(*services)
def test_retire_service_now(self, rest_api, services):
"""Test retiring a service
Prerequisities:
* An appliance with ``/api`` available.
Steps:
* Retrieve list of entities using GET /api/services , pick the first one
* POST /api/service/<id> (method ``retire``)
Metadata:
test_flag: rest
"""
assert "retire" in rest_api.collections.services.action.all
retire_service = services[0]
retire_service.action.retire()
wait_for(
lambda: not rest_api.collections.services.find_by(name=retire_service.name),
num_sec=600,
delay=10,
)
def test_retire_service_future(self, rest_api, services):
"""Test retiring a service
Prerequisities:
* An appliance with ``/api`` available.
Steps:
* Retrieve list of entities using GET /api/services , pick the first one
* POST /api/service/<id> (method ``retire``) with the ``retire_date``
Metadata:
test_flag: rest
"""
assert "retire" in rest_api.collections.services.action.all
retire_service = services[0]
date = (datetime.datetime.now() + datetime.timedelta(days=5)).strftime('%m/%d/%y')
future = {
"date": date,
"warn": "4",
}
date_before = retire_service.updated_at
retire_service.action.retire(future)
def _finished():
retire_service.reload()
if retire_service.updated_at > date_before:
return True
return False
wait_for(_finished, num_sec=600, delay=5, message="REST automation_request finishes")
@pytest.mark.uncollectif(lambda: version.current_version() < '5.5')
def test_set_service_owner(self, rest_api, services):
if "set_ownership" not in rest_api.collections.services.action.all:
pytest.skip("Set owner action for service is not implemented in this version")
service = services[0]
user = rest_api.collections.users.get(userid='admin')
data = {
"owner": {"href": user.href}
}
service.action.set_ownership(data)
service.reload()
assert hasattr(service, "evm_owner")
assert service.evm_owner.userid == user.userid
@pytest.mark.uncollectif(lambda: version.current_version() < '5.5')
def test_set_services_owner(self, rest_api, services):
if "set_ownership" not in rest_api.collections.services.action.all:
pytest.skip("Set owner action for service is not implemented in this version")
data = []
user = rest_api.collections.users.get(userid='admin')
for service in services:
tmp_data = {
"href": service.href,
"owner": {"href": user.href}
}
data.append(tmp_data)
rest_api.collections.services.action.set_ownership(*data)
for service in services:
service.reload()
assert hasattr(service, "evm_owner")
assert service.evm_owner.userid == user.userid
class TestServiceTemplateRESTAPI(object):
@pytest.mark.usefixtures("logged_in")
@pytest.fixture(scope='function')
def service_templates(self, request, rest_api, dialog):
return _service_templates(request, rest_api, dialog)
@pytest.mark.usefixtures("logged_in")
@pytest.fixture(scope="function")
def dialog(self):
return _dialog()
@pytest.fixture(scope="function")
def service_catalogs(self, request, rest_api):
return _service_catalogs(request, rest_api)
def test_edit_service_template(self, rest_api, service_templates):
"""Tests cediting a service template.
Prerequisities:
* An appliance with ``/api`` available.
Steps:
* POST /api/service_templates (method ``edit``) with the ``name``
* Check if the service_template with ``new_name`` exists
Metadata:
test_flag: rest
"""
scl = rest_api.collections.service_templates[0]
new_name = fauxfactory.gen_alphanumeric()
scl.action.edit(name=new_name)
wait_for(
lambda: rest_api.collections.service_catalogs.find_by(name=new_name),
num_sec=180,
delay=10,
)
def test_delete_service_templates(self, rest_api, service_templates):
rest_api.collections.service_templates.action.delete(*service_templates)
with error.expected("ActiveRecord::RecordNotFound"):
rest_api.collections.service_templates.action.delete(*service_templates)
def test_delete_service_template(self, rest_api, service_templates):
s_tpl = rest_api.collections.service_templates[0]
s_tpl.action.delete()
with error.expected("ActiveRecord::RecordNotFound"):
s_tpl.action.delete()
@pytest.mark.uncollectif(lambda: version.current_version() < '5.5')
def test_assign_unassign_service_template_to_service_catalog(self, rest_api, service_catalogs,
service_templates):
"""Tests assigning and unassigning the service templates to service catalog.
Prerequisities:
* An appliance with ``/api`` available.
Steps:
* POST /api/service_catalogs/<id>/service_templates (method ``assign``)
with the list of dictionaries service templates list
* Check if the service_templates were assigned to the service catalog
* POST /api/service_catalogs/<id>/service_templates (method ``unassign``)
with the list of dictionaries service templates list
* Check if the service_templates were unassigned to the service catalog
Metadata:
test_flag: rest
"""
scl = service_catalogs[0]
stpl = service_templates[0]
scl.service_templates.action.assign(stpl)
scl.reload()
assert stpl.id in [st.id for st in scl.service_templates.all]
scl.service_templates.action.unassign(stpl)
scl.reload()
assert stpl.id not in [st.id for st in scl.service_templates.all]
def test_edit_multiple_service_templates(self, rest_api, service_templates):
"""Tests editing multiple service catalogs at time.
Prerequisities:
* An appliance with ``/api`` available.
Steps:
* POST /api/service_templates (method ``edit``)
with the list of dictionaries used to edit
* Check if the service_templates with ``new_name`` each exists
Metadata:
test_flag: rest
"""
new_names = []
service_tpls_data_edited = []
for tpl in service_templates:
new_name = fauxfactory.gen_alphanumeric()
new_names.append(new_name)
service_tpls_data_edited.append({
"href": tpl.href,
"name": new_name,
})
rest_api.collections.service_templates.action.edit(*service_tpls_data_edited)
for new_name in new_names:
wait_for(
lambda: rest_api.collections.service_templates.find_by(name=new_name),
num_sec=180,
delay=10,
)
|
lehinevych/cfme_tests
|
cfme/tests/services/test_rest_services.py
|
Python
|
gpl-2.0
| 10,705
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005, 2006 Francisco José Rodríguez Bogado, #
# (pacoqueen@users.sourceforge.net) #
# #
# This file is part of F.P.-INN . #
# #
# F.P.-INN is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# F.P.-INN is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with F.P.-INN ; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
import sys
import sqlobject, gtk
from utils import combo_set_from_db, \
str_fecha, \
str_fechahoralarga, \
str_hora, \
float2str, \
rellenar_lista
import datetime
DEBUG = False
NOMBRES_COLUMNAS_PORCENTAJE = ("iva", "descuento", "comision")
def setter_entry(objeto, col, w):
"""
Muestra el valor del atributo "col" del objeto
"objeto" en el entry "w".
"""
valor = getattr(objeto, col.name)
if isinstance(col, sqlobject.col.SODateCol):
valor = str_fecha(valor)
elif isinstance(col, sqlobject.col.SODateTimeCol):
valor = str_fechahoralarga(valor)
elif isinstance(col, sqlobject.col.SOTimeCol):
valor = str_hora(valor)
elif isinstance(col, sqlobject.col.SOFloatCol):
# XXX
# HACK: Workaround. Los valores que son porcentaje (descuentos e IVA)
# se deben mostrar con el símbolo "%", pero la única manera de
# distinguir esas columnas es mirar el nombre.
if col.name in NOMBRES_COLUMNAS_PORCENTAJE:
valor = "%s %%" % float2str(valor * 100,
precision = 5,
autodec = True)
# XXX
else:
valor = float2str(valor, autodec = False)
# Si autodec=True y es número redondo > 1000 escribe 1.000 y el
# getter lo interpreta como flotante.
if not isinstance(valor, str):
valor = str(valor)
w.set_text(valor)
def setter_spinbutton(objeto, col, w):
"""
Muestra el valor del atributo "col" del objeto
"objeto" en el spinbutton "w".
"""
valor = getattr(objeto, col.name)
w.set_value(valor)
def setter_textview(objeto, col, w):
"""
Muestra el valor del atributo "col" del objeto
"objeto" en el textview "w".
"""
valor = getattr(objeto, col.name)
if isinstance(col, sqlobject.col.SODateCol):
valor = str_fecha(valor)
elif isinstance(col, sqlobject.col.SODateTimeCol):
valor = str_fechahoralarga(valor)
elif isinstance(col, sqlobject.col.SOTimeCol):
valor = str_hora(valor)
elif isinstance(col, sqlobject.col.SOFloatCol):
# XXX
# HACK: Workaround. Los valores que son porcentaje (descuentos e IVA)
# se deben mostrar con el símbolo "%", pero la única manera de
# distinguir esas columnas es mirar el nombre.
if col.name in NOMBRES_COLUMNAS_PORCENTAJE:
valor = "%s %%" % float2str(valor * 100,
precision = 5,
autodec = True)
# XXX
else:
valor = float2str(valor, autodec = False)
# Si autodec=True y es número redondo > 1000 escribe 1.000 y el
# getter lo interpreta como flotante.
if not isinstance(valor, str):
valor = str(valor)
buf = w.get_buffer()
buf.set_text(valor)
def setter_comboboxentry(objeto, col, w):
"""
Muestra el valor del atributo "col" del objeto
"objeto" en el entry hijo del combobox "w".
"""
valor = getattr(objeto, col.name)
# TODO: Comprobar qué tipo de SOCol es y convertir el valor si
# es una fecha, un float, etc.
combo_set_from_db(w, valor)
def setter_combobox(objeto, col, w):
"""
Muestra el valor del atributo "col" del objeto
"objeto" en el entry hijo del combobox "w".
"""
valor = getattr(objeto, col.name)
# TODO: Comprobar qué tipo de SOCol es y convertir el valor si
# es una fecha, un float, etc.
combo_set_from_db(w, valor)
def setter_checkbutton(objeto, col, w):
"""
Muestra el valor de "col" del objeto como True/False en
el checkbutton recibido.
"""
valor = getattr(objeto, col.name)
w.set_inconsistent(False)
try:
w.set_active(valor)
except (TypeError, ValueError), msg:
sys.stderr.write('adapter.py::setter_checkbutton: "%s" no es un valor correcto para "%s". Excepción: %s'
% (valor, w, msg))
w.set_inconsistent(True)
def get_raw_value(w):
if isinstance(w, gtk.Entry):
res = w.get_text()
elif isinstance(w, gtk.TextView):
buf = w.get_buffer()
res = buf.get_text(*buf.get_bounds())
elif isinstance(w, (gtk.CheckButton, gtk.ToggleButton)):
res = w.get_active()
elif isinstance(w, (gtk.ComboBoxEntry, gtk.ComboBox)):
pos = w.get_active()
if pos > -1:
res = w.get_model()[pos][0]
else:
res = ""
else:
raise TypeError, "adapter.py::get_raw_value: Widget %s no soportado."%w
return res
def generic_getter(func_tratamiento, w):
"""
Obtiene el valor bruto y una función de tratamiento.
Devuelve una función que devuelve el valor después
de ser procesado.
"""
valor_bruto = get_raw_value(w)
return func_tratamiento(valor_bruto)
def convertir_a_entero(valor):
"""
Convierte un valor a entero.
Si es flotante, trunca.
Si valor no es un texto o un número, devuelve 0 -especialmente
para None-.
Si contiene texto, lo filtra y se queda con el primer
entero que pueda formar.
Si es una cadena vacía, también devuelve 0.
En otro caso lanza una excepción de ValueError.
"""
try:
res = int(valor)
except ValueError:
if isinstance(valor, str) and valor.strip() == "":
return 0
import re
reint = re.compile("[0-9]+")
try:
res = reint.findall(valor)[0]
except (IndexError):
raise TypeError
except TypeError:
res = 0
return res
def convertir_a_flotante(valor, vdefecto = 0.0, col = None):
"""
Devuelve el valor por defecto "vdefecto" en caso de excepción.
Antes de lanzarla, prueba como número flotante y como flotante con
símbolo de porcentaje.
Fuerza la comprobación de porcentaje si col es distinto de None y
está entre las columnas a tratar así.
"""
from utils import _float, parse_porcentaje
try:
if col != None and col.name in NOMBRES_COLUMNAS_PORCENTAJE:
raise Exception, "Forzando comprobación de porcentaje."
return _float(valor)
except:
try:
# 5 decimales de precisión es bastante para un porcentaje. Es lo
# que uso también para mostrar en pantalla (ver el getter arriba).
return round(parse_porcentaje(valor, fraccion = True), 5)
except:
return vdefecto
def convertir_a_booleano(valor, vdefecto = False):
try:
return bool(valor)
except:
return vdefecto
def convertir_a_fechahora(valor, vdefecto = datetime.date.today()):
from utils import parse_fechahora
try:
return parse_fechahora(valor)
except:
return vdefecto
def convertir_a_fecha(valor, vdefecto = datetime.date.today()):
from utils import parse_fecha
try:
return parse_fecha(valor)
except:
return vdefecto
def convertir_a_hora(valor, vdefecto = datetime.date.today()):
from utils import parse_hora
try:
return parse_hora(valor)
except:
return vdefecto
class Adaptador:
"""
Pegamento para el MVC.
Adapta y mantiene un diccionario de los adaptadores
entre el modelo y la vista. Cada campo sabrá cómo
debe mostrarse en pantalla y con qué widget, así como
la manera de leer su valor de la vista y guardarlo
en el modelo.
"""
TUPLATEXTO = (gtk.Entry, gtk.TextView, gtk.ComboBoxEntry, gtk.ComboBox)
TUPLABOOL = (gtk.CheckButton, gtk.ToggleButton)
TUPLANUM = tuple(list(TUPLATEXTO) + [gtk.SpinButton])
TUPLAFECHA = TUPLATEXTO
TUPLACOMBO = (gtk.ComboBoxEntry, gtk.ComboBox)
TIPOS = {sqlobject.col.SOStringCol: TUPLATEXTO,
sqlobject.col.SOUnicodeCol: TUPLATEXTO,
sqlobject.col.SOIntCol: TUPLANUM,
sqlobject.col.SOTinyIntCol: TUPLANUM,
sqlobject.col.SOSmallIntCol: TUPLANUM,
sqlobject.col.SOMediumIntCol: TUPLANUM,
sqlobject.col.SOBigIntCol: TUPLANUM,
sqlobject.col.SOBoolCol: TUPLABOOL,
sqlobject.col.SOFloatCol: TUPLANUM,
sqlobject.col.SOEnumCol: (gtk.ComboBox, ),
sqlobject.col.SODateTimeCol: TUPLATEXTO,
sqlobject.col.SODateCol: TUPLATEXTO,
# En un futuro cambiar por un pack (gtk.Entry + gtk.Button
# para elegir fecha).
sqlobject.col.SOTimeCol: TUPLATEXTO,
sqlobject.col.SOTimestampCol: TUPLATEXTO,
sqlobject.col.SODecimalCol: TUPLANUM,
sqlobject.col.SOForeignKey: TUPLACOMBO}
def __init__(self):
self.__adaptadores = {}
def adaptar(self, col, widget = None):
"""
Crea o modifica el adaptador registrado para la columna recibida.
Si widget != None, relaciona el campo con el widget.
"""
self.__adaptadores[col] = self.build_adapter(col, widget)
def get_adaptadores(self):
"""
Devuelve el diccionario de adaptadores.
"""
return self.__adaptadores
def build_adapter(self, col, widget = None):
"""
Crea y devuelve un diccionario con cuatro elementos:
- Un widget de pygtk con el que el atributo de la
columna se mostrará en pantalla.
- Una función para volcar el valor de la columna
en el widget.
- Una función para traducir y guardar el valor del
widget en el campo.
- Una función para comparar el valor del modelo y
de la vista.
"""
res = {'widget': self._inferir_widget(col, widget)}
res['mostrar'] = self._inferir_setter(col, res['widget'])
res['leer'] = self._inferir_getter(col, res['widget'])
res['comparar'] = self._inferir_comparator(col, res['leer'])
return res
def _inferir_comparator(self, col, func_get_value):
"""
Devuelve una función que a su vez devolverá True o False
cuando sea llamada en función de si el atributo del campo
"col" del objeto es igual o no al contenido del widget "w".
"""
return lambda o: getattr(o, col.name) == func_get_value()
def _inferir_getter(self, col, w):
"""
Devuelve una función para obtener el
valor del widget en el tipo adecuado
para la columna col.
"""
if isinstance(col, (sqlobject.col.SOStringCol,
sqlobject.col.SOUnicodeCol)):
f = lambda v: v
elif isinstance(col, (sqlobject.col.SOIntCol,
sqlobject.col.SOTinyIntCol,
sqlobject.col.SOSmallIntCol,
sqlobject.col.SOMediumIntCol,
sqlobject.col.SOBigIntCol,
sqlobject.col.SODecimalCol)):
f = lambda v: convertir_a_entero(v)
elif isinstance(col, sqlobject.col.SOBoolCol):
f = lambda v: convertir_a_booleano(v)
elif isinstance(col, sqlobject.col.SOFloatCol):
f = lambda v: convertir_a_flotante(v, col = col)
elif isinstance(col, sqlobject.col.SOEnumCol):
f = lambda v: v # No sé muy bien qué hacer en este caso, tal
# vez se trate del índice del elemento en el
# combobox/enumerado, y por tanto un entero.
elif isinstance(col, (sqlobject.col.SODateTimeCol,
sqlobject.col.SOTimestampCol)):
f = lambda v: convertir_a_fechahora(v)
elif isinstance(col, sqlobject.col.SODateCol):
f = lambda v: convertir_a_fecha(v)
elif isinstance(col, sqlobject.col.SOTimeCol):
f = lambda v: convertir_a_hora(v)
elif isinstance(col, sqlobject.col.SOForeignKey):
def get_id_or_none(v):
if not v:
res = None
try:
res = int(v)
except:
res = None
return res
f = get_id_or_none
else:
f = lambda v: v
return lambda : generic_getter(f, w)
def _inferir_setter(self, col, w):
"""
Devuelve la función que muestra el
valor de la columna en el widget w.
"""
if DEBUG:
print w, w.name
if isinstance(w, gtk.SpinButton):
func = lambda o: setter_spinbutton(o, col, w)
elif isinstance(w, gtk.Entry):
func = lambda o: setter_entry(o, col, w)
elif isinstance(w, gtk.TextView):
func = lambda o: setter_textview(o, col, w)
elif isinstance(w, gtk.ComboBoxEntry):
func = lambda o: setter_comboboxentry(o, col, w)
elif isinstance(w, gtk.ComboBox):
func = lambda o: setter_combobox(o, col, w)
elif isinstance(w, (gtk.CheckButton, gtk.ToggleButton)):
func = lambda o: setter_checkbutton(o, col, w)
else:
txterr = 'adapter.py::_inferir_setter: Widget "%s" no soportado' % w
sys.stderr.write(txterr)
raise TypeError, txterr
return func
def _inferir_widget(self, col, widget = None):
"""
Si widget != None, comprueba que el tipo es correcto
para la columna y lo asigna. Si no es correcto o es None,
crea y devuelve un widget apropiado, eliminando si hiciera
falta el widget recibido y llamando al nuevo con el mismo
nombre.
"""
ws_soportados = None
for tipo in Adaptador.TIPOS:
if isinstance(col, tipo):
ws_soportados = self.TIPOS[tipo]
break
if not ws_soportados:
raise TypeError, "adapter::_inferir_widget -> Tipo no soportado: %s" % (col)
if widget == None:
widget = self._crear_widget(ws_soportados[0], col)
if DEBUG:
print widget.name, type(widget), ws_soportados
if type(widget) not in ws_soportados:
widget = self._reemplazar_widget(widget, ws_soportados[0], col)
if isinstance(widget, gtk.ComboBox):
# ComboBoxEntry deriva de ComboBox, entra también.
if widget.get_model() == None:
self._construir_model_a(widget, col)
return widget
def _construir_model_a(self, widget, col):
"""
Averigua la fuente del widget según el nombre de la columna
recibida y asocia un model sencillo al widget, que debe ser
un ComboBox[Entry].
"""
import pclases
nomclase = col.name.replace("ID", "")
nomclase = nomclase[0].upper() + nomclase[1:]
clase = getattr(pclases, nomclase)
primera_col = clase.sqlmeta.columnList[0].name
filas = [(r.id, getattr(r, primera_col))
for r in clase.select(orderBy = primera_col)]
rellenar_lista(widget, filas)
def _crear_widget(self, tipo, col, nombre = None):
"""
Crea y devuelve un widget del tipo recibido y con
el nombre de la columna recibido.
"""
if not nombre:
nombre = col.name
w = tipo()
w.set_property("name", nombre)
if (isinstance(w, self.TUPLACOMBO)
and isinstance(col, sqlobject.SOForeignKey)):
import pclases
tablajena = getattr(pclases, col.foreignKey)
texto = tablajena.sqlmeta.columnList[0]
ops = []
for i in tablajena.select(orderBy = texto.name):
id = i.id
txt = getattr(i, texto.name)
ops.append((id, txt))
rellenar_lista(w, ops)
return w
def _reemplazar_widget(self, w, tipo, col):
"""
Crea un widget y "reemplaza" el recibido por
el nuevo, asignándole el mismo nombre y
devolviéndolo.
"""
nombre = w.name
del(w)
w = self._crear_widget(tipo, col, nombre)
return w
def adaptar_clase(clase_pclases, widgets = {}):
"""
Adapta una clase completa de pclases.
Si widgets no está vacío, deberá contener un diccionario
cuyas claves son el nombre de las columnas de la clase o
la columna en sí, y el valor de cada una es el widget al
que se adaptará.
Devuelve el objeto Adaptador que guarda los adaptadores
de las columnas.
"""
adaptador = Adaptador()
columns_dict = clase_pclases.sqlmeta.columns
for nombre_col in columns_dict:
col = columns_dict[nombre_col]
if col in widgets:
adaptador.adaptar(col, widgets[col])
elif nombre_col in widgets:
adaptador.adaptar(col, widgets[nombre_col])
else:
adaptador.adaptar(col)
return adaptador
if __name__ == "__main__":
import pclases
c = pclases.Cliente.select()[0]
a = Adaptador()
a.adaptar(pclases.Cliente.sqlmeta.columnList[0])
a._Adaptador__adaptadores[a._Adaptador__adaptadores.keys()[0]]['widget'].set_text("Tal")
a._Adaptador__adaptadores[a._Adaptador__adaptadores.keys()[0]]['comparar'](c)
c.nombre = a._Adaptador__adaptadores[a._Adaptador__adaptadores.keys()[0]]['leer']()
assert a._Adaptador__adaptadores[a._Adaptador__adaptadores.keys()[0]]['comparar'](c)
|
pacoqueen/upy
|
framework/adapter.py
|
Python
|
gpl-2.0
| 19,257
|
from django.conf import settings
from django.conf.urls import include, url
from django.views.generic.base import RedirectView
from . import views
urlpatterns = [
#url(r'^$', views.study_list),
url (r'^$', views.patient_list),
url (r'^patient/new/$', views.view_patient, name ='new_patient'),
url (r'^patient/(?P<patient_pk>[0-9]+)/$', views.view_patient, name ='view_patient'),
url (r'^episode/(?P<episode_pk>[0-9]+)/$', views.view_episode, name ='view_episode'),
url (r'^patient/(?P<patient_pk>[0-9]+)/new_episode/$', views.new_episode, name ='view_patient'),
url (r'^analizar/$', views.analysis, name="analysis"),
url (r'^data.json$', views.get_data, name="datajson"),
url (r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}),
]
|
NavarraBiomed/seguimientoPacientes
|
ictus/urls.py
|
Python
|
gpl-2.0
| 787
|
"""
Django settings for program_info project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'jkoho6(wg_ehqx@e0)9=c&rr5t3)k%$u=u#3r+om_l6)gu9snc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sql_show',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'program_info.urls'
WSGI_APPLICATION = 'program_info.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
huran2014/huran.github.io
|
program_learning/python/django/program_info/program_info/settings.py
|
Python
|
gpl-2.0
| 2,006
|
""" hmacHash.py
Implemention of Request for Comments: 2104
HMAC: Keyed-Hashing for Message Authentication
HMAC is a mechanism for message authentication
using cryptographic hash functions. HMAC can be used with any
iterative cryptographic hash function, e.g., MD5, SHA-1, in
combination with a secret shared key. The cryptographic strength of
HMAC depends on the properties of the underlying hash function.
This implementation of HMAC uses a generic cryptographic 'hashFunction'
(self.H). Hash functions must conform to the cryptopy.hash method
conventions and are not directly compatible with the Python sha1 or md5 algorithms.
[IETF] RFC 2104 "HMAC: Keyed-Hashing for Message Authentication"
>>>key = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
>>>keyedHashAlg = HMAC(SHA1, key)
>>>result = keyedHashAlg(data)
"""
from cryptopy.hash.hash import Hash
class HMAC(Hash):
""" To compute HMAC over the data `text' we perform
H(K XOR opad, H(K XOR ipad, text))
"""
def __init__(self, hashFunction, key = None):
""" initialize HMAC with hashfunction and optionally the key """
# should check for right type of function
self.H = hashFunction() # a new instance for inner hash
self.H_outer = hashFunction() # separate outer context to allow intermediate digests
self.B = self.H.raw_block_size # in bytes, note - hash block size typically 1
# and raw_block_size much larger
# e.g. raw_block_size is 64 bytes for SHA1 and MD5
self.name = 'HMAC_'+self.H.name
self.blocksize = 1 # single octets can be hashed by padding to raw block size
self.raw_block_size = self.H.raw_block_size
self.digest_size = self.H.digest_size
if key != None:
self.setKey(key)
else:
self.keyed = None
def setKey(self,key):
""" setKey(key) ... key is binary string """
if len(key) > self.B: # if key is too long then hash it
key = self.H(key) # humm... this is odd, hash can be smaller than B
else: # should raise error on short key, but breaks tests :-(
key =key + (self.B-len(key)) * chr(0)
self.k_xor_ipad = ''.join([chr(ord(bchar)^0x36) for bchar in key])
self.k_xor_opad = ''.join([chr(ord(bchar)^0x5C) for bchar in key])
self.keyed = 1
self.reset()
def reset(self):
self.H.reset()
if self.keyed == None :
raise 'no key defined'
self.H.update(self.k_xor_ipad) # start inner hash with key xored with ipad
# outer hash always called as one full pass (no updates)
def update(self,data):
if self.keyed == None :
raise 'no key defined'
self.H.update(data)
def digest(self):
if self.keyed == None :
raise 'no key defined'
return self.H_outer(self.k_xor_opad+self.H.digest())
from cryptopy.hash.sha1Hash import SHA1
class HMAC_SHA1(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,SHA1,key)
from cryptopy.hash.md5Hash import MD5
class HMAC_MD5(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,MD5,key)
|
repotvsupertuga/tvsupertuga.repository
|
script.module.cryptolib/lib/cryptopy/keyedHash/hmacHash.py
|
Python
|
gpl-2.0
| 3,597
|
#
# gPrime - A web-based genealogy program
#
# Copyright (C) 2011 Michiel D. Nauta
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
""" Unittest that tests the code involved in merging """
import unittest
from .. import (Person, Surname, Name, NameType, Family, FamilyRelType,
Event, EventType, Source, Place, PlaceName, Citation, Date,
Repository, RepositoryType, Media, Note, NoteType,
StyledText, StyledTextTag, StyledTextTagType, Tag,
ChildRef, ChildRefType, Attribute, MediaRef, AttributeType,
Url, UrlType, Address, EventRef, EventRoleType, RepoRef,
FamilyRelType, LdsOrd, MediaRef, PersonRef, PlaceType,
SrcAttribute, SrcAttributeType)
from ..privacybase import PrivacyBase
from ..urlbase import UrlBase
from ..addressbase import AddressBase
from ..attrbase import AttributeBase
from ..ldsordbase import LdsOrdBase
from ..mediabase import MediaBase
from ..notebase import NoteBase
from ..citationbase import CitationBase
from ..surnamebase import SurnameBase
from ..tagbase import TagBase
from ..const import IDENTICAL, EQUAL, DIFFERENT
class PrivacyBaseTest:
def test_privacy_merge(self):
self.assertEqual(self.phoenix.to_struct(), self.titanic.to_struct())
self.titanic.set_privacy(True)
self.ref_obj.set_privacy(True)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class NoteBaseTest:
def test_note_merge(self):
note_handle = '123456'
self.titanic.add_note(note_handle)
self.ref_obj.add_note(note_handle)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class CitationBaseTest:
def test_citation_merge(self):
citation = Citation()
citation.set_reference_handle('123456')
citation.set_page('p.10')
self.titanic.add_citation(citation.handle)
self.ref_obj.add_citation(citation.handle)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class MediaBaseTest:
def test_media_merge(self):
mediaref = MediaRef()
mediaref.set_reference_handle('123456')
self.titanic.add_media_reference(mediaref)
self.ref_obj.add_media_reference(mediaref)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class AttrBaseTest:
def test_attribute_merge(self):
attr = Attribute()
attr.set_type(AttributeType.AGE)
attr.set_value(10)
self.titanic.add_attribute(attr)
self.ref_obj.add_attribute(attr)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class UrlBaseTest:
def test_url_merge(self):
url = Url()
url.set_path('http://example.com')
self.titanic.add_url(url)
self.ref_obj.add_url(url)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
#===========================================================
class PrivacyCheck(unittest.TestCase):
def test_privacy(self):
known_values = ( (False, False, False),
(True, False, True),
(False, True, True),
(True, True, True) )
phoenix = PrivacyBase()
titanic = PrivacyBase()
for value1, value2, value_merge in known_values:
phoenix.set_privacy(value1)
titanic.set_privacy(value2)
phoenix._merge_privacy(titanic)
self.assertEqual(phoenix.get_privacy(), value_merge)
class UrlCheck(unittest.TestCase, PrivacyBaseTest):
def setUp(self):
self.phoenix = Url()
self.phoenix.set_path('http://example1.com')
self.phoenix.set_description('hello world')
self.phoenix.set_type(UrlType.WEB_HOME)
self.titanic = Url(self.phoenix)
self.ref_obj = Url(self.phoenix)
def test_path_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_path('http://example2.com')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_type_equivalence(self):
self.titanic.set_type(UrlType.UNKNOWN)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_desc_equivalence(self):
self.titanic.set_description('goodby')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
def test_merge_path(self):
self.titanic.set_path('example2.com')
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.is_equal(self.ref_obj), True)
class UrlBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = UrlBase()
self.titanic = UrlBase()
url = Url()
url.set_path('example.com')
self.phoenix.add_url(url)
def test_identical(self):
ref_url_list = UrlBase(self.phoenix)
url = Url()
url.set_path('example.com')
self.titanic.add_url(url)
self.phoenix._merge_url_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), ref_url_list.to_struct())
def test_equal(self):
ref_url_list = UrlBase(self.phoenix)
ref_url = ref_url_list.get_url_list()[0]
ref_url.set_privacy(True)
url = Url()
url.set_path('example.com')
url.set_privacy(True)
self.titanic.add_url(url)
self.phoenix._merge_url_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), ref_url_list.to_struct())
def test_different(self):
ref_url_list = UrlBase(self.phoenix)
url = Url()
url.set_path('other.com')
ref_url_list.add_url(url)
self.titanic.add_url(url)
self.phoenix._merge_url_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), ref_url_list.to_struct())
class AddressCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
CitationBaseTest):
def setUp(self):
self.phoenix = Address()
self.phoenix.set_city('Amsterdam')
self.titanic = Address(self.phoenix)
self.ref_obj = Address(self.phoenix)
def test_location_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_city('Rotterdam')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_date_equivalence(self):
date = Date()
date.set_yr_mon_day(1999,12,5)
self.titanic.set_date_object(date)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
def test_location_merge(self):
self.titanic.set_city('Rotterdam')
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.is_equal(self.ref_obj), True)
class AddressBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = AddressBase()
self.titanic = AddressBase()
self.ref_list = AddressBase()
address = Address()
address.set_city('Amsterdam')
self.phoenix.add_address(address)
def test_identical(self):
address = Address()
address.set_city('Amsterdam')
self.ref_list.add_address(address)
self.titanic.add_address(address)
self.phoenix._merge_address_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_equal(self):
note_handle = '123456'
address = Address()
address.set_city('Amsterdam')
address.add_note(note_handle)
self.titanic.add_address(address)
self.ref_list.add_address(address)
self.phoenix._merge_address_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_different(self):
address = Address()
address.set_country('Netherlands')
self.titanic.add_address(address)
self.ref_list = AddressBase(self.phoenix)
self.ref_list.add_address(address)
self.phoenix._merge_address_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
class AttributeCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
CitationBaseTest):
def setUp(self):
self.phoenix = Attribute()
self.phoenix.set_type(AttributeType.AGE)
self.phoenix.set_value(10)
self.titanic = Attribute(self.phoenix)
self.ref_obj = Attribute(self.phoenix)
def test_type_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_type(AttributeType.MOTHER_AGE)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_value_equivalence(self):
self.titanic.set_value(12)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
def test_value_merge(self):
self.titanic.set_value(12)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.is_equal(self.ref_obj), True)
class AttributeBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = AttributeBase()
self.titanic = AttributeBase()
self.ref_list = AttributeBase()
attr = Attribute()
attr.set_type(AttributeType.AGE)
attr.set_value(10)
self.phoenix.add_attribute(attr)
def test_identical(self):
attr = Attribute()
attr.set_type(AttributeType.AGE)
attr.set_value(10)
self.ref_list.add_attribute(attr)
self.titanic.add_attribute(attr)
self.phoenix._merge_attribute_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_equal(self):
note_handle = '123456'
attr = Attribute()
attr.set_type(AttributeType.AGE)
attr.set_value(10)
attr.add_note(note_handle)
self.titanic.add_attribute(attr)
self.ref_list.add_attribute(attr)
self.phoenix._merge_attribute_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_different(self):
attr = Attribute()
attr.set_type(AttributeType.AGE)
attr.set_value(12)
self.titanic.add_attribute(attr)
self.ref_list = AttributeBase(self.phoenix)
self.ref_list.add_attribute(attr)
self.phoenix._merge_attribute_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
class ChildRefCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
CitationBaseTest):
def setUp(self):
self.phoenix = ChildRef()
self.phoenix.set_reference_handle('123456')
self.phoenix.set_father_relation(ChildRefType.UNKNOWN)
self.phoenix.set_mother_relation(ChildRefType.UNKNOWN)
self.titanic = ChildRef()
self.titanic.set_reference_handle('123456')
self.titanic.set_father_relation(ChildRefType.UNKNOWN)
self.titanic.set_mother_relation(ChildRefType.UNKNOWN)
self.ref_obj = ChildRef()
self.ref_obj.set_reference_handle('123456')
self.ref_obj.set_father_relation(ChildRefType.UNKNOWN)
self.ref_obj.set_mother_relation(ChildRefType.UNKNOWN)
def test_handle_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_reference_handle('654321')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
def test_mrel_merge(self):
self.titanic.set_mother_relation(ChildRefType.BIRTH)
self.ref_obj.set_mother_relation(ChildRefType.BIRTH)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.is_equal(self.ref_obj), True)
def test_frel_merge(self):
self.titanic.set_father_relation(ChildRefType.ADOPTED)
self.ref_obj.set_father_relation(ChildRefType.ADOPTED)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.is_equal(self.ref_obj), True)
class EventCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
CitationBaseTest, MediaBaseTest, AttrBaseTest):
def setUp(self):
self.phoenix = Event()
self.phoenix.set_description("hello world")
self.titanic = Event.from_struct(self.phoenix.to_struct())
self.ref_obj = Event.from_struct(self.phoenix.to_struct())
class EventRefCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
AttrBaseTest):
def setUp(self):
self.phoenix = EventRef()
self.phoenix.set_reference_handle('123456')
self.titanic = EventRef(self.phoenix)
self.ref_obj = EventRef(self.phoenix)
def test_handle_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_reference_handle('654321')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_role_equivalence(self):
self.titanic.set_role(EventRoleType.WITNESS)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
def test_replace(self):
attr1 = Attribute()
attr1.set_type(AttributeType.AGE)
attr1.set_value(10)
citation1 = Citation()
citation1.set_reference_handle('123456')
citation1.set_page('p.10')
citation2 = Citation()
citation2.set_reference_handle('234567')
citation2.set_page('p.20')
attr1.add_citation(citation1.handle)
attr1.add_citation(citation2.handle)
attr2 = Attribute()
attr2.set_type(AttributeType.AGE)
attr2.set_value(10)
citation3 = Citation()
citation3.set_reference_handle('123456')
citation3.set_page('p.10')
citation4 = Citation()
citation4.set_reference_handle('654321')
citation4.set_page('p.20')
attr2.add_citation(citation3.handle)
attr2.add_citation(citation4.handle)
self.phoenix.add_attribute(attr1)
self.ref_obj.add_attribute(attr2)
self.phoenix.replace_citation_references('234567','654321')
self.assertTrue(self.phoenix.is_equal(self.ref_obj))
class FamilyCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
CitationBaseTest, MediaBaseTest, AttrBaseTest):
def setUp(self):
self.phoenix = Family()
self.phoenix.set_father_handle('123456')
self.phoenix.set_mother_handle('654321')
self.phoenix.set_relationship(FamilyRelType.MARRIED)
self.titanic = Family()
self.titanic.set_father_handle('123456')
self.titanic.set_mother_handle('654321')
self.titanic.set_relationship(FamilyRelType.MARRIED)
self.ref_obj = Family()
self.ref_obj.set_father_handle('123456')
self.ref_obj.set_mother_handle('654321')
self.ref_obj.set_relationship(FamilyRelType.MARRIED)
def test_relation_merge(self):
self.phoenix.set_relationship(FamilyRelType.UNKNOWN)
self.titanic.set_relationship(FamilyRelType.UNMARRIED)
self.ref_obj.set_relationship(FamilyRelType.UNMARRIED)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_eventref_merge(self):
evtref = EventRef()
evtref.set_role(EventRoleType.WITNESS)
self.titanic.add_event_ref(evtref)
self.ref_obj.add_event_ref(evtref)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_ldsord_merge(self):
ldsord = LdsOrd()
ldsord.set_temple('London')
self.titanic.add_lds_ord(ldsord)
self.ref_obj.add_lds_ord(ldsord)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_childref_merge(self):
childref = ChildRef()
childref.set_reference_handle('123456')
self.titanic.add_child_ref(childref)
self.ref_obj.add_child_ref(childref)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_mergechildref_identical(self):
childref1 = ChildRef()
childref1.set_reference_handle('123456')
childref2 = ChildRef()
childref2.set_reference_handle('123456')
childref3 = ChildRef()
childref3.set_reference_handle('123456')
self.phoenix.add_child_ref(childref1)
self.titanic.add_child_ref(childref2)
self.ref_obj.add_child_ref(childref3)
self.phoenix._merge_child_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_mergechildref_equal(self):
childref1 = ChildRef()
childref1.set_reference_handle('123456')
childref2 = ChildRef()
childref2.set_reference_handle('123456')
childref2.add_note('N1')
childref3 = ChildRef()
childref3.set_reference_handle('123456')
childref3.add_note('N1')
self.phoenix.add_child_ref(childref1)
self.titanic.add_child_ref(childref2)
self.ref_obj.add_child_ref(childref3)
self.phoenix._merge_child_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_mergechildref_different(self):
childref1 = ChildRef()
childref1.set_reference_handle('123456')
childref2 = ChildRef()
childref2.set_reference_handle('654321')
childref3 = ChildRef()
childref3.set_reference_handle('123456')
childref4 = ChildRef()
childref4.set_reference_handle('654321')
self.phoenix.add_child_ref(childref1)
self.titanic.add_child_ref(childref2)
self.ref_obj.add_child_ref(childref3)
self.ref_obj.add_child_ref(childref4)
self.phoenix._merge_child_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_childref_absent(self):
childref1 = ChildRef()
childref1.set_reference_handle('234567')
childref2 = ChildRef()
childref2.set_reference_handle('345678')
childref3 = ChildRef()
childref3.set_reference_handle('765432')
childref4 = ChildRef()
childref4.set_reference_handle('345678')
self.phoenix.add_child_ref(childref1)
self.phoenix.add_child_ref(childref2)
self.ref_obj.add_child_ref(childref3)
self.ref_obj.add_child_ref(childref4)
self.phoenix.replace_handle_reference('Person', '234567', '765432')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_childref_identical(self):
childref1 = ChildRef()
childref1.set_reference_handle('234567')
childref2 = ChildRef()
childref2.set_reference_handle('765432')
childref3 = ChildRef()
childref3.set_reference_handle('765432')
self.phoenix.add_child_ref(childref1)
self.phoenix.add_child_ref(childref2)
self.ref_obj.add_child_ref(childref3)
self.phoenix.replace_handle_reference('Person', '234567', '765432')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_childref_equal(self):
childref1 = ChildRef()
childref1.set_reference_handle('234567')
childref1.set_privacy(True)
childref2 = ChildRef()
childref2.set_reference_handle('765432')
childref3 = ChildRef()
childref3.set_reference_handle('765432')
childref3.set_privacy(True)
self.phoenix.add_child_ref(childref1)
self.phoenix.add_child_ref(childref2)
self.ref_obj.add_child_ref(childref3)
self.phoenix.replace_handle_reference('Person', '234567', '765432')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_childref_different(self):
# impossible, is_equivalent is only DIFFERENT if handles differ.
childref1 = ChildRef()
childref1.set_reference_handle('234567')
childref1.set_mother_relation('Adopted')
childref2 = ChildRef()
childref2.set_reference_handle('765432')
childref3 = ChildRef()
childref3.set_reference_handle('765432')
self.phoenix.add_child_ref(childref1)
self.phoenix.add_child_ref(childref2)
self.ref_obj.add_child_ref(childref3)
self.phoenix.replace_handle_reference('Person', '234567', '765432')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_mergeeventref_identical(self):
eventref1 = EventRef()
eventref1.set_role(EventRoleType.WITNESS)
eventref2 = EventRef()
eventref2.set_role(EventRoleType.WITNESS)
eventref3 = EventRef()
eventref3.set_role(EventRoleType.WITNESS)
self.phoenix.add_event_ref(eventref1)
self.titanic.add_event_ref(eventref2)
self.ref_obj.add_event_ref(eventref3)
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_mergeeventref_equal(self):
eventref1 = EventRef()
eventref1.set_role(EventRoleType.WITNESS)
eventref2 = EventRef()
eventref2.set_role(EventRoleType.WITNESS)
eventref2.add_note('N1')
eventref3 = EventRef()
eventref3.set_role(EventRoleType.WITNESS)
eventref3.add_note('N1')
self.phoenix.add_event_ref(eventref1)
self.titanic.add_event_ref(eventref2)
self.ref_obj.add_event_ref(eventref3)
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_mergeeventref_different(self):
eventref1 = EventRef()
eventref1.set_role(EventRoleType.WITNESS)
eventref2 = EventRef()
eventref2.set_role(EventRoleType.CLERGY)
eventref3 = EventRef()
eventref3.set_role(EventRoleType.WITNESS)
eventref4 = EventRef()
eventref4.set_role(EventRoleType.CLERGY)
self.phoenix.add_event_ref(eventref1)
self.titanic.add_event_ref(eventref2)
self.ref_obj.add_event_ref(eventref3)
self.ref_obj.add_event_ref(eventref4)
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_event_absent(self):
eventref1 = EventRef()
eventref1.set_reference_handle('123456')
eventref2 = EventRef()
eventref2.set_reference_handle('234567')
eventref3 = EventRef()
eventref3.set_reference_handle('654321')
eventref4 = EventRef()
eventref4.set_reference_handle('234567')
self.phoenix.add_event_ref(eventref1)
self.phoenix.add_event_ref(eventref2)
self.ref_obj.add_event_ref(eventref3)
self.ref_obj.add_event_ref(eventref4)
self.phoenix.replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_event_identical(self):
eventref1 = EventRef()
eventref1.set_reference_handle('123456')
eventref2 = EventRef()
eventref2.set_reference_handle('654321')
eventref3 = EventRef()
eventref3.set_reference_handle('654321')
self.phoenix.add_event_ref(eventref1)
self.phoenix.add_event_ref(eventref2)
self.ref_obj.add_event_ref(eventref3)
self.phoenix.replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_event_equal(self):
eventref1 = EventRef()
eventref1.set_reference_handle('123456')
eventref1.set_privacy(True)
eventref2 = EventRef()
eventref2.set_reference_handle('654321')
eventref3 = EventRef()
eventref3.set_reference_handle('654321')
eventref3.set_privacy(True)
self.phoenix.add_event_ref(eventref1)
self.phoenix.add_event_ref(eventref2)
self.ref_obj.add_event_ref(eventref3)
self.phoenix.replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_event_different(self):
eventref1 = EventRef()
eventref1.set_reference_handle('123456')
eventref1.set_role(EventRoleType.WITNESS)
eventref2 = EventRef()
eventref2.set_reference_handle('654321')
eventref3 = EventRef()
eventref3.set_reference_handle('654321')
eventref3.set_role(EventRoleType.WITNESS)
eventref4 = EventRef()
eventref4.set_reference_handle('654321')
self.phoenix.add_event_ref(eventref1)
self.phoenix.add_event_ref(eventref2)
self.ref_obj.add_event_ref(eventref3)
self.ref_obj.add_event_ref(eventref4)
self.phoenix.replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_event_order_first(self):
eventref1 = EventRef()
eventref1.set_reference_handle('123456')
eventref2 = EventRef()
eventref2.set_reference_handle('234567')
eventref3 = EventRef()
eventref3.set_reference_handle('654321')
eventref4 = EventRef()
eventref4.set_reference_handle('123456')
eventref5 = EventRef()
eventref5.set_reference_handle('234567')
self.phoenix.add_event_ref(eventref1)
self.phoenix.add_event_ref(eventref2)
self.phoenix.add_event_ref(eventref3)
self.ref_obj.add_event_ref(eventref4)
self.ref_obj.add_event_ref(eventref5)
self.phoenix.replace_handle_reference('Event', '654321', '123456')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_event_order_last(self):
eventref1 = EventRef()
eventref1.set_reference_handle('123456')
eventref2 = EventRef()
eventref2.set_reference_handle('234567')
eventref3 = EventRef()
eventref3.set_reference_handle('654321')
eventref4 = EventRef()
eventref4.set_reference_handle('234567')
eventref5 = EventRef()
eventref5.set_reference_handle('654321')
self.phoenix.add_event_ref(eventref1)
self.phoenix.add_event_ref(eventref2)
self.phoenix.add_event_ref(eventref3)
self.ref_obj.add_event_ref(eventref4)
self.ref_obj.add_event_ref(eventref5)
self.phoenix.replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class LdsordCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
CitationBaseTest):
def setUp(self):
self.phoenix = LdsOrd()
self.phoenix.set_temple('London, England')
self.titanic = LdsOrd(self.phoenix)
self.ref_obj = LdsOrd(self.phoenix)
def test_type_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_type(LdsOrd.CONFIRMATION)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_date_equivalence(self):
date = Date()
date.set_yr_mon_day(1999,12,5)
self.titanic.set_date_object(date)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_temple_equivalence(self):
self.titanic.set_temple('Baton Rouge, Louisiana')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_status_equivalence(self):
self.titanic.set_status(LdsOrd.STATUS_CLEARED)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_famc_equivalence(self):
self.titanic.set_family_handle('F1')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
class LdsordBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = LdsOrdBase()
self.titanic = LdsOrdBase()
self.ref_list = LdsOrdBase()
ldsord = LdsOrd()
ldsord.set_temple('London, England')
self.phoenix.add_lds_ord(ldsord)
def test_identical(self):
ldsord = LdsOrd()
ldsord.set_temple('London, England')
self.titanic.add_lds_ord(ldsord)
self.ref_list.add_lds_ord(ldsord)
self.phoenix._merge_lds_ord_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_equal(self):
ldsord = LdsOrd()
ldsord.set_temple('London, England')
ldsord.set_privacy(True)
self.titanic.add_lds_ord(ldsord)
self.ref_list.add_lds_ord(ldsord)
self.phoenix._merge_lds_ord_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_different(self):
ldsord = LdsOrd()
ldsord.set_temple('Baton Rouge, Louisiana')
self.titanic.add_lds_ord(ldsord)
self.ref_list = LdsOrdBase(self.phoenix)
self.ref_list.add_lds_ord(ldsord)
self.phoenix._merge_lds_ord_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
class MediaBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = MediaBase()
self.titanic = MediaBase()
self.ref_list = MediaBase()
mediaref = MediaRef()
mediaref.set_reference_handle('123456')
mediaref.set_rectangle('10 10 90 90')
self.phoenix.add_media_reference(mediaref)
def test_merge_identical(self):
mediaref = MediaRef()
mediaref.set_reference_handle('123456')
mediaref.set_rectangle('10 10 90 90')
self.titanic.add_media_reference(mediaref)
self.ref_list.add_media_reference(mediaref)
self.phoenix._merge_media_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_merge_equal(self):
mediaref = MediaRef()
mediaref.set_reference_handle('123456')
mediaref.set_rectangle('10 10 90 90')
mediaref.set_privacy(True)
self.titanic.add_media_reference(mediaref)
self.ref_list.add_media_reference(mediaref)
self.phoenix._merge_media_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_merge_different(self):
mediaref1 = MediaRef()
mediaref1.set_reference_handle('123456')
mediaref1.set_rectangle('10 10 90 90')
mediaref2 = MediaRef()
mediaref2.set_reference_handle('123456')
mediaref2.set_rectangle('20 10 90 90')
self.titanic.add_media_reference(mediaref2)
self.ref_list.add_media_reference(mediaref1)
self.ref_list.add_media_reference(mediaref2)
self.phoenix._merge_media_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_replace_absent(self):
mediaref1 = MediaRef()
mediaref1.set_reference_handle('654321')
mediaref1.set_rectangle('10 10 90 90')
self.ref_list.add_media_reference(mediaref1)
self.phoenix.replace_media_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_replace_identical(self):
mediaref1 = MediaRef()
mediaref1.set_reference_handle('654321')
mediaref1.set_rectangle('10 10 90 90')
mediaref2 = MediaRef()
mediaref2.set_reference_handle('654321')
mediaref2.set_rectangle('10 10 90 90')
self.phoenix.add_media_reference(mediaref1)
self.ref_list.add_media_reference(mediaref2)
self.phoenix.replace_media_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_replace_equal(self):
mediaref1 = MediaRef()
mediaref1.set_reference_handle('654321')
mediaref1.set_rectangle('10 10 90 90')
mediaref1.set_privacy(True)
mediaref2 = MediaRef()
mediaref2.set_reference_handle('654321')
mediaref2.set_rectangle('10 10 90 90')
mediaref2.set_privacy(True)
self.phoenix.add_media_reference(mediaref1)
self.ref_list.add_media_reference(mediaref2)
self.phoenix.replace_media_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_replace_different(self):
mediaref1 = MediaRef()
mediaref1.set_reference_handle('654321')
mediaref1.set_rectangle('20 20 90 90')
mediaref2 = MediaRef()
mediaref2.set_reference_handle('654321')
mediaref2.set_rectangle('10 10 90 90')
mediaref3 = MediaRef()
mediaref3.set_reference_handle('654321')
mediaref3.set_rectangle('20 20 90 90')
self.phoenix.add_media_reference(mediaref1)
self.ref_list.add_media_reference(mediaref2)
self.ref_list.add_media_reference(mediaref3)
self.phoenix.replace_media_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_replace_order_first(self):
mediaref1 = MediaRef()
mediaref1.set_reference_handle('234567')
mediaref1.set_rectangle('10 10 90 90')
mediaref2 = MediaRef()
mediaref2.set_reference_handle('654321')
mediaref2.set_rectangle('10 10 90 90')
mediaref3 = MediaRef()
mediaref3.set_reference_handle('123456')
mediaref3.set_rectangle('10 10 90 90')
mediaref4 = MediaRef()
mediaref4.set_reference_handle('234567')
mediaref4.set_rectangle('10 10 90 90')
self.phoenix.add_media_reference(mediaref1)
self.phoenix.add_media_reference(mediaref2)
self.ref_list.add_media_reference(mediaref3)
self.ref_list.add_media_reference(mediaref4)
self.phoenix.replace_media_references('654321','123456')
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_replace_order_last(self):
mediaref1 = MediaRef()
mediaref1.set_reference_handle('234567')
mediaref1.set_rectangle('10 10 90 90')
mediaref2 = MediaRef()
mediaref2.set_reference_handle('654321')
mediaref2.set_rectangle('10 10 90 90')
mediaref3 = MediaRef()
mediaref3.set_reference_handle('234567')
mediaref3.set_rectangle('10 10 90 90')
mediaref4 = MediaRef()
mediaref4.set_reference_handle('654321')
mediaref4.set_rectangle('10 10 90 90')
self.phoenix.add_media_reference(mediaref1)
self.phoenix.add_media_reference(mediaref2)
self.ref_list.add_media_reference(mediaref3)
self.ref_list.add_media_reference(mediaref4)
self.phoenix.replace_media_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
class MediaCheck(unittest.TestCase, PrivacyBaseTest, AttrBaseTest,
NoteBaseTest, CitationBaseTest):
def setUp(self):
self.phoenix = Media()
self.phoenix.set_path('example.png')
self.titanic = Media.from_struct(self.phoenix.to_struct())
self.ref_obj = Media.from_struct(self.phoenix.to_struct())
class MediaRefCheck(unittest.TestCase, PrivacyBaseTest, AttrBaseTest,
CitationBaseTest, NoteBaseTest):
def setUp(self):
self.phoenix = MediaRef()
self.phoenix.set_rectangle("10 10 90 90")
self.titanic = MediaRef(self.phoenix)
self.ref_obj = MediaRef(self.phoenix)
def test_ref_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_reference_handle('123456')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_rect_equivalence(self):
self.titanic.set_rectangle("20 20 80 80")
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
class NameCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
CitationBaseTest):
def setUp(self):
self.phoenix = Name()
self.phoenix.set_first_name('Willem')
surname = Surname()
surname.set_surname("Oranje")
self.phoenix.add_surname(surname)
self.titanic = Name(self.phoenix)
self.ref_obj = Name(self.phoenix)
def test_datalist_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_first_name('Maurits')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_date_equivalence(self):
date = Date()
date.set_yr_mon_day(1999,12,5)
self.titanic.set_date_object(date)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_surname_equivalence(self):
surname = Surname()
surname.set_surname("Nassau")
self.titanic.add_surname(surname)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
class NoteCheck(unittest.TestCase, PrivacyBaseTest):
def setUp(self):
self.phoenix = Note("hello world")
self.titanic = Note("hello world")
self.ref_obj = Note("hello world")
class NoteBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = NoteBase()
self.titanic = NoteBase()
note = Note("hello world")
note.set_handle('123456')
self.phoenix.add_note(note.get_handle())
def test_identical(self):
ref_note_list = NoteBase(self.phoenix)
self.titanic.add_note(self.phoenix.get_note_list()[0])
self.phoenix._merge_note_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), ref_note_list.to_struct())
def test_different(self):
ref_note_list = NoteBase(self.phoenix)
note = Note("note other")
self.titanic.add_note(note.get_handle())
ref_note_list.add_note(note.get_handle())
self.phoenix._merge_note_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), ref_note_list.to_struct())
def test_replace_nonew(self):
note = Note("note other")
note.set_handle('654321')
ref_note_list = NoteBase()
ref_note_list.add_note(note.get_handle())
self.phoenix.replace_note_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), ref_note_list.to_struct())
def test_replace_newpresent(self):
note = Note("note other")
note.set_handle('654321')
note2 = Note("yet another note")
note2.set_handle('234567')
self.phoenix.add_note(note2.get_handle())
self.phoenix.add_note(note.get_handle())
ref_note_list = NoteBase()
ref_note_list.add_note(note2.get_handle())
ref_note_list.add_note(note.get_handle())
self.phoenix.replace_note_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), ref_note_list.to_struct())
def todo_test_replace_child(self):
ref_note_list = NoteBase()
note = Note("")
note.set_handle('123456')
ref_note_list.add_note(note.get_handle())
self.phoenix.replace_note_references('','')
self.assertEqual(self.phoenix.to_struct(), ref_note_list.to_struct())
class PersonCheck(unittest.TestCase, PrivacyBaseTest, MediaBaseTest,
AttrBaseTest, NoteBaseTest, CitationBaseTest):
def setUp(self):
self.phoenix = Person()
name = Name()
name.set_first_name('Adam')
self.phoenix.set_primary_name(name)
self.titanic = Person()
self.titanic.set_primary_name(name)
self.ref_obj = Person()
self.ref_obj.set_primary_name(name)
def test_replace_eventhandle_nonew(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('654321')
self.phoenix.add_event_ref(evtref)
self.ref_obj.add_event_ref(evtref2)
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_eventhandle_identical(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('234567')
evtref3 = EventRef()
evtref3.set_reference_handle('654321')
self.phoenix.add_event_ref(evtref)
self.phoenix.add_event_ref(evtref2)
self.phoenix.add_event_ref(evtref3)
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref3)
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_eventhandle_equal(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('234567')
evtref3 = EventRef()
evtref3.set_reference_handle('654321')
evtref3.set_privacy(True)
self.phoenix.add_event_ref(evtref)
self.phoenix.add_event_ref(evtref2)
self.phoenix.add_event_ref(evtref3)
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref3)
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_eventhandle_different(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('234567')
evtref3 = EventRef()
evtref3.set_reference_handle('654321')
self.phoenix.add_event_ref(evtref)
self.phoenix.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref3)
self.ref_obj.add_event_ref(evtref2)
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_birth_lower(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('654321')
self.phoenix.add_event_ref(evtref)
self.phoenix.add_event_ref(evtref2)
self.phoenix.birth_ref_index = 2
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.birth_ref_index = 1
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_birth_minusone(self):
evtref = EventRef()
evtref.set_reference_handle('654321')
evtref2 = EventRef()
evtref2.set_reference_handle('123456')
self.phoenix.add_event_ref(evtref)
self.phoenix.add_event_ref(evtref2)
self.phoenix.birth_ref_index = 1
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.birth_ref_index = -1
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_death_lower(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('654321')
self.phoenix.add_event_ref(evtref)
self.phoenix.add_event_ref(evtref2)
self.phoenix.death_ref_index = 2
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.death_ref_index = 1
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_death_minusone(self):
evtref = EventRef()
evtref.set_reference_handle('654321')
evtref2 = EventRef()
evtref2.set_reference_handle('123456')
self.phoenix.add_event_ref(evtref)
self.phoenix.add_event_ref(evtref2)
self.phoenix.death_ref_index = 1
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.death_ref_index = -1
self.phoenix._replace_handle_reference('Event', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_personhandle_nonew(self):
personref = PersonRef()
personref.set_reference_handle('123456')
self.phoenix.add_person_ref(personref)
personref2 = PersonRef()
personref2.set_reference_handle('654321')
self.ref_obj.add_person_ref(personref2)
self.phoenix._replace_handle_reference('Person', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_personhandle_identical(self):
personref = PersonRef()
personref.set_reference_handle('123456')
personref2 = PersonRef()
personref2.set_reference_handle('234567')
personref3 = PersonRef()
personref3.set_reference_handle('654321')
self.phoenix.add_person_ref(personref)
self.phoenix.add_person_ref(personref2)
self.phoenix.add_person_ref(personref3)
self.ref_obj.add_person_ref(personref2)
self.ref_obj.add_person_ref(personref3)
self.phoenix._replace_handle_reference('Person', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_personhandle_equal(self):
personref = PersonRef()
personref.set_reference_handle('123456')
personref.set_privacy(True)
personref2 = PersonRef()
personref2.set_reference_handle('234567')
personref3 = PersonRef()
personref3.set_reference_handle('654321')
personref4 = PersonRef()
personref4.set_reference_handle('654321')
personref4.set_privacy(True)
self.phoenix.add_person_ref(personref)
self.phoenix.add_person_ref(personref2)
self.phoenix.add_person_ref(personref3)
self.ref_obj.add_person_ref(personref2)
self.ref_obj.add_person_ref(personref4)
self.phoenix._replace_handle_reference('Person', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_personhandle_different(self):
personref = PersonRef()
personref.set_reference_handle('123456')
personref2 = PersonRef()
personref2.set_reference_handle('234567')
personref3 = PersonRef()
personref3.set_reference_handle('654321')
self.phoenix.add_person_ref(personref)
self.phoenix.add_person_ref(personref2)
self.ref_obj.add_person_ref(personref3)
self.ref_obj.add_person_ref(personref2)
self.phoenix._replace_handle_reference('Person', '123456', '654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_person_primaryname(self):
name = Name()
name.set_first_name('Abel')
self.titanic.set_primary_name(name)
self.ref_obj.add_alternate_name(name)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_person_altname(self):
name = Name()
name.set_first_name('Abel')
self.titanic.add_alternate_name(name)
self.ref_obj.add_alternate_name(name)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_person_eventref(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
self.titanic.add_event_ref(evtref)
self.ref_obj.add_event_ref(evtref)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_person_ldsord(self):
ldsord = LdsOrd()
ldsord.set_type(LdsOrd.BAPTISM)
self.titanic.add_lds_ord(ldsord)
self.ref_obj.add_lds_ord(ldsord)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_person_address(self):
address = Address()
address.set_city('The Hague')
self.titanic.add_address(address)
self.ref_obj.add_address(address)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_person_personref(self):
personref = PersonRef()
personref.set_reference_handle('123456')
self.titanic.add_person_ref(personref)
self.ref_obj.add_person_ref(personref)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def todo_test_merge_person_aschild(self):
pass
def todo_test_merge_person_asparent(self):
pass
def test_altname_identical(self):
name = Name()
name.set_first_name('Abel')
name2 = Name()
name2.set_first_name('Abel')
self.phoenix.add_alternate_name(name)
self.titanic.add_alternate_name(name2)
self.ref_obj.add_alternate_name(name)
self.phoenix._merge_alternate_names(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_altname_equal(self):
name = Name()
name.set_first_name('Abel')
name2 = Name()
name2.set_first_name('Abel')
name2.set_privacy(True)
self.phoenix.add_alternate_name(name)
self.titanic.add_alternate_name(name2)
self.ref_obj.add_alternate_name(name2)
self.phoenix._merge_alternate_names(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_altname_different(self):
name = Name()
name.set_first_name('Abel')
name2 = Name()
name2.set_first_name('Cain')
self.phoenix.add_alternate_name(name)
self.titanic.add_alternate_name(name2)
self.ref_obj.add_alternate_name(name)
self.ref_obj.add_alternate_name(name2)
self.phoenix._merge_alternate_names(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_eventrefs_identical(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('123456')
self.phoenix.add_event_ref(evtref)
self.titanic.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref)
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_eventrefs_equal(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('123456')
evtref2.set_privacy(True)
self.phoenix.add_event_ref(evtref)
self.titanic.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref2)
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_eventrefs_different(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('234567')
self.phoenix.add_event_ref(evtref)
self.titanic.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref)
self.ref_obj.add_event_ref(evtref2)
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_eventrefs_birthref(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('234567')
evtref3 = EventRef()
evtref3.set_reference_handle('123456')
self.phoenix.add_event_ref(evtref2)
self.titanic.add_event_ref(evtref)
self.titanic.birth_ref_index = 0
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref3)
self.ref_obj.birth_ref_index = 1
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_eventrefs_deathref(self):
evtref = EventRef()
evtref.set_reference_handle('123456')
evtref2 = EventRef()
evtref2.set_reference_handle('234567')
evtref3 = EventRef()
evtref3.set_reference_handle('123456')
self.phoenix.add_event_ref(evtref2)
self.titanic.add_event_ref(evtref)
self.titanic.death_ref_index = 0
self.ref_obj.add_event_ref(evtref2)
self.ref_obj.add_event_ref(evtref3)
self.ref_obj.death_ref_index = 1
self.phoenix._merge_event_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_personrefs_identical(self):
personref = PersonRef()
personref.set_reference_handle('123456')
self.phoenix.add_person_ref(personref)
self.titanic.add_person_ref(personref)
self.ref_obj.add_person_ref(personref)
self.phoenix._merge_person_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_personrefs_equal(self):
personref = PersonRef()
personref.set_reference_handle('123456')
personref2 = PersonRef()
personref2.set_reference_handle('123456')
personref2.set_privacy(True)
self.phoenix.add_person_ref(personref)
self.titanic.add_person_ref(personref2)
self.ref_obj.add_person_ref(personref2)
self.phoenix._merge_person_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_personrefs_different(self):
personref = PersonRef()
personref.set_reference_handle('123456')
personref2 = PersonRef()
personref2.set_reference_handle('234567')
self.phoenix.add_person_ref(personref)
self.titanic.add_person_ref(personref2)
self.ref_obj.add_person_ref(personref)
self.ref_obj.add_person_ref(personref2)
self.phoenix._merge_person_ref_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class PlaceCheck(unittest.TestCase, PrivacyBaseTest, MediaBaseTest,
UrlBaseTest, NoteBaseTest, CitationBaseTest):
def setUp(self):
self.phoenix = Place()
self.phoenix.set_title('Place 1')
# __init__ copy has bad side effects, don't use it
# self.titanic = Place(self.phoenix)
self.titanic = Place()
self.titanic.set_title('Place 1')
# __init__ copy has bad side effects, don't use it
# self.ref_obj = Place(self.phoenix)
self.ref_obj = Place()
self.ref_obj.set_title('Place 1')
self.amsterdam = PlaceName()
self.amsterdam.set_value('Amsterdam')
self.rotterdam = PlaceName()
self.rotterdam.set_value('Rotterdam')
self.utrecht = PlaceName()
self.utrecht.set_value('Utrecht')
self.leiden = PlaceName()
self.leiden.set_value('Leiden')
def test_merge_primary_identical(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.titanic.set_title('Place 2')
self.titanic.set_name(self.amsterdam)
self.titanic.set_type(PlaceType.CITY)
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_primary_different(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.titanic.set_title('Place 2')
self.titanic.set_name(self.rotterdam)
self.titanic.set_type(PlaceType.CITY)
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
self.ref_obj.add_alternative_name(self.rotterdam)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_both_different(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.phoenix.add_alternative_name(self.utrecht)
self.titanic.set_title('Place 2')
self.titanic.set_name(self.rotterdam)
self.titanic.set_type(PlaceType.CITY)
self.titanic.add_alternative_name(self.leiden)
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
# Base name shouldn't be in alt_names list
# self.ref_obj.add_alternative_name(self.amsterdam)
# alt_names must be in correct order for test to pass
self.ref_obj.add_alternative_name(self.utrecht)
self.ref_obj.add_alternative_name(self.rotterdam)
self.ref_obj.add_alternative_name(self.leiden)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_alternative_identical(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.phoenix.add_alternative_name(self.rotterdam)
self.titanic.set_title('Place 2')
self.titanic.set_name(self.amsterdam)
self.titanic.set_type(PlaceType.CITY)
self.titanic.add_alternative_name(self.rotterdam)
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
self.ref_obj.add_alternative_name(self.rotterdam)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_alternative_different(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.phoenix.add_alternative_name(self.rotterdam)
self.titanic.set_title('Place 2')
self.titanic.set_name(self.amsterdam)
self.titanic.set_type(PlaceType.CITY)
self.titanic.add_alternative_name(self.utrecht)
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
self.ref_obj.add_alternative_name(self.rotterdam)
self.ref_obj.add_alternative_name(self.utrecht)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_prialt_identical(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.phoenix.add_alternative_name(self.rotterdam)
self.titanic.set_title('Place 2')
self.titanic.set_name(self.rotterdam)
self.titanic.set_type(PlaceType.CITY)
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
self.ref_obj.add_alternative_name(self.rotterdam)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_prialt2(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.phoenix.add_alternative_name(self.rotterdam)
self.titanic.set_title('Place 2')
self.titanic.set_name(self.rotterdam)
self.titanic.set_type(PlaceType.CITY)
self.titanic.add_alternative_name(self.amsterdam)
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
self.ref_obj.add_alternative_name(self.rotterdam)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_empty(self):
self.phoenix.set_name(self.amsterdam)
self.phoenix.set_type(PlaceType.CITY)
self.phoenix.add_alternative_name(self.rotterdam)
self.titanic.set_title('Place 2')
# titanic gets empty name
self.titanic.set_type(PlaceType.CITY)
self.titanic.add_alternative_name(self.utrecht)
self.titanic.add_alternative_name(PlaceName()) # empty alt_name
self.ref_obj.set_name(self.amsterdam)
self.ref_obj.set_type(PlaceType.CITY)
self.ref_obj.add_alternative_name(self.rotterdam)
self.ref_obj.add_alternative_name(self.utrecht)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class RepoCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest, UrlBaseTest):
def setUp(self):
self.phoenix = Repository()
self.phoenix.set_name('Repo 1')
self.phoenix.set_type(RepositoryType.LIBRARY)
self.titanic = Repository()
self.titanic.set_name('Repo 1')
self.titanic.set_type(RepositoryType.LIBRARY)
self.ref_obj = Repository()
self.ref_obj.set_name('Repo 1')
self.ref_obj.set_type(RepositoryType.LIBRARY)
def test_address(self):
address = Address()
address.set_city('Amsterdam')
self.titanic.add_address(address)
self.ref_obj.add_address(address)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace(self):
address = Address()
address.set_city('Utrecht')
citation = Citation()
citation.set_reference_handle('123456')
address.add_citation(citation.handle)
self.phoenix.add_address(address)
address2 = Address()
address2.set_city('Utrecht')
citation2 = Citation()
citation2.set_reference_handle('654321')
address2.add_citation(citation2.handle)
self.ref_obj.add_address(address2)
self.phoenix.replace_citation_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class RepoRefCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest):
def setUp(self):
self.phoenix = RepoRef()
self.phoenix.set_reference_handle('123456')
self.titanic = RepoRef(self.phoenix)
self.ref_obj = RepoRef(self.phoenix)
def test_handle_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_reference_handle('654321')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_callnr_equivalence(self):
self.titanic.set_call_number('10')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_privacy_equivalence(self):
self.titanic.set_privacy(True)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), EQUAL)
class SourceCheck(unittest.TestCase, PrivacyBaseTest, NoteBaseTest,
MediaBaseTest):
def setUp(self):
self.phoenix = Source()
self.phoenix.set_title("Source 1")
self.titanic = Source()
self.titanic.set_title("Source 1")
self.ref_obj = Source()
self.ref_obj.set_title("Source 1")
def todo_test_replace(self):
pass
def test_merge_datamap(self):
attr1 = SrcAttribute()
attr1.set_type('A')
attr1.set_value('a')
attr2 = SrcAttribute()
attr2.set_type('B')
attr2.set_value('b')
attr3 = SrcAttribute()
attr3.set_type('B')
attr3.set_value('bb')
attr4 = SrcAttribute()
attr4.set_type('C')
attr4.set_value('c')
self.phoenix.set_attribute_list([attr1, attr2])
self.titanic.set_attribute_list([attr3, attr4])
self.ref_obj.set_attribute_list([attr1, attr2, attr3, attr4])
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_reporef(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
self.titanic.add_repo_reference(reporef)
self.ref_obj.add_repo_reference(reporef)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_reporef_identical(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
self.phoenix.add_repo_reference(reporef)
self.titanic.add_repo_reference(reporef)
self.ref_obj.add_repo_reference(reporef)
self.phoenix._merge_reporef_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_reporef_equal(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
reporef2 = RepoRef()
reporef2.set_reference_handle('123456')
reporef2.set_privacy(True)
self.phoenix.add_repo_reference(reporef)
self.titanic.add_repo_reference(reporef2)
self.ref_obj.add_repo_reference(reporef2)
self.phoenix._merge_reporef_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_reporef_different(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
reporef2 = RepoRef()
reporef2.set_reference_handle('234567')
self.phoenix.add_repo_reference(reporef)
self.titanic.add_repo_reference(reporef2)
self.ref_obj.add_repo_reference(reporef)
self.ref_obj.add_repo_reference(reporef2)
self.phoenix._merge_reporef_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_reporef_nonew(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
reporef2 = RepoRef()
reporef2.set_reference_handle('654321')
self.phoenix.add_repo_reference(reporef)
self.ref_obj.add_repo_reference(reporef2)
self.phoenix.replace_repo_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_reporef_identical(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
reporef2 = RepoRef()
reporef2.set_reference_handle('234567')
reporef3 = RepoRef()
reporef3.set_reference_handle('654321')
self.phoenix.add_repo_reference(reporef)
self.phoenix.add_repo_reference(reporef2)
self.phoenix.add_repo_reference(reporef3)
self.ref_obj.add_repo_reference(reporef2)
self.ref_obj.add_repo_reference(reporef3)
self.phoenix.replace_repo_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_reporef_equal(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
reporef2 = RepoRef()
reporef2.set_reference_handle('234567')
reporef3 = RepoRef()
reporef3.set_reference_handle('654321')
reporef3.set_privacy(True)
self.phoenix.add_repo_reference(reporef)
self.phoenix.add_repo_reference(reporef2)
self.phoenix.add_repo_reference(reporef3)
self.ref_obj.add_repo_reference(reporef2)
self.ref_obj.add_repo_reference(reporef3)
self.phoenix.replace_repo_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_replace_reporef_different(self):
reporef = RepoRef()
reporef.set_reference_handle('123456')
reporef2 = RepoRef()
reporef2.set_reference_handle('234567')
reporef3 = RepoRef()
reporef3.set_reference_handle('654321')
reporef3.set_call_number('100')
reporef4 = RepoRef()
reporef4.set_reference_handle('654321')
self.phoenix.add_repo_reference(reporef)
self.phoenix.add_repo_reference(reporef2)
self.phoenix.add_repo_reference(reporef3)
self.ref_obj.add_repo_reference(reporef4)
self.ref_obj.add_repo_reference(reporef2)
self.ref_obj.add_repo_reference(reporef3)
self.phoenix.replace_repo_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class CitationBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = CitationBase()
citation = Citation()
citation.set_reference_handle('123456')
self.phoenix.add_citation(citation.handle)
self.titanic = CitationBase()
self.obj_list = CitationBase()
def test_replace_nonew(self):
citation = Citation()
citation.set_reference_handle('654321')
self.obj_list.add_citation(citation.handle)
self.phoenix.replace_citation_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.obj_list.to_struct())
def test_replace_newpresent(self):
citation = Citation()
citation.set_reference_handle('654321')
citation.set_page('p.10')
citation2 = Citation()
citation2.set_reference_handle('234567')
self.phoenix.add_citation(citation.handle)
self.phoenix.add_citation(citation2.handle)
self.obj_list.add_citation(citation2.handle)
self.obj_list.add_citation(citation.handle)
self.phoenix.replace_citation_references('123456','654321')
self.assertEqual(self.phoenix.to_struct(), self.obj_list.to_struct())
def todo_test_replace_child(self):
pass
def test_merge_identical(self):
citation = Citation()
citation.set_reference_handle('123456')
self.titanic.add_citation(citation.handle)
self.obj_list.add_citation(citation.handle)
self.phoenix._merge_citation_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.obj_list.to_struct())
def test_merge_different(self):
citation = Citation()
citation.set_reference_handle('234567')
citation2 = Citation()
citation2.set_reference_handle('123456')
self.titanic.add_citation(citation.handle)
self.obj_list.add_citation(citation2.handle)
self.obj_list.add_citation(citation.handle)
self.phoenix._merge_citation_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.obj_list.to_struct())
class CitationCheck(unittest.TestCase, PrivacyBaseTest, MediaBaseTest,
NoteBaseTest):
def setUp(self):
self.phoenix = Citation()
self.phoenix.set_reference_handle('123456')
self.phoenix.set_page('p.10')
self.titanic = Citation()
self.titanic.set_reference_handle('123456')
self.titanic.set_page('p.10')
self.ref_obj = Citation()
self.ref_obj.set_reference_handle('123456')
self.ref_obj.set_page('p.10')
def test_merge_confidence(self):
known_values = ( (0, 0, 0), (0, 1, 0), (0, 2, 0), (0, 3, 0), (0, 4, 0),
(1, 0, 0), (1, 1, 1), (1, 2, 1), (1, 3, 1), (1, 4, 4),
(2, 0, 0), (2, 1, 1), (2, 2, 2), (2, 3, 3), (2, 4, 4),
(3, 0, 0), (3, 1, 1), (3, 2, 3), (3, 3, 3), (3, 4, 4),
(4, 0, 0), (4, 1, 4), (4, 2, 4), (4, 3, 4), (4, 4, 4))
for val1, val2, val_merge in known_values:
self.phoenix.set_confidence_level(val1)
self.titanic.set_confidence_level(val2)
self.ref_obj.set_confidence_level(val_merge)
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
def test_merge_datamap(self):
attr1 = SrcAttribute()
attr1.set_type('A')
attr1.set_value('a')
attr2 = SrcAttribute()
attr2.set_type('B')
attr2.set_value('b')
attr3 = SrcAttribute()
attr3.set_type('B')
attr3.set_value('bb')
attr4 = SrcAttribute()
attr4.set_type('C')
attr4.set_value('c')
self.phoenix.set_attribute_list([attr1, attr2])
self.titanic.set_attribute_list([attr3, attr4])
self.ref_obj.set_attribute_list([attr1, attr2, attr3, attr4])
self.phoenix.merge(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_obj.to_struct())
class SurnameCheck(unittest.TestCase):
def setUp(self):
self.phoenix = Surname()
self.phoenix.set_prefix('van')
self.titanic = Surname(self.phoenix)
def test_datalist_equivalence(self):
self.assertEqual(self.phoenix.is_equivalent(self.titanic), IDENTICAL)
self.titanic.set_prefix('von')
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
def test_primary_equivalence(self):
self.titanic.set_primary(False)
self.assertEqual(self.phoenix.is_equivalent(self.titanic), DIFFERENT)
# A Surname can never be EQUAL to another Surname.
# There is no merge method to check.
class SurnameBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = SurnameBase()
surname = Surname()
surname.set_surname("Oranje")
self.phoenix.add_surname(surname)
self.titanic = SurnameBase()
self.ref_list = SurnameBase()
def test_identical(self):
surname = Surname()
surname.set_surname("Oranje")
self.ref_list.add_surname(surname)
self.titanic.add_surname(surname)
self.phoenix._merge_surname_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_different(self):
surname = Surname()
surname.set_surname("Biesterfelt")
self.titanic.add_surname(surname)
self.ref_list = SurnameBase(self.phoenix)
self.ref_list.add_surname(surname)
self.phoenix._merge_surname_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
class TagBaseCheck(unittest.TestCase):
def setUp(self):
self.phoenix = TagBase()
tag_handle = '123456'
self.phoenix.add_tag(tag_handle)
self.titanic = TagBase()
def test_identical(self):
self.ref_list = TagBase(self.phoenix)
self.titanic.add_tag(self.phoenix.get_tag_list()[0])
self.phoenix._merge_tag_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
def test_different(self):
self.titanic.set_tag_list([])
tag_handle = '654321'
self.titanic.add_tag(tag_handle)
self.ref_list = TagBase(self.phoenix)
self.ref_list.add_tag(tag_handle)
self.phoenix._merge_tag_list(self.titanic)
self.assertEqual(self.phoenix.to_struct(), self.ref_list.to_struct())
if __name__ == "__main__":
unittest.main()
|
sam-m888/gprime
|
gprime/lib/test/merge_test.py
|
Python
|
gpl-2.0
| 77,654
|
import abjad
metadata = abjad.OrderedDict([("title", "Blue Score"), ("year", 2017)])
|
Abjad/ide
|
scores/blue_score/blue_score/__metadata__.py
|
Python
|
gpl-2.0
| 86
|
#!/usr/bin/env python
import os, sys
import glob, re
import subprocess as sp
class TestBase:
supported_lang = {
'C': { 'cc': 'gcc', 'flags': 'CFLAGS', 'ext': '.c' },
'C++': { 'cc': 'g++', 'flags': 'CXXFLAGS', 'ext': '.cpp' },
}
TEST_SUCCESS = 0
TEST_UNSUPP_LANG = -1
TEST_BUILD_FAIL = -2
TEST_ABNORMAL_EXIT = -3
TEST_TIME_OUT = -4
TEST_DIFF_RESULT = -5
TEST_NONZERO_RETURN = -6
TEST_SKIP = -7
TEST_SUCCESS_FIXED = -8
objdir = 'objdir' in os.environ and os.environ['objdir'] or '..'
ftrace = objdir + '/uftrace --no-pager -L' + objdir
default_cflags = ['-fno-inline', '-fno-builtin', '-fno-omit-frame-pointer']
def __init__(self, name, result, lang='C', cflags='', ldflags='', sort='task'):
self.name = name
self.result = result
self.cflags = cflags
self.ldflags = ldflags
self.lang = lang
self.sort_method = sort
def set_debug(self, dbg):
self.debug = dbg
def pr_debug(self, msg):
if self.debug:
print(msg)
def build_it(self, build_cmd):
try:
p = sp.Popen(build_cmd.split(), stderr=sp.PIPE)
if p.wait() != 0:
self.pr_debug(p.communicate()[1].decode(errors='ignore'))
return TestBase.TEST_BUILD_FAIL
return TestBase.TEST_SUCCESS
except OSError as e:
self.pr_debug(e.strerror)
return TestBase.TEST_BUILD_FAIL
except:
return TestBase.TEST_BUILD_FAIL
def build(self, name, cflags='', ldflags=''):
if self.lang not in TestBase.supported_lang:
pr_debug("%s: unsupported language: %s" % (name, self.lang))
return TestBase.TEST_UNSUPP_LANG
lang = TestBase.supported_lang[self.lang]
prog = 't-' + name
src = 's-' + name + lang['ext']
build_cflags = ' '.join(TestBase.default_cflags + [self.cflags, cflags, \
os.getenv(lang['flags'], '')])
build_ldflags = ' '.join([self.ldflags, ldflags, \
os.getenv('LDFLAGS', '')])
build_cmd = '%s -o %s %s %s %s' % \
(lang['cc'], prog, build_cflags, src, build_ldflags)
self.pr_debug("build command: %s" % build_cmd)
return self.build_it(build_cmd)
def build_libabc(self, cflags='', ldflags=''):
lang = TestBase.supported_lang['C']
build_cflags = ' '.join(TestBase.default_cflags + [self.cflags, cflags, \
os.getenv(lang['flags'], '')])
build_ldflags = ' '.join([self.ldflags, ldflags, \
os.getenv('LDFLAGS', '')])
lib_cflags = build_cflags + ' -shared -fPIC'
# build libabc_test_lib.so library
build_cmd = '%s -o libabc_test_lib.so %s s-lib.c %s' % \
(lang['cc'], lib_cflags, build_ldflags)
self.pr_debug("build command for library: %s" % build_cmd)
return self.build_it(build_cmd)
def build_libfoo(self, name, cflags='', ldflags=''):
prog = 't-' + name
lang = TestBase.supported_lang['C++']
build_cflags = ' '.join(TestBase.default_cflags + [self.cflags, cflags, \
os.getenv(lang['flags'], '')])
build_ldflags = ' '.join([self.ldflags, ldflags, \
os.getenv('LDFLAGS', '')])
lib_cflags = build_cflags + ' -shared -fPIC'
# build lib{foo}.so library
build_cmd = '%s -o lib%s.so %s s-lib%s%s %s' % \
(lang['cc'], name, lib_cflags, name, lang['ext'], build_ldflags)
self.pr_debug("build command for library: %s" % build_cmd)
return self.build_it(build_cmd)
def build_libmain(self, exename, srcname, libs, cflags='', ldflags=''):
if self.lang not in TestBase.supported_lang:
self.pr_debug("%s: unsupported language: %s" % (self.name, self.lang))
return TestBase.TEST_UNSUPP_LANG
lang = TestBase.supported_lang[self.lang]
prog = 't-' + exename
build_cflags = ' '.join(TestBase.default_cflags +
[self.cflags, cflags, os.getenv(lang['flags'], '')])
build_ldflags = ' '.join([self.ldflags, ldflags, os.getenv('LDFLAGS', '')])
exe_ldflags = build_ldflags + ' -Wl,-rpath,$ORIGIN -L. '
for lib in libs:
exe_ldflags += ' -l' + lib[3:-3]
build_cmd = '%s -o %s %s %s %s' % (lang['cc'], prog, build_cflags, srcname, exe_ldflags)
self.pr_debug("build command for executable: %s" % build_cmd)
return self.build_it(build_cmd)
def runcmd(self):
""" This function returns (shell) command that runs the test.
A test case can extend this to setup a complex configuration. """
return '%s %s' % (TestBase.ftrace, 't-' + self.name)
def task_sort(self, output, ignore_children=False):
""" This function post-processes output of the test to be compared .
It ignores blank and comment (#) lines and remaining functions. """
pids = {}
order = 1
before_main = True
for ln in output.split('\n'):
if ln.find(' | main()') > 0:
before_main = False
if before_main:
continue
# ignore result of remaining functions which follows a blank line
if ln.strip() == '':
break;
pid_patt = re.compile('[^[]*\[ *(\d+)\] |')
m = pid_patt.match(ln)
try:
pid = int(m.group(1))
except:
continue
func = ln.split('|', 1)[-1]
if pid not in pids:
pids[pid] = { 'order': order }
pids[pid]['result'] = []
order += 1
pids[pid]['result'].append(func)
result = ''
pid_list = sorted(list(pids), key=lambda p: pids[p]['order'])
try:
if ignore_children:
result += '\n'.join(pids[pid_list[0]]['result'])
else:
for p in pid_list:
result += '\n'.join(pids[p]['result']) + '\n'
result = result.strip()
except:
pass # this leads to a failure with 'NG'
return result
def simple_sort(self, output, ignored):
""" This function post-processes output of the test to be compared .
It ignores blank and comment (#) lines and remaining functions. """
result = []
for ln in output.split('\n'):
# ignore blank lines and comments
if ln.strip() == '' or ln.startswith('#'):
continue
func = ln.split('|', 1)[-1]
result.append(func)
return '\n'.join(result)
def report_sort(self, output, ignored):
""" This function post-processes output of the test to be compared .
It ignores blank and comment (#) lines and remaining functions. """
result = []
for ln in output.split('\n'):
if ln.strip() == '':
continue
line = ln.split()
if line[0] == 'Total':
continue
if line[0].startswith('='):
continue
# A report line consists of following data
# [0] [1] [2] [3] [4] [5]
# total_time unit self_time unit called function
if line[5].startswith('__'):
continue
result.append('%s %s' % (line[4], line[5]))
return '\n'.join(result)
def graph_sort(self, output, ignored):
""" This function post-processes output of the test to be compared.
It ignores blank and comment (#) lines and header lines. """
result = []
mode = 0
for ln in output.split('\n'):
if ln.strip() == '' or ln.startswith('#'):
continue
# A graph result consists of backtrace and calling functions
if ln.startswith('=============== BACKTRACE ==============='):
mode = 1
continue
if ln.startswith('========== FUNCTION CALL GRAPH =========='):
mode = 2
continue
if mode == 1:
if ln.startswith(' backtrace #'):
result.append(ln.split(',')[0]) # remove time part
if ln.startswith(' ['):
result.append(ln.split('(')[0]) # remove '(addr)' part
if mode == 2:
result.append(ln.split(':')[1]) # remove time part
return '\n'.join(result)
def dump_sort(self, output, ignored):
""" This function post-processes output of the test to be compared .
It ignores blank and comment (#) lines and remaining functions. """
import re
# A (raw) dump result consists of following data
# <timestamp> <tid>: [<type>] <func>(<addr>) depth: <N>
mode = 1
patt = re.compile(r'[^[]*(?P<type>\[(entry|exit )\]) (?P<func>[_a-z0-9]*)\([0-9a-f]+\) (?P<depth>.*)')
result = []
for ln in output.split('\n'):
if ln.startswith('uftrace'):
result.append(ln)
else:
m = patt.match(ln)
if m is None:
continue
# ignore __monstartup and __cxa_atexit
if m.group('func').startswith('__'):
continue
result.append(patt.sub(r'\g<type> \g<depth> \g<func>', ln))
return '\n'.join(result)
def chrome_sort(self, output, ignored):
""" This function post-processes output of the test to be compared .
It ignores blank and comment (#) lines and remaining functions. """
import json
# A chrome dump results consists of following JSON object:
# {"ts": <timestamp>, "ph": <type>, "pid": <number>, "name": <func>}
result = []
try:
o = json.loads(output)
except:
return ''
for ln in o['traceEvents']:
if ln['name'].startswith('__'):
continue
result.append("%s %s" % (ln['ph'], ln['name']))
return '\n'.join(result)
def sort(self, output, ignore_children=False):
if not hasattr(TestBase, self.sort_method + '_sort'):
print('cannot find the sort function: %s' % self.sort_method)
return '' # this leads to a failure with 'NG'
func = TestBase.__dict__[self.sort_method + '_sort']
if callable(func):
return func(self, output, ignore_children)
else:
return '' # this leads to a failure with 'NG'
def pre(self):
"""This function is called before running a testcase"""
return TestBase.TEST_SUCCESS
def post(self, result):
"""This function is called after running a testcase"""
return result
def fixup(self, cflags, result):
"""This function is called when result is different to expected.
But if we know some known difference on some optimization level,
apply it and re-test with the modified result."""
return result
def run(self, name, cflags, diff):
ret = TestBase.TEST_SUCCESS
test_cmd = self.runcmd()
self.pr_debug("test command: %s" % test_cmd)
p = sp.Popen(test_cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE)
timed_out = False
def timeout(sig, frame):
timed_out = True
try:
p.kill()
except:
pass
import signal
signal.signal(signal.SIGALRM, timeout)
result_expect = self.sort(self.result)
signal.alarm(5)
result_origin = p.communicate()[0].decode(errors='ignore')
result_tested = self.sort(result_origin) # for python3
signal.alarm(0)
ret = p.wait()
if ret < 0:
if timed_out:
return TestBase.TEST_TIME_OUT
else:
return TestBase.TEST_ABNORMAL_EXIT
if ret > 0:
if ret == 2:
return TestBase.TEST_ABNORMAL_EXIT
return TestBase.TEST_NONZERO_RETURN
self.pr_debug("=========== %s ===========\n%s" % ("original", result_origin))
self.pr_debug("=========== %s ===========\n%s" % (" result ", result_tested))
self.pr_debug("=========== %s ===========\n%s" % ("expected", result_expect))
if result_expect.strip() == '':
return TestBase.TEST_DIFF_RESULT
if result_expect != result_tested:
result_expect = self.sort(self.fixup(cflags, self.result))
ret = TestBase.TEST_SUCCESS_FIXED
if result_expect != result_tested:
if diff:
f = open('expect', 'w')
f.write(result_expect + '\n')
f.close()
f = open('result', 'w')
f.write(result_tested + '\n')
f.close()
p = sp.Popen(['diff', '-U1', 'expect', 'result'], stdout=sp.PIPE)
print("%s: diff result of %s" % (name, cflags))
print(p.communicate()[0].decode(errors='ignore'))
os.remove('expect')
os.remove('result')
return TestBase.TEST_DIFF_RESULT
return ret
RED = '\033[1;31m'
GREEN = '\033[1;32m'
YELLOW = '\033[1;33m'
NORMAL = '\033[0m'
colored_result = {
TestBase.TEST_SUCCESS: GREEN + 'OK' + NORMAL,
TestBase.TEST_UNSUPP_LANG: YELLOW + 'LA' + NORMAL,
TestBase.TEST_BUILD_FAIL: YELLOW + 'BI' + NORMAL,
TestBase.TEST_ABNORMAL_EXIT: RED + 'SG' + NORMAL,
TestBase.TEST_TIME_OUT: RED + 'TM' + NORMAL,
TestBase.TEST_DIFF_RESULT: RED + 'NG' + NORMAL,
TestBase.TEST_NONZERO_RETURN: RED + 'NZ' + NORMAL,
TestBase.TEST_SKIP: YELLOW + 'SK' + NORMAL,
TestBase.TEST_SUCCESS_FIXED: YELLOW + 'OK' + NORMAL,
}
text_result = {
TestBase.TEST_SUCCESS: 'OK',
TestBase.TEST_UNSUPP_LANG: 'LA',
TestBase.TEST_BUILD_FAIL: 'BI',
TestBase.TEST_ABNORMAL_EXIT: 'SG',
TestBase.TEST_TIME_OUT: 'TM',
TestBase.TEST_DIFF_RESULT: 'NG',
TestBase.TEST_NONZERO_RETURN: 'NZ',
TestBase.TEST_SKIP: 'SK',
TestBase.TEST_SUCCESS_FIXED: 'OK',
}
result_string = {
TestBase.TEST_SUCCESS: 'Test succeeded',
TestBase.TEST_UNSUPP_LANG: 'Unsupported Language',
TestBase.TEST_BUILD_FAIL: 'Build failed',
TestBase.TEST_ABNORMAL_EXIT: 'Abnormal exit by signal',
TestBase.TEST_TIME_OUT: 'Test ran too long',
TestBase.TEST_DIFF_RESULT: 'Different test result',
TestBase.TEST_NONZERO_RETURN: 'Non-zero return value',
TestBase.TEST_SKIP: 'Skipped',
TestBase.TEST_SUCCESS_FIXED: 'Test succeeded (with some fixup)',
}
def run_single_case(case, flags, opts, diff, dbg):
result = []
# for python3
_locals = {}
exec("import %s; tc = %s.TestCase()" % (case, case), globals(), _locals)
tc = _locals['tc']
tc.set_debug(dbg)
for flag in flags:
for opt in opts:
cflags = ' '.join(["-" + flag, "-" + opt])
ret = tc.build(tc.name, cflags)
if ret == TestBase.TEST_SUCCESS:
ret = tc.pre()
if ret == TestBase.TEST_SUCCESS:
ret = tc.run(case, cflags, diff)
ret = tc.post(ret)
result.append(ret)
return result
def print_test_result(case, result, color):
if sys.stdout.isatty() and color:
result_list = [colored_result[r] for r in result]
else:
result_list = [text_result[r] for r in result]
output = case[1:4]
output += ' %-20s' % case[5:] + ': ' + ' '.join(result_list) + '\n'
sys.stdout.write(output)
def parse_argument():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--profile-flags", dest='flags',
default="pg finstrument-functions",
help="comma separated list of compiler profiling flags")
parser.add_argument("-O", "--optimize-levels", dest='opts', default="0123s",
help="compiler optimization levels")
parser.add_argument("case", nargs='?', default="all",
help="test case: 'all' or test number or (partial) name")
parser.add_argument("-p", "--profile-pg", dest='pg_flag', action='store_true',
help="profiling with -pg option")
parser.add_argument("-i", "--instrument-functions", dest='if_flag', action='store_true',
help="profiling with -finstrument-functions option")
parser.add_argument("-d", "--diff", dest='diff', action='store_true',
help="show diff result if not matched")
parser.add_argument("-v", "--verbose", dest='debug', action='store_true',
help="show internal command and result for debugging")
parser.add_argument("-n", "--no-color", dest='color', action='store_false',
help="suppress color in the output")
return parser.parse_args()
if __name__ == "__main__":
arg = parse_argument()
if arg.case == 'all':
testcases = glob.glob('t???_*.py')
else:
try:
testcases = glob.glob('t*' + arg.case + '*.py')
finally:
if len(testcases) == 0:
print("cannot find testcase for : %s" % arg.case)
sys.exit(0)
opts = ' '.join(sorted(['O'+o for o in arg.opts]))
optslen = len(opts);
header1 = '%-24s ' % 'Test case'
header2 = '-' * 24 + ':'
empty = ' '
if arg.pg_flag:
flags = ['pg']
elif arg.if_flag:
flags = ['finstrument-functions']
else:
flags = arg.flags.split()
for flag in flags:
# align with optimization flags
header1 += ' ' + flag[:optslen] + empty[len(flag):optslen]
header2 += ' ' + opts
print(header1)
print(header2)
total = 0
res = []
res.append(TestBase.TEST_SUCCESS)
res.append(TestBase.TEST_SUCCESS_FIXED)
res.append(TestBase.TEST_DIFF_RESULT)
res.append(TestBase.TEST_NONZERO_RETURN)
res.append(TestBase.TEST_ABNORMAL_EXIT)
res.append(TestBase.TEST_TIME_OUT)
res.append(TestBase.TEST_BUILD_FAIL)
res.append(TestBase.TEST_UNSUPP_LANG)
res.append(TestBase.TEST_SKIP)
stats = dict.fromkeys(res, 0)
for tc in sorted(testcases):
name = tc[:-3] # remove '.py'
result = run_single_case(name, flags, opts.split(), arg.diff, arg.debug)
print_test_result(name, result, arg.color)
for r in result:
stats[r] += 1
total += 1
success = stats[TestBase.TEST_SUCCESS] + stats[TestBase.TEST_SUCCESS_FIXED]
percent = 100.0 * success / total
print("")
print("runtime test stats")
print("====================")
print("total %5d Tests executed (success: %.2f%%)" % (total, percent))
for r in res:
if sys.stdout.isatty() and arg.color:
result = colored_result[r]
else:
result = text_result[r]
print(" %s: %5d %s" % (result, stats[r], result_string[r]))
|
andrewjss/uftrace
|
tests/runtest.py
|
Python
|
gpl-2.0
| 19,539
|
# -*- coding: utf-8 -*-
# Copyright 2014-2021 Mike Fährmann
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
"""Extractors for https://www.pixiv.net/"""
from .common import Extractor, Message
from .. import text, util, exception
from ..cache import cache
from datetime import datetime, timedelta
import itertools
import hashlib
import time
class PixivExtractor(Extractor):
"""Base class for pixiv extractors"""
category = "pixiv"
directory_fmt = ("{category}", "{user[id]} {user[account]}")
filename_fmt = "{id}_p{num}.{extension}"
archive_fmt = "{id}{suffix}.{extension}"
cookiedomain = None
def __init__(self, match):
Extractor.__init__(self, match)
self.api = PixivAppAPI(self)
self.load_ugoira = self.config("ugoira", True)
self.max_posts = self.config("max-posts", 0)
def items(self):
tags = self.config("tags", "japanese")
if tags == "original":
transform_tags = None
elif tags == "translated":
def transform_tags(work):
work["tags"] = list(set(
tag["translated_name"] or tag["name"]
for tag in work["tags"]))
else:
def transform_tags(work):
work["tags"] = [tag["name"] for tag in work["tags"]]
ratings = {0: "General", 1: "R-18", 2: "R-18G"}
metadata = self.metadata()
works = self.works()
if self.max_posts:
works = itertools.islice(works, self.max_posts)
for work in works:
if not work["user"]["id"]:
continue
meta_single_page = work["meta_single_page"]
meta_pages = work["meta_pages"]
del work["meta_single_page"]
del work["image_urls"]
del work["meta_pages"]
if transform_tags:
transform_tags(work)
work["num"] = 0
work["date"] = text.parse_datetime(work["create_date"])
work["rating"] = ratings.get(work["x_restrict"])
work["suffix"] = ""
work.update(metadata)
yield Message.Directory, work
if work["type"] == "ugoira":
if not self.load_ugoira:
continue
ugoira = self.api.ugoira_metadata(work["id"])
url = ugoira["zip_urls"]["medium"].replace(
"_ugoira600x600", "_ugoira1920x1080")
work["frames"] = ugoira["frames"]
work["_http_adjust_extension"] = False
yield Message.Url, url, text.nameext_from_url(url, work)
elif work["page_count"] == 1:
url = meta_single_page["original_image_url"]
yield Message.Url, url, text.nameext_from_url(url, work)
else:
for work["num"], img in enumerate(meta_pages):
url = img["image_urls"]["original"]
work["suffix"] = "_p{:02}".format(work["num"])
yield Message.Url, url, text.nameext_from_url(url, work)
def works(self):
"""Return an iterable containing all relevant 'work'-objects"""
def metadata(self):
"""Collect metadata for extractor-job"""
return {}
class PixivUserExtractor(PixivExtractor):
"""Extractor for works of a pixiv-user"""
subcategory = "user"
pattern = (r"(?:https?://)?(?:www\.|touch\.)?pixiv\.net/(?:"
r"(?:en/)?users/(\d+)(?:/(?:artworks|illustrations|manga)"
r"(?:/([^/?#]+))?)?/?(?:$|[?#])"
r"|member(?:_illust)?\.php\?id=(\d+)(?:&([^#]+))?"
r"|(?:u(?:ser)?/|(?:mypage\.php)?#id=)(\d+))")
test = (
("https://www.pixiv.net/en/users/173530/artworks", {
"url": "852c31ad83b6840bacbce824d85f2a997889efb7",
}),
# illusts with specific tag
(("https://www.pixiv.net/en/users/173530/artworks"
"/%E6%89%8B%E3%81%B6%E3%82%8D"), {
"url": "25b1cd81153a8ff82eec440dd9f20a4a22079658",
}),
(("https://www.pixiv.net/member_illust.php?id=173530"
"&tag=%E6%89%8B%E3%81%B6%E3%82%8D"), {
"url": "25b1cd81153a8ff82eec440dd9f20a4a22079658",
}),
# avatar (#595, 623)
("https://www.pixiv.net/en/users/173530", {
"options": (("avatar", True),),
"content": "4e57544480cc2036ea9608103e8f024fa737fe66",
"range": "1",
}),
# deleted account
("http://www.pixiv.net/member_illust.php?id=173531", {
"options": (("metadata", True),),
"exception": exception.NotFoundError,
}),
("https://www.pixiv.net/en/users/173530"),
("https://www.pixiv.net/en/users/173530/manga"),
("https://www.pixiv.net/en/users/173530/illustrations"),
("https://www.pixiv.net/member_illust.php?id=173530"),
("https://www.pixiv.net/u/173530"),
("https://www.pixiv.net/user/173530"),
("https://www.pixiv.net/mypage.php#id=173530"),
("https://www.pixiv.net/#id=173530"),
("https://touch.pixiv.net/member_illust.php?id=173530"),
)
def __init__(self, match):
PixivExtractor.__init__(self, match)
u1, t1, u2, t2, u3 = match.groups()
if t1:
t1 = text.unquote(t1)
elif t2:
t2 = text.parse_query(t2).get("tag")
self.user_id = u1 or u2 or u3
self.tag = t1 or t2
def metadata(self):
if self.config("metadata"):
return {"user": self.api.user_detail(self.user_id)}
return {}
def works(self):
works = self.api.user_illusts(self.user_id)
if self.tag:
tag = self.tag.lower()
works = (
work for work in works
if tag in [t["name"].lower() for t in work["tags"]]
)
if self.config("avatar"):
user = self.api.user_detail(self.user_id)
url = user["profile_image_urls"]["medium"].replace("_170.", ".")
avatar = {
"create_date" : None,
"height" : 0,
"id" : "avatar",
"image_urls" : None,
"meta_pages" : (),
"meta_single_page": {"original_image_url": url},
"page_count" : 1,
"sanity_level" : 0,
"tags" : (),
"title" : "avatar",
"type" : "avatar",
"user" : user,
"width" : 0,
"x_restrict" : 0,
}
works = itertools.chain((avatar,), works)
return works
class PixivMeExtractor(PixivExtractor):
"""Extractor for pixiv.me URLs"""
subcategory = "me"
pattern = r"(?:https?://)?pixiv\.me/([^/?#]+)"
test = (
("https://pixiv.me/del_shannon", {
"url": "29c295ce75150177e6b0a09089a949804c708fbf",
}),
("https://pixiv.me/del_shanno", {
"exception": exception.NotFoundError,
}),
)
def __init__(self, match):
PixivExtractor.__init__(self, match)
self.account = match.group(1)
def items(self):
url = "https://pixiv.me/" + self.account
data = {"_extractor": PixivUserExtractor}
response = self.request(
url, method="HEAD", allow_redirects=False, notfound="user")
yield Message.Queue, response.headers["Location"], data
class PixivWorkExtractor(PixivExtractor):
"""Extractor for a single pixiv work/illustration"""
subcategory = "work"
pattern = (r"(?:https?://)?(?:(?:www\.|touch\.)?pixiv\.net"
r"/(?:(?:en/)?artworks/"
r"|member_illust\.php\?(?:[^&]+&)*illust_id=)(\d+)"
r"|(?:i(?:\d+\.pixiv|\.pximg)\.net"
r"/(?:(?:.*/)?img-[^/]+/img/\d{4}(?:/\d\d){5}|img\d+/img/[^/]+)"
r"|img\d*\.pixiv\.net/img/[^/]+|(?:www\.)?pixiv\.net/i)/(\d+))")
test = (
("https://www.pixiv.net/artworks/966412", {
"url": "90c1715b07b0d1aad300bce256a0bc71f42540ba",
"content": "69a8edfb717400d1c2e146ab2b30d2c235440c5a",
}),
(("http://www.pixiv.net/member_illust.php"
"?mode=medium&illust_id=966411"), {
"exception": exception.NotFoundError,
}),
# ugoira
(("https://www.pixiv.net/member_illust.php"
"?mode=medium&illust_id=66806629"), {
"url": "7267695a985c4db8759bebcf8d21dbdd2d2317ef",
"keywords": {"frames": list},
}),
# related works (#1237)
("https://www.pixiv.net/artworks/966412", {
"options": (("related", True),),
"range": "1-10",
"count": ">= 10",
}),
("https://www.pixiv.net/en/artworks/966412"),
("http://www.pixiv.net/member_illust.php?mode=medium&illust_id=96641"),
("http://i1.pixiv.net/c/600x600/img-master"
"/img/2008/06/13/00/29/13/966412_p0_master1200.jpg"),
("https://i.pximg.net/img-original"
"/img/2017/04/25/07/33/29/62568267_p0.png"),
("https://www.pixiv.net/i/966412"),
("http://img.pixiv.net/img/soundcross/42626136.jpg"),
("http://i2.pixiv.net/img76/img/snailrin/42672235.jpg"),
)
def __init__(self, match):
PixivExtractor.__init__(self, match)
self.illust_id = match.group(1) or match.group(2)
def works(self):
works = (self.api.illust_detail(self.illust_id),)
if self.config("related", False):
related = self.api.illust_related(self.illust_id)
works = itertools.chain(works, related)
return works
class PixivFavoriteExtractor(PixivExtractor):
"""Extractor for all favorites/bookmarks of a pixiv-user"""
subcategory = "favorite"
directory_fmt = ("{category}", "bookmarks",
"{user_bookmark[id]} {user_bookmark[account]}")
archive_fmt = "f_{user_bookmark[id]}_{id}{num}.{extension}"
pattern = (r"(?:https?://)?(?:www\.|touch\.)?pixiv\.net/(?:(?:en/)?"
r"users/(\d+)/(bookmarks/artworks|following)(?:/([^/?#]+))?"
r"|bookmark\.php)(?:\?([^#]*))?")
test = (
("https://www.pixiv.net/en/users/173530/bookmarks/artworks", {
"url": "e717eb511500f2fa3497aaee796a468ecf685cc4",
}),
("https://www.pixiv.net/bookmark.php?id=173530", {
"url": "e717eb511500f2fa3497aaee796a468ecf685cc4",
}),
# bookmarks with specific tag
(("https://www.pixiv.net/en/users/3137110"
"/bookmarks/artworks/%E3%81%AF%E3%82%93%E3%82%82%E3%82%93"), {
"url": "379b28275f786d946e01f721e54afe346c148a8c",
}),
# bookmarks with specific tag (legacy url)
(("https://www.pixiv.net/bookmark.php?id=3137110"
"&tag=%E3%81%AF%E3%82%93%E3%82%82%E3%82%93&p=1"), {
"url": "379b28275f786d946e01f721e54afe346c148a8c",
}),
# own bookmarks
("https://www.pixiv.net/bookmark.php", {
"url": "90c1715b07b0d1aad300bce256a0bc71f42540ba",
}),
# own bookmarks with tag (#596)
("https://www.pixiv.net/bookmark.php?tag=foobar", {
"count": 0,
}),
# followed users (#515)
("https://www.pixiv.net/en/users/173530/following", {
"pattern": PixivUserExtractor.pattern,
"count": ">= 12",
}),
# followed users (legacy url) (#515)
("https://www.pixiv.net/bookmark.php?id=173530&type=user", {
"pattern": PixivUserExtractor.pattern,
"count": ">= 12",
}),
# touch URLs
("https://touch.pixiv.net/bookmark.php?id=173530"),
("https://touch.pixiv.net/bookmark.php"),
)
def __init__(self, match):
uid, kind, self.tag, query = match.groups()
query = text.parse_query(query)
if not uid:
uid = query.get("id")
if not uid:
self.subcategory = "bookmark"
if kind == "following" or query.get("type") == "user":
self.subcategory = "following"
self.items = self._items_following
PixivExtractor.__init__(self, match)
self.query = query
self.user_id = uid
def works(self):
tag = None
if "tag" in self.query:
tag = text.unquote(self.query["tag"])
elif self.tag:
tag = text.unquote(self.tag)
restrict = "public"
if self.query.get("rest") == "hide":
restrict = "private"
return self.api.user_bookmarks_illust(self.user_id, tag, restrict)
def metadata(self):
if self.user_id:
user = self.api.user_detail(self.user_id)
else:
self.api.login()
user = self.api.user
self.user_id = user["id"]
return {"user_bookmark": user}
def _items_following(self):
restrict = "public"
if self.query.get("rest") == "hide":
restrict = "private"
for preview in self.api.user_following(self.user_id, restrict):
user = preview["user"]
user["_extractor"] = PixivUserExtractor
url = "https://www.pixiv.net/users/{}".format(user["id"])
yield Message.Queue, url, user
class PixivRankingExtractor(PixivExtractor):
"""Extractor for pixiv ranking pages"""
subcategory = "ranking"
archive_fmt = "r_{ranking[mode]}_{ranking[date]}_{id}{num}.{extension}"
directory_fmt = ("{category}", "rankings",
"{ranking[mode]}", "{ranking[date]}")
pattern = (r"(?:https?://)?(?:www\.|touch\.)?pixiv\.net"
r"/ranking\.php(?:\?([^#]*))?")
test = (
("https://www.pixiv.net/ranking.php?mode=daily&date=20170818"),
("https://www.pixiv.net/ranking.php"),
("https://touch.pixiv.net/ranking.php"),
)
def __init__(self, match):
PixivExtractor.__init__(self, match)
self.query = match.group(1)
self.mode = self.date = None
def works(self):
return self.api.illust_ranking(self.mode, self.date)
def metadata(self):
query = text.parse_query(self.query)
mode = query.get("mode", "daily").lower()
mode_map = {
"daily": "day",
"daily_r18": "day_r18",
"weekly": "week",
"weekly_r18": "week_r18",
"monthly": "month",
"male": "day_male",
"male_r18": "day_male_r18",
"female": "day_female",
"female_r18": "day_female_r18",
"original": "week_original",
"rookie": "week_rookie",
"r18g": "week_r18g",
}
if mode not in mode_map:
self.log.warning("invalid mode '%s'", mode)
mode = "daily"
self.mode = mode_map[mode]
date = query.get("date")
if date:
if len(date) == 8 and date.isdecimal():
date = "{}-{}-{}".format(date[0:4], date[4:6], date[6:8])
else:
self.log.warning("invalid date '%s'", date)
date = None
if not date:
date = (datetime.utcnow() - timedelta(days=1)).strftime("%Y-%m-%d")
self.date = date
return {"ranking": {
"mode": mode,
"date": self.date,
}}
class PixivSearchExtractor(PixivExtractor):
"""Extractor for pixiv search results"""
subcategory = "search"
archive_fmt = "s_{search[word]}_{id}{num}.{extension}"
directory_fmt = ("{category}", "search", "{search[word]}")
pattern = (r"(?:https?://)?(?:www\.|touch\.)?pixiv\.net"
r"/(?:(?:en/)?tags/([^/?#]+)(?:/[^/?#]+)?/?"
r"|search\.php)(?:\?([^#]+))?")
test = (
("https://www.pixiv.net/en/tags/Original", {
"range": "1-10",
"count": 10,
}),
("https://www.pixiv.net/en/tags/foo/artworks?order=date&s_mode=s_tag"),
("https://www.pixiv.net/search.php?s_mode=s_tag&word=Original"),
("https://touch.pixiv.net/search.php?word=Original"),
)
def __init__(self, match):
PixivExtractor.__init__(self, match)
self.word, self.query = match.groups()
self.sort = self.target = None
def works(self):
return self.api.search_illust(
self.word, self.sort, self.target,
date_start=self.date_start, date_end=self.date_end)
def metadata(self):
query = text.parse_query(self.query)
if self.word:
self.word = text.unquote(self.word)
else:
if "word" not in query:
raise exception.StopExtraction("Missing search term")
self.word = query["word"]
sort = query.get("order", "date_d")
sort_map = {
"date": "date_asc",
"date_d": "date_desc",
}
if sort not in sort_map:
self.log.warning("invalid sort order '%s'", sort)
sort = "date_d"
self.sort = sort_map[sort]
target = query.get("s_mode", "s_tag")
target_map = {
"s_tag": "partial_match_for_tags",
"s_tag_full": "exact_match_for_tags",
"s_tc": "title_and_caption",
}
if target not in target_map:
self.log.warning("invalid search target '%s'", target)
target = "s_tag"
self.target = target_map[target]
self.date_start = query.get("scd")
self.date_end = query.get("ecd")
return {"search": {
"word": self.word,
"sort": self.sort,
"target": self.target,
"date_start": self.date_start,
"date_end": self.date_end,
}}
class PixivFollowExtractor(PixivExtractor):
"""Extractor for new illustrations from your followed artists"""
subcategory = "follow"
archive_fmt = "F_{user_follow[id]}_{id}{num}.{extension}"
directory_fmt = ("{category}", "following")
pattern = (r"(?:https?://)?(?:www\.|touch\.)?pixiv\.net"
r"/bookmark_new_illust\.php")
test = (
("https://www.pixiv.net/bookmark_new_illust.php"),
("https://touch.pixiv.net/bookmark_new_illust.php"),
)
def works(self):
return self.api.illust_follow()
def metadata(self):
self.api.login()
return {"user_follow": self.api.user}
class PixivPixivisionExtractor(PixivExtractor):
"""Extractor for illustrations from a pixivision article"""
subcategory = "pixivision"
directory_fmt = ("{category}", "pixivision",
"{pixivision_id} {pixivision_title}")
archive_fmt = "V{pixivision_id}_{id}{suffix}.{extension}"
pattern = r"(?:https?://)?(?:www\.)?pixivision\.net/(?:en/)?a/(\d+)"
test = (
("https://www.pixivision.net/en/a/2791"),
("https://pixivision.net/a/2791", {
"count": 7,
"keyword": {
"pixivision_id": "2791",
"pixivision_title": "What's your favorite music? Editor’s "
"picks featuring: “CD Covers”!",
},
}),
)
def __init__(self, match):
PixivExtractor.__init__(self, match)
self.pixivision_id = match.group(1)
def works(self):
return (
self.api.illust_detail(illust_id)
for illust_id in util.unique_sequence(text.extract_iter(
self.page, '<a href="https://www.pixiv.net/en/artworks/', '"'))
)
def metadata(self):
url = "https://www.pixivision.net/en/a/" + self.pixivision_id
headers = {"User-Agent": "Mozilla/5.0"}
self.page = self.request(url, headers=headers).text
title = text.extract(self.page, '<title>', '<')[0]
return {
"pixivision_id" : self.pixivision_id,
"pixivision_title": text.unescape(title),
}
class PixivSketchExtractor(Extractor):
"""Extractor for user pages on sketch.pixiv.net"""
category = "pixiv"
subcategory = "sketch"
directory_fmt = ("{category}", "sketch", "{user[unique_name]}")
filename_fmt = "{post_id} {id}.{extension}"
archive_fmt = "S{user[id]}_{id}"
root = "https://sketch.pixiv.net"
cookiedomain = ".pixiv.net"
pattern = r"(?:https?://)?sketch\.pixiv\.net/@([^/?#]+)"
test = ("https://sketch.pixiv.net/@nicoby", {
"pattern": r"https://img\-sketch\.pixiv\.net/uploads/medium"
r"/file/\d+/\d+\.(jpg|png)",
"count": ">= 35",
})
def __init__(self, match):
Extractor.__init__(self, match)
self.username = match.group(1)
def items(self):
headers = {"Referer": "{}/@{}".format(self.root, self.username)}
for post in self.posts():
media = post["media"]
post["post_id"] = post["id"]
post["date"] = text.parse_datetime(
post["created_at"], "%Y-%m-%dT%H:%M:%S.%f%z")
util.delete_items(post, ("id", "media", "_links"))
yield Message.Directory, post
post["_http_headers"] = headers
for photo in media:
original = photo["photo"]["original"]
post["id"] = photo["id"]
post["width"] = original["width"]
post["height"] = original["height"]
url = original["url"]
text.nameext_from_url(url, post)
yield Message.Url, url, post
def posts(self):
url = "{}/api/walls/@{}/posts/public.json".format(
self.root, self.username)
headers = {
"Accept": "application/vnd.sketch-v4+json",
"X-Requested-With": "{}/@{}".format(self.root, self.username),
"Referer": self.root + "/",
}
while True:
data = self.request(url, headers=headers).json()
yield from data["data"]["items"]
next_url = data["_links"].get("next")
if not next_url:
return
url = self.root + next_url["href"]
class PixivAppAPI():
"""Minimal interface for the Pixiv App API for mobile devices
For a more complete implementation or documentation, see
- https://github.com/upbit/pixivpy
- https://gist.github.com/ZipFile/3ba99b47162c23f8aea5d5942bb557b1
"""
CLIENT_ID = "MOBrBDS8blbauoSck0ZfDbtuzpyT"
CLIENT_SECRET = "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj"
HASH_SECRET = ("28c1fdd170a5204386cb1313c7077b34"
"f83e4aaf4aa829ce78c231e05b0bae2c")
def __init__(self, extractor):
self.extractor = extractor
self.log = extractor.log
self.username = extractor._get_auth_info()[0]
self.user = None
extractor.session.headers.update({
"App-OS" : "ios",
"App-OS-Version": "13.1.2",
"App-Version" : "7.7.6",
"User-Agent" : "PixivIOSApp/7.7.6 (iOS 13.1.2; iPhone11,8)",
"Referer" : "https://app-api.pixiv.net/",
})
self.client_id = extractor.config(
"client-id", self.CLIENT_ID)
self.client_secret = extractor.config(
"client-secret", self.CLIENT_SECRET)
token = extractor.config("refresh-token")
if token is None or token == "cache":
token = _refresh_token_cache(self.username)
self.refresh_token = token
def login(self):
"""Login and gain an access token"""
self.user, auth = self._login_impl(self.username)
self.extractor.session.headers["Authorization"] = auth
@cache(maxage=3600, keyarg=1)
def _login_impl(self, username):
if not self.refresh_token:
raise exception.AuthenticationError(
"'refresh-token' required.\n"
"Run `gallery-dl oauth:pixiv` to get one.")
self.log.info("Refreshing access token")
url = "https://oauth.secure.pixiv.net/auth/token"
data = {
"client_id" : self.client_id,
"client_secret" : self.client_secret,
"grant_type" : "refresh_token",
"refresh_token" : self.refresh_token,
"get_secure_url": "1",
}
time = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S+00:00")
headers = {
"X-Client-Time": time,
"X-Client-Hash": hashlib.md5(
(time + self.HASH_SECRET).encode()).hexdigest(),
}
response = self.extractor.request(
url, method="POST", headers=headers, data=data, fatal=False)
if response.status_code >= 400:
self.log.debug(response.text)
raise exception.AuthenticationError("Invalid refresh token")
data = response.json()["response"]
return data["user"], "Bearer " + data["access_token"]
def illust_detail(self, illust_id):
params = {"illust_id": illust_id}
return self._call("v1/illust/detail", params)["illust"]
def illust_follow(self, restrict="all"):
params = {"restrict": restrict}
return self._pagination("v2/illust/follow", params)
def illust_ranking(self, mode="day", date=None):
params = {"mode": mode, "date": date}
return self._pagination("v1/illust/ranking", params)
def illust_related(self, illust_id):
params = {"illust_id": illust_id}
return self._pagination("v2/illust/related", params)
def search_illust(self, word, sort=None, target=None, duration=None,
date_start=None, date_end=None):
params = {"word": word, "search_target": target,
"sort": sort, "duration": duration,
"start_date": date_start, "end_date": date_end}
return self._pagination("v1/search/illust", params)
def user_bookmarks_illust(self, user_id, tag=None, restrict="public"):
params = {"user_id": user_id, "tag": tag, "restrict": restrict}
return self._pagination("v1/user/bookmarks/illust", params)
def user_detail(self, user_id):
params = {"user_id": user_id}
return self._call("v1/user/detail", params)["user"]
def user_following(self, user_id, restrict="public"):
params = {"user_id": user_id, "restrict": restrict}
return self._pagination("v1/user/following", params, "user_previews")
def user_illusts(self, user_id):
params = {"user_id": user_id}
return self._pagination("v1/user/illusts", params)
def ugoira_metadata(self, illust_id):
params = {"illust_id": illust_id}
return self._call("v1/ugoira/metadata", params)["ugoira_metadata"]
def _call(self, endpoint, params=None):
url = "https://app-api.pixiv.net/" + endpoint
self.login()
response = self.extractor.request(url, params=params, fatal=False)
data = response.json()
if "error" in data:
if response.status_code == 404:
raise exception.NotFoundError()
error = data["error"]
if "rate limit" in (error.get("message") or "").lower():
self.log.info("Waiting two minutes for API rate limit reset.")
time.sleep(120)
return self._call(endpoint, params)
raise exception.StopExtraction("API request failed: %s", error)
return data
def _pagination(self, endpoint, params, key="illusts"):
while True:
data = self._call(endpoint, params)
yield from data[key]
if not data["next_url"]:
return
query = data["next_url"].rpartition("?")[2]
params = text.parse_query(query)
@cache(maxage=10*365*24*3600, keyarg=0)
def _refresh_token_cache(username):
return None
|
mikf/gallery-dl
|
gallery_dl/extractor/pixiv.py
|
Python
|
gpl-2.0
| 28,123
|
import os
from ninja_utils.factory import Downloadable
from ninja_utils.utils import download_txt_url
from .. import SETTINGS
class SilvaMapping(Downloadable):
def __init__(self, _silva_taxdmp_urls=SETTINGS.settings['silva_taxdmp_urls'], _silva_taxdmp_dir=SETTINGS.settings['silva_taxdmp_dir']):
super().__init__(_silva_taxdmp_dir)
self.urls = _silva_taxdmp_urls
def download(self):
for url in self.urls:
file_name = url.split('/')[-1]
download_txt_url(os.path.join(self.path, file_name), url)
def main():
SilvaMapping().run()
if __name__ == '__main__':
main()
|
knights-lab/NINJA-DOJO
|
dojo/downloaders/download_silva2ncbi_taxonomy.py
|
Python
|
gpl-2.0
| 633
|
#!/usr/bin/python
# coding: utf-8
import os
import sys
import re
import csv
# Generate pdf file
def generate_pdf(filename, directory):
print " generating pdf certificate"
command = 'inkscape %s.svg --export-pdf=%s/%s.pdf' % (filename, directory, filename)
os.system(command)
print " removing svg file"
remover = 'rm %s.svg' % (filename)
os.system(remover)
# Generate svg file
def generate_svg(svg_name, svg_file, filename, name):
print " copying the svg file from " + filename + ".svg"
command = 'cp %s.svg %s.svg' % (svg_name, filename)
os.system(command)
new_certificate = open(filename + ".svg",'r+')
new_certificate.write( re.sub("___NAME___", name, svg_file) )
new_certificate.close()
# Generate the certificates
def generate_certificates(svg_name, svg_file, list_names_file, directory):
list_names = open(list_names_file)
if not os.path.exists(directory):
os.makedirs(directory)
for name in list_names:
name = name.rstrip("\n")
if (name):
print "Generating " + name + "'s certificate:\n"
filename = name.replace(" ","_")
generate_svg(svg_name, svg_file, filename, name)
generate_pdf(filename, directory)
if __name__ == '__main__':
if(len(sys.argv) != 4):
print "Usage: python g_certificates.py <svg_file> <list_of_names> <folder>"
sys.exit()
svg_name = sys.argv[1] # SVG file
list_names_file = sys.argv[2] # List of names for generate the certificates
directory = sys.argv[3] # Directory for storage the certificates
svg_file = open(svg_name+".svg").read() # Open and read the SVG file
generate_certificates(svg_name, svg_file, list_names_file, directory)
|
di3goleite/certificate-generator
|
g_certificates.py
|
Python
|
gpl-2.0
| 1,763
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
from __future__ import absolute_import
import errno
import os
import time
import traceback
from contextlib import contextmanager
from bindings import revisionstore
from edenscm.mercurial import encoding, error, progress, util, vfs
from edenscm.mercurial.i18n import _
from edenscm.mercurial.node import nullid, short
from ..extutil import flock
from . import constants, datapack, historypack, shallowutil
class RepackAlreadyRunning(error.Abort):
pass
def domaintenancerepack(repo):
"""Perform a background repack if necessary."""
backgroundrepack(repo, incremental=True)
def backgroundrepack(repo, incremental=True):
cmd = [util.hgexecutable(), "-R", repo.origroot, "repack"]
msg = _("(running background repack)\n")
if incremental:
cmd.append("--incremental")
msg = _("(running background incremental repack)\n")
if not repo.ui.quiet:
repo.ui.write_err(msg)
util.spawndetached(cmd)
def _runrustrepack(ui, packpath, stores, incremental, shared):
if not os.path.isdir(packpath):
return
try:
revisionstore.repack(packpath, stores, not incremental, shared, ui._rcfg._rcfg)
except Exception as e:
ui.log("repack_failure", msg=str(e), traceback=traceback.format_exc())
if "Repack successful but with errors" not in str(e):
raise
def runrepacklegacy(ui, packpath, incremental, shared):
_runrustrepack(ui, packpath, None, incremental, shared)
def _getstores(repo, category):
if category == constants.FILEPACK_CATEGORY:
return (repo.fileslog.contentstore, repo.fileslog.metadatastore)
elif category == constants.TREEPACK_CATEGORY:
return (repo.manifestlog.datastore, repo.manifestlog.historystore)
raise error.ProgrammingError("invalid pack category")
def _shareddatastoresrepack(repo, incremental, category):
packpath = shallowutil.getcachepackpath(repo, category)
limit = repo.ui.configbytes("remotefilelog", "cachelimit", "10GB")
_cleanuppacks(repo.ui, packpath, limit)
_runrustrepack(repo.ui, packpath, _getstores(repo, category), incremental, True)
def _localdatarepack(repo, incremental, category):
if repo.ui.configbool("remotefilelog", "localdatarepack"):
packpath = shallowutil.getlocalpackpath(repo.svfs.vfs.base, category)
_cleanuppacks(repo.ui, packpath, 0)
_runrustrepack(
repo.ui, packpath, _getstores(repo, category), incremental, False
)
def fulllocaldatarepack(repo, stores):
if repo.ui.configbool("remotefilelog", "localdatarepack"):
packpath = shallowutil.getlocalpackpath(
repo.svfs.vfs.base, constants.FILEPACK_CATEGORY
)
_cleanuppacks(repo.ui, packpath, 0)
_runrustrepack(repo.ui, packpath, stores, False, False)
def _manifestrepack(repo, incremental):
if repo.ui.configbool("treemanifest", "server"):
# This code path is no longer used. Will be deleted soon.
pass
elif util.safehasattr(repo.manifestlog, "datastore"):
if repo.ui.configbool("treemanifest", "useruststore"):
# Shared
_shareddatastoresrepack(repo, incremental, constants.TREEPACK_CATEGORY)
# Local
_localdatarepack(repo, incremental, constants.TREEPACK_CATEGORY)
else:
localdata, shareddata = _getmanifeststores(repo)
lpackpath, ldstores, lhstores = localdata
spackpath, sdstores, shstores = shareddata
def _domanifestrepack(packpath, dstores, hstores, shared):
limit = (
repo.ui.configbytes("remotefilelog", "manifestlimit", "2GB")
if shared
else 0
)
_cleanuppacks(repo.ui, packpath, limit)
runrepacklegacy(repo.ui, packpath, incremental, shared)
# Repack the shared manifest store
_domanifestrepack(spackpath, sdstores, shstores, True)
# Repack the local manifest store
_domanifestrepack(lpackpath, ldstores, lhstores, False)
def _dorepack(repo, incremental):
try:
mask = os.umask(0o002)
with flock(
repacklockvfs(repo).join("repacklock"),
_("repacking %s") % repo.origroot,
timeout=0,
):
repo.hook("prerepack")
_shareddatastoresrepack(repo, incremental, constants.FILEPACK_CATEGORY)
_localdatarepack(repo, incremental, constants.FILEPACK_CATEGORY)
_manifestrepack(repo, incremental)
except error.LockHeld:
raise RepackAlreadyRunning(
_("skipping repack - another repack " "is already running")
)
finally:
os.umask(mask)
def fullrepack(repo):
_dorepack(repo, False)
def incrementalrepack(repo):
"""This repacks the repo by looking at the distribution of pack files in the
repo and performing the most minimal repack to keep the repo in good shape.
"""
_dorepack(repo, True)
def _getmanifeststores(repo):
shareddatastores = repo.manifestlog.shareddatastores
localdatastores = repo.manifestlog.localdatastores
sharedhistorystores = repo.manifestlog.sharedhistorystores
localhistorystores = repo.manifestlog.localhistorystores
sharedpackpath = shallowutil.getcachepackpath(repo, constants.TREEPACK_CATEGORY)
localpackpath = shallowutil.getlocalpackpath(
repo.svfs.vfs.base, constants.TREEPACK_CATEGORY
)
return (
(localpackpath, localdatastores, localhistorystores),
(sharedpackpath, shareddatastores, sharedhistorystores),
)
def _cleanuptemppacks(ui, packpath):
"""In some situations, temporary pack files are left around unecessarily
using disk space. We've even seen cases where some users had 170GB+ worth
of these. Let's remove these.
"""
extensions = [
datapack.PACKSUFFIX,
datapack.INDEXSUFFIX,
historypack.PACKSUFFIX,
historypack.INDEXSUFFIX,
]
def _shouldhold(f):
"""Newish files shouldn't be removed as they could be used by another
running command.
"""
if os.path.isdir(f) or os.path.basename(f) == "repacklock":
return True
try:
stat = os.lstat(f)
except OSError:
# If we can't access the file, it's either being removed, or we
# don't have access to it, either way there is nothing we can do
# about it, ignore them.
return True
return time.gmtime(stat.st_atime + 24 * 3600) > time.gmtime()
with progress.spinner(ui, _("cleaning old temporary files")):
try:
for f in os.listdir(packpath):
f = os.path.join(packpath, f)
if _shouldhold(f):
continue
__, ext = os.path.splitext(f)
if ext not in extensions:
try:
util.unlink(f)
except Exception:
pass
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
def _cleanupoldpacks(ui, packpath, limit):
"""Enforce a size limit on the cache. Packfiles will be removed oldest
first, with the asumption that old packfiles contains less useful data than new ones.
"""
with progress.spinner(ui, _("cleaning old packs")):
def _mtime(f):
stat = util.lstat(f)
return stat.st_mtime
def _listpackfiles(path):
packs = []
try:
for f in os.listdir(path):
_, ext = os.path.splitext(f)
if ext.endswith("pack"):
packs.append(os.path.join(packpath, f))
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
return packs
files = sorted(_listpackfiles(packpath), key=_mtime, reverse=True)
cachesize = 0
for f in files:
stat = os.lstat(f)
cachesize += stat.st_size
while cachesize > limit:
f = files.pop()
stat = util.lstat(f)
# Dont't remove files that are newer than 10 minutes. This will
# avoid a race condition where mercurial downloads files from the
# network and expect these to be present on disk. If the 'limit' is
# properly set, we should have removed enough files that this
# condition won't matter.
if time.gmtime(stat.st_mtime + 10 * 60) > time.gmtime():
return
root, ext = os.path.splitext(f)
try:
if ext == datapack.PACKSUFFIX:
util.unlink(root + datapack.INDEXSUFFIX)
else:
util.unlink(root + historypack.INDEXSUFFIX)
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
try:
util.unlink(f)
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
cachesize -= stat.st_size
def _cleanuppacks(ui, packpath, limit):
_cleanuptemppacks(ui, packpath)
if ui.configbool("remotefilelog", "cleanoldpacks"):
if limit != 0:
_cleanupoldpacks(ui, packpath, limit)
def repacklockvfs(repo):
if util.safehasattr(repo, "name"):
# Lock in the shared cache so repacks across multiple copies of the same
# repo are coordinated.
sharedcachepath = shallowutil.getcachepackpath(
repo, constants.FILEPACK_CATEGORY
)
return vfs.vfs(sharedcachepath)
else:
return repo.svfs
|
facebookexperimental/eden
|
eden/scm/edenscm/hgext/remotefilelog/repack.py
|
Python
|
gpl-2.0
| 9,960
|
# coding=utf-8
from __future__ import division
from scipy.optimize.minpack import curve_fit
from scipy.ndimage.interpolation import zoom
import numpy as np
def fit2dArrayToFn(arr, fn, mask=None, down_scale_factor=None,
output_shape=None, guess=None,
outgrid=None):
"""Fit a 2d array to a 2d function
USE ONLY MASKED VALUES
* [down_scale_factor] map to speed up fitting procedure, set value smaller than 1
* [output_shape] shape of the output array
* [guess] must be scaled using [scale_factor]
Returns:
Fitted map, fitting params (scaled), error
"""
if mask is None:
#assert outgrid is not None
mask = np.ones(shape=arr.shape, dtype=bool)
if down_scale_factor is None:
if mask.sum() > 1000:
down_scale_factor = 0.3
else:
down_scale_factor = 1
if down_scale_factor != 1:
# SCALE TO DECREASE AMOUNT OF POINTS TO FIT:
arr2 = zoom(arr, down_scale_factor)
mask = zoom(mask, down_scale_factor, output=bool)
else:
arr2 = arr
# USE ONLY VALID POINTS:
x, y = np.where(mask)
z = arr2[mask]
# FIT:
parameters, cov_matrix = curve_fit(fn, (x, y), z, p0=guess)
# ERROR:
perr = np.sqrt(np.diag(cov_matrix))
if outgrid is not None:
yy,xx = outgrid
rebuilt = fn((yy,xx), *parameters)
else:
if output_shape is None:
output_shape = arr.shape
fx = arr2.shape[0] / output_shape[0]
fy = arr2.shape[1] / output_shape[1]
rebuilt = np.fromfunction(lambda x, y: fn((x * fx, y * fy),
*parameters), output_shape)
return rebuilt, parameters, perr
if __name__ == '__main__':
import sys
import pylab as plt
a, b = 10, 5
f = 0.3 # down scale factor
shape = (100, 200)
guess = (a * f, b * f)
def fn(xxx_todo_changeme, a, b):
(x, y) = xxx_todo_changeme
return np.sin(x / a) + np.cos(y / b)
fn2 = lambda x, y: fn((x, y), a, b)
# build noisy image:
img = np.fromfunction(fn2, shape)
img += np.random.rand(*shape)
# fit equation using noise image:
fit, parameters, perr = fit2dArrayToFn(
img, fn, guess=guess, down_scale_factor=f)
if 'no_window' not in sys.argv:
plt.figure('original')
plt.imshow(img)
plt.figure('fit')
plt.imshow(fit)
plt.show()
|
radjkarl/fancyTools
|
fancytools/fit/fit2dArrayToFn.py
|
Python
|
gpl-3.0
| 2,493
|
# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Record version control information to the workflow log directory on
installation.
If the workflow source directory is a supported repository/working copy
(git or svn), information about the working copy will be saved in
``<run-dir>/log/version/vcs.conf``.
An example of this information for a git repo:
.. code-block:: cylc
version control system = "git"
repository version = "2.8.0-dirty"
commit = "e5dc6573dd70cabd8f973d1535c17c29c026d553"
working copy root path = "~/cylc-src/my-workflow-git"
status = \"\"\"
M flow.cylc
\"\"\"
And for an svn working copy:
.. code-block:: cylc
version control system = "svn"
working copy root path = "~/cylc-src/my-workflow-svn"
url = "file:///home/my-workflow-svn/trunk"
repository uuid = "219f5687-8eb8-44b1-beb6-e8220fa964d3"
revision = "14"
status = \"\"\"
M flow.cylc
\"\"\"
Any uncommitted changes will also be saved as a diff in
``<run-dir>/log/version/uncommitted.diff``. (Note that git does not include
untracked files in the diff.)
"""
from collections import OrderedDict
from pathlib import Path
from subprocess import Popen, DEVNULL, PIPE
from typing import Dict, Iterable, List, Optional, TYPE_CHECKING, Union
from cylc.flow import LOG
from cylc.flow.exceptions import CylcError
from cylc.flow.workflow_files import WorkflowFiles
if TYPE_CHECKING:
from optparse import Values
SVN = 'svn'
GIT = 'git'
INFO_COMMANDS: Dict[str, List[str]] = {
SVN: ['info', '--non-interactive'],
GIT: ['describe', '--always', '--dirty']
}
# git ['show', '--quiet', '--format=short'],
STATUS_COMMANDS: Dict[str, List[str]] = {
SVN: ['status', '--non-interactive'],
GIT: ['status', '--short']
}
DIFF_COMMANDS: Dict[str, List[str]] = {
SVN: ['diff', '--internal-diff', '--non-interactive'],
GIT: ['diff', 'HEAD']
# ['diff', '--no-index', '/dev/null', '{0}'] # untracked files
}
GIT_REV_PARSE_COMMAND: List[str] = ['rev-parse', 'HEAD']
NOT_REPO_ERRS: Dict[str, List[str]] = {
SVN: ['svn: e155007:',
'svn: warning: w155007:'],
GIT: ['fatal: not a git repository',
'warning: not a git repository']
}
NO_BASE_ERRS: Dict[str, List[str]] = {
SVN: [], # Not possible for svn working copy to have no base commit?
GIT: ['fatal: bad revision \'head\'',
'fatal: ambiguous argument \'head\': unknown revision']
}
SVN_INFO_KEYS: List[str] = [
'revision', 'url', 'working copy root path', 'repository uuid'
]
LOG_VERSION_DIR = Path(WorkflowFiles.LOG_DIR, 'version')
class VCSNotInstalledError(CylcError):
"""Exception to be raised if an attempted VCS command is not installed.
Args:
vcs: The version control system command.
exc: The exception that was raised when attempting to run the command.
"""
def __init__(self, vcs: str, exc: Exception) -> None:
self.vcs = vcs
self.exc = exc
def __str__(self) -> str:
return f"{self.vcs} does not appear to be installed ({self.exc})"
class VCSMissingBaseError(CylcError):
"""Exception to be raised if a repository is missing a base commit.
Args:
vcs: The version control system command.
repo_path: The path to the working copy.
"""
def __init__(self, vcs: str, repo_path: Union[Path, str]) -> None:
self.vcs = vcs
self.path = repo_path
def __str__(self) -> str:
return f"{self.vcs} repository at {self.path} is missing a base commit"
def get_vc_info(path: Union[Path, str]) -> Optional['OrderedDict[str, str]']:
"""Return the version control information for a repository, given its path.
"""
info = OrderedDict()
missing_base = False
for vcs, args in INFO_COMMANDS.items():
try:
out = _run_cmd(vcs, args, cwd=path)
except VCSNotInstalledError as exc:
LOG.debug(exc)
continue
except VCSMissingBaseError as exc:
missing_base = True
LOG.debug(exc)
except OSError as exc:
if not any(
exc.strerror.lower().startswith(err)
for err in NOT_REPO_ERRS[vcs]
):
raise exc
else:
LOG.debug(f"Source dir {path} is not a {vcs} repository")
continue
info['version control system'] = vcs
if vcs == SVN:
info.update(_parse_svn_info(out))
elif vcs == GIT:
if not missing_base:
info['repository version'] = out.splitlines()[0]
info['commit'] = _get_git_commit(path)
info['working copy root path'] = str(path)
info['status'] = get_status(vcs, path)
LOG.debug(f"{vcs} repository detected")
return info
return None
def _run_cmd(vcs: str, args: Iterable[str], cwd: Union[Path, str]) -> str:
"""Run a command, return stdout.
Args:
vcs: The version control system.
args: The args to pass to the version control command.
cwd: Directory to run the command in.
Raises:
OSError: with stderr if non-zero return code.
"""
cmd = [vcs, *args]
try:
proc = Popen( # nosec
cmd,
cwd=cwd,
stdin=DEVNULL,
stdout=PIPE,
stderr=PIPE,
text=True,
)
# commands are defined in constants at top of module
except FileNotFoundError as exc:
# This will only be raised if the VCS command is not installed,
# otherwise Popen() will succeed with a non-zero return code
raise VCSNotInstalledError(vcs, exc)
ret_code = proc.wait()
out, err = proc.communicate()
if ret_code:
if any(err.lower().startswith(msg)
for msg in NO_BASE_ERRS[vcs]):
# No base commit in repo
raise VCSMissingBaseError(vcs, cwd)
raise OSError(ret_code, err)
return out
def write_vc_info(
info: 'OrderedDict[str, str]', run_dir: Union[Path, str]
) -> None:
"""Write version control info to the workflow's vcs log dir.
Args:
info: The vcs info.
run_dir: The workflow run directory.
"""
if not info:
raise ValueError("Nothing to write")
info_file = Path(run_dir, LOG_VERSION_DIR, 'vcs.conf')
info_file.parent.mkdir(exist_ok=True)
with open(info_file, 'w') as f:
for key, value in info.items():
if key == 'status':
f.write(f"{key} = \"\"\"\n")
f.write(f"{value}\n")
f.write("\"\"\"\n")
else:
f.write(f"{key} = \"{value}\"\n")
def _get_git_commit(path: Union[Path, str]) -> str:
"""Return the hash of the HEAD of the repo at path."""
args = GIT_REV_PARSE_COMMAND
return _run_cmd(GIT, args, cwd=path).splitlines()[0]
def get_status(vcs: str, path: Union[Path, str]) -> str:
"""Return the short status of a repo.
Args:
vcs: The version control system.
path: The path to the repository.
"""
args = STATUS_COMMANDS[vcs]
return _run_cmd(vcs, args, cwd=path).rstrip('\n')
def _parse_svn_info(info_text: str) -> 'OrderedDict[str, str]':
"""Return OrderedDict of certain info parsed from svn info raw output."""
ret = OrderedDict()
for line in info_text.splitlines():
if line:
key, value = (ln.strip() for ln in line.split(':', 1))
key = key.lower()
if key in SVN_INFO_KEYS:
ret[key] = value
return ret
def get_diff(vcs: str, path: Union[Path, str]) -> Optional[str]:
"""Return the diff of uncommitted changes for a repository.
Args:
vcs: The version control system.
path: The path to the repo.
"""
args_ = DIFF_COMMANDS[vcs]
if Path(path).is_absolute():
args_.append(str(path))
else:
args_.append(str(Path().cwd() / path))
try:
diff = _run_cmd(vcs, args_, cwd=path)
except (VCSNotInstalledError, VCSMissingBaseError):
return None
header = (
"# Auto-generated diff of uncommitted changes in the Cylc "
"workflow repository:\n"
f"# {path}")
return f"{header}\n{diff}"
def write_diff(diff: str, run_dir: Union[Path, str]) -> None:
"""Write a diff to the workflow's vcs log dir.
Args:
diff: The diff.
run_dir: The workflow run directory.
"""
diff_file = Path(run_dir, LOG_VERSION_DIR, 'uncommitted.diff')
diff_file.parent.mkdir(exist_ok=True)
with open(diff_file, 'w') as f:
f.write(diff)
# Entry point:
def main(
srcdir: Union[Path, str], opts: 'Values', rundir: Union[Path, str]
) -> bool:
"""Entry point for this plugin. Write version control info and any
uncommmited diff to the workflow log dir.
Args:
srcdir: Workflow source dir for cylc install.
opts: CLI options (requirement for post_install entry point, but
not used here)
rundir: Workflow run dir.
Return True if source dir is a supported repo, else False.
"""
vc_info = get_vc_info(srcdir)
if vc_info is None:
return False
vcs = vc_info['version control system']
diff = get_diff(vcs, srcdir)
write_vc_info(vc_info, rundir)
if diff is not None:
write_diff(diff, rundir)
return True
|
cylc/cylc
|
cylc/flow/install_plugins/log_vc_info.py
|
Python
|
gpl-3.0
| 10,110
|
from __future__ import division
#import sys
from random import choice, randint
import numpy as np
#from scipy import stats
#from scipy.stats import gaussian_kde
#from scipy.optimize import fsolve
import math
def distance(p0, p1):
""" take two (x, y) tuples as parameters
http://stackoverflow.com/questions/5407969/distance-formula-between-two-points-in-a-list"""
return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)
def nearest_neighbor(xlist1, xlist2, ylist1, ylist2, q=1):
""" xlist1 and ylist1 are assumed to be the lists of individual organisms
xlist2 and ylist2 are assumed to be the lists of whatever the individual
organisms are being measured with respect to their distance to """
n = len(xlist1)
r = len(xlist2)
r = min([10, r])
refpoints = min([10, n])
DistList = []
for ref in range(refpoints):
i = randint(0, n-1)
x1 = xlist1[i]
y1 = ylist1[i]
MinDist = 10000
for j in range(r):
x2 = xlist2[j]
y2 = ylist2[j]
dist = distance((x1, y1), (x2, y2))
if dist < MinDist:
MinDist = dist
DistList.append(MinDist)
return np.mean(DistList)
def avg_dist(xlist1, xlist2, ylist1, ylist2, q=1):
""" xlist1 and ylist1 are assumed to be the lists of individual organisms
xlist2 and ylist2 are assumed to be the lists of whatever the individual
organisms are being measured with respect to their distance to """
nmax = len(xlist1)
rmax = len(xlist2)
refpoints = min([100, nmax])
dist = []
for n in range(refpoints):
for j in range(q):
i1 = choice(range(nmax))
x1 = xlist1[i1]
y1 = ylist1[i1]
i2 = choice(range(rmax))
x2 = xlist2[i2]
y2 = ylist2[i2]
dist.append(distance((x1, y1), (x2, y2)))
return np.mean(dist)
"""
def nearest_neighbor_dist(xlist1, xlist2, ylist1, ylist2, q=1):
nmax = len(xlist1)
refpoints = min([100, nmax])
for n in range(refpoints):
for j in range(q):
i = choice(range(nmax))
x1 = xlist1[i]
x2 = xlist2[i]
y1 = ylist1[i]
y2 = ylist2[i]
return
"""
|
LennonLab/ActiveSoil
|
Modeling/tools/spatial/spatial.py
|
Python
|
gpl-3.0
| 2,281
|
class Point:
"""Représente un point dans l'espace"""
def __init__(self, x, y, z):
"""Méthode d'initialisation d'un point dans l'espace"""
self.x, self.y, self.z = x, y, z
def distance(self, other=None):
"""Renvoi la distance par rapport à un autre point ou par défaut à l'origine"""
if other is None:
other = Point(0, 0, 0)
return ((self.x-other.x)**2 + (self.y-other.y)**2 + (self.z-other.z)**2) ** (1 / 2)
def __add__(self, other):
"""Opérateur d'addition"""
return Point(self.x + other.x,
self.y + other.y,
self.z + other.z)
def __sub__(self, other):
"""Opérateur de soustraction"""
return Point(self.x - other.x,
self.y - other.y,
self.z - other.z)
def __mul__(self, scalaire):
"""Opérateur de multiplication"""
return Point(self.x * scalaire,
self.y * scalaire,
self.z * scalaire)
def __iadd__(self, other):
"""Opérateur d'addition en place"""
self.x += other.x
self.y += other.y
self.z += other.z
return self
def __isub__(self, other):
"""Opérateur de soustraction en place"""
self.x -= other.x
self.y -= other.y
self.z -= other.z
return self
def __imul__(self, scalaire):
"""Opérateur de multiplication en place"""
self.x *= scalaire
self.y *= scalaire
self.z *= scalaire
return self
def __str__(self):
"""Représentation d'un point sous la forme d'une chaîne de caractère"""
return "Point ({self.x}, {self.y}, {self.z})".format(self=self)
print("Mise en évidence de l'optimisation")
p = Point(1, 2, 3)
print(id(p))
p *= 42
print(id(p))
print(p)
|
UnderXirox/Python-3_Des-fichiers-complementaires
|
Guide/24_Classes/24__04__mutabilité.py
|
Python
|
gpl-3.0
| 1,881
|
from dinosaurus_lib.config import *
get_name_id=GetSequence()
get_ip_id=GetSequence()
class ResName(object):
record_type="XXX"
def __init__(self,name):
self.name=name
self.id=get_name_id()
self.res={}
def __hash__(self):
return hash(self.id)
def __str__(self):
return "["+str(self.id)+"] "+self.name+" ("+self.record_type+")"
def __eq__(self,other): return self.name.lower()==other.name.lower()
def __lt__(self,other):
s_t=self.name.lower().split(".")
o_t=other.name.lower().split(".")
s_t.reverse()
o_t.reverse()
s_L=len(s_t)
o_L=len(o_t)
L=min(s_L,o_L)
for n in range(0,L):
if s_t[n]<o_t[n]: return True
if s_t[n]>o_t[n]: return False
return s_L<o_L
def __ne__(self,other): return not self.__eq__(other)
def __le__(self,other):
if self.__eq__(other): return True
return self.__lt__(other)
def __gt__(self,other): return other.__lt__(self)
def __ge__(self,other):
if self.__eq__(other): return True
return self.__gt__(other)
def add_res(self,key,record,view_list):
if not self.res.has_key(key):
self.res[key]=[]
self.res[key].append( (view_list,record) )
class ResAName(ResName):
record_type = "A"
def __init__(self,name):
ResName.__init__(self,name)
self.res_cname_list=[]
self.res_ip_list=[]
class ResCName(ResName):
record_type = "CNAME"
def __init__(self,name):
ResName.__init__(self,name)
self.res_name_list=[]
self.res_ip_list=[]
def add_res_name(self,res_name,view_list):
self.res_name_list.append( (view_list,res_name) )
res_name.res_cname_list.append( (view_list,self) )
def add_res_ip(self,res_ip,view_list):
self.res_ip_list.append( (view_list,res_ip) )
class ResPTRName(ResName):
record_type = "PTR"
def __init__(self,name):
ResName.__init__(self,name)
self.res_ip_list=[]
def add_res_ip(self,res_ip,view_list):
self.res_ip_list.append( (view_list,res_ip) )
class ResIp(object):
def __init__(self,ip):
self.ip=ip.replace(".in-addr.arpa.","")
self.res={}
self.res_name_list=[]
self.id=get_ip_id()
def __hash__(self):
return hash(self.id)
def __str__(self): return "["+str(self.id)+"] "+self.ip
def __eq__(self,other): return self.ip==other.ip
def __lt__(self,other):
return ip_cmp(self.ip,other.ip) < 0
# if other.ip=="::1": return False
# if self.ip=="::1":
# return True
# try:
# s_t=map(int,self.ip.split(".")[:4])
# except ValueError, e:
# return True
# try:
# o_t=map(int,other.ip.split(".")[:4])
# except ValueError, e:
# return False
# if (s_t[0] in [ 127,0,10 ]) and (o_t[0] not in [ 127,0,10 ]):
# return True
# if (s_t[0] not in [ 127,0,10 ]) and (o_t[0] in [ 127,0,10 ]):
# return False
# if (s_t[0]==172) and (s_t[1] in range(16,32)):
# if (o_t[0]!=172): return True
# if (o_t[1] not in range(16,32)): return True
# if (o_t[0]==172) and (o_t[1] in range(16,32)):
# if (s_t[0]!=172): return False
# if (s_t[1] not in range(16,32)): return False
# if (s_t[0]==192) and (s_t[1]==168):
# if (o_t[0]!=192): return True
# if (o_t[1]!=168): return True
# if (o_t[0]==192) and (o_t[1]==168):
# if (s_t[0]!=192): return False
# if (s_t[1]!=168): return False
# if (s_t[0]==169) and (s_t[1]==254):
# if (o_t[0]!=169): return True
# if (o_t[1]!=254): return True
# if (o_t[0]==169) and (o_t[1]==254):
# if (s_t[0]!=169): return False
# if (s_t[1]!=254): return False
# for n in range(0,4):
# if s_t[n]<o_t[n]: return True
# if s_t[n]>o_t[n]: return False
# return False
def __ne__(self,other): return not self.__eq__(other)
def __le__(self,other):
if self.__eq__(other): return True
return self.__lt__(other)
def __gt__(self,other): return other.__lt__(self)
def __ge__(self,other):
if self.__eq__(other): return True
return self.__gt__(other)
def print_res_name_list(self):
for vlist,rec in self.res_name_list:
print rec
for v in vlist:
print " ",v
def add_res(self,name,record,view_list):
if not self.res.has_key(name):
self.res[name]=[]
self.res[name].append( (view_list,record) )
def add_res_name(self,res_name,view_list):
self.res_name_list.append( (view_list,res_name) )
res_name.res_ip_list.append( (view_list,self) )
|
chiara-paci/dinosaurus
|
dinosaurus_lib/resolutions.py
|
Python
|
gpl-3.0
| 4,987
|
# Copyright (C) 2009-2013 Roman Zimbelmann <hut@hut.pm>
# This software is distributed under the terms of the GNU GPL version 3.
# TODO: Add an optional "!" to all commands and set a flag if it's there
import os
import ranger
import re
from collections import deque
from ranger.api import *
from ranger.core.shared import FileManagerAware
from ranger.ext.lazy_property import lazy_property
_SETTINGS_RE = re.compile(r'^\s*([^\s]+?)=(.*)$')
DELETE_WARNING = 'delete seriously? ' # COMPAT
def alias(*_): pass # COMPAT
class CommandContainer(object):
def __init__(self):
self.commands = {}
def __getitem__(self, key):
return self.commands[key]
def alias(self, name, full_command):
try:
cmd = type(name, (AliasCommand, ), dict())
cmd._based_function = name
cmd._function_name = name
cmd._object_name = name
cmd._line = full_command
self.commands[name] = cmd
except:
pass
def load_commands_from_module(self, module):
for var in vars(module).values():
try:
if issubclass(var, Command) and var != Command \
and var != FunctionCommand:
self.commands[var.get_name()] = var
except TypeError:
pass
def load_commands_from_object(self, obj, filtr):
for attribute_name in dir(obj):
if attribute_name[0] == '_' or attribute_name not in filtr:
continue
attribute = getattr(obj, attribute_name)
if hasattr(attribute, '__call__'):
cmd = type(attribute_name, (FunctionCommand, ), dict())
cmd._based_function = attribute
cmd._function_name = attribute.__name__
cmd._object_name = obj.__class__.__name__
self.commands[attribute_name] = cmd
def get_command(self, name, abbrev=True):
if abbrev:
lst = [cls for cmd, cls in self.commands.items() \
if cls.allow_abbrev and cmd.startswith(name) \
or cmd == name]
if len(lst) == 0:
raise KeyError
if len(lst) == 1:
return lst[0]
if self.commands[name] in lst:
return self.commands[name]
raise ValueError("Ambiguous command")
else:
try:
return self.commands[name]
except KeyError:
return None
def command_generator(self, start):
return sorted(cmd + ' ' for cmd in self.commands if cmd.startswith(start))
class Command(FileManagerAware):
"""Abstract command class"""
name = None
allow_abbrev = True
resolve_macros = True
escape_macros_for_shell = False
quantifier = None
_shifted = 0
_setting_line = None
def __init__(self, line, quantifier=None):
self.line = line
self.args = line.split()
self.quantifier = quantifier
self.quickly_executed = False
try:
self.firstpart = line[:line.rindex(' ') + 1]
except ValueError:
self.firstpart = ''
@classmethod
def get_name(self):
classdict = self.__mro__[0].__dict__
if 'name' in classdict and classdict['name']:
return self.name
else:
return self.__name__
def execute(self):
"""Override this"""
def tab(self):
"""Override this"""
def quick(self):
"""Override this"""
def cancel(self):
"""Override this"""
# Easy ways to get information
def arg(self, n):
"""Returns the nth space separated word"""
try:
return self.args[n]
except IndexError:
return ""
def rest(self, n):
"""Returns everything from and after arg(n)"""
got_space = True
word_count = 0
for i in range(len(self.line)):
if self.line[i] == " ":
if not got_space:
got_space = True
word_count += 1
elif got_space:
got_space = False
if word_count == n + self._shifted:
return self.line[i:]
return ""
def start(self, n):
"""Returns everything until (inclusively) arg(n)"""
return ' '.join(self.args[:n]) + " " # XXX
def shift(self):
del self.args[0]
self._setting_line = None
self._shifted += 1
def tabinsert(self, word):
return ''.join([self._tabinsert_left, word, self._tabinsert_right])
def parse_setting_line(self):
if self._setting_line is not None:
return self._setting_line
match = _SETTINGS_RE.match(self.rest(1))
if match:
self.firstpart += match.group(1) + '='
result = [match.group(1), match.group(2), True]
else:
result = [self.arg(1), self.rest(2), ' ' in self.rest(1)]
self._setting_line = result
return result
def parse_flags(self):
"""Finds and returns flags in the command
>>> Command("").parse_flags()
('', '')
>>> Command("foo").parse_flags()
('', '')
>>> Command("shell test").parse_flags()
('', 'test')
>>> Command("shell -t ls -l").parse_flags()
('t', 'ls -l')
>>> Command("shell -f -- -q test").parse_flags()
('f', '-q test')
>>> Command("shell -foo -bar rest of the command").parse_flags()
('foobar', 'rest of the command')
"""
flags = ""
args = self.line.split()
rest = ""
if len(args) > 0:
rest = self.line[len(args[0]):].lstrip()
for arg in args[1:]:
if arg == "--":
rest = rest[2:].lstrip()
break
elif len(arg) > 1 and arg[0] == "-":
rest = rest[len(arg):].lstrip()
flags += arg[1:]
else:
break
return flags, rest
# XXX: Lazy properties? Not so smart? self.line can change after all!
@lazy_property
def _tabinsert_left(self):
try:
return self.line[:self.line[0:self.pos].rindex(' ') + 1]
except ValueError:
return ''
@lazy_property
def _tabinsert_right(self):
return self.line[self.pos:]
# COMPAT: this is still used in old commands.py configs
def _tab_only_directories(self):
from os.path import dirname, basename, expanduser, join
cwd = self.fm.thisdir.path
rel_dest = self.rest(1)
# expand the tilde into the user directory
if rel_dest.startswith('~'):
rel_dest = expanduser(rel_dest)
# define some shortcuts
abs_dest = join(cwd, rel_dest)
abs_dirname = dirname(abs_dest)
rel_basename = basename(rel_dest)
rel_dirname = dirname(rel_dest)
try:
# are we at the end of a directory?
if rel_dest.endswith('/') or rel_dest == '':
_, dirnames, _ = next(os.walk(abs_dest))
# are we in the middle of the filename?
else:
_, dirnames, _ = next(os.walk(abs_dirname))
dirnames = [dn for dn in dirnames \
if dn.startswith(rel_basename)]
except (OSError, StopIteration):
# os.walk found nothing
pass
else:
dirnames.sort()
# no results, return None
if len(dirnames) == 0:
return
# one result. since it must be a directory, append a slash.
if len(dirnames) == 1:
return self.start(1) + join(rel_dirname, dirnames[0]) + '/'
# more than one result. append no slash, so the user can
# manually type in the slash to advance into that directory
return (self.start(1) + join(rel_dirname, dirname)
for dirname in dirnames)
def _tab_directory_content(self):
from os.path import dirname, basename, expanduser, join
cwd = self.fm.thisdir.path
rel_dest = self.rest(1)
# expand the tilde into the user directory
if rel_dest.startswith('~'):
rel_dest = expanduser(rel_dest)
# define some shortcuts
abs_dest = join(cwd, rel_dest)
abs_dirname = dirname(abs_dest)
rel_basename = basename(rel_dest)
rel_dirname = dirname(rel_dest)
try:
directory = self.fm.get_directory(abs_dest)
# are we at the end of a directory?
if rel_dest.endswith('/') or rel_dest == '':
if directory.content_loaded:
# Take the order from the directory object
names = [f.basename for f in directory.files]
if self.fm.thisfile.basename in names:
i = names.index(self.fm.thisfile.basename)
names = names[i:] + names[:i]
else:
# Fall back to old method with "os.walk"
_, dirnames, filenames = next(os.walk(abs_dest))
names = dirnames + filenames
names.sort()
# are we in the middle of the filename?
else:
if directory.content_loaded:
# Take the order from the directory object
names = [f.basename for f in directory.files \
if f.basename.startswith(rel_basename)]
if self.fm.thisfile.basename in names:
i = names.index(self.fm.thisfile.basename)
names = names[i:] + names[:i]
else:
# Fall back to old method with "os.walk"
_, dirnames, filenames = next(os.walk(abs_dirname))
names = [name for name in (dirnames + filenames) \
if name.startswith(rel_basename)]
names.sort()
except (OSError, StopIteration):
# os.walk found nothing
pass
else:
# no results, return None
if len(names) == 0:
return
# one result. append a slash if it's a directory
if len(names) == 1:
path = join(rel_dirname, names[0])
slash = '/' if os.path.isdir(path) else ''
return self.start(1) + path + slash
# more than one result. append no slash, so the user can
# manually type in the slash to advance into that directory
return (self.start(1) + join(rel_dirname, name) for name in names)
def _tab_through_executables(self):
from ranger.ext.get_executables import get_executables
programs = [program for program in get_executables() if \
program.startswith(self.rest(1))]
if not programs:
return
if len(programs) == 1:
return self.start(1) + programs[0]
programs.sort()
return (self.start(1) + program for program in programs)
class FunctionCommand(Command):
_based_function = None
_object_name = ""
_function_name = "unknown"
def execute(self):
if not self._based_function:
return
if len(self.args) == 1:
try:
return self._based_function(**{'narg':self.quantifier})
except TypeError:
return self._based_function()
args, keywords = list(), dict()
for arg in self.args[1:]:
equal_sign = arg.find("=")
value = arg if (equal_sign is -1) else arg[equal_sign + 1:]
try:
value = int(value)
except:
if value in ('True', 'False'):
value = (value == 'True')
else:
try:
value = float(value)
except:
pass
if equal_sign == -1:
args.append(value)
else:
keywords[arg[:equal_sign]] = value
if self.quantifier is not None:
keywords['narg'] = self.quantifier
try:
if self.quantifier is None:
return self._based_function(*args, **keywords)
else:
try:
return self._based_function(*args, **keywords)
except TypeError:
del keywords['narg']
return self._based_function(*args, **keywords)
except TypeError:
if ranger.arg.debug:
raise
else:
self.fm.notify("Bad arguments for %s.%s: %s, %s" %
(self._object_name, self._function_name,
repr(args), repr(keywords)), bad=True)
class AliasCommand(Command):
_based_function = None
_object_name = ""
_function_name = "unknown"
_line = ""
def execute(self):
return self._make_cmd().execute()
def quick(self):
return self._make_cmd().quick()
def tab(self):
return self._make_cmd().tab()
def cancel(self):
return self._make_cmd().cancel()
def _make_cmd(self):
cmd_class = self.fm.commands.get_command(self._line.split()[0])
cmd = cmd_class(self._line + ' ' + self.rest(1))
cmd.quickly_executed = self.quickly_executed
cmd.quantifier = self.quantifier
cmd.escape_macros_for_shell = self.escape_macros_for_shell
cmd.resolve_macros = self.resolve_macros
cmd.allow_abbrev = self.allow_abbrev
return cmd
if __name__ == '__main__':
import doctest
doctest.testmod()
|
mathstuf/ranger
|
ranger/api/commands.py
|
Python
|
gpl-3.0
| 13,965
|
# -*- coding: utf-8 -*-
# !/usr/bin/python
################################### PART0 DESCRIPTION #################################
# Filename: test_decorator.py
# Description:
#
# Author: Shuai Yuan
# E-mail: ysh329@sina.com
# Create: 2016-02-14 23:15:46
# Last:
__author__ = 'yuens'
################################### PART1 IMPORT ######################################
################################### PART2 CLASS && FUNCTION ###########################
def log_of_function(undecorated_function):
def record_start_and_end_log(args, **kw):
print("Function {0} start.".format(undecorated_function.__name__))
n = undecorated_function(args, **kw)
print("Function {0} end.".format(undecorated_function.__name__))
return n
return record_start_and_end_log
@log_of_function
def fun_a(n):
print("This is {0} from function a.".format(n))
return n
################################### PART3 CLASS TEST ##################################
n = fun_a(11)
print n
# 输出如下:
# Function fun_a start.
# This is 11 from function a.
# Function fun_a end.
# 11
|
ysh329/learn-python-characteristic
|
decorator/test_decorator.py
|
Python
|
gpl-3.0
| 1,109
|
from math import log as _log
from sympify import _sympify
from cache import cacheit
from core import C
from singleton import S
from expr import Expr
from sympy.core.function import (_coeff_isneg, expand_complex,
expand_multinomial, expand_mul)
from sympy.core.logic import fuzzy_bool
from sympy.core.compatibility import as_int
from sympy.mpmath.libmp import sqrtrem as mpmath_sqrtrem
from sympy.utilities.iterables import sift
def integer_nthroot(y, n):
"""
Return a tuple containing x = floor(y**(1/n))
and a boolean indicating whether the result is exact (that is,
whether x**n == y).
>>> from sympy import integer_nthroot
>>> integer_nthroot(16,2)
(4, True)
>>> integer_nthroot(26,2)
(5, False)
"""
y, n = int(y), int(n)
if y < 0:
raise ValueError("y must be nonnegative")
if n < 1:
raise ValueError("n must be positive")
if y in (0, 1):
return y, True
if n == 1:
return y, True
if n == 2:
x, rem = mpmath_sqrtrem(y)
return int(x), not rem
if n > y:
return 1, False
# Get initial estimate for Newton's method. Care must be taken to
# avoid overflow
try:
guess = int(y**(1./n) + 0.5)
except OverflowError:
exp = _log(y, 2)/n
if exp > 53:
shift = int(exp - 53)
guess = int(2.0**(exp - shift) + 1) << shift
else:
guess = int(2.0**exp)
#print n
if guess > 2**50:
# Newton iteration
xprev, x = -1, guess
while 1:
t = x**(n - 1)
#xprev, x = x, x - (t*x-y)//(n*t)
xprev, x = x, ((n - 1)*x + y//t)//n
#print n, x-xprev, abs(x-xprev) < 2
if abs(x - xprev) < 2:
break
else:
x = guess
# Compensate
t = x**n
while t < y:
x += 1
t = x**n
while t > y:
x -= 1
t = x**n
return x, t == y
class Pow(Expr):
is_Pow = True
__slots__ = ['is_commutative']
@cacheit
def __new__(cls, b, e, evaluate=True):
from sympy.functions.elementary.exponential import exp_polar
# don't optimize "if e==0; return 1" here; it's better to handle that
# in the calling routine so this doesn't get called
b = _sympify(b)
e = _sympify(e)
if evaluate:
if e is S.Zero:
return S.One
elif e is S.One:
return b
elif S.NaN in (b, e):
if b is S.One: # already handled e == 0 above
return S.One
return S.NaN
else:
# recognize base as E
if not e.is_Atom and b is not S.Exp1 and b.func is not exp_polar:
from sympy import numer, denom, log, sign, im, factor_terms
c, ex = factor_terms(e, sign=False).as_coeff_Mul()
den = denom(ex)
if den.func is log and den.args[0] == b:
return S.Exp1**(c*numer(ex))
elif den.is_Add:
s = sign(im(b))
if s.is_Number and s and den == \
log(-factor_terms(b, sign=False)) + s*S.ImaginaryUnit*S.Pi:
return S.Exp1**(c*numer(ex))
obj = b._eval_power(e)
if obj is not None:
return obj
obj = Expr.__new__(cls, b, e)
obj.is_commutative = (b.is_commutative and e.is_commutative)
return obj
@property
def base(self):
return self._args[0]
@property
def exp(self):
return self._args[1]
@classmethod
def class_key(cls):
return 3, 2, cls.__name__
def _eval_power(self, other):
from sympy.functions.elementary.exponential import log
b, e = self.as_base_exp()
b_nneg = b.is_nonnegative
if b.is_real and not b_nneg and e.is_even:
b = abs(b)
b_nneg = True
# Special case for when b is nan. See pull req 1714 for details
if b is S.NaN:
smallarg = (abs(e) <= S.Zero)
else:
smallarg = (abs(e) <= abs(S.Pi/log(b)))
if (other.is_Rational and other.q == 2 and
e.is_real is False and smallarg is False):
return -Pow(b, e*other)
if (other.is_integer or
e.is_real and (b_nneg or abs(e) < 1) or
e.is_real is False and smallarg is True or
b.is_polar):
return Pow(b, e*other)
def _eval_is_even(self):
if self.exp.is_integer and self.exp.is_positive:
return self.base.is_even
def _eval_is_positive(self):
if self.base.is_positive:
if self.exp.is_real:
return True
elif self.base.is_negative:
if self.exp.is_even:
return True
if self.exp.is_odd:
return False
elif self.base.is_nonpositive:
if self.exp.is_odd:
return False
def _eval_is_negative(self):
if self.base.is_negative:
if self.exp.is_odd:
return True
if self.exp.is_even:
return False
elif self.base.is_positive:
if self.exp.is_real:
return False
elif self.base.is_nonnegative:
if self.exp.is_real:
return False
elif self.base.is_nonpositive:
if self.exp.is_even:
return False
elif self.base.is_real:
if self.exp.is_even:
return False
def _eval_is_integer(self):
b, e = self.args
c1 = b.is_integer
c2 = e.is_integer
if c1 is None or c2 is None:
return None
if not c1 and e.is_nonnegative: # rat**nonneg
return False
if c1 and c2: # int**int
if e.is_nonnegative or e.is_positive:
return True
if self.exp.is_negative:
return False
if c1 and e.is_negative and e.is_bounded: # int**neg
return False
if b.is_Number and e.is_Number:
# int**nonneg or rat**?
check = Pow(*self.args)
return check.is_Integer
def _eval_is_real(self):
real_b = self.base.is_real
if real_b is None:
return
real_e = self.exp.is_real
if real_e is None:
return
if real_b and real_e:
if self.base.is_positive:
return True
else: # negative or zero (or positive)
if self.exp.is_integer:
return True
elif self.base.is_negative:
if self.exp.is_Rational:
return False
im_b = self.base.is_imaginary
im_e = self.exp.is_imaginary
if im_b:
if self.exp.is_integer:
if self.exp.is_even:
return True
elif self.exp.is_odd:
return False
elif (self.exp in [S.ImaginaryUnit, -S.ImaginaryUnit] and
self.base in [S.ImaginaryUnit, -S.ImaginaryUnit]):
return True
elif self.exp.is_Add:
c, a = self.exp.as_coeff_Add()
if c and c.is_Integer:
return C.Mul(
self.base**c, self.base**a, evaluate=False).is_real
if real_b and im_e:
if self.base is S.NegativeOne:
return True
c = self.exp.coeff(S.ImaginaryUnit)
if c:
ok = (c*C.log(self.base)/S.Pi).is_Integer
if ok is not None:
return ok
def _eval_is_odd(self):
if self.exp.is_integer:
if self.exp.is_positive:
return self.base.is_odd
elif self.exp.is_nonnegative and self.base.is_odd:
return True
def _eval_is_bounded(self):
if self.exp.is_negative:
if self.base.is_infinitesimal:
return False
if self.base.is_unbounded:
return True
c1 = self.base.is_bounded
if c1 is None:
return
c2 = self.exp.is_bounded
if c2 is None:
return
if c1 and c2:
if self.exp.is_nonnegative or self.base.is_nonzero:
return True
def _eval_is_polar(self):
return self.base.is_polar
def _eval_subs(self, old, new):
if old.func is self.func and self.base == old.base:
coeff1, terms1 = self.exp.as_independent(C.Symbol, as_Add=False)
coeff2, terms2 = old.exp.as_independent(C.Symbol, as_Add=False)
if terms1 == terms2:
pow = coeff1/coeff2
ok = False # True if int(pow) == pow OR self.base.is_positive
try:
pow = as_int(pow)
ok = True
except ValueError:
ok = self.base.is_positive
if ok:
# issue 2081
return Pow(new, pow) # (x**(6*y)).subs(x**(3*y),z)->z**2
if old.func is C.exp and self.exp.is_real and self.base.is_positive:
coeff1, terms1 = old.args[0].as_independent(C.Symbol, as_Add=False)
# we can only do this when the base is positive AND the exponent
# is real
coeff2, terms2 = (self.exp*C.log(self.base)).as_independent(
C.Symbol, as_Add=False)
if terms1 == terms2:
pow = coeff1/coeff2
if pow == int(pow) or self.base.is_positive:
return Pow(new, pow) # (2**x).subs(exp(x*log(2)), z) -> z
def as_base_exp(self):
"""Return base and exp of self.
If base is 1/Integer, then return Integer, -exp. If this extra
processing is not needed, the base and exp properties will
give the raw arguments
Examples
========
>>> from sympy import Pow, S
>>> p = Pow(S.Half, 2, evaluate=False)
>>> p.as_base_exp()
(2, -2)
>>> p.args
(1/2, 2)
"""
b, e = self.args
if b.is_Rational and b.p == 1:
return Integer(b.q), -e
return b, e
def _eval_adjoint(self):
from sympy.functions.elementary.complexes import adjoint
i, p = self.exp.is_integer, self.base.is_positive
if i:
return adjoint(self.base)**self.exp
if p:
return self.base**adjoint(self.exp)
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return adjoint(expanded)
def _eval_conjugate(self):
from sympy.functions.elementary.complexes import conjugate as c
i, p = self.exp.is_integer, self.base.is_positive
if i:
return c(self.base)**self.exp
if p:
return self.base**c(self.exp)
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return c(expanded)
def _eval_transpose(self):
from sympy.functions.elementary.complexes import transpose
i, p = self.exp.is_integer, self.base.is_complex
if p:
return self.base**self.exp
if i:
return transpose(self.base)**self.exp
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return transpose(expanded)
def _eval_expand_power_exp(self, **hints):
"""a**(n+m) -> a**n*a**m"""
b = self.base
e = self.exp
if e.is_Add and e.is_commutative:
expr = []
for x in e.args:
expr.append(Pow(self.base, x))
return Mul(*expr)
return Pow(b, e)
def _eval_expand_power_base(self, **hints):
"""(a*b)**n -> a**n * b**n"""
force = hints.get('force', False)
b = self.base
e = self.exp
if not b.is_Mul:
return self
cargs, nc = b.args_cnc(split_1=False)
# expand each term - this is top-level-only
# expansion but we have to watch out for things
# that don't have an _eval_expand method
if nc:
nc = [i._eval_expand_power_base(**hints)
if hasattr(i, '_eval_expand_power_base') else i
for i in nc]
if e.is_Integer:
if e.is_positive:
rv = Mul(*nc*e)
else:
rv = 1/Mul(*nc*-e)
if cargs:
rv *= Mul(*cargs)**e
return rv
if not cargs:
return Pow(Mul(*nc), e, evaluate=False)
nc = [Mul(*nc)]
# sift the commutative bases
def pred(x):
if x is S.ImaginaryUnit:
return S.ImaginaryUnit
polar = x.is_polar
if polar:
return True
if polar is None:
return fuzzy_bool(x.is_nonnegative)
sifted = sift(cargs, pred)
nonneg = sifted[True]
other = sifted[None]
neg = sifted[False]
imag = sifted[S.ImaginaryUnit]
if imag:
I = S.ImaginaryUnit
i = len(imag) % 4
if i == 0:
pass
elif i == 1:
other.append(I)
elif i == 2:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(S.NegativeOne)
else:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(S.NegativeOne)
other.append(I)
del imag
# bring out the bases that can be separated from the base
if force or e.is_integer:
# treat all commutatives the same and put nc in other
cargs = nonneg + neg + other
other = nc
else:
# this is just like what is happening automatically, except
# that now we are doing it for an arbitrary exponent for which
# no automatic expansion is done
assert not e.is_Integer
# handle negatives by making them all positive and putting
# the residual -1 in other
if len(neg) > 1:
o = S.One
if not other and neg[0].is_Number:
o *= neg.pop(0)
if len(neg) % 2:
o = -o
for n in neg:
nonneg.append(-n)
if o is not S.One:
other.append(o)
elif neg and other:
if neg[0].is_Number and neg[0] is not S.NegativeOne:
other.append(S.NegativeOne)
nonneg.append(-neg[0])
else:
other.extend(neg)
else:
other.extend(neg)
del neg
cargs = nonneg
other += nc
rv = S.One
if cargs:
rv *= Mul(*[Pow(b, e, evaluate=False) for b in cargs])
if other:
rv *= Pow(Mul(*other), e, evaluate=False)
return rv
def _eval_expand_multinomial(self, **hints):
"""(a+b+..) ** n -> a**n + n*a**(n-1)*b + .., n is nonzero integer"""
base, exp = self.args
result = self
if exp.is_Rational and exp.p > 0 and base.is_Add:
if not exp.is_Integer:
n = Integer(exp.p // exp.q)
if not n:
return result
else:
radical, result = Pow(base, exp - n), []
expanded_base_n = Pow(base, n)
if expanded_base_n.is_Pow:
expanded_base_n = \
expanded_base_n._eval_expand_multinomial()
for term in Add.make_args(expanded_base_n):
result.append(term*radical)
return Add(*result)
n = int(exp)
if base.is_commutative:
order_terms, other_terms = [], []
for b in base.args:
if b.is_Order:
order_terms.append(b)
else:
other_terms.append(b)
if order_terms:
# (f(x) + O(x^n))^m -> f(x)^m + m*f(x)^{m-1} *O(x^n)
f = Add(*other_terms)
o = Add(*order_terms)
if n == 2:
return expand_multinomial(f**n, deep=False) + n*f*o
else:
g = expand_multinomial(f**(n - 1), deep=False)
return expand_mul(f*g, deep=False) + n*g*o
if base.is_number:
# Efficiently expand expressions of the form (a + b*I)**n
# where 'a' and 'b' are real numbers and 'n' is integer.
a, b = base.as_real_imag()
if a.is_Rational and b.is_Rational:
if not a.is_Integer:
if not b.is_Integer:
k = Pow(a.q * b.q, n)
a, b = a.p*b.q, a.q*b.p
else:
k = Pow(a.q, n)
a, b = a.p, a.q*b
elif not b.is_Integer:
k = Pow(b.q, n)
a, b = a*b.q, b.p
else:
k = 1
a, b, c, d = int(a), int(b), 1, 0
while n:
if n & 1:
c, d = a*c - b*d, b*c + a*d
n -= 1
a, b = a*a - b*b, 2*a*b
n //= 2
I = S.ImaginaryUnit
if k == 1:
return c + I*d
else:
return Integer(c)/k + I*d/k
p = other_terms
# (x+y)**3 -> x**3 + 3*x**2*y + 3*x*y**2 + y**3
# in this particular example:
# p = [x,y]; n = 3
# so now it's easy to get the correct result -- we get the
# coefficients first:
from sympy import multinomial_coefficients
from sympy.polys.polyutils import basic_from_dict
expansion_dict = multinomial_coefficients(len(p), n)
# in our example: {(3, 0): 1, (1, 2): 3, (0, 3): 1, (2, 1): 3}
# and now construct the expression.
return basic_from_dict(expansion_dict, *p)
else:
if n == 2:
return Add(*[f*g for f in base.args for g in base.args])
else:
multi = (base**(n - 1))._eval_expand_multinomial()
if multi.is_Add:
return Add(*[f*g for f in base.args
for g in multi.args])
else:
# XXX can this ever happen if base was an Add?
return Add(*[f*multi for f in base.args])
elif (exp.is_Rational and exp.p < 0 and base.is_Add and
abs(exp.p) > exp.q):
return 1 / Pow(base, -exp)._eval_expand_multinomial()
elif exp.is_Add and base.is_Number:
# a + b a b
# n --> n n , where n, a, b are Numbers
coeff, tail = S.One, S.Zero
for term in exp.args:
if term.is_Number:
coeff *= Pow(base, term)
else:
tail += term
return coeff * Pow(base, tail)
else:
return result
def as_real_imag(self, deep=True, **hints):
from sympy.polys.polytools import poly
if self.exp.is_Integer:
exp = self.exp
re, im = self.base.as_real_imag(deep=deep)
if not im:
return self, S.Zero
a, b = symbols('a b', cls=Dummy)
if exp >= 0:
if re.is_Number and im.is_Number:
# We can be more efficient in this case
expr = expand_multinomial(self.base**exp)
return expr.as_real_imag()
expr = poly(
(a + b)**exp) # a = re, b = im; expr = (a + b*I)**exp
else:
mag = re**2 + im**2
re, im = re/mag, -im/mag
if re.is_Number and im.is_Number:
# We can be more efficient in this case
expr = expand_multinomial((re + im*S.ImaginaryUnit)**-exp)
return expr.as_real_imag()
expr = poly((a + b)**-exp)
# Terms with even b powers will be real
r = [i for i in expr.terms() if not i[0][1] % 2]
re_part = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
# Terms with odd b powers will be imaginary
r = [i for i in expr.terms() if i[0][1] % 4 == 1]
im_part1 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
r = [i for i in expr.terms() if i[0][1] % 4 == 3]
im_part3 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
return (re_part.subs({a: re, b: S.ImaginaryUnit*im}),
im_part1.subs({a: re, b: im}) + im_part3.subs({a: re, b: -im}))
elif self.exp.is_Rational:
# NOTE: This is not totally correct since for x**(p/q) with
# x being imaginary there are actually q roots, but
# only a single one is returned from here.
re, im = self.base.as_real_imag(deep=deep)
r = Pow(Pow(re, 2) + Pow(im, 2), S.Half)
t = C.atan2(im, re)
rp, tp = Pow(r, self.exp), t*self.exp
return (rp*C.cos(tp), rp*C.sin(tp))
else:
if deep:
hints['complex'] = False
expanded = self.expand(deep, **hints)
if hints.get('ignore') == expanded:
return None
else:
return (C.re(expanded), C.im(expanded))
else:
return (C.re(self), C.im(self))
def _eval_derivative(self, s):
dbase = self.base.diff(s)
dexp = self.exp.diff(s)
return self * (dexp * C.log(self.base) + dbase * self.exp/self.base)
def _eval_evalf(self, prec):
base, exp = self.as_base_exp()
base = base._evalf(prec)
if not exp.is_Integer:
exp = exp._evalf(prec)
if exp < 0 and base.is_number and base.is_real is False:
base = base.conjugate() / (base * base.conjugate())._evalf(prec)
exp = -exp
return Pow(base, exp).expand()
return Pow(base, exp)
def _eval_is_polynomial(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_polynomial(syms) and \
self.exp.is_Integer and \
self.exp >= 0
else:
return True
def _eval_is_rational(self):
p = self.func(*self.as_base_exp()) # in case it's unevaluated
if not p.is_Pow:
return p.is_rational
b, e = p.as_base_exp()
if e.is_Rational and b.is_Rational:
# we didn't check that e is not an Integer
# because Rational**Integer autosimplifies
return False
if e.is_integer:
return b.is_rational
def _eval_is_rational_function(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_rational_function(syms) and \
self.exp.is_Integer
else:
return True
def _eval_is_algebraic_expr(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_algebraic_expr(syms) and \
self.exp.is_Rational
else:
return True
def as_numer_denom(self):
if not self.is_commutative:
return self, S.One
base, exp = self.as_base_exp()
n, d = base.as_numer_denom()
# this should be the same as ExpBase.as_numer_denom wrt
# exponent handling
neg_exp = exp.is_negative
if not neg_exp and not (-exp).is_negative:
neg_exp = _coeff_isneg(exp)
int_exp = exp.is_integer
# the denominator cannot be separated from the numerator if
# its sign is unknown unless the exponent is an integer, e.g.
# sqrt(a/b) != sqrt(a)/sqrt(b) when a=1 and b=-1. But if the
# denominator is negative the numerator and denominator can
# be negated and the denominator (now positive) separated.
if not (d.is_real or int_exp):
n = base
d = S.One
dnonpos = d.is_nonpositive
if dnonpos:
n, d = -n, -d
elif dnonpos is None and not int_exp:
n = base
d = S.One
if neg_exp:
n, d = d, n
exp = -exp
return Pow(n, exp), Pow(d, exp)
def matches(self, expr, repl_dict={}, old=False):
expr = _sympify(expr)
# special case, pattern = 1 and expr.exp can match to 0
if expr is S.One:
d = repl_dict.copy()
d = self.exp.matches(S.Zero, d)
if d is not None:
return d
b, e = expr.as_base_exp()
# special case number
sb, se = self.as_base_exp()
if sb.is_Symbol and se.is_Integer and expr:
if e.is_rational:
return sb.matches(b**(e/se), repl_dict)
return sb.matches(expr**(1/se), repl_dict)
d = repl_dict.copy()
d = self.base.matches(b, d)
if d is None:
return None
d = self.exp.xreplace(d).matches(e, d)
if d is None:
return Expr.matches(self, expr, repl_dict)
return d
def _eval_nseries(self, x, n, logx):
# NOTE! This function is an important part of the gruntz algorithm
# for computing limits. It has to return a generalized power
# series with coefficients in C(log, log(x)). In more detail:
# It has to return an expression
# c_0*x**e_0 + c_1*x**e_1 + ... (finitely many terms)
# where e_i are numbers (not necessarily integers) and c_i are
# expressions involving only numbers, the log function, and log(x).
from sympy import powsimp, collect, exp, log, O, ceiling
b, e = self.args
if e.is_Integer:
if e > 0:
# positive integer powers are easy to expand, e.g.:
# sin(x)**4 = (x-x**3/3+...)**4 = ...
return expand_multinomial(Pow(b._eval_nseries(x, n=n,
logx=logx), e), deep=False)
elif e is S.NegativeOne:
# this is also easy to expand using the formula:
# 1/(1 + x) = 1 - x + x**2 - x**3 ...
# so we need to rewrite base to the form "1+x"
b = b._eval_nseries(x, n=n, logx=logx)
prefactor = b.as_leading_term(x)
# express "rest" as: rest = 1 + k*x**l + ... + O(x**n)
rest = expand_mul((b - prefactor)/prefactor)
if rest == 0:
# if prefactor == w**4 + x**2*w**4 + 2*x*w**4, we need to
# factor the w**4 out using collect:
return 1/collect(prefactor, x)
if rest.is_Order:
return 1/prefactor + rest/prefactor
n2 = rest.getn()
if n2 is not None:
n = n2
# remove the O - powering this is slow
if logx is not None:
rest = rest.removeO()
k, l = rest.leadterm(x)
if l.is_Rational and l > 0:
pass
elif l.is_number and l > 0:
l = l.evalf()
else:
raise NotImplementedError()
terms = [1/prefactor]
for m in xrange(1, ceiling(n/l)):
new_term = terms[-1]*(-rest)
if new_term.is_Pow:
new_term = new_term._eval_expand_multinomial(
deep=False)
else:
new_term = expand_mul(new_term, deep=False)
terms.append(new_term)
# Append O(...), we know the order.
if n2 is None or logx is not None:
terms.append(O(x**n))
return powsimp(Add(*terms), deep=True, combine='exp')
else:
# negative powers are rewritten to the cases above, for
# example:
# sin(x)**(-4) = 1/( sin(x)**4) = ...
# and expand the denominator:
denominator = (b**(-e))._eval_nseries(x, n=n, logx=logx)
if 1/denominator == self:
return self
# now we have a type 1/f(x), that we know how to expand
return (1/denominator)._eval_nseries(x, n=n, logx=logx)
if e.has(Symbol):
return exp(e*log(b))._eval_nseries(x, n=n, logx=logx)
# see if the base is as simple as possible
bx = b
while bx.is_Pow and bx.exp.is_Rational:
bx = bx.base
if bx == x:
return self
# work for b(x)**e where e is not an Integer and does not contain x
# and hopefully has no other symbols
def e2int(e):
"""return the integer value (if possible) of e and a
flag indicating whether it is bounded or not."""
n = e.limit(x, 0)
unbounded = n.is_unbounded
if not unbounded:
# XXX was int or floor intended? int used to behave like floor
# so int(-Rational(1, 2)) returned -1 rather than int's 0
try:
n = int(n)
except TypeError:
#well, the n is something more complicated (like 1+log(2))
try:
n = int(n.evalf()) + 1 # XXX why is 1 being added?
except TypeError:
pass # hope that base allows this to be resolved
n = _sympify(n)
return n, unbounded
order = O(x**n, x)
ei, unbounded = e2int(e)
b0 = b.limit(x, 0)
if unbounded and (b0 is S.One or b0.has(Symbol)):
# XXX what order
if b0 is S.One:
resid = (b - 1)
if resid.is_positive:
return S.Infinity
elif resid.is_negative:
return S.Zero
raise ValueError('cannot determine sign of %s' % resid)
return b0**ei
if (b0 is S.Zero or b0.is_unbounded):
if unbounded is not False:
return b0**e # XXX what order
if not ei.is_number: # if not, how will we proceed?
raise ValueError(
'expecting numerical exponent but got %s' % ei)
nuse = n - ei
if e.is_real and e.is_positive:
lt = b.as_leading_term(x)
# Try to correct nuse (= m) guess from:
# (lt + rest + O(x**m))**e =
# lt**e*(1 + rest/lt + O(x**m)/lt)**e =
# lt**e + ... + O(x**m)*lt**(e - 1) = ... + O(x**n)
try:
cf = C.Order(lt, x).getn()
nuse = ceiling(n - cf*(e - 1))
except NotImplementedError:
pass
bs = b._eval_nseries(x, n=nuse, logx=logx)
terms = bs.removeO()
if terms.is_Add:
bs = terms
lt = terms.as_leading_term(x)
# bs -> lt + rest -> lt*(1 + (bs/lt - 1))
return ((Pow(lt, e) * Pow((bs/lt).expand(), e).nseries(
x, n=nuse, logx=logx)).expand() + order)
if bs.is_Add:
from sympy import O
# So, bs + O() == terms
c = Dummy('c')
res = []
for arg in bs.args:
if arg.is_Order:
arg = c*arg.expr
res.append(arg)
bs = Add(*res)
rv = (bs**e).series(x).subs(c, O(1))
rv += order
return rv
rv = bs**e
if terms != bs:
rv += order
return rv
# either b0 is bounded but neither 1 nor 0 or e is unbounded
# b -> b0 + (b-b0) -> b0 * (1 + (b/b0-1))
o2 = order*(b0**-e)
z = (b/b0 - 1)
o = O(z, x)
#r = self._compute_oseries3(z, o2, self.taylor_term)
if o is S.Zero or o2 is S.Zero:
unbounded = True
else:
if o.expr.is_number:
e2 = log(o2.expr*x)/log(x)
else:
e2 = log(o2.expr)/log(o.expr)
n, unbounded = e2int(e2)
if unbounded:
# requested accuracy gives infinite series,
# order is probably non-polynomial e.g. O(exp(-1/x), x).
r = 1 + z
else:
l = []
g = None
for i in xrange(n + 2):
g = self.taylor_term(i, z, g)
g = g.nseries(x, n=n, logx=logx)
l.append(g)
r = Add(*l)
return r*b0**e + order
def _eval_as_leading_term(self, x):
if not self.exp.has(x):
return Pow(self.base.as_leading_term(x), self.exp)
return C.exp(self.exp * C.log(self.base)).as_leading_term(x)
@cacheit
def taylor_term(self, n, x, *previous_terms): # of (1+x)**e
if n < 0:
return S.Zero
x = _sympify(x)
return C.binomial(self.exp, n) * Pow(x, n)
def _sage_(self):
return self.args[0]._sage_()**self.args[1]._sage_()
def as_content_primitive(self, radical=False):
"""Return the tuple (R, self/R) where R is the positive Rational
extracted from self.
Examples
========
>>> from sympy import sqrt
>>> sqrt(4 + 4*sqrt(2)).as_content_primitive()
(2, sqrt(1 + sqrt(2)))
>>> sqrt(3 + 3*sqrt(2)).as_content_primitive()
(1, sqrt(3)*sqrt(1 + sqrt(2)))
>>> from sympy import expand_power_base, powsimp, Mul
>>> from sympy.abc import x, y
>>> ((2*x + 2)**2).as_content_primitive()
(4, (x + 1)**2)
>>> (4**((1 + y)/2)).as_content_primitive()
(2, 4**(y/2))
>>> (3**((1 + y)/2)).as_content_primitive()
(1, 3**((y + 1)/2))
>>> (3**((5 + y)/2)).as_content_primitive()
(9, 3**((y + 1)/2))
>>> eq = 3**(2 + 2*x)
>>> powsimp(eq) == eq
True
>>> eq.as_content_primitive()
(9, 3**(2*x))
>>> powsimp(Mul(*_))
3**(2*x + 2)
>>> eq = (2 + 2*x)**y
>>> s = expand_power_base(eq); s.is_Mul, s
(False, (2*x + 2)**y)
>>> eq.as_content_primitive()
(1, (2*(x + 1))**y)
>>> s = expand_power_base(_[1]); s.is_Mul, s
(True, 2**y*(x + 1)**y)
See docstring of Expr.as_content_primitive for more examples.
"""
b, e = self.as_base_exp()
b = _keep_coeff(*b.as_content_primitive(radical=radical))
ce, pe = e.as_content_primitive(radical=radical)
if b.is_Rational:
#e
#= ce*pe
#= ce*(h + t)
#= ce*h + ce*t
#=> self
#= b**(ce*h)*b**(ce*t)
#= b**(cehp/cehq)*b**(ce*t)
#= b**(iceh+r/cehq)*b**(ce*t)
#= b**(iceh)*b**(r/cehq)*b**(ce*t)
#= b**(iceh)*b**(ce*t + r/cehq)
h, t = pe.as_coeff_Add()
if h.is_Rational:
ceh = ce*h
c = Pow(b, ceh)
r = S.Zero
if not c.is_Rational:
iceh, r = divmod(ceh.p, ceh.q)
c = Pow(b, iceh)
return c, Pow(b, _keep_coeff(ce, t + r/ce/ceh.q))
e = _keep_coeff(ce, pe)
# b**e = (h*t)**e = h**e*t**e = c*m*t**e
if e.is_Rational and b.is_Mul:
h, t = b.as_content_primitive(radical=radical) # h is positive
c, m = Pow(h, e).as_coeff_Mul() # so c is positive
m, me = m.as_base_exp()
if m is S.One or me == e: # probably always true
# return the following, not return c, m*Pow(t, e)
# which would change Pow into Mul; we let sympy
# decide what to do by using the unevaluated Mul, e.g
# should it stay as sqrt(2 + 2*sqrt(5)) or become
# sqrt(2)*sqrt(1 + sqrt(5))
return c, Pow(_keep_coeff(m, t), e)
return S.One, Pow(b, e)
def is_constant(self, *wrt, **flags):
if flags.get('simplify', True):
self = self.simplify()
b, e = self.as_base_exp()
bz = b.equals(0)
if bz: # recalculate with assumptions in case it's unevaluated
new = b**e
if new != self:
return new.is_constant()
econ = e.is_constant(*wrt)
bcon = b.is_constant(*wrt)
if bcon:
if econ:
return True
bz = b.equals(0)
if bz is False:
return False
elif bcon is None:
return None
return e.equals(0)
from add import Add
from numbers import Integer
from mul import Mul, _keep_coeff
from symbol import Symbol, Dummy, symbols
|
lidavidm/mathics-heroku
|
venv/lib/python2.7/site-packages/sympy/core/power.py
|
Python
|
gpl-3.0
| 38,801
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('interfapp', '0005_auto_20170322_1104'),
]
operations = [
migrations.AddField(
model_name='owner',
name='role',
field=models.IntegerField(default=1, choices=[(1, b'\xe6\xb5\x8b\xe8\xaf\x95'), (2, b'\xe5\xbc\x80\xe5\x8f\x91')]),
),
]
|
EmerJuny/djcasemg
|
interfmg/interfapp/migrations/0006_owner_role.py
|
Python
|
gpl-3.0
| 476
|
import datetime
import unittest
from sqlalchemy.exc import IntegrityError
from src.models.author import AuthorModel
from src import engine
from src.models import metadata, BookModel, TopicModel
class CreateBookTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
metadata.create_all(engine)
cls.topic = TopicModel(name='software engineering')
def test_save_book(self):
book = BookModel()
book.title = 'object-oriented analysis and design with applications ' \
'third edition'
book.publication_date = 2007
book.read_date = datetime.date.today()
book.topic = self.topic
self.assertIsNone(book.save())
def test_save_book_without_topic(self):
book = BookModel(
title='clean code',
publication_date=2008,
read_date=datetime.date.today()
)
try:
book.save()
self.fail('this should fail')
except IntegrityError:
pass
def test_save_book_with_authors(self):
book = BookModel(
title='refactoring: improving the design of existing code',
publication_date=1999,
read_date=datetime.date.today(),
topic_id=1
)
author = AuthorModel(
first_name='martin',
last_name='fowler'
)
book.authors.append(author)
self.assertIsNone(book.save())
def test_create_book(self):
topic = TopicModel(name='tdd')
topic.save()
book = BookModel.create(
title='test-driven development: by example',
publication_date=2002,
read_date=datetime.date.today(),
topic_id=topic.id
)
self.assertIsNotNone(book.id)
self.assertIsInstance(book, BookModel)
@classmethod
def tearDownClass(cls):
TopicModel.session.close_all()
metadata.drop_all(engine)
|
pity7736/books_read
|
tests/models/test_create_book.py
|
Python
|
gpl-3.0
| 1,969
|
import serial
import byte_formatting
messages = ["RX Empty Flag",
"RX Full Flag",
"Reserved",
"TX Empty Flag",
"TX Full Flag",
"TX Reuse",
"Reserved",
"TX fifo Full",
"RX ready PipeNum",
"Maximal Retransmits reached",
"TX Data Sent",
"RX Data Ready",
"Register Bank",
]
bitfield_format = [[1], [1], [2], [1], [1], [1], [1],
[1], [3], [1], [1], [1], [1]]
messages = ['J0', 'J1', 'J2', 'J3']
size = 16
joystick_bitfieldformat = [[size],[size],[size],[size]]
def main():
ser_port = serial.Serial('/dev/ttyACM0', 115200)
while(True):
byt = ser_port.readline()
print(byt)
# vals = byt
# integer = byte_formatting._8ui8_to_ui64(vals[:-1])
# values = byte_formatting.format_int_to_list(integer, joystick_bitfieldformat)
# print(values)
if __name__ == "__main__":
main()
|
JackTheEngineer/Drone
|
scripts/uart.py
|
Python
|
gpl-3.0
| 1,018
|
from random import randint
def quick_sort(input):
"""
Time: average - O(nlogn), worst - O(n^2)
:param input:
:return:
"""
if len(input) <=1:
return input
small, equal, large = [], [], []
pivot = input[randint(0, len(input) - 1)]
for item in input:
if item < pivot:
small.append(item)
elif item == pivot:
equal.append(item)
elif item > pivot:
large.append(item)
return quick_sort(small) + equal + quick_sort(large)
## in-place quick sort
def quick_sort_inplace(a, low=0, high=None):
if high is None:
high = len(a) - 1
if low < high:
p = partition(a, low, high)
quick_sort_inplace(a, low, p-1)
quick_sort_inplace(a, p+1, high)
def partition(a, low, high):
i = low-1
pivot = a[high]
for j in range(low, high):
if a[j] <= pivot:
i += 1
# swap
a[i], a[j] = a[j], a[i]
a[i+1], a[high] = a[high], a[i+1]
return i+1
def create_array(size=20, max_val=500):
return [randint(0, max_val) for _ in range(size)]
if __name__ == "__main__":
a = create_array()
print(a)
print(quick_sort(a))
b = create_array()
print("Unsorted: ", b)
quick_sort_inplace(b)
print("Sorted: ", b)
# code for bench mark both functions
from time import time
times = {'new':[], 'inplace':[]}
sizes = [10,100,1000,10000,100000]
samples = 3
for s in sizes:
total = 0.0
for _ in range(samples):
a = create_array()
t0 = time()
a = quick_sort(a)
t1 = time()
total += (t1 - t0)
times['new'].append(float(total/samples))
for _ in range(samples):
a = create_array()
t0 = time()
quick_sort_inplace(a)
t1 = time()
total += (t1 - t0)
times['inplace'].append(float(total/samples))
print ("\nn\tQuicksort\tQuicsort - inplace")
print ("_"*40)
for i, s in enumerate(sizes):
print("%d\t%0.5f \t%0.5f"%(s,times['new'][i], times['inplace'][i]))
print("\n")
|
NeerajM999/recap-python
|
LearnPython/sorting/quick_sort.py
|
Python
|
gpl-3.0
| 2,178
|
"""CLI tests for Repository setup.
:Requirement: Repository
:CaseAutomation: Automated
:CaseLevel: Component
:CaseComponent: Repositories
:Assignee: chiggins
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import pytest
from broker import VMBroker
from robottelo.cli.factory import make_lifecycle_environment
from robottelo.cli.factory import make_org
from robottelo.config import settings
from robottelo.constants import CONTAINER_REGISTRY_HUB
from robottelo.constants import CONTAINER_UPSTREAM_NAME
from robottelo.constants import DISTRO_RHEL6
from robottelo.constants import DISTROS_SUPPORTED
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE
from robottelo.hosts import ContentHost
from robottelo.products import DockerRepository
from robottelo.products import PuppetRepository
from robottelo.products import RepositoryCollection
from robottelo.products import SatelliteToolsRepository
from robottelo.products import YumRepository
@pytest.fixture
def org():
return make_org()
@pytest.fixture
def lce(org):
return make_lifecycle_environment({'organization-id': org['id']})
@pytest.mark.tier4
@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url')
@pytest.mark.parametrize('cdn', [True, False], ids=['cdn', 'no_cdn'])
@pytest.mark.parametrize('distro', DISTROS_SUPPORTED)
def test_vm_install_package(org, lce, distro, cdn, default_sat):
"""Install a package with all supported distros and cdn / non-cdn variants
:id: b2a6065a-69f6-4805-a28b-eaaa812e0f4b
:parametrized: yes
:expectedresults: Package is install is installed
"""
if distro == DISTRO_RHEL6:
pytest.skip(f'{DISTRO_RHEL6!s} skipped until ELS subscriptions are in manifest.')
repos_collection = RepositoryCollection(
distro=distro,
repositories=[
SatelliteToolsRepository(cdn=cdn, distro=distro),
YumRepository(url=settings.repos.yum_0.url),
DockerRepository(url=CONTAINER_REGISTRY_HUB, upstream_name=CONTAINER_UPSTREAM_NAME),
PuppetRepository(
url=settings.repos.custom_puppet.url,
modules=[dict(name='generic_1', author='robottelo')],
),
],
)
# Create repos, content view, and activation key.
repos_collection.setup_content(org['id'], lce['id'], upload_manifest=True)
with VMBroker(nick=distro, host_classes={'host': ContentHost}) as host:
# install katello-agent
repos_collection.setup_virtual_machine(
host, default_sat, enable_custom_repos=True, install_katello_agent=False
)
# install a package from custom repo
result = host.execute(f'yum -y install {FAKE_0_CUSTOM_PACKAGE}')
assert result.status == 0
|
lpramuk/robottelo
|
tests/foreman/cli/test_vm_install_products_package.py
|
Python
|
gpl-3.0
| 2,788
|
#!/usr/bin/env python
import sys
import paperwork.backend.config as config
import paperwork.backend.docsearch as docsearch
import paperwork.backend.util as util
def main():
pconfig = config.PaperworkConfig()
pconfig.read()
print("Opening docs (%s)" % pconfig.settings['workdir'].value)
print("====================")
dsearch = docsearch.DocSearch(pconfig.settings['workdir'].value)
nb_words = 0
nb_docs = (len(dsearch.docs))
nb_pages = 0
max_pages = 0
total_word_len = 0
max_word_len = 0
words = set()
total_nb_unique_words = 0
total_nb_unique_words_per_doc = 0
print("")
print("Analysis")
print("========")
all_labels = set([l.name for l in dsearch.label_list])
label_keys = [ 'global', 'positive', 'negative' ] # for the order
total_label_accuracy = {
'global': 0,
'positive': 0,
'negative': 0,
}
total_labels = {
'global': 0,
'positive': 0,
'negative': 0,
}
for doc in dsearch.docs:
sys.stdout.write(str(doc) + ": ")
sys.stdout.flush()
doc_words = set()
if doc.nb_pages > max_pages:
max_pages = doc.nb_pages
### Keyword stats
for page in doc.pages:
sys.stdout.write("%d " % (page.page_nb + 1))
sys.stdout.flush()
nb_pages += 1
for line in page.text:
for word in util.split_words(line):
# ignore words too short to be useful
if (len(word) < 4):
continue
if not word in words:
words.add(word)
total_nb_unique_words += 1
if not word in doc_words:
doc_words.add(word)
total_nb_unique_words_per_doc += 1
nb_words += 1
total_word_len += len(word)
if max_word_len < len(word):
max_word_len = len(word)
### Label predictions stats
doc_labels = set([l.name for l in doc.labels])
predicated_labels = set(dsearch.predict_label_list(doc))
accurate = {
'global': 0,
'negative': 0,
'positive': 0,
}
nb_labels = {
'global': len(all_labels),
'positive': len(doc_labels),
'negative': len(all_labels) - len(doc_labels),
}
for key in label_keys:
total_labels[key] += nb_labels[key]
for label in all_labels:
if not ((label in doc_labels) ^ (label in predicated_labels)):
accurate['global'] += 1
total_label_accuracy['global'] += 1
if label in doc_labels:
accurate['positive'] += 1
total_label_accuracy['positive'] += 1
else:
accurate['negative'] += 1
total_label_accuracy['negative'] += 1
for key in label_keys:
total = nb_labels[key]
value = accurate[key]
if total == 0:
continue
value = accurate[key]
sys.stdout.write("\n\t- label prediction accuracy (%s): %d%%"
% (key, (100 * accurate[key] / total)))
sys.stdout.write("\n")
print("")
print("Statistics")
print("==========")
print("Total number of documents: %d" % nb_docs)
print("Total number of pages: %d" % nb_pages)
print("Total number of words: %d" % nb_words)
print("Total words len: %d" % total_word_len)
print("Total number of unique words: %d" % total_nb_unique_words)
print("===")
print("Maximum number of pages in one document: %d" % max_pages)
print("Maximum word length: %d" % max_word_len)
print("Average word length: %f" % (float(total_word_len) / float(nb_words)))
print ("Average number of words per page: %f"
% (float(nb_words) / float(nb_pages)))
print ("Average number of words per document: %f"
% (float(nb_words) / float(nb_docs)))
print ("Average number of pages per document: %f"
% (float(nb_pages) / float(nb_docs)))
print ("Average number of unique words per document: %f"
% (float(total_nb_unique_words_per_doc) / float(nb_docs)))
for key in label_keys:
total = total_labels[key]
value = total_label_accuracy[key]
print ("Average accuracy of label prediction (%s): %d%%"
% (key, (100 * value / total)))
if __name__ == "__main__":
main()
|
mjourdan/paperwork
|
scripts/stats.py
|
Python
|
gpl-3.0
| 4,660
|
__author__ = 'kaef'
import defaults as cfg
from Clicker import Clicker
from ScreenCapture import ScreenCapture
def main():
import time
time.sleep(4)
c = Clicker()
c.set_blank_page_pos(cfg.blank_page_pos_x,cfg.blank_page_pos_y)
c.set_next_button_pos(cfg.next_button_pos_x,cfg.next_button_pos_y)
c.set_size_button_pos(cfg.size_button_pos_x,cfg.size_button_pos_y)
c.set_vertical_resize_button_pos(cfg.vertical_resize_button_pos_x,cfg.vertical_resize_button_pos_y)
a = ScreenCapture(cfg.folder)
a.set_dim(cfg.img_x,cfg.img_y,cfg.img_xx,cfg.img_yy)
for i in range(cfg.start, cfg.finish):
a.capture_with_id(i)
c.switch_page()
if __name__ == "__main__":
main()
|
bkolada/ibuk_scraper
|
Tool/main.py
|
Python
|
gpl-3.0
| 717
|
#coding: utf8
from server import db
from sqlalchemy import DateTime
import datetime
import json
class serializableModel(db.Model):
__abstract__ = True
def as_dict(self, recursif=False):
if recursif :
return self.as_dict_withrelationships()
d = {}
for c in self.__table__.columns :
if isinstance(c.type, db.DateTime) and getattr(self, c.name):
d[c.name] = str(getattr(self, c.name))
else:
d[c.name] = getattr(self, c.name)
return d
def as_dict_withrelationships(self):
obj = self.as_dict()
for key in self.__mapper__.relationships.keys() :
if self.__mapper__.relationships[key].uselist :
obj[key] = [ item.as_dict() for item in getattr(self, key)]
else :
obj[key] = getattr(self, key).as_dict()
return obj
|
PnCevennes/SaisieChasse
|
modules/utils/genericmodels.py
|
Python
|
gpl-3.0
| 906
|
#! /usr/bin/python
#
# Copyright (C) 2003-2021 ABINIT group
#
# Written by Gabriel Antonius in python (compatible v2.7).
# This is free software, and you are welcome to redistribute it
# under certain conditions (GNU General Public License,
# see ~abinit/COPYING or http://www.gnu.org/copyleft/gpl.txt).
#
# ABINIT is a project of the Universite Catholique de Louvain,
# Corning Inc. and other collaborators, see ~abinit/doc/developers/contributors.txt.
# Please read ~abinit/doc/biblio/generated_files/bib_acknow.html for suggested
# acknowledgments of the ABINIT effort.
#
# For more information, see https://www.abinit.org .
"""
This script can be run interactively,
but it is recommended to import it as a module:
>>> from merge_ddb_nc import merge_ddb_nc
>>> merge_ddb_nc(out_fname, fnames)
"""
from __future__ import print_function
import numpy as np
import netCDF4 as nc
__version__ = '1.0.0'
def merge_ddb_nc(out_fname, fnames):
"""
Merge a list of DDB.nc files containing different elements of the same qpoint.
Arguments
---------
out_fname: Name for the merged file (will overwrite any existing file).
fnames: List of DDB.nc files.
"""
if not fnames:
raise Exception('Empty list of files given for merge')
fname0 = fnames.pop(0)
with nc.Dataset(out_fname, 'w') as dsout:
with nc.Dataset(fname0, 'r') as dsin:
nc_copy(dsin, dsout)
q0 = dsin.variables[u'q_point_reduced_coord'][...]
for fname in fnames:
with nc.Dataset(fname, 'r') as dsin:
# Check that the qpoints are the same
q = dsin.variables[u'q_point_reduced_coord'][...]
if not all(np.isclose(q0, q)):
raise Exception('Cannot merge DDB.nc at different q-points.')
# Merge dynamical matrix
dynmat = dsin.variables[u'second_derivative_of_energy'][...]
dynmat_mask = dsin.variables[u'second_derivative_of_energy_mask'][...]
out_dynmat = dsin.variables[u'second_derivative_of_energy']
out_dynmat_mask = dsin.variables[u'second_derivative_of_energy_mask']
ni,nj,nk,nl = dynmat_mask.shape
for i in range(ni):
for j in range(nj):
for k in range(nk):
for l in range(nl):
if dynmat_mask[i,j,k,l]:
dsout.variables[u'second_derivative_of_energy'][i,j,k,l,:] = (
dynmat[i,j,k,l,:])
dsout.variables[u'second_derivative_of_energy_mask'][i,j,k,l] = (
dynmat_mask[i,j,k,l])
# Born effective charge tensor
BECT = dsin.variables[u'born_effective_charge_tensor'][...]
BECT_mask = dsin.variables[u'born_effective_charge_tensor_mask'][...]
ni,nj,nk = BECT_mask.shape
for i in range(ni):
for j in range(nj):
for k in range(nk):
if BECT_mask[i,j,k]:
dsout.variables[u'born_effective_charge_tensor'][i,j,k] = (
BECT[i,j,k])
dsout.variables[u'born_effective_charge_tensor_mask'][i,j,k] = (
BECT_mask[i,j,k])
def nc_copy(dsin, dsout):
"""
Copy all dimensions and variable of one nc.Dataset instance into another.
"""
#Copy dimensions
for dname, dim in dsin.dimensions.iteritems():
dsout.createDimension(dname, len(dim))
#Copy variables
for vname, varin in dsin.variables.iteritems():
outVar = dsout.createVariable(vname, varin.datatype, varin.dimensions)
outVar[...] = varin[...]
def interactive_merge_ddb_nc():
"""Get inputs from the user and run merge_ddb_nc."""
program_name = 'merge_ddb_nc'
description = """Merge several DDB.nc files, belonging to the same q-point."""
def get_user(s):
return raw_input(s.rstrip() + '\n').split('#')[0]
print(program_name)
print(len(program_name) * '-')
print(description + '\n')
ui = get_user('Enter a name for the output file in which to merge (will overwrite any existing file):')
out_fname = str(ui)
ui = get_user('Enter the number of files to merge:')
nfiles = int(ui)
fnames = list()
for i in range(nfiles):
ui = get_user('Enter the name of file {}:'.format(i+1))
fname = str(ui)
fnames.append(fname)
# Main execution
print('Executing...')
merge_ddb_nc(out_fname, fnames)
print('All done.')
# =========================================================================== #
# Run interactive program
# =========================================================================== #
if __name__ == '__main__':
interactive_merge_ddb_nc()
|
abinit/abinit
|
scripts/post_processing/merge_ddb_nc.py
|
Python
|
gpl-3.0
| 4,948
|
# This file is part of RiakKit.
#
# RiakKit is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RiakKit is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with RiakKit. If not, see <http://www.gnu.org/licenses/>.
"""This is the top level riakkit module. It provides a shortcut to the package
contents in a convinient fashion.
It imports everything from under commons.properties as well as
commons.exceptions It also import SimpleDocument, BaseDocument, and Document.
This also sets up EmDocument"""
from riakkit.simple import SimpleDocument, BaseDocument
EmDocument = BaseDocument
from riakkit.document import Document, getClassGivenBucketName
from riakkit.commons.properties import *
from riakkit.commons.exceptions import *
#PEP 386 versioning
VERSION = (0, 6, 0, 'a')
__version__ = ('.'.join(map(str, VERSION[:3])) + '.'.join(VERSION[3:]))
__author__ = "Shuhao Wu"
__url__ = "https://github.com/shuhaowu/riakkit"
|
shuhaowu/riakkit
|
riakkit/__init__.py
|
Python
|
gpl-3.0
| 1,382
|
"""Test class for Remote Execution Management UI
@Requirement: Remoteexecution
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: UI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from datetime import datetime, timedelta
from nailgun import entities
from robottelo.constants import OS_TEMPLATE_DATA_FILE, DISTRO_RHEL7
from robottelo.datafactory import (
gen_string,
generate_strings_list,
invalid_values_list,
)
from robottelo.decorators import stubbed, tier1, tier2, tier3
from robottelo.helpers import add_remote_execution_ssh_key, get_data_file
from robottelo.test import UITestCase
from robottelo.ui.factory import make_job_template, set_context
from robottelo.ui.locators import common_locators, locators
from robottelo.ui.session import Session
from robottelo.vm import VirtualMachine
OS_TEMPLATE_DATA_FILE = get_data_file(OS_TEMPLATE_DATA_FILE)
class JobsTemplateTestCase(UITestCase):
"""Test class for jobs template feature"""
@classmethod
def setUpClass(cls):
"""Create an organization and host which can be re-used in tests."""
super(JobsTemplateTestCase, cls).setUpClass()
cls.organization = entities.Organization().create()
cls.host = entities.Host(organization=cls.organization).create()
entities.OperatingSystem(
id=cls.host.operatingsystem.id, family='Redhat').update(['family'])
@tier1
def test_positive_create_simple_job_template(self):
"""Create a simple Job Template
@id: 7cb1e5b0-5420-47c5-bb43-e2c58bed7a9d
@Steps:
1. Navigate to Hosts -> Job Templates
2. Enter a valid name
3. Populate the template code
4. Navigate to the job tab
5. Enter a job name
6. Click submit
@Assert: The job template was successfully created
"""
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alphanumeric', 500),
)
self.assertIsNotNone(self.jobtemplate.search(name))
@tier1
def test_positive_template_upload(self):
"""Use a template file to populate the job template
@id: 976cf310-b2af-41bd-845a-f08baa2e8490
@Setup: Create or use a pre-made job template file
@Steps:
1. Create a new job template.
2. Enter a valid name
3. Click the upload button to upload a template from the file
4. Select the file with the desired template
@Assert: Verify the template correctly imported the file's contents
"""
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
make_job_template(
session,
name=name,
template_type='file',
template_content=OS_TEMPLATE_DATA_FILE,
)
self.assertIsNotNone(self.jobtemplate.search(name))
@tier1
def test_positive_create_job_template_input(self):
"""Create a Job Template using input
@id: dbaf5aa9-101d-47dc-bdf8-d5b4d1a52396
@Steps:
1. Navigate to Hosts -> Job Templates
2. Enter a name
3. Navigate to the job tab
4. Enter a job name
5. Click the +Add Input button
6. Add an appropriate name
7. Choose an input type
8. Populate the template code and reference the newly created input
9. Click submit
@Assert: The job template was successfully saved with new input added
"""
name = gen_string('alpha')
var_name = gen_string('alpha')
with Session(self.browser) as session:
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alphanumeric', 20),
)
self.assertIsNotNone(self.jobtemplate.search(name))
self.jobtemplate.add_input(name, var_name)
self.jobtemplate.update(
name,
template_type='input',
template_content='<%= input("{0}") %>'.format(var_name)
)
@tier1
def test_negative_create_job_template(self):
"""Create Job Template with invalid name
@id: 79342781-1369-4d1f-a512-ca1a809d98fb
@Steps:
1. Navigate to Hosts -> Job Templates
2. Enter an invalid name
3. Click submit
@Assert: Job Template with invalid name cannot be created and error is
raised
"""
with Session(self.browser) as session:
for name in invalid_values_list('ui'):
with self.subTest(name):
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alphanumeric', 20),
)
self.assertIsNotNone(self.jobtemplate.wait_until_element(
common_locators['name_haserror']))
@tier1
def test_negative_create_job_template_with_same_name(self):
"""Create Job Template with duplicate name
@id: 2c193758-dc34-4701-863c-f2823851223a
@Steps:
1. Create a new job template.
2. Enter a name that has already been used
3. Click submit
@Assert: The name duplication is caught and error is raised
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alphanumeric', 20),
)
self.assertIsNotNone(self.jobtemplate.search(name))
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alphanumeric', 20),
)
self.assertIsNotNone(self.jobtemplate.wait_until_element(
common_locators['name_haserror']))
@tier1
def test_positive_delete_job_template(self):
"""Delete a job template
@id: b25e4fb9-ad75-407d-b15f-76df381c4f9c
@Setup: Create a valid job template.
@Steps:
1. Click the dropdown next to the Job Template's Run button
2. Select Delete from the list
3. Confirm the deletion
@Assert: The Job Template has been deleted
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alphanumeric', 20),
)
self.jobtemplate.delete(name)
@tier1
def test_positive_clone_job_template(self):
"""Clone a Job Template
@id: a1ec5d1d-907f-4d18-93d3-adb1134d9cca
@Setup: Create a valid job template.
@Steps:
1. Navigate to Hosts -> Job Templates
2. Click the clone button next to a job template
3. Change the name
4. Click submit
@Assert: Verify all job template contents were successfully copied
"""
name = gen_string('alpha')
clone_name = gen_string('alpha')
with Session(self.browser) as session:
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alphanumeric', 20),
)
self.assertIsNotNone(self.jobtemplate.search(name))
self.jobtemplate.clone(name, clone_name)
self.assertIsNotNone(self.jobtemplate.search(clone_name))
@tier1
def test_positive_view_diff(self):
"""View diff within template editor
@id: 4b8fff93-4862-4119-bb97-aadc50fc817d
@Setup: Create a valid job template.
@Steps:
1. Open the job template created during setup
2. Modify the template's code
3. Click the Diff button
@Assert: Verify that the new changes are displayed in the window
"""
name = gen_string('alpha')
old_template = gen_string('alpha')
new_template = gen_string('alphanumeric')
with Session(self.browser) as session:
make_job_template(
session,
name=name,
template_type='input',
template_content=old_template,
)
self.jobtemplate.click(self.jobtemplate.search(name))
self.jobtemplate.assign_value(
locators['job.template_input'], new_template)
self.jobtemplate.click(common_locators['ace.diff'])
template_text = self.jobtemplate.wait_until_element(
locators['job.template_input']).text
self.assertIn('-' + old_template, template_text)
self.assertIn('+' + new_template, template_text)
@tier1
def test_positive_preview_verify(self):
"""Use preview within the job template editor to verify template
@id: 4b4939f3-c056-4716-8071-e8fa00233e3e
@Steps:
1. Create a new job template.
2. Add input controls under jobs
3. Reference those input controls in the template text
4. Select "preview" within the template viewer
@Assert: Verify no errors are thrown
"""
name = gen_string('alpha')
var_name = gen_string('alpha')
with Session(self.browser) as session:
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alpha'),
org=self.organization.name,
)
self.jobtemplate.add_input(name, var_name)
self.jobtemplate.update(
name,
template_type='input',
template_content='<%= input("{0}") %>'.format(var_name)
)
self.jobtemplate.click(self.jobtemplate.search(name))
self.jobtemplate.click(common_locators['ace.preview'])
self.assertEqual(
u'$USER_INPUT[{0}]'.format(var_name),
self.jobtemplate.wait_until_element(
locators['job.template_input']).text
)
@tier1
def test_negative_preview_verify(self):
"""Use a template file to populate the job template
@id: 8c0d132c-b500-44b5-a549-d32c7636a712
@Steps:
1. Create a new job template
2. Add input controls under jobs
3. Incorrectly reference those input controls in the template text
4. And/or reference non-existent input controls in the template text
5. Select "preview" within the template viewer
@Assert: Verify appropriate errors are thrown
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_job_template(
session,
name=name,
template_type='input',
template_content=gen_string('alpha'),
org=self.organization.name,
)
self.jobtemplate.add_input(name, gen_string('alpha'))
self.jobtemplate.update(
name,
template_type='input',
template_content='<%= input("{0}") %>'.format(
gen_string('alphanumeric'))
)
self.jobtemplate.click(self.jobtemplate.search(name))
self.jobtemplate.click(common_locators['ace.preview'])
self.assertIsNotNone(self.jobtemplate.wait_until_element(
common_locators['alert.error']))
class RemoteExecutionTestCase(UITestCase):
"""Test class for remote execution feature"""
@classmethod
def setUpClass(cls):
"""Create an organization which can be re-used in tests."""
super(RemoteExecutionTestCase, cls).setUpClass()
cls.organization = entities.Organization().create()
@tier2
def test_positive_run_default_job_template(self):
"""Run a job template against a single host
@id: 7f0cdd1a-c87c-4324-ae9c-dbc30abad217
@Setup: Use pre-defined job template.
@Steps:
1. Navigate to an individual host and click Run Job
2. Select the job and appropriate template
3. Run the job
@Assert: Verify the job was successfully ran against the host
@CaseLevel: Integration
"""
with VirtualMachine(distro=DISTRO_RHEL7) as client:
client.install_katello_ca()
client.register_contenthost(self.organization.label, lce='Library')
add_remote_execution_ssh_key(client.ip_addr)
with Session(self.browser) as session:
set_context(session, org=self.organization.name)
self.hosts.click(self.hosts.search(client.hostname))
status = self.job.run(
job_category='Commands',
job_template='Run Command - SSH Default',
options_list=[{'name': 'command', 'value': 'ls'}]
)
self.assertTrue(status)
@tier3
def test_positive_run_custom_job_template(self):
"""Run a job template against a single host
@id: 89b75feb-afff-44f2-a2bd-2ffe74b63ec7
@Setup: Create a working job template.
@Steps:
1. Navigate to an individual host and click Run Job
2. Select the job and appropriate template
3. Run the job
@Assert: Verify the job was successfully ran against the host
@CaseLevel: System
"""
jobs_template_name = gen_string('alpha')
with VirtualMachine(distro=DISTRO_RHEL7) as client:
client.install_katello_ca()
client.register_contenthost(self.organization.label, lce='Library')
add_remote_execution_ssh_key(client.ip_addr)
with Session(self.browser) as session:
set_context(session, org=self.organization.name)
make_job_template(
session,
name=jobs_template_name,
template_type='input',
template_content='<%= input("command") %>',
provider_type='SSH',
)
self.assertIsNotNone(
self.jobtemplate.search(jobs_template_name))
self.jobtemplate.add_input(
jobs_template_name, 'command', required=True)
self.hosts.click(self.hosts.search(client.hostname))
status = self.job.run(
job_category='Miscellaneous',
job_template=jobs_template_name,
options_list=[{'name': 'command', 'value': 'ls'}]
)
self.assertTrue(status)
@tier3
def test_positive_run_job_template_multiple_hosts(self):
"""Run a job template against multiple hosts
@id: 7f1981cb-afcc-49b7-a565-7fef9aa8ddde
@Setup: Create a working job template.
@Steps:
1. Navigate to the hosts page and select at least two hosts
2. Click the "Select Action"
3. Select the job and appropriate template
4. Run the job
@Assert: Verify the job was successfully ran against the hosts
@CaseLevel: System
"""
with VirtualMachine(distro=DISTRO_RHEL7) as client:
with VirtualMachine(distro=DISTRO_RHEL7) as client2:
for vm in client, client2:
vm.install_katello_ca()
vm.register_contenthost(
self.organization.label, lce='Library')
add_remote_execution_ssh_key(vm.ip_addr)
with Session(self.browser) as session:
set_context(session, org=self.organization.name)
self.hosts.navigate_to_entity()
self.hosts.update_host_bulkactions(
[client.hostname, client2.hostname],
action='Run Job',
parameters_list=[{'command': 'ls'}],
)
strategy, value = locators['job_invocation.status']
self.job.wait_until_element(
(strategy, value % 'succeeded'), 240)
@tier3
def test_positive_run_scheduled_job_template(self):
"""Schedule a job to be ran against a host
@id: 35c8b68e-1ac5-4c33-ad62-a939b87f76fb
@Setup: Use pre-defined job template.
@Steps:
1. Navigate to an individual host and click Run Job
2. Select the job and appropriate template
3. Select "Schedule Future Job"
4. Enter a desired time for the job to run
5. Click submit
@Assert:
1. Verify the job was not immediately ran
2. Verify the job was successfully ran after the designated time
@CaseLevel: System
"""
with VirtualMachine(distro=DISTRO_RHEL7) as client:
client.install_katello_ca()
client.register_contenthost(self.organization.label, lce='Library')
add_remote_execution_ssh_key(client.ip_addr)
with Session(self.browser) as session:
set_context(session, org=self.organization.name)
self.hosts.click(self.hosts.search(client.hostname))
plan_time = (datetime.now() + timedelta(seconds=90)).strftime(
"%Y-%m-%d %H:%M")
status = self.job.run(
job_category='Commands',
job_template='Run Command - SSH Default',
options_list=[{'name': 'command', 'value': 'ls'}],
schedule='future',
schedule_options=[
{'name': 'start_at', 'value': plan_time}],
result='queued'
)
self.assertTrue(status)
strategy, value = locators['job_invocation.status']
self.job.wait_until_element_is_not_visible(
(strategy, value % 'queued'), 95)
self.job.wait_until_element(
(strategy, value % 'succeeded'), 30)
@stubbed()
@tier3
def test_positive_run_job_against_provisioned_rhel6_host(self):
"""Run a job against a single provisioned RHEL 6 host
@id: 7cc94029-69a0-43e0-8ce5-fdf802d0addc
@Setup:
1. Provision a RHEL 6 host.
2. Create a working job template.
@Steps:
1. Navigate to the provisioned host and click Run Job
2. Select the created job and appropriate template
3. Click submit
@Assert: Verify the job was successfully ran on the provisioned host
@caseautomation: notautomated
@CaseLevel: System
"""
@stubbed()
@tier3
def test_positive_run_job_against_provisioned_rhel7_host(self):
"""Run a job against a single provisioned RHEL 7 host
@id: e911edfb-abcf-4ea2-940d-44f3e4de1954
@Setup:
1. Provision a RHEL 7 host.
2. Create a working job template.
@Steps:
1. Navigate to the provisioned host and click Run Job
2. Select the created job and appropriate template
3. Click submit
@Assert: Verify the job was successfully ran on the provisioned host
@caseautomation: notautomated
@CaseLevel: System
"""
@stubbed()
@tier3
def test_positive_run_job_against_multiple_provisioned_hosts(self):
"""Run a job against multiple provisioned hosts
@id: 7637f724-924f-478d-88d8-25f500335236
@Setup:
1. Provision at least two hosts (RHEL6/7 preferred).
2. Create a working job template.
@Steps:
1. Navigate to the hosts page and select all provisioned hosts
2. Click Select Action -> Run Job
3. Select the created job and appropriate template
4. Click submit
@Assert: Verify the job was successfully ran on the provisioned hosts
@caseautomation: notautomated
@CaseLevel: System
"""
|
sthirugn/robottelo
|
tests/foreman/ui/test_remoteexecution.py
|
Python
|
gpl-3.0
| 20,834
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import sys
import os
from GLXCurses.Utils import clamp_to_zero
from GLXCurses.Utils import clamp
from GLXCurses.Utils import resize_text
from GLXCurses.Utils import glxc_type
from GLXCurses.Utils import new_id
from GLXCurses.Utils import is_valid_id
from GLXCurses.Utils import merge_dicts
from GLXCurses import Window
# Unittest
class TestUtils(unittest.TestCase):
def test_glxc_type(self):
"""Test Utils.glxc_type()"""
self.assertTrue(glxc_type(Window()))
self.assertFalse(glxc_type(int()))
self.assertFalse(glxc_type())
def test_clamp_to_zero(self):
"""Test Utils.clamp_to_zero()"""
self.assertEqual(0, clamp_to_zero(None))
self.assertEqual(0, clamp_to_zero(-42))
self.assertEqual(0, clamp_to_zero(0))
self.assertEqual(42, clamp_to_zero(42))
self.assertRaises(TypeError, clamp_to_zero, float(42.42))
def test_clamp(self):
"""Test Utils.clamp()"""
self.assertEqual(42, clamp(value=1, smallest=42, largest=100))
self.assertEqual(42.0, clamp(value=1.0, smallest=42, largest=100))
self.assertEqual(42, clamp(value=100, smallest=0, largest=42))
self.assertEqual(42.0, clamp(value=100.0, smallest=0, largest=42))
self.assertRaises(TypeError, clamp, value=str(''), smallest=0, largest=42)
self.assertRaises(TypeError, clamp, value=100, smallest=str(''), largest=42)
self.assertRaises(TypeError, clamp, value=100, smallest=0, largest=str(''))
def test_resize_text(self):
"""Test Utils.clamp_to_zero()"""
text = "123456789"
width = 10
self.assertEqual(text, resize_text(text, width, '~'))
width = 9
self.assertEqual(text, resize_text(text, width, '~'))
width = 8
self.assertEqual('123~789', resize_text(text, width, '~'))
width = 7
self.assertEqual('123~789', resize_text(text, width, '~'))
width = 6
self.assertEqual('12~89', resize_text(text, width, '~'))
width = 5
self.assertEqual('12~89', resize_text(text, width, '~'))
width = 4
self.assertEqual('1~9', resize_text(text, width, '~'))
width = 3
self.assertEqual('1~9', resize_text(text, width, '~'))
width = 2
self.assertEqual('19', resize_text(text, width, '~'))
width = 1
self.assertEqual('1', resize_text(text, width, '~'))
width = 0
self.assertEqual('', resize_text(text, width, '~'))
width = -1
self.assertEqual('', resize_text(text, width, '~'))
# Test Error
self.assertRaises(TypeError, resize_text, text=text, max_width=width, separator=int(42))
self.assertRaises(TypeError, resize_text, text=text, max_width='coucou', separator='~')
self.assertRaises(TypeError, resize_text, text=int(42), max_width=width, separator='~')
def test_id_generator(self):
"""Test Utils.id_generator()"""
id_1 = new_id()
self.assertTrue(is_valid_id(id_1))
self.assertEqual(len(id_1), 8)
# max_iteration = 10000000 - Take 99.114s on Intel(R) Core(TM) i7-2860QM CPU @ 2.50GHz
# max_iteration = 1000000 - Take 9.920s on Intel(R) Core(TM) i7-2860QM CPU @ 2.50GHz
# max_iteration = 100000 - Take 0.998s on Intel(R) Core(TM) i7-2860QM CPU @ 2.50GHz
# max_iteration = 10000 - Take 0.108s on Intel(R) Core(TM) i7-2860QM CPU @ 2.50GHz
max_iteration = 10000
for _ in range(1, max_iteration):
id_2 = new_id()
self.assertEqual(len(id_2), 8)
self.assertNotEqual(id_1, id_2)
def test_is_valid_id(self):
"""Test Utils.is_valid_id()"""
id_1 = new_id()
self.assertTrue(is_valid_id(id_1))
self.assertFalse(is_valid_id(42))
def test_(self):
"""Test Utils.merge_dicts()"""
result_wanted1 = {
'a': 4,
'b': 2
}
value1 = {
'a': 4
}
value2 = {
'b': 2
}
self.assertEqual(result_wanted1, merge_dicts(value1, value2))
value1 = {
'a': 0,
'b': 0
}
value2 = {
'a': 4,
'b': 2
}
self.assertEqual(value2, merge_dicts(value1, value2))
|
Tuuux/galaxie-curses
|
test/test_Utils.py
|
Python
|
gpl-3.0
| 4,386
|
#! /usr/bin/env python
# TuxTruck Playlist
# Time-stamp: "2008-05-27 16:23:58 jantman"
# $Id: TuxTruck_Playlist.py,v 1.7 2008-05-27 20:23:34 jantman Exp $
#
# Copyright 2008 Jason Antman. Licensed under GNU GPLv3 or latest version (at author's discretion).
# Jason Antman - jason@jasonantman.com - http://www.jasonantman.com
# Project web site at http://www.jasonantman.com/tuxtruck/
# NOTE: This depends on elementtree from <http://effbot.org/zone/element-index.htm>
import elementtree.ElementTree as ET
import os
import re
class TuxTruck_Playlist():
"""
This handles *all* playlist-related functions. Specifically, this loads playlists and provides methods parse and write the playlist XML files.
"""
current_file = ""
playlistROOT = None # this will be the elementtree root node
playlistTREE = None # this will be the full elementtree tree
playlist_root_path = "" # as found in settings/audio
songsROOT = None # this is the elementtree root for the songs entries
def __init__(self, parent, playlistroot):
# TODO: here, we should really load the last playlist, or a default one.
self.parent = parent
self.playlist_root_path = playlistroot
def ReadPlaylist(self, file_path):
"""
This function reads in a playlist and parses it. After calling this, you can get the individual entries. file_path is the *absolute* path to the playlist file.
"""
#print "Reading playlist from "+file_path # DEBUG
self.current_file = file_path # TODO: but only if reading it works
self.playlistTREE = ET.parse(file_path)
self.playlistROOT = self.playlistTREE.getroot()
self.songsROOT = self.playlistROOT.find("songs")
def IsInPlaylist(self, filepath):
"""
Returns TRUE if the playlist located at playlistFile contains a (song) entry containing exactly filepath as its "filepath" field content. Otherwise returns false.
"""
# TODO: create a GetSongByName() function
songsIter = self.songsROOT.getiterator()
for entry in songsIter:
if entry.findtext('filepath') == filepath:
return True
return False
def WriteCurrentPlaylist(self):
"""
This function WRITES the current playlist changes to the playlist file. It's mainly used when building a new playlist, to write the complete tree, or when updating the rank of a file. Path is absolute.
"""
#print "Writing playlist to "+self.current_file # debug
# wrap it in an ElementTree instance, and save as XML
tree = ET.ElementTree(self.playlistROOT)
tree.write(self.current_file)
def GetEntryByPos(self, pos):
"""
This gets an entry by position number in the playlist. Takes the integer position as an argument, returns a 3-tuple (pos (int), filepath (string), displayName (string)). File path is relative to MP3_ROOT.
"""
def GetNextEntry(self):
"""
This function returns the next entry in the playlist. It returns a 3-tuple (pos (int), filepath (string), displayName (string)). filepath is relative to MP3_ROOT.
"""
def AddEntry(self, filepath, displayName, title, artist, album, genre):
"""
This function adds an entry to the current playlist. All entry information is specified as arguments - please see the playlist documentation for an explanation of the fields. Any fields that do not have an appropriate value should be sent as an empty string - "". WriteCurrentPlaylist() must be called to write out the playlist to the file.
"""
#print "Adding entry with displayName "+displayName+" and filepath="+filepath+" to "+self.current_file # DEBUG
entry = ET.SubElement(self.songsROOT, "playlist_entry")
filepathElem = ET.SubElement(entry, "filepath")
filepathElem.text = filepath
displayNameElem = ET.SubElement(entry, "displayName")
displayNameElem.text = displayName
if title != "":
titleElem = ET.SubElement(entry, "title")
titleElem.text = title
if artist != "":
artistElem = ET.SubElement(entry, "artist")
artistElem.text = artist
if album != "":
albumElem = ET.SubElement(entry, "album")
albumElem.text = album
if genre != "":
genreElem = ET.SubElement(entry, "genre")
genreElem.text = genre
def CreateBlankPlaylist(self, filepath, name, type):
"""
This function creates a skeleton of a blank playlist, ready for adding entries to (for use when building playlists from disk files). Entries are added with AddEntry(). When finished, it is written to disk with WriteCurrentPlaylist(). Arguments are filepath, name and type as seen in the playlist documentation. Path is absolute.
"""
# TODO: also include playlist name, type, etc.
self.current_file = filepath # where we'll save to
self.playlistROOT = ET.Element("TuxTruck_Playlist") # create the root node
typeElem = ET.SubElement(self.playlistROOT, "type")
typeElem.text = type
nameElem = ET.SubElement(self.playlistROOT, "name")
nameElem.text = name
self.songsROOT = ET.SubElement(self.playlistROOT, "songs")
#print "CreateBlankPlaylist name="+name+" type="+type+" path="+filepath # DEBUG
def ReadOrCreatePlaylist(self, filepath, name, type):
"""
This function is used by playlist builders. If the specified path exists, it reads it in. If not, it creates it. Filepath is an absolute path.
"""
#print "ReadOrCreatePlaylist name="+name+" type="+type+" path="+filepath # DEBUG
if os.path.exists(filepath):
self.ReadPlaylist(filepath)
else:
self.CreateBlankPlaylist(filepath, name, type)
def ChangeRank(self, rank):
"""
This function changes the integer rank of the current file. Rank should be an int, either -1, 0, or 1.
"""
# TODO: implement this
WriteCurrentPlaylist() # write out the changes immediately.
def MakePlaylistFilename(self, s):
"""
This function formats a string (such as an artist name or album name) as a playlist file. It converts it to a lower case string containing only the characters [a-z],[0-9],"-","_", and ".". It also appends the ".ttpl" extension. A single, double, or back quote is simply removed, everything else invalid is replaced with a dash, and spaces are replaced with underscores.
"""
s = self.MakeValidFilename(s) # call makeValidFilename to handle most of the replacements
s = s.lower() # convert to lower case
s = s+".ttpl" # append ".ttpl"
return s
def MakeValidFilename(self, s):
"""
This functions converts a string to a valid filename. All it does is make sure the string only contains the characters [a-z],[0-9],"-","_", and ".". A single, double, or back quote is simply removed, everything else invalid is replaced with a dash, and spaces are replaced with underscores.
"""
while s[0] == ".":
# remove leading dots
s = s[1:]
s = re.sub(" +", "_", s) # replace space with _
s = s.replace("'", "") # replace ' with nothing
s = s.replace("\"", "") # replace " with nothing
s = s.replace("`", "") # replace ` with nothing
# replace everything else with -
return s
|
jantman/TuxTruck-wxPython
|
TuxTruck_Playlist.py
|
Python
|
gpl-3.0
| 7,587
|
########################################################################
# File : CREAMComputingElement.py
# Author : A.T.
########################################################################
""" CREAM Computing Element
"""
__RCSID__ = "$Id$"
import os
import re
import tempfile
import stat
from types import StringTypes
from DIRAC import S_OK, S_ERROR
from DIRAC.Resources.Computing.ComputingElement import ComputingElement
from DIRAC.Core.Utilities.Grid import executeGridCommand
from DIRAC.Core.Utilities.File import makeGuid
CE_NAME = 'CREAM'
MANDATORY_PARAMETERS = [ 'Queue' ]
class CREAMComputingElement( ComputingElement ):
#############################################################################
def __init__( self, ceUniqueID ):
""" Standard constructor.
"""
ComputingElement.__init__( self, ceUniqueID )
self.ceType = CE_NAME
self.submittedJobs = 0
self.mandatoryParameters = MANDATORY_PARAMETERS
self.pilotProxy = ''
self.queue = ''
self.outputURL = 'gsiftp://localhost'
self.gridEnv = ''
self.proxyRenewal = 0
#############################################################################
def _addCEConfigDefaults( self ):
"""Method to make sure all necessary Configuration Parameters are defined
"""
# First assure that any global parameters are loaded
ComputingElement._addCEConfigDefaults( self )
def __writeJDL( self, executableFile ):
""" Create the JDL for submission
"""
workingDirectory = self.ceParameters['WorkingDirectory']
fd, name = tempfile.mkstemp( suffix = '.jdl', prefix = 'CREAM_', dir = workingDirectory )
diracStamp = os.path.basename( name ).replace( '.jdl', '' ).replace( 'CREAM_', '' )
jdlFile = os.fdopen( fd, 'w' )
jdl = """
[
JobType = "Normal";
Executable = "%(executable)s";
StdOutput="%(diracStamp)s.out";
StdError="%(diracStamp)s.err";
InputSandbox={"%(executableFile)s"};
OutputSandbox={"%(diracStamp)s.out", "%(diracStamp)s.err"};
OutputSandboxBaseDestUri="%(outputURL)s";
]
""" % {
'executableFile':executableFile,
'executable':os.path.basename( executableFile ),
'outputURL':self.outputURL,
'diracStamp':diracStamp
}
jdlFile.write( jdl )
jdlFile.close()
return name, diracStamp
def _reset( self ):
self.queue = self.ceParameters['Queue']
self.outputURL = self.ceParameters.get( 'OutputURL', 'gsiftp://localhost' )
self.gridEnv = self.ceParameters['GridEnv']
#############################################################################
def submitJob( self, executableFile, proxy, numberOfJobs = 1 ):
""" Method to submit job
"""
self.log.verbose( "Executable file path: %s" % executableFile )
if not os.access( executableFile, 5 ):
os.chmod( executableFile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH )
batchIDList = []
stampDict = {}
if numberOfJobs == 1:
jdlName, diracStamp = self.__writeJDL( executableFile )
cmd = ['glite-ce-job-submit', '-n', '-a', '-N', '-r',
'%s/%s' % ( self.ceName, self.queue ),
'%s' % jdlName ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( jdlName )
if result['OK']:
if result['Value'][0]:
# We have got a non-zero status code
return S_ERROR( 'Pilot submission failed with error: %s ' % result['Value'][2].strip() )
pilotJobReference = result['Value'][1].strip()
if not pilotJobReference:
return S_ERROR( 'No pilot reference returned from the glite job submission command' )
if not pilotJobReference.startswith( 'https' ):
return S_ERROR( 'Invalid pilot reference %s' % pilotJobReference )
batchIDList.append( pilotJobReference )
stampDict[pilotJobReference] = diracStamp
else:
delegationID = makeGuid()
cmd = [ 'glite-ce-delegate-proxy', '-e', '%s' % self.ceName, '%s' % delegationID ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
if not result['OK']:
self.log.error( 'Failed to delegate proxy', result['Message'] )
return result
for _i in range( numberOfJobs ):
jdlName, diracStamp = self.__writeJDL( executableFile )
cmd = ['glite-ce-job-submit', '-n', '-N', '-r',
'%s/%s' % ( self.ceName, self.queue ),
'-D', '%s' % delegationID, '%s' % jdlName ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( jdlName )
if not result['OK']:
break
if result['Value'][0] != 0:
break
pilotJobReference = result['Value'][1].strip()
if pilotJobReference and pilotJobReference.startswith( 'https' ):
batchIDList.append( pilotJobReference )
stampDict[pilotJobReference] = diracStamp
else:
break
if batchIDList:
result = S_OK( batchIDList )
result['PilotStampDict'] = stampDict
else:
result = S_ERROR( 'No pilot references obtained from the glite job submission' )
return result
def killJob( self, jobIDList ):
""" Kill the specified jobs
"""
jobList = list( jobIDList )
if type( jobIDList ) in StringTypes:
jobList = [ jobIDList ]
cmd = ['glite-ce-job-cancel', '-n', '-N'] + jobList
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
if not result['OK']:
return result
if result['Value'][0] != 0:
return S_ERROR( 'Failed kill job: %s' % result['Value'][0][1] )
return S_OK()
#############################################################################
def getCEStatus( self, jobIDList = None ):
""" Method to return information on running and pending jobs.
:param list jobIDList: list of job IDs to be considered
"""
statusList = ['REGISTERED', 'PENDING', 'IDLE', 'RUNNING', 'REALLY-RUNNING']
cmd = ['glite-ce-job-status', '-n', '-a', '-e',
'%s' % self.ceName, '-s',
'%s' % ':'.join( statusList ) ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
resultDict = {}
if not result['OK']:
return result
if result['Value'][0]:
if result['Value'][0] == 11:
return S_ERROR( 'Segmentation fault while calling glite-ce-job-status' )
elif result['Value'][2]:
return S_ERROR( result['Value'][2] )
elif "Authorization error" in result['Value'][1]:
return S_ERROR( "Authorization error" )
elif "FaultString" in result['Value'][1]:
res = re.search( 'FaultString=\[([\w\s]+)\]', result['Value'][1] )
fault = ''
if res:
fault = res.group( 1 )
detail = ''
res = re.search( 'FaultDetail=\[([\w\s]+)\]', result['Value'][1] )
if res:
detail = res.group( 1 )
return S_ERROR( "Error: %s:%s" % (fault,detail) )
else:
return S_ERROR( 'Error while interrogating CE status' )
if result['Value'][1]:
resultDict = self.__parseJobStatus( result['Value'][1] )
running = 0
waiting = 0
statusDict = {}
for ref, status in resultDict.items():
if jobIDList is not None and not ref in jobIDList:
continue
if status == 'Scheduled':
waiting += 1
if status == 'Running':
running += 1
statusDict[ref] = status
result = S_OK()
result['RunningJobs'] = running
result['WaitingJobs'] = waiting
result['SubmittedJobs'] = 0
result['JobStatusDict'] = statusDict
return result
def getJobStatus( self, jobIDList ):
""" Get the status information for the given list of jobs
"""
if self.proxyRenewal % 60 == 0:
self.proxyRenewal += 1
statusList = ['REGISTERED', 'PENDING', 'IDLE', 'RUNNING', 'REALLY-RUNNING']
cmd = ['glite-ce-job-status', '-L', '2', '--all', '-e',
'%s' % self.ceName, '-s',
'%s' % ':'.join( statusList ) ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
if result['OK']:
delegationIDs = []
for line in result['Value'][1].split( '\n' ):
if line.find( 'Deleg Proxy ID' ) != -1:
delegationID = line.split()[-1].replace( '[', '' ).replace( ']', '' )
if delegationID not in delegationIDs:
delegationIDs.append( delegationID )
if delegationIDs:
cmd = ['glite-ce-proxy-renew', '-e', self.ceName ]
cmd.extend( delegationIDs )
self.log.info( 'Refreshing proxy for:', ' '.join( delegationIDs ) )
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
if result['OK']:
status, output, error = result['Value']
if status:
self.log.error( "Failed to renew proxy delegation",
'Output:\n' + output + '\nError:\n' + error )
workingDirectory = self.ceParameters['WorkingDirectory']
fd, idFileName = tempfile.mkstemp( suffix = '.ids', prefix = 'CREAM_', dir = workingDirectory )
idFile = os.fdopen( fd, 'w' )
idFile.write( '##CREAMJOBS##' )
for id_ in jobIDList:
if ":::" in id_:
ref, _stamp = id_.split( ':::' )
else:
ref = id_
idFile.write( '\n' + ref )
idFile.close()
cmd = ['glite-ce-job-status', '-n', '-i', '%s' % idFileName ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( idFileName )
resultDict = {}
if not result['OK']:
self.log.error( 'Failed to get job status', result['Message'] )
return result
if result['Value'][0]:
if result['Value'][2]:
return S_ERROR( result['Value'][2] )
else:
return S_ERROR( 'Error while interrogating job statuses' )
if result['Value'][1]:
resultDict = self.__parseJobStatus( result['Value'][1] )
if not resultDict:
return S_ERROR( 'No job statuses returned' )
# If CE does not know about a job, set the status to Unknown
for job in jobIDList:
if not resultDict.has_key( job ):
resultDict[job] = 'Unknown'
return S_OK( resultDict )
def __parseJobStatus( self, output ):
""" Parse the output of the glite-ce-job-status
"""
resultDict = {}
ref = ''
for line in output.split( '\n' ):
if not line:
continue
match = re.search( 'JobID=\[(.*)\]', line )
if match and len( match.groups() ) == 1:
ref = match.group( 1 )
match = re.search( 'Status.*\[(.*)\]', line )
if match and len( match.groups() ) == 1:
creamStatus = match.group( 1 )
if creamStatus in ['DONE-OK']:
resultDict[ref] = 'Done'
elif creamStatus in ['DONE-FAILED']:
resultDict[ref] = 'Failed'
elif creamStatus in ['REGISTERED', 'PENDING', 'IDLE']:
resultDict[ref] = 'Scheduled'
elif creamStatus in ['ABORTED']:
resultDict[ref] = 'Aborted'
elif creamStatus in ['CANCELLED']:
resultDict[ref] = 'Killed'
elif creamStatus in ['RUNNING', 'REALLY-RUNNING']:
resultDict[ref] = 'Running'
elif creamStatus == 'N/A':
resultDict[ref] = 'Unknown'
else:
resultDict[ref] = creamStatus.capitalize()
return resultDict
def getJobOutput( self, jobID, localDir = None ):
""" Get the specified job standard output and error files. If the localDir is provided,
the output is returned as file in this directory. Otherwise, the output is returned
as strings.
"""
if jobID.find( ':::' ) != -1:
pilotRef, stamp = jobID.split( ':::' )
else:
pilotRef = jobID
stamp = ''
if not stamp:
return S_ERROR( 'Pilot stamp not defined for %s' % pilotRef )
outURL = self.ceParameters.get( 'OutputURL', 'gsiftp://localhost' )
if outURL == 'gsiftp://localhost':
result = self.__resolveOutputURL( pilotRef )
if not result['OK']:
return result
outURL = result['Value']
outputURL = os.path.join( outURL, '%s.out' % stamp )
errorURL = os.path.join( outURL, '%s.err' % stamp )
workingDirectory = self.ceParameters['WorkingDirectory']
outFileName = os.path.join( workingDirectory, os.path.basename( outputURL ) )
errFileName = os.path.join( workingDirectory, os.path.basename( errorURL ) )
cmd = ['globus-url-copy', '%s' % outputURL, 'file://%s' % outFileName ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
output = ''
if result['OK']:
if not result['Value'][0]:
outFile = open( outFileName, 'r' )
output = outFile.read()
outFile.close()
os.unlink( outFileName )
elif result['Value'][0] == 1 and "No such file or directory" in result['Value'][2]:
output = "Standard Output is not available on the CREAM service"
if os.path.exists( outFileName ):
os.unlink( outFileName )
else:
error = '\n'.join( result['Value'][1:] )
return S_ERROR( error )
else:
return S_ERROR( 'Failed to retrieve output for %s' % jobID )
cmd = ['globus-url-copy', '%s' % errorURL, '%s' % errFileName ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
error = ''
if result['OK']:
if not result['Value'][0]:
errFile = open( errFileName, 'r' )
error = errFile.read()
errFile.close()
os.unlink( errFileName )
elif result['Value'][0] == 1 and "No such file or directory" in result['Value'][2]:
error = "Standard Error is not available on the CREAM service"
if os.path.exists( errFileName ):
os.unlink( errFileName )
else:
return S_ERROR( 'Failed to retrieve error for %s' % jobID )
return S_OK( ( output, error ) )
def __resolveOutputURL( self, pilotRef ):
""" Resolve the URL of the pilot output files
"""
cmd = [ 'glite-ce-job-status', '-L', '2', '%s' % pilotRef,
'| grep -i osb' ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
url = ''
if result['OK']:
if not result['Value'][0]:
output = result['Value'][1]
for line in output.split( '\n' ):
line = line.strip()
if line.find( 'OSB' ) != -1:
match = re.search( '\[(.*)\]', line )
if match:
url = match.group( 1 )
if url:
return S_OK( url )
else:
return S_ERROR( 'output URL not found for %s' % pilotRef )
else:
return S_ERROR( 'Failed to retrieve long status for %s' % pilotRef )
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#
|
vmendez/DIRAC
|
Resources/Computing/CREAMComputingElement.py
|
Python
|
gpl-3.0
| 14,828
|
# Copyright (C) 2010-2018 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .script_interface import ScriptObjectRegistry, ScriptInterfaceHelper, script_interface_register
import espressomd.code_info
if any(i in espressomd.code_info.features() for i in ["LB_BOUNDARIES", "LB_BOUNDARIES_GPU"]):
@script_interface_register
class LBBoundaries(ScriptObjectRegistry):
"""
Creates a set of lattice Boltzmann boundaries.
"""
_so_name = "LBBoundaries::LBBoundaries"
def add(self, *args, **kwargs):
"""
Adds a boundary to the set.
Either a valid boundary is an argument,
or a valid set of parameters to create a boundary.
"""
if len(args) == 1:
if isinstance(args[0], LBBoundary):
lbboundary = args[0]
else:
raise TypeError(
"Either a LBBoundary object or key-value pairs for the parameters of a LBBoundary object need to be passed.")
else:
lbboundary = LBBoundary(**kwargs)
self.call_method("add", object=lbboundary)
return lbboundary
def remove(self, lbboundary):
"""
Removes a boundary from the set.
Parameters
----------
lbboundary : :obj:`LBBoundary`
The boundary to be removed from the set.
"""
self.call_method("remove", object=lbboundary)
def clear(self):
"""
Removes all boundaries.
"""
self.call_method("clear")
def size(self):
return self.call_method("size")
def empty(self):
return self.call_method("empty")
@script_interface_register
class LBBoundary(ScriptInterfaceHelper):
"""
Creates a LB boundary.
"""
_so_name = "LBBoundaries::LBBoundary"
_so_bind_methods = ("get_force",)
|
mkuron/espresso
|
src/python/espressomd/lbboundaries.py
|
Python
|
gpl-3.0
| 2,672
|
#!/usr/bin/env python
import matplotlib
matplotlib.use('Agg')
import pylab
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
import numpy
def rescale(arr):
min = 1e20
for x in arr:
if x < min:
min = x
for i in range(len(arr)):
arr[i] = (arr[i] - min) # / something
def flatten(arr):
for elem in arr:
elem[1] -= 0
#elem[1] -= lj(elem[0])
#############################
kt = 8.617e-4
#f = lambda x : 1 * kt * numpy.log(4 * numpy.pi * x**2)
f = lambda x : 0
sigma = 1
epsilon = 10 * kt
trunc = [0, 1]
lj = lambda x : 4 * epsilon * ((x / sigma)**(-12) - (x / sigma)**(-6)) + epsilon
x = numpy.arange(1, 3, 0.01)
y = map(lj, x)
plt.plot(x, y, color='black')
#################
## Trapezoids
#data = numpy.loadtxt("A_trap.txt")[(2*trunc[0]):(-2*trunc[1])]
#flatten(data)
#corrected = data[:,1] + map(f, data[:,0])
#rescale(corrected)
## line up tails
##corrected = map(lambda x : x + lj(data[-1,0]) - corrected[-1], corrected)
#plt.plot(data[:,0], corrected, 'b+')
#
#
#################
## Simpson's rule
#data = numpy.loadtxt("A_simps.txt")[(trunc[0]):(-trunc[1])]
#flatten(data)
#corrected = data[:,1] + map(f, data[:,0])
#rescale(corrected)
## line up tails
##corrected = map(lambda x : x + lj(data[-1,0]) - corrected[-1], corrected)
#plt.plot(data[:,0], corrected, 'rx')
data = numpy.loadtxt("wham_free.txt")[8:-8]
flatten(data)
corrected = data[:,1] + map(f, data[:,0])
rescale(corrected)
plt.plot(data[:,0], corrected, 'g-')
#x = numpy.arange(-1.5, 1.5, 0.05)
#y = x**2
#plt.plot(x, y)
plt.xlabel('x')
plt.ylabel('A')
plt.grid(True)
plotfname = "pmf.png"
pylab.savefig(plotfname)
|
afrachioni/umbrella
|
tools/plot_pmf.py
|
Python
|
gpl-3.0
| 1,619
|
import getopt, sys
import numpy as np
import cv2
import main
def testimg(sysargv):
#====================================================================
# filename of image to load is a command line argument
try:
import RandomJunkPolygonGenerator
for iii in range(14):
cv2.imshow(str(iii), RandomJunkPolygonGenerator.returnImage())
cv2.waitKey(0)
except:
abcdefghijklmnopqrstuvwxyz = 1 #do nothing
if len(sysargv) <= 1:
print("usage: {image-file} {optional:show-images?}")
quit()
filename_of_image = str(sysargv[1])
showImages = True
if len(sysargv) > 2:
if int(sysargv[2]) == 0:
showImages = False
#====================================================================
# load the image
loaded_image_mat = cv2.imread(filename_of_image)
#check if it failed to load
if np.size(loaded_image_mat) <= 1:
print "ERROR: COULD NOT OPEN IMAGE FILE: " + filename_of_image
return
#====================================================================
# segmentation
(c1char, c1conf, c1angle, c2char, c2conf, c2angle) = main.doOCR(loaded_image_mat, 0)
#====================================================================
# display image and results
print("top result: char == \'"+c1char+"\' at angle "+str(c1angle)+" with confidence "+str(c1conf))
print("second-best result: char == \'"+c2char+"\' at angle "+str(c2angle)+" with confidence "+str(c2conf))
# execute main()... this needs to be at the end
if __name__ == "__main__":
testimg(sys.argv)
|
UCSD-AUVSI/Heimdall
|
Recognition/OCR/PythonOCR/ConvolutionalNeuralNet/Python/test.py
|
Python
|
gpl-3.0
| 1,507
|
# 'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove', 'reverse', 'sort'
class vector(list):
def __init__(self, *args):
result = []
for element in args:
result.append(element)
self.v = result
def get(self):
return self.v
def add(self, v2):
result = []
for i in range(len(self.v)):
result.append(self.v[i] + v2[i])
return result
def skalar(self, x):
result = []
for element in self.v:
result.append(element*x)
return result
def checksum(self):
result = 0
for element in self.v:
result += element
return result
|
dodonator/area51
|
array/array.py
|
Python
|
gpl-3.0
| 582
|
# HTTM: A transformation library for RAW and Electron Flux TESS Images
# Copyright (C) 2016, 2017 John Doty and Matthew Wampler-Doty of Noqsi Aerospace, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
``httm.system.command_line``
============================
This module contains utilities for parsing settings, such as parameter and flags, from the command line.
"""
# TODO: Documentation
def add_arguments_from_settings(argument_parser, setting_dictionary):
for key in setting_dictionary:
command_line_argument = '--' + key.replace('_', '-') # type: str
default = setting_dictionary[key]['default']
argument_type = type(default)
documentation_no_defaults = setting_dictionary[key]['documentation'] \
.replace('`', '') \
.replace('*', '') # type: str
if default is True or default is False:
negative_command_line_argument = '--no-' + key.replace('_', '-')
argument_parser.add_argument(
negative_command_line_argument,
default=None,
dest=key,
action='store_false')
argument_parser.add_argument(
command_line_argument,
default=None,
dest=key,
action='store_true',
help='{}. Default: Set to {}'.format(
documentation_no_defaults.rstrip('.'), str(default).upper()))
elif hasattr(default, '__iter__') and not isinstance(default, str):
argument_parser.add_argument(
command_line_argument,
default=None,
dest=key,
action='store',
nargs='+',
type=type(default[0]),
help='{}. Default: {}'.format(documentation_no_defaults.rstrip('.'), default))
else:
argument_parser.add_argument(
command_line_argument,
dest=key,
action='store',
type=argument_type,
default=None,
help='{}. Default: {}'.format(documentation_no_defaults.rstrip('.'), default))
|
TESScience/httm
|
httm/system/command_line/__init__.py
|
Python
|
gpl-3.0
| 2,753
|
"""Manages hotkeys."""
from global_hotkeys import GlobalHotkeys as ghk
from data import Dir
import subprocess
import time
def terminal(wm):
subprocess.run("C:\\cygwin64\\bin\\mintty.exe")
time.sleep(.2)
wm.insert()
class HookManager():
"""
Defines and registers all hotkeys.
Attributes:
win_manager (WindowManager): The window manager.
"""
def __init__(self, win_manager):
"""
Initializes the hook manager given a WindowManager
Args:
win_manager (WindowManager): Description
"""
self.win_manager = win_manager
MOD = ghk.MOD_WIN | ghk.MOD_SHIFT
ALTMOD = MOD | ghk.MOD_ALT
# TODO error when using VK_UP/DOWN?
ghk.register(ghk.VK_PRIOR, MOD, win_manager.change_focus, depth=1)
ghk.register(ghk.VK_NEXT, MOD, win_manager.change_focus, depth=0)
ghk.register(ghk.VK_PRIOR, ALTMOD, win_manager.swap_window, depth=1)
ghk.register(ghk.VK_NEXT, ALTMOD, win_manager.swap_window, depth=0)
ghk.register(ghk.VK_G, MOD, win_manager.change_gaps, delta=2)
ghk.register(ghk.VK_G, ALTMOD, win_manager.change_gaps, delta=-2)
ghk.register(ghk.VK_R, MOD, win_manager.change_ratio, delta=.1)
ghk.register(ghk.VK_R, ALTMOD, win_manager.change_ratio, delta=-.1)
ghk.register(ghk.VK_F, MOD, win_manager.set_insertion)
ghk.register(ghk.VK_S, MOD, win_manager.change_split)
ghk.register(ghk.VK_D, MOD, win_manager.debug)
ghk.register(ghk.VK_Q, ghk.MOD_ALT, win_manager.close_window)
ghk.register(ghk.VK_M, ALTMOD, win_manager.bring_to_top)
ghk.register(ghk.VK_RETURN, ALTMOD, terminal, wm=win_manager)
ghk.register(ghk.VK_E, MOD, win_manager.exit)
ghk.register(ghk.VK_OEM_PLUS, MOD, win_manager.insert)
ghk.register(ghk.VK_OEM_PLUS, ALTMOD, win_manager.insert_all)
ghk.register(ghk.VK_OEM_MINUS, MOD, win_manager.remove)
ghk.register(ghk.VK_OEM_MINUS, ALTMOD, win_manager.remove_all)
ghk.listen()
|
howardjohn/pyty
|
src/hook_manager.py
|
Python
|
gpl-3.0
| 2,062
|
#!/usr/bin/env python
from sys import exit, argv, stderr
from os import system
from numpy import squeeze, abs, diff
try:
from netCDF4 import Dataset as NC
except:
print "netCDF4 is not installed!"
sys.exit(1)
pism_path=argv[1]
mpiexec=argv[2]
stderr.write("Testing: temperature continuity at ice-bed interface (polythermal case).\n")
cmd = "%s %s/pismv -test F -y 10 -verbose 1 -o bar-temp-continuity.nc" % (mpiexec, pism_path)
stderr.write(cmd + '\n')
e = system(cmd)
if e != 0:
exit(1)
deltas = []
dts = [200, 100]
for dt in dts:
cmd = "%s %s/pisms -eisII B -y 5000 -Mx 16 -My 16 -Mz 21 -Lbz 1000 -Mbz 11 -energy enthalpy -regrid_file bar-temp-continuity.nc -regrid_vars thk -verbose 1 -max_dt %f -o foo-temp-continuity.nc -o_size big" % (mpiexec, pism_path, dt)
stderr.write(cmd + '\n')
e = system(cmd)
if e != 0:
exit(1)
e = system("ncks -O -v temp -d z,0 foo-temp-continuity.nc temp-temp-continuity.nc")
if e != 0:
exit(1)
e = system("ncks -O -v litho_temp -d zb,10 foo-temp-continuity.nc litho_temp-temp-continuity.nc")
if e != 0:
exit(1)
nc1 = NC("temp-temp-continuity.nc")
nc2 = NC("litho_temp-temp-continuity.nc")
temp = squeeze(nc1.variables['temp'][:])
litho_temp = squeeze(nc2.variables['litho_temp'][:])
deltas.append(abs(temp - litho_temp).max())
# these deltas are observed to decrease O(dt^1) approximately, which is expected from theory
for (dt, delta) in zip(dts, deltas):
stderr.write("dt = %f, delta = %f\n" % (dt, delta))
# the only test is whether they decrease; no rate measured
if any(diff(deltas) > 0):
exit(1)
system("rm foo-temp-continuity.nc foo-temp-continuity.nc~ bar-temp-continuity.nc temp-temp-continuity.nc litho_temp-temp-continuity.nc")
exit(0)
|
talbrecht/pism_pik06
|
test/regression/temp_continuity.py
|
Python
|
gpl-3.0
| 1,796
|
import datetime
from django.contrib.auth.models import User
from django.test import TestCase
# Create your tests here.
from django.utils import timezone
from pip.wheel import root_is_purelib
from gantt.models import Project, Task, Resource, Dependency, Assignment
def initModel():
u = User.objects.create_user("pippo")
r = Resource(user=u)
r.save()
p = Project()
p.name = "Gant Testing"
p.start_time = timezone.now()
p.end_time = p.start_time + datetime.timedelta(days=60)
p.save()
r.work_on.add(p)
def clearModel():
Project.objects.all().delete()
Resource.objects.all().delete()
class TaskTestCase(TestCase):
def setUp(self):
initModel()
self.project = Project.objects.all()[0] #Only one project must be in the db for this TestCase
self.resource = Resource.objects.all()[0]
def tearDown(self):
clearModel()
def _createTasksArgs(self, number=0):
args = dict()
args['name']="write init model"+str(number)
args['code']="inimod"+str(number)
args['description'] = "descr"+str(number)
args['row_index']=1+number
args['start_time']=timezone.now() + datetime.timedelta(days=number)
args['end_time']=timezone.now() + datetime.timedelta(days=10+number)
return args
def test_addTask(self):
root_task = self.project.root_task
t= root_task.addTask(**self._createTasksArgs())
self.assertEqual(t, root_task.grafted_task_set.first())
self.assertEqual(self.project.task_set.count(), 2)
def test_removeTask(self):
root_task = self.project.root_task
t= root_task.addTask(**self._createTasksArgs())
root_task.removeTask(t.id)
try:
Task.objects.get(id=t.id)
self.assertTrue(False)
except Task.DoesNotExist:
self.assertTrue(True)
self.assertEqual(self.project.task_set.count(), 1)
def test_addDependency(self):
root_task = self.project.root_task
t1= root_task.addTask(**self._createTasksArgs())
root_task = self.project.root_task
t2= root_task.addTask(**self._createTasksArgs(10))
t2.addDependency(t1.id)
self.assertEqual(t2.depends_on.count(), 1)
self.assertEqual(t2.depends_on.first(), t1)
def test_removeDependency(self):
root_task = self.project.root_task
t1= root_task.addTask(**self._createTasksArgs())
root_task = self.project.root_task
t2= root_task.addTask(**self._createTasksArgs(10))
t2.addDependency(t1.id)
t2.removeDependency(t1.id)
self.assertEqual(t2.depends_on.count(), 0)
self.assertEqual(self.project.task_set.get(pk=t1.id),t1)
def test_addAssignment(self):
root_task = self.project.root_task
root_task.addAssignment(self.resource)
self.assertEqual(root_task.assigned_to.count(), 1)
def test_removeAssignment(self):
root_task = self.project.root_task
root_task.addAssignment(self.resource)
root_task.removeAssignment(self.resource.id)
self.assertEqual(root_task.assigned_to.count(), 0)
def test_delete_project(self):
root_task = self.project.root_task
t1= root_task.addTask(**self._createTasksArgs())
root_task = self.project.root_task
t2= root_task.addTask(**self._createTasksArgs(10))
t2.addDependency(t1.id)
root_task = self.project.root_task
root_task.addAssignment(self.resource)
self.project.delete()
self.assertEqual(Task.objects.count(),0)
self.assertEqual(Dependency.objects.count(),0)
self.assertEqual(Assignment.objects.count(),0)
def test_delete_root_task(self):
root_task = self.project.root_task
t1= root_task.addTask(**self._createTasksArgs())
root_task = self.project.root_task
t2= root_task.addTask(**self._createTasksArgs(10))
t2.addDependency(t1.id)
root_task = self.project.root_task
root_task.addAssignment(self.resource)
root_task.delete()
self.assertEqual(Task.objects.count(),0)
self.assertEqual(Dependency.objects.count(),0)
self.assertEqual(Assignment.objects.count(),0)
|
MeMpy/Eriteam
|
gantt/tests.py
|
Python
|
gpl-3.0
| 4,307
|
from __future__ import unicode_literals
from django.db import models
class Isolate(models.Model):
isolate = models.CharField(max_length=128)
species = models.CharField(max_length=128)
scheme = models.CharField(max_length=128)
st = models.IntegerField(null=True)
reads = models.IntegerField()
sequencing_yield = models.BigIntegerField()
genome_size = models.BigIntegerField()
contigs = models.IntegerField()
avgqual = models.FloatField() # DecimalField?
minlen = models.IntegerField()
avglen = models.IntegerField()
modelen = models.IntegerField()
maxlen = models.IntegerField()
geecee = models.FloatField() # DecimalField?
#added_by = models.CharField(max_length=128)
#added_at = models.DateTimeField()
|
MDU-PHL/kelpie
|
website/kelpie/models.py
|
Python
|
gpl-3.0
| 773
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (C) 2008 Pâris Quentin
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import codecs
import os
import subprocess
import wx
import wx.adv
import wx.html
import wx.lib.agw.hyperlink
from wx.lib.ClickableHtmlWindow import PyClickableHtmlWindow
import lib.Variables as Variables
import lib.playonlinux as playonlinux
from install.DescriptionFetcher import DescriptionFetcher
from install.MiniatureWindow import MiniatureWindow
from ui.PlayOnLinuxWindow import PlayOnLinuxWindow
class InstallWindow(PlayOnLinuxWindow):
def addCat(self, name, icon, iid):
espace = 80
if (os.environ["POL_OS"] == "Mac"):
offset = 10
else:
offset = 2
self.cats_icons[name] = wx.BitmapButton(self.installWindowHeader, 2000 + iid, wx.Bitmap(icon), (0, 0),
style=wx.NO_BORDER)
self.cats_links[name] = wx.lib.agw.hyperlink.HyperLinkCtrl(self.installWindowHeader, 3000 + iid, name, pos=(0, 52))
mataille = self.cats_links[name].GetSize()[0]
mataille2 = self.cats_icons[name].GetSize()[0]
image_pos = (espace - mataille2) / 2 + espace * iid;
self.cats_links[name].SetPosition((espace * iid + (espace - mataille / 1.3) / 2, 47))
self.cats_icons[name].SetPosition((image_pos, offset))
self.Bind(wx.lib.agw.hyperlink.EVT_HYPERLINK_LEFT, self.AddApps, id=3000 + iid)
self.Bind(wx.EVT_BUTTON, self.AddApps, id=2000 + iid)
self.cats_links[name].SetColours(wx.Colour(0, 0, 0), wx.Colour(0, 0, 0), wx.Colour(0, 0, 0))
self.cats_links[name].AutoBrowse(False)
self.cats_links[name].UpdateLink(True)
self.cats_links[name].SetUnderlines(False, False, False)
self.cats_links[name].SetFont(self.fontText)
self.cats_links[name].SetBackgroundColour((255, 255, 255))
def setWaitState(self, isWaiting):
if isWaiting:
self.installWindowCategoryContentPanel.Hide()
self.panelWait.Show()
self.manualInstall.Raise()
self.animation_wait.Play()
self.installButton.Hide()
self.updateButton.Hide()
else:
self.installWindowCategoryContentPanel.Show()
self.manualInstall.Raise()
self.panelWait.Hide()
self.animation_wait.Stop()
self.installButton.Show()
self.updateButton.Show()
self.Layout()
def _createHeader(self):
self.installWindowHeader = wx.Panel(self, -1, size=(802, 69))
self.installWindowHeader.SetBackgroundColour((255, 255, 255))
self.windowSizer.Add(self.installWindowHeader, 0, wx.EXPAND)
self.addCat(_("Accessories"),
Variables.playonlinux_env + "/resources/images/install/32/applications-accessories.png", 0)
self.addCat(_("Development"),
Variables.playonlinux_env + "/resources/images/install/32/applications-development.png", 1)
self.addCat(_("Education"), Variables.playonlinux_env + "/resources/images/install/32/applications-science.png",
2)
self.addCat(_("Games"), Variables.playonlinux_env + "/resources/images/install/32/applications-games.png", 3)
self.addCat(_("Graphics"), Variables.playonlinux_env + "/resources/images/install/32/applications-graphics.png",
4)
self.addCat(_("Internet"), Variables.playonlinux_env + "/resources/images/install/32/applications-internet.png",
5)
self.addCat(_("Multimedia"),
Variables.playonlinux_env + "/resources/images/install/32/applications-multimedia.png", 6)
self.addCat(_("Office"), Variables.playonlinux_env + "/resources/images/install/32/applications-office.png", 7)
self.addCat(_("Other"), Variables.playonlinux_env + "/resources/images/install/32/applications-other.png", 8)
self.addCat(_("Patches"), Variables.playonlinux_env + "/resources/images/install/32/view-refresh.png", 9)
def _createBody(self):
self.installWindowBodyPanel = wx.Panel(self, -1)
self.windowSizer.Add(self.installWindowBodyPanel, 1, wx.EXPAND)
self.installWindowBodySizer = wx.BoxSizer(wx.VERTICAL)
self.installWindowBodyPanel.SetSizer(self.installWindowBodySizer)
def _createWaitPanel(self):
self.panelWait = wx.Panel(self.installWindowBodyPanel, -1)
self.installWindowBodySizer.Add(self.panelWait, 1, wx.EXPAND)
self.panelWait.Hide()
## FIXME: Remove those magic numbers
self.animation_wait = wx.adv.AnimationCtrl(self.panelWait, -1,
pos=((800 - 128) / 2, (550 - 128) / 2 - 71))
self.animation_wait.LoadFile(Variables.playonlinux_env + "/resources/images/install/wait.gif")
self.percentageText = wx.StaticText(self.panelWait, -1, "", ((800 - 30) / 2, (550 - 128) / 2 + 128 + 10 - 71),
wx.DefaultSize)
self.percentageText.SetFont(self.fontTitle)
self.timer = wx.Timer(self, 1)
self.Bind(wx.EVT_TIMER, self.TimerAction, self.timer)
self.timer.Start(200)
def _createInstallWindowCategoryContentPanel(self):
self.installWindowCategoryContentPanel = wx.Panel(self.installWindowBodyPanel, -1)
self.installWindowBodySizer.Add(self.installWindowCategoryContentPanel, 1, wx.EXPAND)
self.installWindowCategoryContentSizer = wx.BoxSizer(wx.VERTICAL)
self.installWindowCategoryContentPanel.SetSizer(self.installWindowCategoryContentSizer)
def _createFilterPanel(self):
self.filterPanel = wx.Panel(self.installWindowCategoryContentPanel, -1)
self.installWindowCategoryContentSizer.AddSpacer(10)
self.installWindowCategoryContentSizer.Add(self.filterPanel, 0, wx.EXPAND)
self.installWindowCategoryContentSizer.AddSpacer(10)
filterSizer = wx.BoxSizer(wx.HORIZONTAL)
self.filterPanel.SetSizer(filterSizer)
self.searchbox = wx.SearchCtrl(self.filterPanel, 110,)
filtersCaption = wx.StaticText(self.filterPanel, -1, _("Include:"))
self.testingChk = wx.CheckBox(self.filterPanel, 401)
self.testingChk.SetValue(True)
testingCaption = wx.StaticText(self.filterPanel, -1, _("Testing"))
self.nocdChk = wx.CheckBox(self.filterPanel, 402)
nocdCaption = wx.StaticText(self.filterPanel, -1, _("No-cd needed"))
self.commercialChk = wx.CheckBox(self.filterPanel, 403)
self.commercialChk.SetValue(True)
commercialCaption = wx.StaticText(self.filterPanel, -1, _("Commercial"))
self.panelStars = wx.Panel(self.filterPanel, -1)
filterSizer.AddSpacer(10)
filterSizer.Add(self.searchbox, 4, wx.EXPAND)
filterSizer.AddSpacer(10)
filterSizer.Add(filtersCaption, 0, wx.EXPAND)
filterSizer.AddSpacer(10)
filterSizer.Add(self.testingChk, 0, wx.EXPAND)
filterSizer.Add(testingCaption, 0, wx.EXPAND)
filterSizer.AddSpacer(10)
filterSizer.Add(self.nocdChk, 0, wx.EXPAND)
filterSizer.Add(nocdCaption, 0, wx.EXPAND)
filterSizer.AddSpacer(10)
filterSizer.Add(self.commercialChk, 0, wx.EXPAND)
filterSizer.Add(commercialCaption, 0, wx.EXPAND)
filterSizer.AddSpacer(20)
filterSizer.Add(self.panelStars, 2, wx.EXPAND)
filterSizer.AddSpacer(10)
def _createAppNavigation(self):
self.appNavigationPanel = wx.Panel(self.installWindowCategoryContentPanel, -1)
self.installWindowCategoryContentSizer.Add(self.appNavigationPanel, 10, wx.EXPAND)
self.appNavigationSizer = wx.BoxSizer(wx.HORIZONTAL)
#
self.appNavigationPanel.SetSizer(self.appNavigationSizer)
self.imagesapps = wx.ImageList(22, 22)
self.appsList = wx.TreeCtrl(self.appNavigationPanel, 106,
style=wx.TR_HIDE_ROOT | wx.TR_FULL_ROW_HIGHLIGHT | Variables.widget_borders)
self.appsList.SetImageList(self.imagesapps)
#self.appsList.SetSpacing(0)
self.appNavigationSizer.AddSpacer(10)
self.appNavigationSizer.Add(self.appsList, 4, wx.EXPAND, 5)
self.appNavigationSizer.AddSpacer(10)
def _createAppDescription(self):
appDescriptionPanel = wx.Panel(self.appNavigationPanel, -1)
self.appNavigationSizer.Add(appDescriptionPanel, 1, wx.EXPAND)
self.appNavigationSizer.AddSpacer(10)
appDescriptionSizer = wx.BoxSizer(wx.VERTICAL)
self.descriptionContentHtmlBox = PyClickableHtmlWindow(appDescriptionPanel, 107, style=Variables.widget_borders)
appDescriptionSizer.Add(self.descriptionContentHtmlBox, 1, wx.EXPAND)
self.descriptionLoaderPanel = wx.Panel(appDescriptionPanel, -1, style=Variables.widget_borders)
self.descriptionLoaderPanel.SetBackgroundColour((255, 255, 255))
self.animation = wx.adv.AnimationCtrl(self.descriptionLoaderPanel, -1, pos=(90, 100))
self.animation.LoadFile(Variables.playonlinux_env + "/resources/images/install/wait_mini.gif")
self.animation.Hide()
self.descriptionLoaderPanel.Hide()
self.image = wx.StaticBitmap(appDescriptionPanel, 108,
wx.Bitmap(Variables.playonlinux_env + "/resources/images/pol_min.png"))
self.image.Bind(wx.EVT_LEFT_DOWN, self.sizeUpScreen)
appDescriptionSizer.Add(self.descriptionLoaderPanel, 1, wx.EXPAND)
appDescriptionSizer.AddSpacer(10)
appDescriptionSizer.Add(self.image, 0)
appDescriptionPanel.SetSizer(appDescriptionSizer)
def _createButtons(self):
buttonsPanel = wx.Panel(self.installWindowBodyPanel, -1)
buttonsSizer = wx.BoxSizer(wx.HORIZONTAL)
buttonsPanel.SetSizer(buttonsSizer)
self.installWindowBodySizer.AddSpacer(10)
self.installWindowBodySizer.Add(buttonsPanel, 0, wx.EXPAND)
self.installWindowBodySizer.AddSpacer(10)
self.cancelButton = wx.Button(buttonsPanel, wx.ID_CLOSE, _("Cancel"))
self.installButton = wx.Button(buttonsPanel, wx.ID_APPLY, _("Install"))
self.updateButton = wx.Button(buttonsPanel, wx.ID_REFRESH, _("Refresh"))
self.manualInstall = wx.lib.agw.hyperlink.HyperLinkCtrl(buttonsPanel, 111, _("Install a non-listed program"))
self.manualInstall.SetColours(wx.Colour(0, 0, 0), wx.Colour(0, 0, 0), wx.Colour(0, 0, 0))
self.manualInstall.AutoBrowse(False)
self.manualInstall.UpdateLink(True)
buttonsSizer.AddSpacer(10)
buttonsSizer.Add(self.manualInstall, 0)
buttonsSizer.AddStretchSpacer()
buttonsSizer.Add(self.updateButton, 0)
buttonsSizer.AddSpacer(1)
buttonsSizer.Add(self.cancelButton, 0)
buttonsSizer.AddSpacer(1)
buttonsSizer.Add(self.installButton, 0)
buttonsSizer.AddSpacer(10)
def __init__(self, parent, id, title):
PlayOnLinuxWindow.__init__(self, parent, -1, title, size=(850, 550))
self.cats_icons = {}
self.cats_links = {}
self.descriptionFetcher = DescriptionFetcher()
## Window
self.windowSizer = wx.BoxSizer(wx.VERTICAL)
self._createHeader()
self._createBody()
self.SetSizer(self.windowSizer)
self._createInstallWindowCategoryContentPanel()
self._createWaitPanel()
# Filter panel
self._createFilterPanel()
# Apps Navigation
self._createAppNavigation()
self._createAppDescription()
## Buttons
self._createButtons()
self.live = 0
self.openMin = False
self.images_cat = wx.ImageList(22, 22)
self.installButton.Enable(False)
# wx.EVT_TREE_SEL_CHANGED(self, 105, self.AddApps)
self.Bind(wx.EVT_TREE_SEL_CHANGED, self.AppsDetails, id=106)
self.Bind(wx.EVT_BUTTON, self.closeapp, id=wx.ID_CLOSE)
self.Bind(wx.EVT_BUTTON, self.installapp, id=wx.ID_APPLY)
self.Bind(wx.EVT_BUTTON, self.UpdatePol, id=wx.ID_REFRESH)
self.Bind(wx.EVT_CLOSE, self.closeapp)
self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.installapp, id=106)
self.Bind(wx.EVT_TEXT, self.search, id=110)
self.Bind(wx.lib.agw.hyperlink.EVT_HYPERLINK_LEFT, self.manual, id=111)
self.Bind(wx.EVT_CHECKBOX, self.CheckBoxReload, id=401)
self.Bind(wx.EVT_CHECKBOX, self.CheckBoxReload, id=402)
self.Bind(wx.EVT_CHECKBOX, self.CheckBoxReload, id=403)
def TimerAction(self, event):
try:
self.lasthtml_content
except AttributeError:
self.lasthtml_content = ""
if self.lasthtml_content != self.descriptionFetcher.htmlContent:
self.SetImg(self.descriptionFetcher.miniature)
self.descriptionFetcher.miniature = self.descriptionFetcher.miniature_defaut
self.lasthtml_content = self.descriptionFetcher.htmlContent
if self.descriptionFetcher.htmlContent == "###WAIT###":
self.animation.Show()
self.animation.Play()
self.descriptionLoaderPanel.Show()
self.descriptionContentHtmlBox.Hide()
self.Refresh()
else:
self.animation.Stop()
self.descriptionContentHtmlBox.Show()
self.animation.Hide()
self.descriptionLoaderPanel.Hide()
self.Refresh()
self.descriptionContentHtmlBox.SetPage(self.descriptionFetcher.htmlContent)
self.Layout()
self.showStars(self.descriptionFetcher.stars)
if self.openMin == True:
if self.descriptionFetcher.med_miniature != None:
self.miniatureWindow = MiniatureWindow(None, -1,
self.appsList.GetItemText(self.appsList.GetSelection()),
self.descriptionFetcher.med_miniature)
self.miniatureWindow.Show()
self.miniatureWindow.Center(wx.BOTH)
self.openMin = False
def closeapp(self, event):
self.descriptionFetcher.thread_running = False
self.Destroy()
def manual(self, event):
self.live = 1
self.installapp(event)
def showStars(self, stars):
starWidth = 20
self.panelStars.DestroyChildren()
for i in range(int(stars)):
wx.StaticBitmap(self.panelStars, -1,
wx.Bitmap(Variables.playonlinux_env + "/etc/star.png"),
(i * starWidth, 0), wx.DefaultSize)
def UpdatePol(self, event):
self.DelApps()
self.Parent.updater.check()
playonlinux.SetSettings("LAST_TIMESTAMP", "0")
def installapp(self, event):
if (self.live == 1):
InstallApplication = "ExecLiveInstall"
else:
InstallApplication = self.appsList.GetItemText(self.appsList.GetSelection())
if os.path.exists(Variables.playonlinux_rep + "/configurations/listes/search"):
content = codecs.open(Variables.playonlinux_rep + "/configurations/listes/search", "r",
"utf-8").read().split("\n")
found = False
for line in content:
split = line.split("~")
if (split[0] == InstallApplication):
found = True
break;
if (found == True):
if (len(split) <= 1):
self.UpdatePol(self)
else:
if (split[1] == "1"):
wx.MessageBox(_(
"This program is currently in testing.\n\nIt might not work as expected. Your feedback, positive or negative, is specially important to improve this installer."),
_("Please read this"))
if (split[2] == "1"):
wx.MessageBox(_(
"This program contains a protection against copy (DRM) incompatible with emulation.\nThe only workaround is to use a \"no-cd\" patch, but since those can also be used for piracy purposes we won't give any support on this matter."),
_("Please read this"))
subprocess.Popen(
["bash", Variables.playonlinux_env + "/bash/install", InstallApplication])
self.Destroy()
return
def search(self, event):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/search", 'r', "utf-8")
self.apps = self.apps.readlines()
self.j = 0
while (self.j < len(self.apps)):
self.apps[self.j] = self.apps[self.j].replace("\n", "")
self.j += 1
self.j = 0
self.k = 0
self.user_search = self.searchbox.GetValue()
self.search_result = []
while (self.j < len(self.apps)):
if (self.user_search.lower() in self.apps[self.j].lower()):
self.search_result.append(self.apps[self.j])
self.k = self.k + 1
self.j = self.j + 1
if (len(self.user_search) < 2 or "~" in self.user_search):
self.search_result = []
self.user_search_cut = self.user_search.split(":")
if (len(self.user_search_cut) > 1):
if (self.user_search_cut[0] == "get" and self.user_search_cut[1].isdigit()):
self.search_result.append(self.user_search)
if (self.user_search != ""):
self.WriteApps(self.search_result)
else:
self.DelApps()
def EraseDetails(self):
self.descriptionContentHtmlBox.SetValue("")
def AppsDetails(self, event):
self.installButton.Enable(True)
self.application = self.appsList.GetItemText(self.appsList.GetSelection())
self.descriptionFetcher.download(self.application)
def sizeUpScreen(self, event):
self.openMin = True
def WriteApps(self, array):
self.imagesapps.RemoveAll()
self.DelApps()
self.root_apps = self.appsList.AddRoot("")
self.i = 0
array.sort(key=str.upper)
for app in array:
app_array = app.split("~")
appname = app_array[0]
try:
free = int(app_array[3])
testing = int(app_array[1])
nocd = int(app_array[2])
except IndexError:
free = 0
testing = 0
nocd = 0
show = True
if nocd == 1 and self.nocdChk.IsChecked() == 0:
show = False
if free == 0 and self.commercialChk.IsChecked() == 0:
show = False
if testing == 1 and self.testingChk.IsChecked() == 0:
show = False
if (show == True):
self.icon_look_for = Variables.playonlinux_rep + "/configurations/icones/" + appname
if (os.path.exists(self.icon_look_for)):
try:
bitmap = wx.Image(self.icon_look_for)
bitmap.Rescale(22, 22, wx.IMAGE_QUALITY_HIGH)
bitmap = bitmap.ConvertToBitmap()
self.imagesapps.Add(bitmap)
except:
pass
else:
self.imagesapps.Add(wx.Bitmap(Variables.playonlinux_env + "/etc/playonlinux22.png"))
itemId = self.appsList.AppendItem(self.root_apps, appname, self.i)
if testing == 1:
# (255,255,214) is web site color for beta, but it's not very visible next to plain white,
# and red is the color of danger
self.appsList.SetItemBackgroundColour(itemId, (255, 214, 214))
self.i = self.i + 1
def DelApps(self):
self.appsList.DeleteAllItems()
def SetImg(self, image):
self.image.SetBitmap(wx.Bitmap(image))
self.Refresh()
def ResetImg(self):
self.SetImg(Variables.playonlinux_env + "/resources/images/pol_min.png")
def CheckBoxReload(self, event):
chk_id = event.GetId()
if (chk_id == 401):
if (self.testingChk.IsChecked() == 1):
wx.MessageBox(_(
"By enabling this option, you can install programs that employ digital rights management (DRM) copy protection that are not supported by {0} and might need to be bypassed.\n\nThis feature should not be construed as implicit or implied condoning of piracy and as such, we will not offer any support for issues resulting from using this option.").format(
os.environ["APPLICATION_TITLE"]), _("Attention!"))
if (chk_id == 402):
if (self.nocdChk.IsChecked() == 1):
wx.MessageBox(_(
"By enabling this, you will have access to installers for programs that contain protections against copy (DRM) incompatible with emulation.\nThe only workaround is to use \"no-cd\" patches, but since those can also be used for piracy purposes we won't give any support on this matter."),
_("Please read this"))
if (self.searchbox.GetValue() == ""):
self.AddApps(self, noevent=True)
else:
self.search(self)
def AddApps(self, event, noevent=False):
self.searchbox.SetValue("")
if (noevent == False):
if (event.GetId() >= 3000):
self.cat_selected = event.GetId() - 3000
else:
self.cat_selected = event.GetId() - 2000
self.current_cat = self.cat_selected
else:
try:
self.cat_selected = self.current_cat
except:
return 0
if (self.cat_selected == 8):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/0", 'r', "utf-8")
if (self.cat_selected == 3):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/1", 'r', "utf-8")
if (self.cat_selected == 0):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/2", 'r', "utf-8")
if (self.cat_selected == 7):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/3", 'r', "utf-8")
if (self.cat_selected == 5):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/4", 'r', "utf-8")
if (self.cat_selected == 6):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/5", 'r', "utf-8")
if (self.cat_selected == 4):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/6", 'r', "utf-8")
if (self.cat_selected == 1):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/7", 'r', "utf-8")
if (self.cat_selected == 2):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/8", 'r', "utf-8")
if (self.cat_selected == 9):
self.apps = codecs.open(Variables.playonlinux_rep + "/configurations/listes/9", 'r', "utf-8")
if (self.cat_selected != -1):
self.apps = self.apps.readlines()
self.j = 0
while (self.j < len(self.apps)):
self.apps[self.j] = self.apps[self.j].replace("\n", "")
self.j += 1
self.WriteApps(self.apps)
def Destroy(self):
self.timer.Stop()
super().Destroy()
|
PlayOnLinux/POL-POM-4
|
python/install/InstallWindow.py
|
Python
|
gpl-3.0
| 24,437
|
from entity_reader import EntityReader
from dataset_importer.utils import HandleDatasetImportException
class TXTReader(EntityReader):
@staticmethod
def get_features(**kwargs):
directory = kwargs['directory']
for file_path in TXTReader.get_file_list(directory, 'txt'):
try:
features = TXTReader.get_meta_features(file_path=file_path)
with open(file_path, 'r', encoding='utf8') as text_file:
features['text'] = text_file.read()
features['_texta_id'] = file_path
yield features
except Exception as e:
HandleDatasetImportException(kwargs, e, file_path=file_path)
@staticmethod
def count_total_documents(**kwargs):
directory = kwargs['directory']
return TXTReader.count_documents(root_directory=directory, extension='txt')
|
texta-tk/texta
|
dataset_importer/document_reader/readers/entity/txt_reader.py
|
Python
|
gpl-3.0
| 771
|
from app import app
from app.api import apiv10
from app.form import GetLinkForm
from app.models import Urls
from app.utils import number_of_generated_short_url
from app.utils import short_url_generator
from app.utils import url_checker
from flask import abort
from flask import flash
from flask import jsonify
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
@app.route('/', methods=['GET', 'POST'])
def home():
# This function is executed when http://example.com/ is requested. It
# works with two methods - GET and POST. When URL is submitted via POST
# method, then it catches it and show the regarding short URL or regarding
# errors.
number = number_of_generated_short_url()
if request.method == 'POST':
form = GetLinkForm(request.form)
if form.validate():
long_url = form.long_url.data
checker = url_checker(long_url)
if checker is True:
short_url = short_url_generator(long_url)
return render_template('index.html', form=form,
short_url=short_url, number=number)
else:
flash("The URL seems to be dead at this moment.")
return render_template('index.html', form=form, number=number)
else:
flash("Please, paste a valid link to shorten it.")
return render_template('index.html', form=form, number=number)
else:
return render_template('index.html', form=GetLinkForm(), number=number)
@app.route('/<string:short_url>')
def redirect_to_main_url(short_url):
# When a short URL comes to this app, the short URL will be redirected to
# the long URL, we mean main URL. This function does this important task.
# If the short URL isn't in our database, then it throws 404 error
# message.
surl_query = Urls.query.filter_by(short_url=short_url).first()
if surl_query is None:
abort(404)
else:
return redirect(surl_query.long_url)
@app.route('/api/<string:version>/', methods=['POST'])
def api(version):
# This function handles our API requests. Firstly it checks the api
# version, if not matches, then it throws a 403 error message. If matched,
# then it triggers the equivalent api function.
if version == 'v1.0' and not request.json or not 'title' in request.json:
long_url = request.get_json().get('long_url')
info = apiv10(long_url)
return jsonify(info)
else:
abort(403)
@app.errorhandler(404)
def page_not_found(error):
# This function handles all 404 error on the app and shows a custom 404
# error page.
number = number_of_generated_short_url()
return render_template('404.html', number=number), 404
|
ugcoder/Py-URL-Shortener
|
app/views.py
|
Python
|
gpl-3.0
| 2,826
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import liblo, sys
# send all messages to port 1234 on the local machine
try:
target = liblo.Address(1234)
except liblo.AddressError, err:
sys.exit(err)
# send message "/foo/message1" with int, float and string arguments
liblo.send(target, "/media/next", 1)
|
yomguy/tools
|
osc/osc_player_next.py
|
Python
|
gpl-3.0
| 314
|
# Copyright 2011 Pyela Project
#
# This file is part of Pyela.
#
# Pyela is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyela is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pyela. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk
class NetworkingErrorAlert(Gtk.Dialog):
"""A GTK dialog notifying the user of a networking error.
The dialog has three buttons: Reconnect, login with another account, quit
Constructor parameters:
p_msg: Primary text, displayed in bold with large font
s_msg: Optional secondary text, should provide more in-depth description of
the problem and suggested action, may also include more info about
the error
Both strings are parsed as pango markup, so pass them along accordingly."""
def __init__(self, parent, p_msg, s_msg = ""):
super(NetworkingErrorAlert, self).__init__("Networking error",
parent,
Gtk.DialogFlags.MODAL|Gtk.DialogFlags.DESTROY_WITH_PARENT,
("_Login as other user", Gtk.ResponseType.REJECT,
Gtk.STOCK_QUIT, Gtk.ResponseType.CANCEL,
"_Reconnect", Gtk.ResponseType.ACCEPT)
)
self.set_default_response(Gtk.ResponseType.ACCEPT)
self.set_position(Gtk.WindowPosition.CENTER_ON_PARENT)
self.set_border_width(6) #GNOME HIG
self.set_resizable(False) #GNOME HIG
#Add eyecandy
img = Gtk.Image()
img.set_from_stock(Gtk.STOCK_DIALOG_ERROR, Gtk.IconSize.DIALOG)
img.set_alignment(0, 0) #GNOME HIG
label_str = '<span weight="bold" size="larger">%s</span>\n' % p_msg
if s_msg != None and len(s_msg) > 0:
label_str += '\n%s\n' % s_msg
label = Gtk.Label(label_str)
label.set_use_markup(True)
label.set_alignment(0, 0) #GNOME HIG
label.set_line_wrap(True) #GNOME HIG
label.set_selectable(True) #GNOME HIG
hbox = Gtk.HBox()
hbox.set_spacing(12) #GNOME HIG
hbox.set_border_width(6) #GNOME HIG
hbox.pack_start(img, False, False, 0)
hbox.pack_start(label, True, True, 0)
hbox.show_all()
vbox = self.get_content_area()
vbox.set_spacing(12) #GNOME HIG
vbox.pack_start(hbox, False, False, 0)
vbox.show_all()
|
atc-/pyela
|
chat/gui/networking_error.py
|
Python
|
gpl-3.0
| 2,558
|
from . import GeneralBranch, Branch
from anaconda_updates.settings import GlobalSettings
class Rhel6Branch(GeneralBranch):
def __init__(self):
super().__init__(branch_type=Branch.rhel6,
cmd_args=["-rh6", "--rhel6"],
help="working on RHEL6",
version_script_params=["-rh6", "-p"],
img_name="rhel6_updates.img")
GlobalSettings.use_blivet = False
GlobalSettings.use_pykickstart = False
GlobalSettings.auto_fetch = False
|
jkonecny12/anaconda_updates
|
anaconda_updates/releases/rhel6.py
|
Python
|
gpl-3.0
| 556
|