code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
import unittest
import json
import sys
import os.path
import websocket
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from app.constants import Message
class TestServer(unittest.TestCase):
"""
This is not a unit test.
Simple client to test basic functionality of the server
"""
def setUp(self):
self.ws = websocket.create_connection("ws://127.0.0.1:8000/ws")
def testOnOpenServerSendsCard(self):
res = self.ws.recv()
print "\nCard on connection", res
self.assertIn("cardReceived", res)
def testServerSendsCard(self):
print "\nSending {0} ServerSendsCard".format(Message.GetCard)
self.ws.send(json.dumps({'type':Message.GetCard}))
self.ws.recv()
res = self.ws.recv()
print res
self.assertIn("Stormtrooper", res)
def testServerSendsListRooms(self):
print "\nSending {0} ListOfRooms".format(Message.GetListOfRoom)
self.ws.send(json.dumps({'type':Message.GetListOfRoom}))
self.ws.recv()
res = self.ws.recv()
print res
self.assertIn(str(Message.ListOfRooms), res)
def testServerCreatesAndSendsRoom(self):
print "\nSending {0} CreateRoom".format(Message.CreateRoom)
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'My Favorite Room'}))
self.ws.recv()
res = self.ws.recv()
print res
self.assertIn("id", res)
def testErrorWhenUserCreatesSecondRoom(self):
print "\n **************************---***************"
print "\nSending {0} Create 2 room. Room 1".format(Message.CreateRoom)
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'First room, Ok'}))
self.ws.recv()
res = self.ws.recv()
print res
print "create second Room"
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'SecondRoom room, WRONG'}))
res2 = self.ws.recv()
print res2
self.assertIn(str(Message.Error), res2)
def test_destroy_room(self):
print "\n **************************---***************"
self.ws.recv()
print "\nSending {0} CreateRoom".format(Message.CreateRoom)
self.ws.send(json.dumps({'type':Message.CreateRoom, 'name':'This room will be destroyed'}))
print "Recv: {0}".format(self.ws.recv())
#get list of rooms
self.ws.send(json.dumps({'type':Message.GetListOfRoom}))
print "list of rooms {0}".format(self.ws.recv())
print "Sending {0} DestroyRoom".format(Message.DestroyRoom)
self.ws.send(json.dumps({'type':Message.DestroyRoom}))
res = self.ws.recv()
print("\nReceive: {0}".format(res))
#get list of rooms
self.ws.send(json.dumps({'type':Message.GetListOfRoom}))
print "list of rooms {0}".format(self.ws.recv())
self.assertIn(str(Message.SUCCESS), res)
# def test_client_can_send_chat_messages(self):
# print "\n chat *****************************************"
# self.ws.recv()
# self.ws.send(json.dumps({'type':Message.ListOfRooms}))
# res = self.ws.recv()
# roomId = json.loads(res)['id']
# self.ws.send(json.dumps({'type':Message.ConnectToRoom, 'id':roomId}))
# _= self.ws.recv()
#
# self.assertIn("id", res)
#
def tearDown(self):
self.ws.close()
if __name__ == "__main__":
unittest.main() | tsh/card-game-server | tests/testRoom.py | Python | gpl-2.0 | 3,449 |
## begin license ##
#
# "Meresco Harvester" consists of two subsystems, namely an OAI-harvester and
# a web-control panel.
# "Meresco Harvester" is originally called "Sahara" and was developed for
# SURFnet by:
# Seek You Too B.V. (CQ2) http://www.cq2.nl
#
# Copyright (C) 2006-2007 SURFnet B.V. http://www.surfnet.nl
# Copyright (C) 2007-2008 SURF Foundation. http://www.surf.nl
# Copyright (C) 2007-2009, 2011 Seek You Too (CQ2) http://www.cq2.nl
# Copyright (C) 2007-2009 Stichting Kennisnet Ict op school. http://www.kennisnetictopschool.nl
# Copyright (C) 2009 Tilburg University http://www.uvt.nl
# Copyright (C) 2011, 2020-2021 Stichting Kennisnet https://www.kennisnet.nl
# Copyright (C) 2020-2021 Data Archiving and Network Services https://dans.knaw.nl
# Copyright (C) 2020-2021 SURF https://www.surf.nl
# Copyright (C) 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl
# Copyright (C) 2020-2021 The Netherlands Institute for Sound and Vision https://beeldengeluid.nl
#
# This file is part of "Meresco Harvester"
#
# "Meresco Harvester" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Harvester" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Harvester"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
import unittest
import datetime, tempfile, os, shutil
from meresco.harvester.throughputanalyser import parseToTime, ThroughputAnalyser, ThroughputReport
class ThroughputAnalyserTest(unittest.TestCase):
def setUp(self):
self.mockAnalyseRepository_arguments = []
self.testdir = os.path.join(tempfile.gettempdir(), 'throughputanalysertest')
not os.path.isdir(self.testdir) and os.makedirs(self.testdir)
def tearDown(self):
shutil.rmtree(self.testdir)
def testParseToTime(self):
timeString = "1999-12-03 12:34:35.123"
date = parseToTime(timeString)
self.assertEqual((1999,12,3,12,34,35,123000), (date.year,date.month,date.day, date.hour, date.minute, date.second, date.microsecond))
date = parseToTime("2006-08-04 10:40:50.644")
self.assertEqual((2006,8,4,10,40,50,644000), (date.year,date.month,date.day, date.hour, date.minute, date.second, date.microsecond))
def testParseToTimeDiff(self):
date1 = parseToTime("1999-12-03 12:34:35.123")
date2 = parseToTime("1999-12-03 12:34:36.423")
delta = date2 - date1
self.assertEqual(1.3, delta.seconds + delta.microseconds/1000000.0)
def testAnalyse(self):
t = ThroughputAnalyser(eventpath = self.testdir)
t._analyseRepository = self.mockAnalyseRepository
report = t.analyse(['repo1','repo2'], '2006-08-31')
self.assertEqual(1000, report.records)
self.assertEqual(2000.0, report.seconds)
self.assertEqual(['repo1', 'repo2'], self.mockAnalyseRepository_arguments)
def testAnalyseNothing(self):
t = ThroughputAnalyser(eventpath = self.testdir)
t._analyseRepository = self.mockAnalyseRepository
report = t.analyse([], '2006-08-31')
self.assertEqual(0, report.records)
self.assertEqual(0.0, report.seconds)
self.assertEqual('-' , report.recordsPerSecond())
self.assertEqual('-' , report.recordsPerDay())
def testAnalyseRepository(self):
r = open(os.path.join(self.testdir, 'repo1.events'), 'w')
try:
r.write("""
[2006-08-30 00:00:15.500] ENDHARVEST [repo1]
[2006-08-30 01:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-30 01:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1000, ResumptionToken: r1
[2006-08-30 01:00:15.500] ENDHARVEST [repo1]
[2006-08-31 01:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-31 01:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1200, ResumptionToken: r1
[2006-08-31 01:00:15.500] ENDHARVEST [repo1]
[2006-08-31 02:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-31 02:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1400, ResumptionToken: r2
[2006-08-31 02:00:25.500] ENDHARVEST [repo1]
[2006-08-31 03:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-31 03:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1600, ResumptionToken: r3
[2006-08-31 03:00:35.500] ENDHARVEST [repo1]
""")
finally:
r.close()
t = ThroughputAnalyser(eventpath = self.testdir)
records, seconds = t._analyseRepository('repo1', '2006-08-31')
self.assertEqual(600, records)
self.assertEqual(76.5, seconds)
def testAnalyseNonExistingRepository(self):
t = ThroughputAnalyser(eventpath = self.testdir)
records, seconds = t._analyseRepository('repository', '2006-08-31')
self.assertEqual(0, records)
self.assertEqual(0.0, seconds)
def testReportOnEmptyEventsFile(self):
t = ThroughputAnalyser(eventpath = self.testdir)
records, seconds = t._analyseRepository('repo1', '2006-08-31')
self.assertEqual(0, records)
self.assertEqual(0, seconds)
def testReport(self):
report = ThroughputReport()
report.add(100000,10000.0)
self.assertEqual('10.00', report.recordsPerSecond())
self.assertEqual('864000', report.recordsPerDay())
self.assertEqual("02:46:40", report.hmsString())
#Mock self shunt
def mockAnalyseRepository(self, repositoryName, dateSince):
self.mockAnalyseRepository_arguments.append(repositoryName)
return 500, 1000.0
| seecr/meresco-harvester | test/throughputanalysertest.py | Python | gpl-2.0 | 6,185 |
from cfme.utils.version import get_stream
from collections import namedtuple
from contextlib import contextmanager
from cfme.test_framework.sprout.client import SproutClient
from cfme.utils.conf import cfme_data, credentials
from cfme.utils.log import logger
import pytest
from wait_for import wait_for
from cfme.test_framework.sprout.client import SproutException
from fixtures.appliance import temp_appliances
TimedCommand = namedtuple('TimedCommand', ['command', 'timeout'])
@pytest.yield_fixture(scope="function")
def dedicated_db_appliance(app_creds, appliance):
"""'ap' launch appliance_console, '' clear info screen, '5/8' setup db, '1' Creates v2_key,
'1' selects internal db, 'y' continue, '1' use partition, 'y' create dedicated db, 'pwd'
db password, 'pwd' confirm db password + wait 360 secs and '' finish."""
if appliance.version > '5.7':
with temp_appliances(count=1, preconfigured=False) as apps:
pwd = app_creds['password']
opt = '5' if apps[0].version >= "5.8" else '8'
command_set = ('ap', '', opt, '1', '1', 'y', '1', 'y', pwd, TimedCommand(pwd, 360), '')
apps[0].appliance_console.run_commands(command_set)
wait_for(lambda: apps[0].db.is_dedicated_active)
yield apps[0]
else:
raise Exception("Can't setup dedicated db on appliance below 5.7 builds")
""" The Following fixtures are for provisioning one preconfigured or unconfigured appliance for
testing from an FQDN provider unless there are no provisions available"""
@contextmanager
def fqdn_appliance(appliance, preconfigured):
sp = SproutClient.from_config()
available_providers = set(sp.call_method('available_providers'))
required_providers = set(cfme_data['fqdn_providers'])
usable_providers = available_providers & required_providers
version = appliance.version.vstring
stream = get_stream(appliance.version)
for provider in usable_providers:
try:
apps, pool_id = sp.provision_appliances(
count=1, preconfigured=preconfigured, version=version, stream=stream,
provider=provider
)
break
except Exception as e:
logger.warning("Couldn't provision appliance with following error:")
logger.warning("{}".format(e))
continue
else:
logger.error("Couldn't provision an appliance at all")
raise SproutException('No provision available')
yield apps[0]
apps[0].ssh_client.close()
sp.destroy_pool(pool_id)
@pytest.yield_fixture()
def unconfigured_appliance(appliance):
with fqdn_appliance(appliance, preconfigured=False) as app:
yield app
@pytest.yield_fixture()
def configured_appliance(appliance):
with fqdn_appliance(appliance, preconfigured=True) as app:
yield app
@pytest.yield_fixture()
def ipa_crud(configured_appliance, ipa_creds):
configured_appliance.appliance_console_cli.configure_ipa(ipa_creds['ipaserver'],
ipa_creds['username'], ipa_creds['password'], ipa_creds['domain'], ipa_creds['realm'])
yield(configured_appliance)
@pytest.fixture()
def app_creds():
return {
'username': credentials['database']['username'],
'password': credentials['database']['password'],
'sshlogin': credentials['ssh']['username'],
'sshpass': credentials['ssh']['password']
}
@pytest.fixture(scope="module")
def app_creds_modscope():
return {
'username': credentials['database']['username'],
'password': credentials['database']['password'],
'sshlogin': credentials['ssh']['username'],
'sshpass': credentials['ssh']['password']
}
@pytest.fixture()
def ipa_creds():
fqdn = cfme_data['auth_modes']['ext_ipa']['ipaserver'].split('.', 1)
creds_key = cfme_data['auth_modes']['ext_ipa']['credentials']
return{
'hostname': fqdn[0],
'domain': fqdn[1],
'realm': cfme_data['auth_modes']['ext_ipa']['iparealm'],
'ipaserver': cfme_data['auth_modes']['ext_ipa']['ipaserver'],
'username': credentials[creds_key]['principal'],
'password': credentials[creds_key]['password']
}
| jkandasa/integration_tests | cfme/fixtures/cli.py | Python | gpl-2.0 | 4,212 |
#!/usr/bin/env python
# encoding:utf-8
"""
@software: PyCharm
@file: video_db.py
@time: 2016/8/4 16:56
"""
import sqlite3
class Create_DB():
def __init__(self):
self.conn = sqlite3.connect('video.db')
self.cn = self.conn.cursor()
def create_table(self, table):
# 创建表格 table == 创建表命令
self.cn.execute(table)
def insert_db(self):
# 插入数据
pass
def select_db(self):
# 查询数据
pass
if __name__ == '__main__':
pass
| bjweiqm/Sele | school/pachong/video_db.py | Python | gpl-2.0 | 534 |
import collections
def moduleman_plugin(*args):
method_args = []
def inner_decorator(cls):
for method in method_args:
if (not (method in dir(cls))):
raise Exception("Required method %s not implemented" % method)
cls.__PLUGIN_MODULEMAN_MARK = "Plugin mark"
return cls
if not isinstance(args[0], collections.Callable):
method_args += args
return inner_decorator
return inner_decorator(args[0])
| 0ps/wfuzz | src/wfuzz/externals/moduleman/plugin.py | Python | gpl-2.0 | 482 |
import time
output = ''
i=1
start_time = time.time()
while len(output)<1000001:
output +=str(i)
i += 1
print(int(output[9]) * int(output[99]) *
int(output[999]) * int(output[9999]) *
int(output[99999]) * int(output[999999]))
print("--- %s seconds ---" % (time.time() - start_time)) | BinDigit1/EulerProjects | Problem 40/Champernownes_constant.py | Python | gpl-2.0 | 304 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import re, os, time
# function: read and parse sensor data file
def read_sensor(path):
value = "U"
try:
f = open(path, "r")
line = f.readline()
if re.match(r"([0-9a-f]{2} ){9}: crc=[0-9a-f]{2} YES", line):
line = f.readline()
m = re.match(r"([0-9a-f]{2} ){9}t=([+-]?[0-9]+)", line)
if m:
value = str(round(float(m.group(2)) / 1000.0,1))
f.close()
except (IOError), e:
print time.strftime("%x %X"), "Error reading", path, ": ", e
return value
# define pathes to 1-wire sensor data
pathes = (
"/sys/bus/w1/devices/28-0314640daeff/w1_slave"
)
# read sensor data
#for path in pathes:
# path = "/sys/bus/w1/devices/28-0314640daeff/w1_slave"
# print read_sensor(path)
# time.sleep(30)
flag = 1
temp = 0
temp2 = 0
while (flag):
temp2 = temp
temp = read_sensor("/sys/bus/w1/devices/28-0314640daeff/w1_slave")
if temp2 != temp:
print temp
time.sleep(11)
| o-unity/lanio | old/lsrv/bin/getTemp.py | Python | gpl-2.0 | 977 |
from allauth.account.signals import email_confirmed, email_changed, email_added, email_removed, user_signed_up, user_logged_in
from django.contrib.auth.models import User, Group, Permission
from django.db.models import Q
from django.dispatch import receiver
"""intercept signals from allauth"""
@receiver(email_confirmed)
def email_confirmed_(sender, email_address, **kwargs):
"""user has confirmed the email manually"""
# print(email_address.email + " confirmed email.")
query = {'email': email_address.email}
if email_address.primary:
user = User.objects.get(**query)
# print(str(user) + " confirmed primary email.")
group = Group.objects.get(name='AllowedCommentary')
user.groups.add(group)
@receiver(user_signed_up)
def user_signed_up_(sender, request, user, **kwargs):
"""when a user signs up"""
# print("SIGN UP " + str(user) + " signed up and kwargs=" + str(kwargs))
social_login = kwargs.get('sociallogin', None)
if social_login:
social_account = social_login.account
if social_account:
if 'verified_email' in social_account.extra_data:
if social_account.extra_data['verified_email']:
group = Group.objects.get(name='AllowedCommentary')
user.groups.add(group)
| scotartt/commentarius | decommentariis/decommentariis/signals.py | Python | gpl-2.0 | 1,210 |
# -*- coding: utf-8 -*-
import xbmc,urllib
all_modules = [ 'https://github.com/bazingasheldon/tv/blob/master/raspberry/addon.tar.gz?raw=true']
for parser in all_modules:
xbmc.executebuiltin('XBMC.RunPlugin("plugin://plugin.video.p2p-streams/?mode=405&name=p2p&url=' + urllib.quote(parser) + '")')
xbmc.sleep(1000)
xbmc.executebuiltin("Notification(%s,%s,%i,%s)" % ('P2P-Streams', "All parsers imported",1,'')) | bazingasheldon/tv | loader.py | Python | gpl-2.0 | 421 |
#!/usr/bin/env python
#
# special_tests.py: testing special and reserved file handling
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Copyright (c) 2000-2007 CollabNet. All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://subversion.tigris.org/license-1.html.
# If newer versions of this license are posted there, you may use a
# newer version instead, at your option.
#
######################################################################
# General modules
import sys, os, re
# Our testing module
import svntest
from svntest.main import server_has_mergeinfo
# (abbreviation)
Skip = svntest.testcase.Skip
SkipUnless = svntest.testcase.SkipUnless
XFail = svntest.testcase.XFail
Item = svntest.wc.StateItem
######################################################################
# Tests
#
# Each test must return on success or raise on failure.
#----------------------------------------------------------------------
def general_symlink(sbox):
"general symlink handling"
sbox.build()
wc_dir = sbox.wc_dir
# First try to just commit a symlink
newfile_path = os.path.join(wc_dir, 'newfile')
linktarget_path = os.path.join(wc_dir, 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', newfile_path)
svntest.main.run_svn(None, 'add', newfile_path, linktarget_path)
expected_output = svntest.wc.State(wc_dir, {
'newfile' : Item(verb='Adding'),
'linktarget' : Item(verb='Adding'),
})
# Run a diff and verify that we get the correct output
exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'diff',
wc_dir)
regex = '^\+link linktarget'
for line in stdout_lines:
if re.match(regex, line):
break
else:
raise svntest.Failure
# Commit and make sure everything is good
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'newfile' : Item(status=' ', wc_rev=2),
'linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None,
wc_dir)
## Now we should update to the previous version, verify that no
## symlink is present, then update back to HEAD and see if the symlink
## is regenerated properly.
svntest.actions.run_and_verify_svn(None, None, [],
'up', '-r', '1', wc_dir)
# Is the symlink gone?
if os.path.isfile(newfile_path) or os.path.islink(newfile_path):
raise svntest.Failure
svntest.actions.run_and_verify_svn(None, None, [],
'up', '-r', '2', wc_dir)
# Is the symlink back?
new_target = os.readlink(newfile_path)
if new_target != 'linktarget':
raise svntest.Failure
## Now change the target of the symlink, verify that it is shown as
## modified and that a commit succeeds.
os.remove(newfile_path)
os.symlink('A', newfile_path)
was_cwd = os.getcwd()
os.chdir(wc_dir)
svntest.actions.run_and_verify_svn(None, [ "M newfile\n" ], [], 'st')
os.chdir(was_cwd)
expected_output = svntest.wc.State(wc_dir, {
'newfile' : Item(verb='Sending'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
expected_status.add({
'newfile' : Item(status=' ', wc_rev=3),
'linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None, wc_dir)
def replace_file_with_symlink(sbox):
"replace a normal file with a special file"
sbox.build()
wc_dir = sbox.wc_dir
# First replace a normal file with a symlink and make sure we get an
# error
iota_path = os.path.join(wc_dir, 'iota')
os.remove(iota_path)
os.symlink('A', iota_path)
# Does status show the obstruction?
was_cwd = os.getcwd()
os.chdir(wc_dir)
svntest.actions.run_and_verify_svn(None, [ "~ iota\n" ], [], 'st')
# And does a commit fail?
os.chdir(was_cwd)
exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'ci', '-m',
'log msg',
wc_dir)
regex = 'svn: Commit failed'
for line in stderr_lines:
if re.match(regex, line):
break
else:
raise svntest.Failure
def import_export_symlink(sbox):
"import and export a symlink"
sbox.build()
wc_dir = sbox.wc_dir
# create a new symlink to import
new_path = os.path.join(wc_dir, 'new_file')
os.symlink('linktarget', new_path)
# import this symlink into the repository
url = sbox.repo_url + "/dirA/dirB/new_link"
exit_code, output, errput = svntest.actions.run_and_verify_svn(
'Import a symlink', None, [], 'import',
'-m', 'log msg', new_path, url)
regex = "(Committed|Imported) revision [0-9]+."
for line in output:
if re.match(regex, line):
break
else:
raise svntest.Failure
# remove the unversioned link
os.remove(new_path)
# run update and verify that the symlink is put back into place
svntest.actions.run_and_verify_svn(None, None, [],
'up', wc_dir)
# Is the symlink back?
link_path = wc_dir + "/dirA/dirB/new_link"
new_target = os.readlink(link_path)
if new_target != 'linktarget':
raise svntest.Failure
## Now we will try exporting from both the working copy and the
## repository directly, verifying that the symlink is created in
## both cases.
for export_src, dest_dir in [(sbox.wc_dir, 'export-wc'),
(sbox.repo_url, 'export-url')]:
export_target = sbox.add_wc_path(dest_dir)
svntest.actions.run_and_verify_svn(None, None, [],
'export', export_src, export_target)
# is the link at the correct place?
link_path = os.path.join(export_target, "dirA/dirB/new_link")
new_target = os.readlink(link_path)
if new_target != 'linktarget':
raise svntest.Failure
#----------------------------------------------------------------------
# Regression test for issue 1986
def copy_tree_with_symlink(sbox):
"'svn cp dir1 dir2' which contains a symlink"
sbox.build()
wc_dir = sbox.wc_dir
# Create a versioned symlink within directory 'A/D/H'.
newfile_path = os.path.join(wc_dir, 'A', 'D', 'H', 'newfile')
linktarget_path = os.path.join(wc_dir, 'A', 'D', 'H', 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', newfile_path)
svntest.main.run_svn(None, 'add', newfile_path, linktarget_path)
expected_output = svntest.wc.State(wc_dir, {
'A/D/H/newfile' : Item(verb='Adding'),
'A/D/H/linktarget' : Item(verb='Adding'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'A/D/H/newfile' : Item(status=' ', wc_rev=2),
'A/D/H/linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None, wc_dir)
# Copy H to H2
H_path = os.path.join(wc_dir, 'A', 'D', 'H')
H2_path = os.path.join(wc_dir, 'A', 'D', 'H2')
svntest.actions.run_and_verify_svn(None, None, [], 'cp', H_path, H2_path)
# 'svn status' should show just "A/D/H2 A +". Nothing broken.
expected_status.add({
'A/D/H2' : Item(status='A ', copied='+', wc_rev='-'),
'A/D/H2/chi' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/omega' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/psi' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/linktarget' : Item(status=' ', copied='+', wc_rev='-'),
'A/D/H2/newfile' : Item(status=' ', copied='+', wc_rev='-'),
})
svntest.actions.run_and_verify_status(wc_dir, expected_status)
def replace_symlink_with_file(sbox):
"replace a special file with a non-special file"
sbox.build()
wc_dir = sbox.wc_dir
# Create a new special file and commit it.
newfile_path = os.path.join(wc_dir, 'newfile')
linktarget_path = os.path.join(wc_dir, 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', newfile_path)
svntest.main.run_svn(None, 'add', newfile_path, linktarget_path)
expected_output = svntest.wc.State(wc_dir, {
'newfile' : Item(verb='Adding'),
'linktarget' : Item(verb='Adding'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'newfile' : Item(status=' ', wc_rev=2),
'linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None, wc_dir)
# Now replace the symlink with a normal file and try to commit, we
# should get an error
os.remove(newfile_path);
svntest.main.file_append(newfile_path, "text of actual file");
# Does status show the obstruction?
was_cwd = os.getcwd()
os.chdir(wc_dir)
svntest.actions.run_and_verify_svn(None, [ "~ newfile\n" ], [], 'st')
# And does a commit fail?
os.chdir(was_cwd)
exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'ci', '-m',
'log msg',
wc_dir)
regex = 'svn: Commit failed'
for line in stderr_lines:
if re.match(regex, line):
break
else:
raise svntest.Failure
def remove_symlink(sbox):
"remove a symlink"
sbox.build()
wc_dir = sbox.wc_dir
# Commit a symlink
newfile_path = os.path.join(wc_dir, 'newfile')
linktarget_path = os.path.join(wc_dir, 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', newfile_path)
svntest.main.run_svn(None, 'add', newfile_path, linktarget_path)
expected_output = svntest.wc.State(wc_dir, {
'newfile' : Item(verb='Adding'),
'linktarget' : Item(verb='Adding'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'newfile' : Item(status=' ', wc_rev=2),
'linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None, wc_dir)
# Now remove it
svntest.actions.run_and_verify_svn(None, None, [], 'rm', newfile_path)
# Commit and verify that it worked
expected_output = svntest.wc.State(wc_dir, {
'newfile' : Item(verb='Deleting'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'linktarget' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output,
expected_status, None, wc_dir)
def merge_symlink_into_file(sbox):
"merge symlink into file"
sbox.build()
wc_dir = sbox.wc_dir
d_url = sbox.repo_url + '/A/D'
dprime_url = sbox.repo_url + '/A/Dprime'
gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma')
gamma_prime_path = os.path.join(wc_dir, 'A', 'Dprime', 'gamma')
# create a copy of the D directory to play with
svntest.main.run_svn(None,
'copy', d_url, dprime_url, '-m', 'copy')
svntest.main.run_svn(None,
'update', sbox.wc_dir)
# remove A/Dprime/gamma
svntest.main.run_svn(None, 'delete', gamma_prime_path)
expected_output = svntest.wc.State(wc_dir, {
'A/Dprime/gamma' : Item(verb='Deleting'),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None,
wc_dir)
# Commit a symlink in its place
linktarget_path = os.path.join(wc_dir, 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', gamma_prime_path)
svntest.main.run_svn(None, 'add', gamma_prime_path)
expected_output = svntest.wc.State(wc_dir, {
'A/Dprime/gamma' : Item(verb='Adding'),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None,
wc_dir)
# merge the creation of the symlink into the original directory
svntest.main.run_svn(None,
'merge', '-r', '2:4', dprime_url,
os.path.join(wc_dir, 'A', 'D'))
# now revert, and we'll get a strange error
svntest.main.run_svn(None, 'revert', '-R', wc_dir)
# assuming we got past the revert because someone fixed that bug, lets
# try the merge and a commit, since that apparently used to throw us for
# a loop, see issue 2530
svntest.main.run_svn(None,
'merge', '-r', '2:4', dprime_url,
os.path.join(wc_dir, 'A', 'D'))
expected_output = svntest.wc.State(wc_dir, {
'A/D' : Item(verb='Sending'),
'A/D/gamma' : Item(verb='Replacing'),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None,
wc_dir)
def merge_file_into_symlink(sbox):
"merge file into symlink"
sbox.build()
wc_dir = sbox.wc_dir
d_url = sbox.repo_url + '/A/D'
dprime_url = sbox.repo_url + '/A/Dprime'
gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma')
gamma_prime_path = os.path.join(wc_dir, 'A', 'Dprime', 'gamma')
# create a copy of the D directory to play with
svntest.main.run_svn(None,
'copy', d_url, dprime_url, '-m', 'copy')
svntest.main.run_svn(None,
'update', sbox.wc_dir)
# remove A/Dprime/gamma
svntest.main.run_svn(None, 'delete', gamma_prime_path)
expected_output = svntest.wc.State(wc_dir, {
'A/Dprime/gamma' : Item(verb='Deleting'),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None,
wc_dir)
# Commit a symlink in its place
linktarget_path = os.path.join(wc_dir, 'linktarget')
svntest.main.file_append(linktarget_path, 'this is just a link target')
os.symlink('linktarget', gamma_prime_path)
svntest.main.run_svn(None, 'add', gamma_prime_path)
expected_output = svntest.wc.State(wc_dir, {
'A/Dprime/gamma' : Item(verb='Adding'),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None,
wc_dir)
svntest.main.file_write(gamma_path, 'changed file', 'w+')
expected_output = svntest.wc.State(wc_dir, {
'A/D/gamma' : Item(verb='Sending'),
})
svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None,
wc_dir)
# ok, now merge the change to the file into the symlink we created, this
# gives us a weird error
svntest.main.run_svn(None,
'merge', '-r', '4:5', d_url,
os.path.join(wc_dir, 'A', 'Dprime'))
# Issue 2701: Tests to see repository with symlinks can be checked out on all
# platforms.
def checkout_repo_with_symlinks(sbox):
"checkout a repository containing symlinks"
svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
'special_tests_data',
'symlink.dump'))
expected_output = svntest.wc.State(sbox.wc_dir, {
'from': Item(status='A '),
'to': Item(status='A '),
})
if svntest.main.is_os_windows():
expected_link_contents = 'link to'
else:
expected_link_contents = ''
expected_wc = svntest.wc.State('', {
'from' : Item(contents=expected_link_contents),
'to' : Item(contents=''),
})
svntest.actions.run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir,
expected_output,
expected_wc)
# Issue 2716: 'svn diff' against a symlink to a directory within the wc
def diff_symlink_to_dir(sbox):
"diff a symlink to a directory"
sbox.build(read_only = True)
os.chdir(sbox.wc_dir)
# Create a symlink to A/D/.
d_path = os.path.join('A', 'D')
link_path = 'link'
os.symlink(d_path, link_path)
# Add the symlink.
svntest.main.run_svn(None, 'add', link_path)
# Now diff the wc itself and check the results.
expected_output = [
"Index: link\n",
"===================================================================\n",
"--- link\t(revision 0)\n",
"+++ link\t(revision 0)\n",
"@@ -0,0 +1 @@\n",
"+link " + d_path + "\n",
"\ No newline at end of file\n",
"\n",
"Property changes on: link\n",
"___________________________________________________________________\n",
"Added: svn:special\n",
" + *\n",
"\n" ]
svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff',
'.')
# We should get the same output if we the diff the symlink itself.
svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff',
link_path)
# Issue 2692 (part of): Check that the client can check out a repository
# that contains an unknown special file type.
def checkout_repo_with_unknown_special_type(sbox):
"checkout repository with unknown special file type"
svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
'special_tests_data',
'bad-special-type.dump'))
expected_output = svntest.wc.State(sbox.wc_dir, {
'special': Item(status='A '),
})
expected_wc = svntest.wc.State('', {
'special' : Item(contents='gimble wabe'),
})
svntest.actions.run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir,
expected_output,
expected_wc)
def replace_symlink_with_dir(sbox):
"replace a special file with a directory"
svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
'special_tests_data',
'symlink.dump'))
wc_dir = sbox.wc_dir
from_path = os.path.join(wc_dir, 'from')
# Now replace the symlink with a directory and try to commit, we
# should get an error
os.remove(from_path);
os.mkdir(from_path);
# Does status show the obstruction?
was_cwd = os.getcwd()
os.chdir(wc_dir)
svntest.actions.run_and_verify_svn(None, [ "~ from\n" ], [], 'st')
# The commit shouldn't do anything.
# I'd expect a failed commit here, but replacing a file locally with a
# directory seems to make svn think the file is unchanged.
os.chdir(was_cwd)
exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'ci', '-m',
'log msg',
wc_dir)
if not (stdout_lines == [] or stderr_lines == []):
raise svntest.Failure
# test for issue #1808: svn up deletes local symlink that obstructs
# versioned file
def update_obstructing_symlink(sbox):
"symlink obstructs incoming delete"
sbox.build()
wc_dir = sbox.wc_dir
mu_path = os.path.join(wc_dir, 'A', 'mu')
mu_url = sbox.repo_url + '/A/mu'
iota_path = os.path.join(wc_dir, 'iota')
# delete A/mu and replace it with a symlink
svntest.main.run_svn(None, 'rm', mu_path)
os.symlink(iota_path, mu_path)
svntest.main.run_svn(None, 'rm', mu_url,
'-m', 'log msg')
svntest.main.run_svn(None,
'up', wc_dir)
# check that the symlink is still there
target = os.readlink(mu_path)
if target != iota_path:
raise svntest.Failure
def warn_on_reserved_name(sbox):
"warn when attempt operation on a reserved name"
sbox.build()
wc_dir = sbox.wc_dir
if os.path.exists(os.path.join(wc_dir, ".svn")):
reserved_path = os.path.join(wc_dir, ".svn")
elif os.path.exists(os.path.join(wc_dir, "_svn")):
reserved_path = os.path.join(wc_dir, "_svn")
else:
# We don't know how to test this, but have no reason to believe
# it would fail. (TODO: any way to return 'Skip', though?)
return
svntest.actions.run_and_verify_svn(
"Locking a file with a reserved name failed to result in an error",
None,
".*Skipping argument: '.+' ends in a reserved name.*",
'lock', reserved_path)
# on users@: http://mid.gmane.org/1292856447.8650.24.camel@nimble.325Bayport
def unrelated_changed_special_status(sbox):
"commit foo while bar changed special status"
sbox.build()
wc_dir = sbox.wc_dir
os.chdir(os.path.join(sbox.wc_dir, 'A/D/H'))
open('chi', 'a').write('random local mod')
os.unlink('psi')
os.symlink('omega', 'psi') # omega is versioned!
svntest.main.run_svn(None, 'changelist', 'chi cl', 'chi')
svntest.actions.run_and_verify_svn(None, None, [], 'commit',
'--changelist', 'chi cl',
'-m', 'psi changed special status')
########################################################################
# Run the tests
# list all tests here, starting with None:
test_list = [ None,
SkipUnless(general_symlink, svntest.main.is_posix_os),
SkipUnless(replace_file_with_symlink, svntest.main.is_posix_os),
SkipUnless(import_export_symlink, svntest.main.is_posix_os),
SkipUnless(copy_tree_with_symlink, svntest.main.is_posix_os),
SkipUnless(replace_symlink_with_file, svntest.main.is_posix_os),
SkipUnless(remove_symlink, svntest.main.is_posix_os),
SkipUnless(SkipUnless(merge_symlink_into_file,
svntest.main.is_posix_os),
server_has_mergeinfo),
SkipUnless(merge_file_into_symlink, svntest.main.is_posix_os),
checkout_repo_with_symlinks,
XFail(SkipUnless(diff_symlink_to_dir, svntest.main.is_posix_os)),
checkout_repo_with_unknown_special_type,
replace_symlink_with_dir,
SkipUnless(update_obstructing_symlink, svntest.main.is_posix_os),
warn_on_reserved_name,
SkipUnless(unrelated_changed_special_status,
svntest.main.is_posix_os),
]
if __name__ == '__main__':
svntest.main.run_tests(test_list)
# NOTREACHED
### End of file.
| bdmod/extreme-subversion | BinarySourcce/subversion-1.6.17/subversion/tests/cmdline/special_tests.py | Python | gpl-2.0 | 22,957 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2012 Red Hat, Inc.
#
# Authors:
# Thomas Woerner <twoerner@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time
from firewall.core.base import *
from firewall.core.logger import log
from firewall.functions import portStr, checkIPnMask, checkIP6nMask, \
checkProtocol, enable_ip_forwarding, check_single_address
from firewall.core.rich import *
from firewall.errors import *
from firewall.core.ipXtables import ip4tables_available_tables,\
ip6tables_available_tables, OUR_CHAINS
mangle = []
if "mangle" in ip4tables_available_tables:
mangle.append("ipv4")
if "mangle" in ip6tables_available_tables:
mangle.append("ipv6")
nat = []
if "nat" in ip4tables_available_tables:
nat.append("ipv4")
else:
if "ipv4" in mangle:
mangle.remove("ipv4")
if "nat" in ip6tables_available_tables:
nat.append("ipv6")
else:
if "ipv6" in mangle:
mangle.remove("ipv6")
ZONE_CHAINS = {
"filter": {
"INPUT": [ "ipv4", "ipv6" ],
"FORWARD_IN": [ "ipv4", "ipv6" ],
"FORWARD_OUT": [ "ipv4", "ipv6" ],
},
"nat": {
"PREROUTING": nat,
"POSTROUTING": nat,
},
"mangle": {
"PREROUTING": mangle,
},
}
INTERFACE_ZONE_OPTS = {
"PREROUTING": "-i",
"POSTROUTING": "-o",
"INPUT": "-i",
"FORWARD_IN": "-i",
"FORWARD_OUT": "-o",
"OUTPUT": "-o",
}
class FirewallZone(object):
def __init__(self, fw):
self._fw = fw
self._chains = { }
self._zones = { }
def __repr__(self):
return '%s(%r, %r)' % (self.__class__, self._chains, self._zones)
def cleanup(self):
self._chains.clear()
self._zones.clear()
# zones
def get_zones(self):
return sorted(self._zones.keys())
def get_zone_of_interface(self, interface):
interface_id = self.__interface_id(interface)
for zone in self._zones:
if interface_id in self._zones[zone].settings["interfaces"]:
# an interface can only be part of one zone
return zone
return None
def get_zone_of_source(self, source):
source_id = self.__source_id(source)
for zone in self._zones:
if source_id in self._zones[zone].settings["sources"]:
# a source_id can only be part of one zone
return zone
return None
def get_zone(self, zone):
z = self._fw.check_zone(zone)
return self._zones[z]
def _error2warning(self, f, name, *args):
# transform errors into warnings
try:
f(name, *args)
except FirewallError as error:
msg = str(error)
log.warning("%s: %s" % (name, msg))
def add_zone(self, obj):
obj.settings = { x : {} for x in [ "interfaces", "sources",
"services", "ports",
"masquerade", "forward_ports",
"icmp_blocks", "rules",
"protocols" ] }
self._zones[obj.name] = obj
def remove_zone(self, zone):
obj = self._zones[zone]
if obj.applied:
self.unapply_zone_settings(zone)
obj.settings.clear()
del self._zones[zone]
def apply_zones(self):
for zone in self.get_zones():
obj = self._zones[zone]
applied = False
# load zone in case of missing services, icmptypes etc.
for args in obj.icmp_blocks:
self._error2warning(self.add_icmp_block, obj.name, args)
for args in obj.forward_ports:
self._error2warning(self.add_forward_port, obj.name, *args)
for args in obj.services:
self._error2warning(self.add_service, obj.name, args)
for args in obj.ports:
self._error2warning(self.add_port, obj.name, *args)
for args in obj.protocols:
self._error2warning(self.add_protocol, obj.name, args)
if obj.masquerade:
self._error2warning(self.add_masquerade, obj.name)
for args in obj.rules:
self._error2warning(self.add_rule, obj.name, args)
for args in obj.interfaces:
self._error2warning(self.add_interface, obj.name, args)
applied = True
for args in obj.sources:
self._error2warning(self.add_source, obj.name, args)
applied = True
obj.applied = applied
# dynamic chain handling
def __chain(self, zone, create, table, chain):
if create:
if zone in self._chains and \
table in self._chains[zone] and \
chain in self._chains[zone][table]:
return
else:
if zone not in self._chains or \
table not in self._chains[zone] or \
chain not in self._chains[zone][table]:
return
chains = [ ]
rules = [ ]
zones = [ DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[chain],
zone=zone) ]
# TODO: simplify for one zone only
for _zone in zones:
ipvs = []
if self._fw.is_table_available("ipv4", table):
ipvs.append("ipv4")
if self._fw.is_table_available("ipv6", table):
ipvs.append("ipv6")
for ipv in ipvs:
OUR_CHAINS[table].update(set([_zone,
"%s_log" % _zone,
"%s_deny" % _zone,
"%s_allow" % _zone]))
chains.append((ipv, [ _zone, "-t", table ]))
chains.append((ipv, [ "%s_log" % (_zone), "-t", table ]))
chains.append((ipv, [ "%s_deny" % (_zone), "-t", table ]))
chains.append((ipv, [ "%s_allow" % (_zone), "-t", table ]))
rules.append((ipv, [ _zone, 1, "-t", table,
"-j", "%s_log" % (_zone) ]))
rules.append((ipv, [ _zone, 2, "-t", table,
"-j", "%s_deny" % (_zone) ]))
rules.append((ipv, [ _zone, 3, "-t", table,
"-j", "%s_allow" % (_zone) ]))
# Handle trust, block and drop zones:
# Add an additional rule with the zone target (accept, reject
# or drop) to the base _zone only in the filter table.
# Otherwise it is not be possible to have a zone with drop
# target, that is allowing traffic that is locally initiated
# or that adds additional rules. (RHBZ#1055190)
target = self._zones[zone].target
if table == "filter" and \
target in [ "ACCEPT", "REJECT", "%%REJECT%%", "DROP" ] and \
chain in [ "INPUT", "FORWARD_IN", "FORWARD_OUT", "OUTPUT" ]:
rules.append((ipv, [ _zone, 4, "-t", table, "-j", target ]))
if create:
# handle chains first
ret = self._fw.handle_chains(chains, create)
if ret:
(cleanup_chains, msg) = ret
log.debug2(msg)
self._fw.handle_chains(cleanup_chains, not create)
raise FirewallError(COMMAND_FAILED, msg)
# handle rules
ret = self._fw.handle_rules(rules, create, insert=True)
if ret:
# also cleanup chains
self._fw.handle_chains(chains, not create)
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not create)
raise FirewallError(COMMAND_FAILED, msg)
else:
# reverse rule order for cleanup
rules.reverse()
# cleanup rules first
ret = self._fw.handle_rules(rules, create, insert=True)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not create)
raise FirewallError(COMMAND_FAILED, msg)
# cleanup chains
ret = self._fw.handle_chains(chains, create)
if ret:
# also create rules
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not create)
(cleanup_chains, msg) = ret
self._fw.handle_chains(cleanup_chains, not create)
raise FirewallError(COMMAND_FAILED, msg)
if create:
self._chains.setdefault(zone, { }).setdefault(table, [ ]).append(chain)
else:
self._chains[zone][table].remove(chain)
if len(self._chains[zone][table]) == 0:
del self._chains[zone][table]
if len(self._chains[zone]) == 0:
del self._chains[zone]
def add_chain(self, zone, table, chain):
self.__chain(zone, True, table, chain)
def remove_chain(self, zone, table, chain):
# TODO: add config setting to remove chains optionally if
# table,chain is not used for zone anymore
# self.__chain(zone, False, table, chain)
pass
# settings
# generate settings record with sender, timeout, mark
def __gen_settings(self, timeout, sender, mark=None):
ret = {
"date": time.time(),
"sender": sender,
"timeout": timeout,
}
if mark:
ret["mark"] = mark
return ret
def get_settings(self, zone):
return self.get_zone(zone).settings
def set_settings(self, zone, settings):
_obj = self.get_zone(zone)
try:
for key in settings:
for args in settings[key]:
if args in _obj.settings[key]:
# do not add things, that are already active in the
# zone configuration, also do not restore date,
# sender and timeout
continue
if key == "icmp_blocks":
self.add_icmp_block(zone, args)
elif key == "forward_ports":
self.add_forward_port(zone, *args)
elif key == "services":
self.add_service(zone, args)
elif key == "ports":
self.add_port(zone, *args)
elif key == "protocols":
self.add_protocol(zone, *args)
elif key == "masquerade":
self.add_masquerade(zone)
elif key == "rules":
self.add_rule(zone, Rich_Rule(rule_str=args))
elif key == "interfaces":
self.change_zone_of_interface(zone, args)
elif key == "sources":
self.change_zone_of_source(zone, args)
else:
log.error("Zone '%s': Unknown setting '%s:%s', "
"unable to restore.", zone, key, args)
# restore old date, sender and timeout
if args in _obj.settings[key]:
_obj.settings[key][args] = settings[key][args]
except FirewallError as msg:
log.error(msg)
def __zone_settings(self, enable, zone):
obj = self.get_zone(zone)
if (enable and obj.applied) or (not enable and not obj.applied):
return
settings = self.get_settings(zone)
for key in settings:
for args in settings[key]:
try:
if key == "icmp_blocks":
self.__icmp_block(enable, zone, args)
elif key == "forward_ports":
mark = obj.settings["forward_ports"][args]["mark"]
self.__forward_port(enable, zone, *args, mark_id=mark)
elif key == "services":
self.__service(enable, zone, args)
elif key == "ports":
self.__port(enable, zone, *args)
elif key == "protocols":
self.__protocol(enable, zone, args)
elif key == "masquerade":
self.__masquerade(enable, zone)
elif key == "rules":
mark = self.__rule(enable, zone,
Rich_Rule(rule_str=args), None)
obj.settings["rules"][args]["mark"] = mark
elif key == "interfaces":
self.__interface(enable, zone, args)
elif key == "sources":
self.__source(enable, zone, *args)
else:
log.error("Zone '%s': Unknown setting '%s:%s', "
"unable to apply", zone, key, args)
except FirewallError as msg:
log.error(msg)
obj.applied = enable
def apply_zone_settings(self, zone):
self.__zone_settings(True, zone)
def unapply_zone_settings(self, zone):
self.__zone_settings(False, zone)
def unapply_zone_settings_if_unused(self, zone):
obj = self._zones[zone]
if len(obj.interfaces) == 0 and len(obj.sources) == 0:
self.unapply_zone_settings(zone)
def get_config_with_settings(self, zone):
"""
:return: exported config updated with runtime settings
"""
config = self.get_zone(zone).export_config()
config = config[:5] + (self.list_services(zone),
self.list_ports(zone),
self.list_icmp_blocks(zone),
self.query_masquerade(zone),
self.list_forward_ports(zone),
self.list_interfaces(zone),
self.list_sources(zone),
self.list_rules(zone))
return config
# handle chains, modules and rules for a zone
def handle_cmr(self, zone, chains, modules, rules, enable):
cleanup_chains = None
cleanup_modules = None
cleanup_rules = None
# handle chains
if enable:
for (table, chain) in chains:
self.add_chain(zone, table, chain)
# handle modules
module_return = self._fw.handle_modules(modules, enable)
if module_return is None:
# handle rules
rules_return = self._fw.handle_rules2(rules, enable)
if rules_return is not None:
(cleanup_rules, msg) = rules_return
cleanup_chains = chains
cleanup_modules = modules
else:
# error loading modules
(cleanup_modules, msg) = module_return
# error case:
if cleanup_chains is not None or cleanup_modules is not None or \
cleanup_rules is not None:
# cleanup chains
for (table, chain) in cleanup_chains:
if enable:
self.remove_chain(zone, table, chain)
else:
self.add_chain(zone, table, chain)
# cleanup modules
if cleanup_modules is not None:
self._fw.handle_modules(cleanup_modules, not enable)
# cleanup rules
if cleanup_rules is not None:
self._fw.handle_rules2(cleanup_rules, not enable)
# cleanup chains last
if not enable:
for (table, chain) in chains:
self.remove_chain(zone, table, chain)
# report error case
if cleanup_chains is not None or cleanup_modules is not None or \
cleanup_rules is not None:
log.error(msg)
return msg
return None
# INTERFACES
def check_interface(self, interface):
self._fw.check_interface(interface)
def __interface_id(self, interface):
self.check_interface(interface)
return interface
def __interface(self, enable, zone, interface, append=False):
rules = [ ]
for table in ZONE_CHAINS:
for chain in ZONE_CHAINS[table]:
# create needed chains if not done already
if enable:
self.add_chain(zone, table, chain)
for ipv in ZONE_CHAINS[table][chain]:
# handle all zones in the same way here, now
# trust and block zone targets are handled now in __chain
opt = INTERFACE_ZONE_OPTS[chain]
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS[chain], zone=zone)
if self._zones[zone].target == DEFAULT_ZONE_TARGET:
action = "-g"
else:
action = "-j"
rule = [ "%s_ZONES" % chain, "-t", table,
opt, interface, action, target ]
if enable and not append:
rule.insert(1, "1")
rules.append((ipv, rule))
# handle rules
ret = self._fw.handle_rules(rules, enable, not append)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not enable)
log.debug2(msg)
raise FirewallError(COMMAND_FAILED, msg)
# if not enable:
# for table in ZONE_CHAINS:
# for chain in ZONE_CHAINS[table]:
# self.remove_chain(zone, table, chain)
def add_interface(self, zone, interface, sender=None):
self._fw.check_panic()
_zone = self._fw.check_zone(zone)
_obj = self._zones[_zone]
if not _obj.applied:
self.apply_zone_settings(zone)
interface_id = self.__interface_id(interface)
if interface_id in _obj.settings["interfaces"]:
raise FirewallError(ZONE_ALREADY_SET,
"'%s' already bound to '%s'" % (interface_id, zone))
if self.get_zone_of_interface(interface) is not None:
raise FirewallError(ZONE_CONFLICT,
"'%s' already bound to a zone" % interface)
log.debug1("Setting zone of interface '%s' to '%s'" % (interface, _zone))
self.__interface(True, _zone, interface)
_obj.settings["interfaces"][interface_id] = \
self.__gen_settings(0, sender)
# add information whether we add to default or specific zone
_obj.settings["interfaces"][interface_id]["__default__"] = (not zone or zone == "")
return _zone
def change_zone_of_interface(self, zone, interface, sender=None):
self._fw.check_panic()
_old_zone = self.get_zone_of_interface(interface)
_new_zone = self._fw.check_zone(zone)
if _new_zone == _old_zone:
return _old_zone
if _old_zone is not None:
self.remove_interface(_old_zone, interface)
return self.add_interface(zone, interface, sender)
def change_default_zone(self, old_zone, new_zone):
self._fw.check_panic()
self.apply_zone_settings(new_zone)
self.__interface(True, new_zone, "+", True)
if old_zone is not None and old_zone != "":
self.__interface(False, old_zone, "+", True)
def remove_interface(self, zone, interface):
self._fw.check_panic()
zoi = self.get_zone_of_interface(interface)
if zoi is None:
raise FirewallError(UNKNOWN_INTERFACE,
"'%s' is not in any zone" % interface)
_zone = zoi if zone == "" else self._fw.check_zone(zone)
if zoi != _zone:
raise FirewallError(ZONE_CONFLICT,
"remove_interface(%s, %s): zoi='%s'" % \
(zone, interface, zoi))
_obj = self._zones[_zone]
interface_id = self.__interface_id(interface)
if _obj.applied:
self.__interface(False, _zone, interface)
if interface_id in _obj.settings["interfaces"]:
del _obj.settings["interfaces"][interface_id]
# self.unapply_zone_settings_if_unused(_zone)
return _zone
def query_interface(self, zone, interface):
return self.__interface_id(interface) in self.get_settings(zone)["interfaces"]
def list_interfaces(self, zone):
return sorted(self.get_settings(zone)["interfaces"].keys())
# SOURCES
def check_source(self, source):
if checkIPnMask(source):
return "ipv4"
elif checkIP6nMask(source):
return "ipv6"
else:
raise FirewallError(INVALID_ADDR, source)
def __source_id(self, source):
ipv = self.check_source(source)
return (ipv, source)
def __source(self, enable, zone, ipv, source):
rules = [ ]
for table in ZONE_CHAINS:
for chain in ZONE_CHAINS[table]:
# create needed chains if not done already
if enable:
self.add_chain(zone, table, chain)
# handle trust and block zone directly, accept or reject
# others will be placed into the proper zone chains
opt = INTERFACE_ZONE_OPTS[chain]
# transform INTERFACE_ZONE_OPTS for source address
if opt == "-i":
opt = "-s"
if opt == "-o":
opt = "-d"
target = self._zones[zone].target.format(
chain=SHORTCUTS[chain], zone=zone)
if target in [ "REJECT", "%%REJECT%%" ] and \
chain not in [ "INPUT", "FORWARD", "OUTPUT" ]:
# REJECT is only valid in the INPUT, FORWARD and
# OUTPUT chains, and user-defined chains which are
# only called from those chains
continue
if target == "DROP" and table == "nat":
# DROP is not supported in nat table
continue
# append rule
if self._zones[zone].target == DEFAULT_ZONE_TARGET:
action = "-g"
else:
action = "-j"
rule = [ "%s_ZONES_SOURCE" % chain, "-t", table,
opt, source, action, target ]
rules.append((ipv, rule))
# handle rules
ret = self._fw.handle_rules(rules, enable)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not enable)
log.debug2(msg)
raise FirewallError(COMMAND_FAILED, msg)
# if not enable:
# for table in ZONE_CHAINS:
# for chain in ZONE_CHAINS[table]:
# self.remove_chain(zone, table, chain)
def add_source(self, zone, source, sender=None):
self._fw.check_panic()
_zone = self._fw.check_zone(zone)
_obj = self._zones[_zone]
if not _obj.applied:
self.apply_zone_settings(zone)
source_id = self.__source_id(source)
if source_id in _obj.settings["sources"]:
raise FirewallError(ZONE_ALREADY_SET,
"'%s' already bound to '%s'" % (source_id, _zone))
if self.get_zone_of_source(source) is not None:
raise FirewallError(ZONE_CONFLICT,
"'%s' already bound to a zone" % source_id)
self.__source(True, _zone, source_id[0], source_id[1])
_obj.settings["sources"][source_id] = \
self.__gen_settings(0, sender)
# add information whether we add to default or specific zone
_obj.settings["sources"][source_id]["__default__"] = (not zone or zone == "")
return _zone
def change_zone_of_source(self, zone, source, sender=None):
self._fw.check_panic()
_old_zone = self.get_zone_of_source(source)
_new_zone = self._fw.check_zone(zone)
if _new_zone == _old_zone:
return _old_zone
if _old_zone is not None:
self.remove_source(_old_zone, source)
return self.add_source(zone, source, sender)
def remove_source(self, zone, source):
self._fw.check_panic()
zos = self.get_zone_of_source(source)
if zos is None:
raise FirewallError(UNKNOWN_SOURCE,
"'%s' is not in any zone" % source)
_zone = zos if zone == "" else self._fw.check_zone(zone)
if zos != _zone:
raise FirewallError(ZONE_CONFLICT,
"remove_source(%s, %s): zos='%s'" % \
(zone, source, zos))
_obj = self._zones[_zone]
source_id = self.__source_id(source)
if _obj.applied:
self.__source(False, _zone, source_id[0], source_id[1])
if source_id in _obj.settings["sources"]:
del _obj.settings["sources"][source_id]
# self.unapply_zone_settings_if_unused(_zone)
return _zone
def query_source(self, zone, source):
return self.__source_id(source) in self.get_settings(zone)["sources"]
def list_sources(self, zone):
return [ k[1] for k in self.get_settings(zone)["sources"].keys() ]
# RICH LANGUAGE
def check_rule(self, rule):
rule.check()
def __rule_id(self, rule):
self.check_rule(rule)
return (str(rule))
def __rule_source(self, source, command):
if source:
if source.invert:
command.append("!")
command += [ "-s", source.addr ]
def __rule_destination(self, destination, command):
if destination:
if destination.invert:
command.append("!")
command += [ "-d", destination.addr ]
def __rule_limit(self, limit):
if limit:
return [ "-m", "limit", "--limit", limit.value ]
return [ ]
def __rule_log(self, ipv, table, target, rule, command, rules):
if not rule.log:
return
chain = "%s_log" % target
_command = command[:]
_command += [ "-j", "LOG" ]
if rule.log.prefix:
_command += [ "--log-prefix", '%s' % rule.log.prefix ]
if rule.log.level:
_command += [ "--log-level", '%s' % rule.log.level ]
_command += self.__rule_limit(rule.log.limit)
rules.append((ipv, table, chain, _command))
def __rule_audit(self, ipv, table, target, rule, command, rules):
if not rule.audit:
return
chain = "%s_log" % target
_command = command[:]
if type(rule.action) == Rich_Accept:
_type = "accept"
elif type(rule.action) == Rich_Reject:
_type = "reject"
elif type(rule.action) == Rich_Drop:
_type = "drop"
else:
_type = "unknown"
_command += [ "-j", "AUDIT", "--type", _type ]
_command += self.__rule_limit(rule.audit.limit)
rules.append((ipv, table, chain, _command))
def __rule_action(self, ipv, table, target, rule, command, rules):
if not rule.action:
return
_command = command[:]
if type(rule.action) == Rich_Accept:
chain = "%s_allow" % target
_command += [ "-j", "ACCEPT" ]
elif type(rule.action) == Rich_Reject:
chain = "%s_deny" % target
_command += [ "-j", "REJECT" ]
if rule.action.type:
_command += [ "--reject-with", rule.action.type ]
elif type(rule.action) == Rich_Drop:
chain = "%s_deny" % target
_command += [ "-j", "DROP" ]
else:
raise FirewallError(INVALID_RULE,
"Unknown action %s" % type(rule.action))
_command += self.__rule_limit(rule.action.limit)
rules.append((ipv, table, chain, _command))
def __rule(self, enable, zone, rule, mark_id):
chains = [ ]
modules = [ ]
rules = [ ]
if rule.family is not None:
ipvs = [ rule.family ]
else:
ipvs = [ "ipv4", "ipv6" ]
for ipv in ipvs:
# SERVICE
if type(rule.element) == Rich_Service:
svc = self._fw.service.get_service(rule.element.name)
if len(svc.destination) > 0:
if ipv not in svc.destination:
# destination is set, only use if it contains ipv
raise FirewallError(INVALID_RULE,
"Service %s is not usable with %s" %
(rule.element.name, ipv))
if svc.destination[ipv] != "" and rule.destination:
# we can not use two destinations at the same time
raise FirewallError(INVALID_RULE,
"Destination conflict with service.")
table = "filter"
chains.append([table, "INPUT" ])
if type(rule.action) == Rich_Accept:
# only load modules for accept action
modules += svc.modules
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
# create rules
for (port,proto) in svc.ports:
table = "filter"
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_destination(rule.destination, command)
command += [ "-p", proto ]
if port:
command += [ "--dport", "%s" % portStr(port) ]
if ipv in svc.destination and svc.destination[ipv] != "":
command += [ "-d", svc.destination[ipv] ]
command += [ "-m", "conntrack", "--ctstate", "NEW" ]
self.__rule_log(ipv, table, target, rule, command, rules)
self.__rule_audit(ipv, table, target, rule, command, rules)
self.__rule_action(ipv, table, target, rule, command, rules)
# PORT
elif type(rule.element) == Rich_Port:
port = rule.element.port
protocol = rule.element.protocol
self.check_port(port, protocol)
table = "filter"
chains.append([ table, "INPUT" ])
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_destination(rule.destination, command)
command += [ "-m", protocol, "-p", protocol,
"--dport", portStr(port),
"-m", "conntrack", "--ctstate", "NEW" ]
self.__rule_log(ipv, table, target, rule, command, rules)
self.__rule_audit(ipv, table, target, rule, command, rules)
self.__rule_action(ipv, table, target, rule, command, rules)
# PROTOCOL
elif type(rule.element) == Rich_Protocol:
protocol = rule.element.value
self.check_protocol(protocol)
table = "filter"
chains.append([ table, "INPUT" ])
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_destination(rule.destination, command)
command += [ "-p", protocol,
"-m", "conntrack", "--ctstate", "NEW" ]
self.__rule_log(ipv, table, target, rule, command, rules)
self.__rule_audit(ipv, table, target, rule, command, rules)
self.__rule_action(ipv, table, target, rule, command, rules)
# MASQUERADE
elif type(rule.element) == Rich_Masquerade:
if enable:
enable_ip_forwarding(ipv)
chains.append([ "nat", "POSTROUTING" ])
chains.append([ "filter", "FORWARD_OUT" ])
# POSTROUTING
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["POSTROUTING"], zone=zone)
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_destination(rule.destination, command)
command += [ "-j", "MASQUERADE" ]
rules.append((ipv, "nat", "%s_allow" % target, command))
# FORWARD_OUT
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["FORWARD_OUT"], zone=zone)
command = [ ]
# reverse source/destination !
self.__rule_source(rule.destination, command)
self.__rule_destination(rule.source, command)
command += [ "-m", "conntrack", "--ctstate", "NEW",
"-j", "ACCEPT" ]
rules.append((ipv, "filter", "%s_allow" % target, command))
# FORWARD PORT
elif type(rule.element) == Rich_ForwardPort:
if enable:
enable_ip_forwarding(ipv)
port = rule.element.port
protocol = rule.element.protocol
toport = rule.element.to_port
toaddr = rule.element.to_address
self.check_forward_port(ipv, port, protocol, toport, toaddr)
if enable:
mark_id = self._fw.new_mark()
filter_chain = "INPUT" if not toaddr else "FORWARD_IN"
chains.append([ "mangle", "PREROUTING" ])
chains.append([ "nat", "PREROUTING" ])
chains.append([ "filter", filter_chain ])
mark_str = "0x%x" % mark_id
port_str = portStr(port)
to = ""
if toaddr:
to += toaddr
if toport and toport != "":
to += ":%s" % portStr(toport, "-")
mark = [ "-m", "mark", "--mark", mark_str ]
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["PREROUTING"],
zone=zone)
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_destination(rule.destination, command)
command += [ "-p", protocol, "--dport", port_str,
"-j", "MARK", "--set-mark", mark_str ]
rules.append((ipv, "mangle", "%s_allow" % target, command))
# local and remote
command = [ "-p", protocol ] + mark + \
[ "-j", "DNAT", "--to-destination", to ]
rules.append((ipv, "nat", "%s_allow" % target, command))
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[filter_chain],
zone=zone)
command = [ "-m", "conntrack", "--ctstate", "NEW" ] + \
mark + [ "-j", "ACCEPT" ]
rules.append((ipv, "filter", "%s_allow" % target, command))
if not enable:
self._fw.del_mark(mark_id)
mark_id = None
# ICMP BLOCK
elif type(rule.element) == Rich_IcmpBlock:
ict = self._fw.icmptype.get_icmptype(rule.element.name)
if rule.action and type(rule.action) == Rich_Accept:
# icmp block might have reject or drop action, but not accept
raise FirewallError(INVALID_RULE,
"IcmpBlock not usable with accept action")
if ict.destination and ipv not in ict.destination:
raise FirewallError(INVALID_RULE,
"IcmpBlock %s not usable with %s" %
(rule.element.name, ipv))
table = "filter"
chains.append([ table, "INPUT" ])
chains.append([ table, "FORWARD_IN" ])
if ipv == "ipv4":
proto = [ "-p", "icmp" ]
match = [ "-m", "icmp", "--icmp-type", rule.element.name ]
else:
proto = [ "-p", "ipv6-icmp" ]
match = [ "-m", "icmp6", "--icmpv6-type", rule.element.name ]
# INPUT
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_destination(rule.destination, command)
command += proto + match
self.__rule_log(ipv, table, target, rule, command, rules)
self.__rule_audit(ipv, table, target, rule, command, rules)
if rule.action:
self.__rule_action(ipv, table, target, rule, command, rules)
else:
command += [ "-j", "%%REJECT%%" ]
rules.append((ipv, table, "%s_deny" % target, command))
# FORWARD_IN
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["FORWARD_IN"],
zone=zone)
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_destination(rule.destination, command)
command += proto + match
self.__rule_log(ipv, table, target, rule, command, rules)
self.__rule_audit(ipv, table, target, rule, command, rules)
if rule.action:
self.__rule_action(ipv, table, target, rule, command, rules)
else:
command += [ "-j", "%%REJECT%%" ]
rules.append((ipv, table, "%s_deny" % target, command))
elif rule.element is None:
# source action
table = "filter"
chains.append([ table, "INPUT" ])
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
command = [ ]
self.__rule_source(rule.source, command)
self.__rule_log(ipv, table, target, rule, command, rules)
self.__rule_audit(ipv, table, target, rule, command, rules)
self.__rule_action(ipv, table, target, rule, command, rules)
# EVERYTHING ELSE
else:
raise FirewallError(INVALID_RULE, "Unknown element %s" %
type(rule.element))
msg = self.handle_cmr(zone, chains, modules, rules, enable)
if msg is not None:
raise FirewallError(COMMAND_FAILED, msg)
return mark_id
def add_rule(self, zone, rule, timeout=0, sender=None):
_zone = self._fw.check_zone(zone)
self._fw.check_timeout(timeout)
self._fw.check_panic()
_obj = self._zones[_zone]
rule_id = self.__rule_id(rule)
if rule_id in _obj.settings["rules"]:
raise FirewallError(ALREADY_ENABLED,
"'%s' already in '%s'" % (rule, _zone))
if _obj.applied:
mark = self.__rule(True, _zone, rule, None)
else:
mark = None
_obj.settings["rules"][rule_id] = \
self.__gen_settings(timeout, sender, mark=mark)
return _zone
def remove_rule(self, zone, rule):
_zone = self._fw.check_zone(zone)
self._fw.check_panic()
_obj = self._zones[_zone]
rule_id = self.__rule_id(rule)
if rule_id not in _obj.settings["rules"]:
raise FirewallError(NOT_ENABLED,
"'%s' not in '%s'" % (rule, _zone))
if "mark" in _obj.settings["rules"][rule_id]:
mark = _obj.settings["rules"][rule_id]["mark"]
else:
mark = None
if _obj.applied:
self.__rule(False, _zone, rule, mark)
if rule_id in _obj.settings["rules"]:
del _obj.settings["rules"][rule_id]
return _zone
def query_rule(self, zone, rule):
return self.__rule_id(rule) in self.get_settings(zone)["rules"]
def list_rules(self, zone):
return list(self.get_settings(zone)["rules"].keys())
# SERVICES
def check_service(self, service):
self._fw.check_service(service)
def __service_id(self, service):
self.check_service(service)
return service
def __service(self, enable, zone, service):
svc = self._fw.service.get_service(service)
if enable:
self.add_chain(zone, "filter", "INPUT")
rules = [ ]
for ipv in [ "ipv4", "ipv6" ]:
if len(svc.destination) > 0 and ipv not in svc.destination:
# destination is set, only use if it contains ipv
continue
# handle rules
for (port,proto) in svc.ports:
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["INPUT"], zone=zone)
rule = [ "%s_allow" % (target), "-t", "filter", "-p", proto ]
if port:
rule += [ "--dport", "%s" % portStr(port) ]
if ipv in svc.destination and svc.destination[ipv] != "":
rule += [ "-d", svc.destination[ipv] ]
rule += [ "-m", "conntrack", "--ctstate", "NEW" ]
rule += [ "-j", "ACCEPT" ]
rules.append((ipv, rule))
for protocol in svc.protocols:
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["INPUT"], zone=zone)
rules.append((ipv, [ "%s_allow" % (target),
"-t", "filter", "-p", protocol,
"-m", "conntrack", "--ctstate", "NEW",
"-j", "ACCEPT" ]))
cleanup_rules = None
cleanup_modules = None
msg = None
# handle rules
ret = self._fw.handle_rules(rules, enable)
if ret is None: # no error, handle modules
mod_ret = self._fw.handle_modules(svc.modules, enable)
if mod_ret is not None: # error loading modules
(cleanup_modules, msg) = mod_ret
cleanup_rules = rules
else: # ret is not None
(cleanup_rules, msg) = ret
if cleanup_rules is not None or cleanup_modules is not None:
if cleanup_rules:
self._fw.handle_rules(cleanup_rules, not enable)
if cleanup_modules:
self._fw.handle_modules(cleanup_modules, not enable)
raise FirewallError(COMMAND_FAILED, msg)
if not enable:
self.remove_chain(zone, "filter", "INPUT")
def add_service(self, zone, service, timeout=0, sender=None):
_zone = self._fw.check_zone(zone)
self._fw.check_timeout(timeout)
self._fw.check_panic()
_obj = self._zones[_zone]
service_id = self.__service_id(service)
if service_id in _obj.settings["services"]:
raise FirewallError(ALREADY_ENABLED,
"'%s' already in '%s'" % (service_id, _zone))
if _obj.applied:
self.__service(True, _zone, service)
_obj.settings["services"][service_id] = \
self.__gen_settings(timeout, sender)
return _zone
def remove_service(self, zone, service):
_zone = self._fw.check_zone(zone)
self._fw.check_panic()
_obj = self._zones[_zone]
service_id = self.__service_id(service)
if service_id not in _obj.settings["services"]:
raise FirewallError(NOT_ENABLED,
"'%s' not in '%s'" % (service, _zone))
if _obj.applied:
self.__service(False, _zone, service)
if service_id in _obj.settings["services"]:
del _obj.settings["services"][service_id]
return _zone
def query_service(self, zone, service):
return (self.__service_id(service) in self.get_settings(zone)["services"])
def list_services(self, zone):
return sorted(self.get_settings(zone)["services"].keys())
# PORTS
def check_port(self, port, protocol):
self._fw.check_port(port)
self._fw.check_tcpudp(protocol)
def __port_id(self, port, protocol):
self.check_port(port, protocol)
return (portStr(port, "-"), protocol)
def __port(self, enable, zone, port, protocol):
if enable:
self.add_chain(zone, "filter", "INPUT")
rules = [ ]
for ipv in [ "ipv4", "ipv6" ]:
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
rules.append((ipv, [ "%s_allow" % (target),
"-t", "filter",
"-m", protocol, "-p", protocol,
"--dport", portStr(port),
"-m", "conntrack", "--ctstate", "NEW",
"-j", "ACCEPT" ]))
# handle rules
ret = self._fw.handle_rules(rules, enable)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not enable)
raise FirewallError(COMMAND_FAILED, msg)
if not enable:
self.remove_chain(zone, "filter", "INPUT")
def add_port(self, zone, port, protocol, timeout=0, sender=None):
_zone = self._fw.check_zone(zone)
self._fw.check_timeout(timeout)
self._fw.check_panic()
_obj = self._zones[_zone]
port_id = self.__port_id(port, protocol)
if port_id in _obj.settings["ports"]:
raise FirewallError(ALREADY_ENABLED,
"'%s:%s' already in '%s'" % (port, protocol, _zone))
if _obj.applied:
self.__port(True, _zone, port, protocol)
_obj.settings["ports"][port_id] = \
self.__gen_settings(timeout, sender)
return _zone
def remove_port(self, zone, port, protocol):
_zone = self._fw.check_zone(zone)
self._fw.check_panic()
_obj = self._zones[_zone]
port_id = self.__port_id(port, protocol)
if port_id not in _obj.settings["ports"]:
raise FirewallError(NOT_ENABLED,
"'%s:%s' not in '%s'" % (port, protocol, _zone))
if _obj.applied:
self.__port(False, _zone, port, protocol)
if port_id in _obj.settings["ports"]:
del _obj.settings["ports"][port_id]
return _zone
def query_port(self, zone, port, protocol):
return self.__port_id(port, protocol) in self.get_settings(zone)["ports"]
def list_ports(self, zone):
return list(self.get_settings(zone)["ports"].keys())
# PROTOCOLS
def check_protocol(self, protocol):
if not checkProtocol(protocol):
raise FirewallError(INVALID_PROTOCOL, protocol)
def __protocol_id(self, protocol):
self.check_protocol(protocol)
return protocol
def __protocol(self, enable, zone, protocol):
if enable:
self.add_chain(zone, "filter", "INPUT")
rules = [ ]
for ipv in [ "ipv4", "ipv6" ]:
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
rules.append((ipv, [ "%s_allow" % (target),
"-t", "filter", "-p", protocol,
"-m", "conntrack", "--ctstate", "NEW",
"-j", "ACCEPT" ]))
# handle rules
ret = self._fw.handle_rules(rules, enable)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not enable)
raise FirewallError(COMMAND_FAILED, msg)
if not enable:
self.remove_chain(zone, "filter", "INPUT")
def add_protocol(self, zone, protocol, timeout=0, sender=None):
_zone = self._fw.check_zone(zone)
self._fw.check_timeout(timeout)
self._fw.check_panic()
_obj = self._zones[_zone]
protocol_id = self.__protocol_id(protocol)
if protocol_id in _obj.settings["protocols"]:
raise FirewallError(ALREADY_ENABLED,
"'%s' already in '%s'" % (protocol, _zone))
if _obj.applied:
self.__protocol(True, _zone, protocol)
_obj.settings["protocols"][protocol_id] = \
self.__gen_settings(timeout, sender)
return _zone
def remove_protocol(self, zone, protocol):
_zone = self._fw.check_zone(zone)
self._fw.check_panic()
_obj = self._zones[_zone]
protocol_id = self.__protocol_id(protocol)
if protocol_id not in _obj.settings["protocols"]:
raise FirewallError(NOT_ENABLED,
"'%s' not in '%s'" % (protocol, _zone))
if _obj.applied:
self.__protocol(False, _zone, protocol)
if protocol_id in _obj.settings["protocols"]:
del _obj.settings["protocols"][protocol_id]
return _zone
def query_protocol(self, zone, protocol):
return self.__protocol_id(protocol) in self.get_settings(zone)["protocols"]
def list_protocols(self, zone):
return list(self.get_settings(zone)["protocols"].keys())
# MASQUERADE
def __masquerade_id(self):
return True
def __masquerade(self, enable, zone):
if enable:
self.add_chain(zone, "nat", "POSTROUTING")
self.add_chain(zone, "filter", "FORWARD_OUT")
enable_ip_forwarding("ipv4")
rules = [ ]
for ipv in [ "ipv4" ]: # IPv4 only!
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["POSTROUTING"], zone=zone)
rules.append((ipv, [ "%s_allow" % (target), "!", "-i", "lo",
"-t", "nat", "-j", "MASQUERADE" ]))
# FORWARD_OUT
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["FORWARD_OUT"], zone=zone)
rules.append((ipv, [ "%s_allow" % (target),
"-t", "filter", "-j", "ACCEPT" ]))
# handle rules
ret = self._fw.handle_rules(rules, enable)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not enable)
raise FirewallError(COMMAND_FAILED, msg)
if not enable:
self.remove_chain(zone, "nat", "POSTROUTING")
self.remove_chain(zone, "filter", "FORWARD_OUT")
def add_masquerade(self, zone, timeout=0, sender=None):
_zone = self._fw.check_zone(zone)
self._fw.check_timeout(timeout)
self._fw.check_panic()
_obj = self._zones[_zone]
masquerade_id = self.__masquerade_id()
if masquerade_id in _obj.settings["masquerade"]:
raise FirewallError(ALREADY_ENABLED,
"masquerade already enabled in '%s'" % _zone)
if _obj.applied:
self.__masquerade(True, _zone)
_obj.settings["masquerade"][masquerade_id] = \
self.__gen_settings(timeout, sender)
return _zone
def remove_masquerade(self, zone):
_zone = self._fw.check_zone(zone)
self._fw.check_panic()
_obj = self._zones[_zone]
masquerade_id = self.__masquerade_id()
if masquerade_id not in _obj.settings["masquerade"]:
raise FirewallError(NOT_ENABLED,
"masquerade not enabled in '%s'" % _zone)
if _obj.applied:
self.__masquerade(False, _zone)
if masquerade_id in _obj.settings["masquerade"]:
del _obj.settings["masquerade"][masquerade_id]
return _zone
def query_masquerade(self, zone):
return self.__masquerade_id() in self.get_settings(zone)["masquerade"]
# PORT FORWARDING
def check_forward_port(self, ipv, port, protocol, toport=None, toaddr=None):
self._fw.check_port(port)
self._fw.check_tcpudp(protocol)
if toport:
self._fw.check_port(toport)
if toaddr:
if not check_single_address(ipv, toaddr):
raise FirewallError(INVALID_ADDR, toaddr)
if not toport and not toaddr:
raise FirewallError(INVALID_FORWARD,
"port-forwarding is missing to-port AND to-addr")
def __forward_port_id(self, port, protocol, toport=None, toaddr=None):
self.check_forward_port("ipv4", port, protocol, toport, toaddr)
return (portStr(port, "-"), protocol,
portStr(toport, "-"), str(toaddr))
def __forward_port(self, enable, zone, port, protocol, toport=None,
toaddr=None, mark_id=None):
mark_str = "0x%x" % mark_id
port_str = portStr(port)
to = ""
if toaddr:
to += toaddr
filter_chain = "INPUT" if not toaddr else "FORWARD_IN"
if toport and toport != "":
to += ":%s" % portStr(toport, "-")
mark = [ "-m", "mark", "--mark", mark_str ]
if enable:
self.add_chain(zone, "mangle", "PREROUTING")
self.add_chain(zone, "nat", "PREROUTING")
self.add_chain(zone, "filter", filter_chain)
enable_ip_forwarding("ipv4")
rules = [ ]
for ipv in [ "ipv4" ]: # IPv4 only!
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["PREROUTING"], zone=zone)
rules.append((ipv, [ "%s_allow" % (target),
"-t", "mangle",
"-p", protocol, "--dport", port_str,
"-j", "MARK", "--set-mark", mark_str ]))
# local and remote
rules.append((ipv, [ "%s_allow" % (target),
"-t", "nat",
"-p", protocol ] + mark + \
[ "-j", "DNAT", "--to-destination", to ]))
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[filter_chain],
zone=zone)
rules.append((ipv, [ "%s_allow" % (target),
"-t", "filter",
"-m", "conntrack", "--ctstate", "NEW" ] + \
mark + [ "-j", "ACCEPT" ]))
# handle rules
ret = self._fw.handle_rules(rules, enable)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not enable)
if enable:
self._fw.del_mark(mark_id)
raise FirewallError(COMMAND_FAILED, msg)
if not enable:
self.remove_chain(zone, "mangle", "PREROUTING")
self.remove_chain(zone, "nat", "PREROUTING")
self.remove_chain(zone, "filter", filter_chain)
def add_forward_port(self, zone, port, protocol, toport=None,
toaddr=None, timeout=0, sender=None):
_zone = self._fw.check_zone(zone)
self._fw.check_timeout(timeout)
self._fw.check_panic()
_obj = self._zones[_zone]
forward_id = self.__forward_port_id(port, protocol, toport, toaddr)
if forward_id in _obj.settings["forward_ports"]:
raise FirewallError(ALREADY_ENABLED,
"'%s:%s:%s:%s' already in '%s'" % \
(port, protocol, toport, toaddr, _zone))
mark = self._fw.new_mark()
if _obj.applied:
self.__forward_port(True, _zone, port, protocol, toport, toaddr,
mark_id=mark)
_obj.settings["forward_ports"][forward_id] = \
self.__gen_settings(timeout, sender, mark=mark)
return _zone
def remove_forward_port(self, zone, port, protocol, toport=None,
toaddr=None):
_zone = self._fw.check_zone(zone)
self._fw.check_panic()
_obj = self._zones[_zone]
forward_id = self.__forward_port_id(port, protocol, toport, toaddr)
if not forward_id in _obj.settings["forward_ports"]:
raise FirewallError(NOT_ENABLED,
"'%s:%s:%s:%s' not in '%s'" % \
(port, protocol, toport, toaddr, _zone))
mark = _obj.settings["forward_ports"][forward_id]["mark"]
if _obj.applied:
self.__forward_port(False, _zone, port, protocol, toport, toaddr,
mark_id=mark)
if forward_id in _obj.settings["forward_ports"]:
del _obj.settings["forward_ports"][forward_id]
self._fw.del_mark(mark)
return _zone
def query_forward_port(self, zone, port, protocol, toport=None,
toaddr=None):
forward_id = self.__forward_port_id(port, protocol, toport, toaddr)
return forward_id in self.get_settings(zone)["forward_ports"]
def list_forward_ports(self, zone):
return list(self.get_settings(zone)["forward_ports"].keys())
# ICMP BLOCK
def check_icmp_block(self, icmp):
self._fw.check_icmptype(icmp)
def __icmp_block_id(self, icmp):
self.check_icmp_block(icmp)
return icmp
def __icmp_block(self, enable, zone, icmp):
ict = self._fw.icmptype.get_icmptype(icmp)
if enable:
self.add_chain(zone, "filter", "INPUT")
self.add_chain(zone, "filter", "FORWARD_IN")
rules = [ ]
for ipv in [ "ipv4", "ipv6" ]:
if ict.destination and ipv not in ict.destination:
continue
if ipv == "ipv4":
proto = [ "-p", "icmp" ]
match = [ "-m", "icmp", "--icmp-type", icmp ]
else:
proto = [ "-p", "ipv6-icmp" ]
match = [ "-m", "icmp6", "--icmpv6-type", icmp ]
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
rules.append((ipv, [ "%s_deny" % (target),
"-t", "filter", ] + proto + \
match + [ "-j", "%%REJECT%%" ]))
target = DEFAULT_ZONE_TARGET.format(
chain=SHORTCUTS["FORWARD_IN"], zone=zone)
rules.append((ipv, [ "%s_deny" % (target),
"-t", "filter", ] + proto + \
match + [ "-j", "%%REJECT%%" ]))
# handle rules
ret = self._fw.handle_rules(rules, enable)
if ret:
(cleanup_rules, msg) = ret
self._fw.handle_rules(cleanup_rules, not enable)
raise FirewallError(COMMAND_FAILED, msg)
if not enable:
self.remove_chain(zone, "filter", "INPUT")
self.remove_chain(zone, "filter", "FORWARD_IN")
def add_icmp_block(self, zone, icmp, timeout=0, sender=None):
_zone = self._fw.check_zone(zone)
self._fw.check_timeout(timeout)
self._fw.check_panic()
_obj = self._zones[_zone]
icmp_id = self.__icmp_block_id(icmp)
if icmp_id in _obj.settings["icmp_blocks"]:
raise FirewallError(ALREADY_ENABLED,
"'%s' already in '%s'" % (icmp, _zone))
if _obj.applied:
self.__icmp_block(True, _zone, icmp)
_obj.settings["icmp_blocks"][icmp_id] = \
self.__gen_settings(timeout, sender)
return _zone
def remove_icmp_block(self, zone, icmp):
_zone = self._fw.check_zone(zone)
self._fw.check_panic()
_obj = self._zones[_zone]
icmp_id = self.__icmp_block_id(icmp)
if icmp_id not in _obj.settings["icmp_blocks"]:
raise FirewallError(NOT_ENABLED,
"'%s' not in '%s'" % (icmp, _zone))
if _obj.applied:
self.__icmp_block(False, _zone, icmp)
if icmp_id in _obj.settings["icmp_blocks"]:
del _obj.settings["icmp_blocks"][icmp_id]
return _zone
def query_icmp_block(self, zone, icmp):
return self.__icmp_block_id(icmp) in self.get_settings(zone)["icmp_blocks"]
def list_icmp_blocks(self, zone):
return sorted(self.get_settings(zone)["icmp_blocks"].keys())
| ignatenkobrain/firewalld | src/firewall/core/fw_zone.py | Python | gpl-2.0 | 62,523 |
'''
Created on 29.07.2013
@author: mhoyer
'''
from mysqldb import MysqlDB
from local_system import LocalSystem
from remote_system import RemoteSystem
from entities import Application
import logging
import util
class Actionmanager():
'''
classdocs
'''
def __init__(self, config):
self.logger = logging.getLogger(__name__)
self.config = config.get_config_list()
self.app_config = config.get_applications_list()
self.db = MysqlDB(config)
self.system = LocalSystem(config)
self.remotesystem = RemoteSystem(config)
def replicate_all(self):
for element in self.app_config:
app = Application(element)
if app.slave_node:
self.logger.info("replicating %s" % app.name)
self.replicate(app)
self.logger.info("Replication completed successfully")
def replicate_single(self, app_name):
app = None
# iterate over app config and load app object if there is a matching name
for item in self.app_config:
if item["name"] == app_name:
app = Application(item)
if app:
if app.slave_node:
self.replicate(app)
else:
self.logger.warning("Application has no slave node configured")
raise Exception("Configuration Error")
else:
self.logger.error("No application configured with name: " + app_name)
raise Exception("Configuration Error")
self.logger.info("Replication completed successfully")
def backup_all(self):
for element in self.app_config:
app = Application(element)
self.logger.info("saving %s" % app.name)
self.backup(app)
self.logger.info("Backup completed successfully")
def backup_single(self, app_name):
app = None
# iterate over app config and load app object if there is a matching name
for item in self.app_config:
if item["name"] == app_name:
app = Application(item)
if app:
self.backup(app)
else:
self.logger.error("No application configured with name: " + app_name)
raise Exception("Configuration Error")
self.logger.info("Backup completed successfully")
def replicate(self, app):
try:
# prepare replicator temp folder for the target node
self.system.prepare_application_dirs()
self.remotesystem.prepare_application_dirs(app.slave_node)
if app.packages:
self.logger.debug("ensuring packages installed: %s" % ', '.join(app.packages))
self.remotesystem.install(app.slave_node, app.packages)
if app.databases:
for database in app.databases:
self.logger.debug("replicating database: %s" % database)
self.db.replicate_database(database, app.slave_node)
if app.files:
for afile in app.files:
self.logger.debug("replicating file: %s" % afile)
self.remotesystem.transfer_single_file(app.slave_node, afile, afile)
if app.folders:
for afolder in app.folders:
self.logger.debug("replicating folder: %s" % afolder)
self.remotesystem.transfer_folder(app.slave_node, afolder, afolder)
# reload needed services
if app.needed_services:
for service in app.needed_services:
self.logger.debug("reloading service %s on %s" % (service,app.slave_node))
self.remotesystem.reload_service(app.slave_node, service)
self.remotesystem.prepare_application_dirs(app.slave_node)
# test availability
if app.url:
return self.remotesystem.test_availability(app.slave_node, 80, app.url)
except Exception as e:
self.logger.error("Stopping after error: " + str(e))
raise Exception("Error replicating " + app.name)
def backup(self, app):
# define path
app_temp_path = util.path_append([self.system.temp_path,app.name])
db_temp_path = util.path_append([app_temp_path,"databases"])
file_temp_path = util.path_append([app_temp_path,"files"])
# clear and prepare temp directories
self.system.prepare_application_dirs()
self.system.clear_folder(app_temp_path)
self.system.clear_folder(db_temp_path)
self.system.clear_folder(file_temp_path)
try:
# backup all components of the application
if app.databases:
for database in app.databases:
self.logger.debug("saving database: %s" % database)
self.db.dump_database(database, util.path_append([db_temp_path ,database + ".sql"]))
if app.files:
for afile in app.files:
self.logger.debug("saving file: %s" % afile)
self.system.mkdir(util.get_folder_from_path( util.path_append([file_temp_path, afile]) ), True)
self.system.cp(afile, util.path_append([file_temp_path, afile]), False)
if app.folders:
for folder in app.folders:
self.logger.debug("saving folder: %s" % folder)
self.system.mkdir(util.path_append([file_temp_path, folder]), True)
self.system.cp(folder, util.path_append([file_temp_path, folder]), True)
# write package list
self.system.write_package_list(util.path_append([app_temp_path, "package_list.txt"]))
# save compressed backup of application data
backup_file = util.path_append([self.system.backup_path, app.name + "_" + util.get_timestamp(), ".tar.gz"])
self.logger.debug("Saving compressed backup to: %s" % backup_file)
self.system.compress(app_temp_path, backup_file)
self.system.rm(app_temp_path, True)
except Exception as e:
self.logger.error("Stopping after error: " + str(e))
raise Exception("Error saving " + app.name) | marco-hoyer/replicator | src/main/python/replicator/actionmanager.py | Python | gpl-2.0 | 6,373 |
def LetterCount(str):
words = str.split(" ")
result_word = ""
letter_count = 0
for word in words:
word_map = {}
for ch in word:
if ch in word_map:
word_map[ch] += 1
else:
word_map[ch] = 1
max_key = max(word_map.iterkeys(), key=lambda k: word_map[k])
if letter_count < word_map[max_key] and word_map[max_key] > 1:
letter_count = word_map[max_key]
result_word = word
return result_word if letter_count > 1 else -1
print LetterCount("Hello apple pie")
print LetterCount("No words")
| DevCouch/coderbyte_python | medium/letter_count.py | Python | gpl-2.0 | 614 |
from PerfectMatchingData import *
from Face import *
from Vertex import *
from Graph import *
from VertexList import *
from Output import *
from KekuleanMethods import *
from Checkers import *
from RequiredEdgeMethods import *
from Tkinter import *
from AppInformation import *
from random import randint
import time
import os
import shutil
import multiprocessing as mp
import threading
Break = False
BreakLoop = False
#These methods the main drivers of the program. Some of their helper methods are also present here.
settings = {}
#function that reads in the graph returns a 2D string list of the graph
def getInput(fileName):
faceGraph = []
inputFile = open(fileName, 'r')
row = inputFile.readline()
y = 0
while len(row) > 0:
row = row.replace('\n', '')
row = row.split(" ")
for i in range(len(row)):
x = row[i]
faceGraph.append((Face(int(x), y)))
row = inputFile.readline()
y += 1
inputFile.close()
return faceGraph
def getSettings():
fileName = "settings.txt"
inputFile = open(fileName, 'r')
lineNumber = 0
minW = 0
maxW = 0
minH = 0
maxH = 0
line = inputFile.readline()
while len(line) > 0:
line = line.replace('\n', '')
settings[lineNumber] = float(line)
line = inputFile.readline()
lineNumber += 1
inputFile.close()
def resetGraph(root,appInfo,submitGraph,graphNumberEntry,view):
submitGraph.destroy()
view.destroy()
graphNumberEntry.destroy()
def analyzeGraph(root,appInfo):
root.geometry("600x400")
selection = StringVar()
choiceEntry = Entry(root, textvariable = selection)
choice = selection.get()
def callback(root,appInfo,choice,selection,choiceEntry,fileName = "graph.txt"):
loading = Label(root, text="Analyzing graph data, this may take a few minutes.")
loading.pack()
fileName = fileName
faceGraph = getInput(fileName)
#check for connectedness
connected = isConnected(faceGraphToInts(faceGraph))
if connected == True:
vertexGraph = makeVertexGraph(faceGraph)
superGraph = Graph(faceGraph, vertexGraph)
structures = assignMatching(superGraph)
_findRequiredEdges(structures)
loading.destroy()
choiceEntry.pack()
typeSelection = Label(root, text="Would you like to view the graphs ranked by Fries or Clars?")
typeSelection.pack()
submit = Button(root, text ="Submit", command = lambda: userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry))
submit.pack(side = BOTTOM)
while True:
choice = selection.get()
flag = False
exit = False
if choice != 'fries' and choice != 'clars' and choice != "":
againSelection = Label(root, text="That file does not exist, please try again.")
againSelection.pack()
print "again"
flag = True
while choice != 'fries' and choice != 'clars':
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
againSelection.update_idletasks()
choice = selection.get()
if exit == True:
againSelection.destroy()
break
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
t = threading.Thread(target = lambda: callback(root,appInfo,choice,selection,choiceEntry))
t.setDaemon(True)
appInfo.setThreads(t)
t.start()
def userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry):
structureNumber = IntVar()
submit.destroy()
typeSelection.destroy()
choiceEntry.destroy()
def displayClarFries(structureNumber,structures,choice):
structures.sort()
if choice == 'clars':
Graph.comparison = 'clars'
elif choice == 'fries':
Graph.comparison = 'fries'
structures.reverse()
structures[structureNumber].displayGraph()
view = Label(root, text="There are " + str(len(structures)) + " distince Kekule structures avaiable. Which would you like to view?")
view.pack()
graphNumberEntry = Entry(root, textvariable = structureNumber)
graphNumberEntry.pack()
number = structureNumber.get()
submitGraph = Button(root, text ="Submit Structure", command = lambda: displayClarFries(number,structures,choice))
submitGraph.pack(side = BOTTOM)
def deleteB(button):
button.destroy()
reset = Button(root, text ="Quit", command = lambda: resetB(root,appInfo,submitGraph,graphNumberEntry,view))
reset.pack(side = BOTTOM)
def resetB(root,appInfo,submitGraph,graphNumberEntry,view):
deleteB(reset)
resetGraph(root,appInfo,submitGraph,graphNumberEntry,view)
#A user-entered number of graphs are generated and tested for Kekulean-ness and written to their proper text files
def randomIntoFiles():
kekuleanFile = open("Kekuleans.txt", "w")
notKekuleanFile = open("NotKekulean.txt", "w")
numK = 0
numNotK = 0
trials = int(raw_input("How many graphs would you like to create? "))
print "\n" #just to provide some visual space
t1 = time.time()
for i in range(trials):
faceGraph = createRandomConnectedGraph()
vGraph = makeVertexGraph(faceGraph)
randGraph = Graph(faceGraph, vGraph)
if isKekulean(randGraph) == True:
numK += 1
kekuleanFile.write("Graph #" + str(numK) + "\n")
kekuleanFile.write(randGraph.simpleToString() + '\n')
else:
numNotK += 1
notKekuleanFile.write("Graph #" + str(numNotK) + "\n")
notKekuleanFile.write(randGraph.simpleToString() + '\n')
#print randGraph
#print "\n"
t2 = time.time()
print "\n" + str(numK) + " Kekulean graph(s) were found.\n" + str(numNotK) + " non-Kekulean graph(s) were found."
print "Time elapsed (in seconds): " + str(t2 - t1) + "\n"
kekuleanFile.close()
notKekuleanFile.close()
#creates a random Kekulean graph ands does stuff with it and saves it to an png
def createRandomKekulean():
#creates a face graphs
randomFaces = createRandomGraph()
randomGraph = _createRandomKekulean()
print "There are", len(randomGraph.getVertexGraph()), "vertices"
graphs = assignMatching(randomGraph)
graphs.sort()
if len(graphs) > 0:
#save graphs as PNG file
savePNG(graphs, "graphs - Fries.png")
Graph.comparison = 'clars'
graphs.sort()
savePNG(graphs, "graphs - Clars.png")
while True:
choice = raw_input("Would you like to view the graphs ranked by Fries or Clars? (or quit?) ")
while choice.lower() != 'fries' and choice.lower() != 'clars' and choice.lower() != 'quit':
choice = raw_input("Would you like to view the graphs ranked by Fries or Clars? (or quit?) ")
if choice.lower() == 'clars':
Graph.comparison = 'clars'
elif choice.lower() == 'fries':
Graph.comparison = 'fries'
else:
break
graphs.sort()
graphs.reverse()
print "There are", len(graphs), "Kekulean structures"
displayGraphs(graphs)
else:
print "error - Graph is Kekulean but has no perfect matching - see error.txt for graph"
errorFile = open("error.txt", "w")
errorFile.write(randomGraph.simpleToString() + '\n')
#Creates a random planar graph, which may not be connected
def createRandomGraph():
height = randint(settings[2], settings[3])
randGraph = []
for i in range(height):
rowLength = randint(settings[0], settings[1])
row = getRow(rowLength, i)
while len(row) == 0:
row = getRow(rowLength, i)
randGraph.extend(row)
if checkAlignment(randGraph) == False:
randGraph = createRandomGraph()
return randGraph
def checkAlignment(graph):
for face in graph:
if face.getX() == 0:
break
else:
#there is no face on the y-axis
return False
for face in graph:
if face.getY() == 0:
break
else:
#there is no face on the x-axis
return False
#there is a face on the x-axis
return True
def createRandomConnectedGraph():
g = createRandomGraph()
while isConnected(faceGraphToInts(g)) == False:
g = createRandomGraph()
return g
#generates a row for the the createRandomGraph method
def getRow(rl, rowNum):
r = []
for j in range(rl):
chance = randint(0, 100)
if chance > settings[4] * 100:
r.append(Face(j, rowNum))
return r
def _createRandomKekulean():
#creates a face graphs
randomFaces = createRandomGraph()
while isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
while isKekulean(randomGraph) == False:
#print "making K"
randomFaces = createRandomGraph()
while isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
if isKekulean(randomGraph):
return randomGraph
else:
return _createRandomKekulean()
def createManyKekuleans():
graphs = [] #list of kekulean graphs
graphList = [] #list of the Kekulean graphs with their matchings, and Fries/Clars Faces
trials = int(raw_input("How many graphs would you like to create? "))
pool = mp.Pool(mp.cpu_count())
results = [pool.apply_async(_createRandomKekulean) for x in range(trials)]
graphs = [r.get() for r in results]
for g in graphs:
graphList.extend(assignMatching(g))
graphList.sort()
if len(graphList) > 0:
print "There are", len(graphList), "Kekulean structures"
displayGraphs(graphList)
def testKekuleanThms():
conflictFile = open("conflict.txt", "w")
interval = float(raw_input("How many hours would you like to run the program?"))
timeLimit = 3600 * interval
print "limit:", timeLimit
t1 = time.time()
t2 = time.time()
counter = 0
while t2 - t1 < timeLimit:
print "graph #" + str(counter)
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
nelsonThm = isOldKekulean(randomGraph)
perfectMatchingThm = isKekulean(randomGraph)
if nelsonThm != perfectMatchingThm:
conflictFile.write("Perfect matching: " + str(perfectMatchingThm) + " Nelson Thm: " + str(nelsonThm) + "\n")
conflictFile.write(randomGraph.simpleToString())
conflictFile.write("\n")
t2 = time.time()
counter += 1
conflictFile.close()
#takes a row and returns a the number of vertical edges in that row
def getRowEdgeCount(row):
edgeCount = 0
f = 0
for i in range(len(row)):
edgeCount += 1
try:
f = row[i+1]
except:
f = None
if row[i] + 1 != f or f == None:
edgeCount += 1
return edgeCount
def getMinRows(g):
minRows = {}
index = 0
minEdges = sys.maxint
for r in g:
edgeCount = getRowEdgeCount(r)
if edgeCount < minEdges:
minEdges = edgeCount
minRows.clear()
minRows[index] = r
elif edgeCount == minEdges:
minRows[index] = r
index += 1
return minRows
#counts up the number of peaks above each row and stores those values in a list at indexes that correspond to the the row of the graph
def getPeaksAboveRows(g):
peaksAboveRow = [0]*(len(g))
for r in range(len(g)):
#print "r: " + str(r)
row = g[r]
if r > 0:
peaksAboveRow[r] += peaksAboveRow[r-1]
for col in range(len(row)):
face = row[col]
if searchRow(face, True, g, r) == True:
peaksAboveRow[r] += 1
#print "Peak at: " + str(r) + ", " + str(col)
if searchRow(face, False, g, r) == True and r < len(g)-1:
peaksAboveRow[r+1] -= 1
#print "Valley at: " + str(r) + ", " + str(col)
peaksAboveRow[r] = abs(peaksAboveRow[r])
return peaksAboveRow
#Theorem I devoloped
def NelsonThm(peaks, g):
kekulean = True
minRows = getMinRows(g)
for i, row in minRows.items():
if peaks[i] > getRowEdgeCount(row):
kekulean = False
break
return kekulean
#ckesks of a graph is Kekulean and returns a boolean
def isOldKekulean(graph):
fg = faceGraphToInts(graph.getFaceGraph())
peaksAbove = getPeaksAboveRows(fg)
#print peaksAbove
kekulean = NelsonThm(peaksAbove, fg)
return kekulean
def getUpperBounds(graph):
#faceGraph = getInput(filename)
#vertexGraph = makeVertexGraph(faceGraph)
#graph = Graph(faceGraph, vertexGraph)
kekulean = isKekulean(graph)
if kekulean == True:
rowCount = [0] * graph.getNumberOfRows()
whiteCount = [0] * graph.getNumberOfRows()
blackCount = [0] * graph.getNumberOfRows()
print "len:", len(whiteCount)
for v in graph.getVertexGraph():
#even y numbers mean the vertex is marked white on the graph
if v.getY() % 2 == 0:
index = v.getY() / 2
if index < len(whiteCount):
whiteCount[index] += 1
#The else implies that the vertex's y is odd, and thus the verex is marked black
else:
index = (v.getY() - 1) / 2
if index < len(blackCount):
blackCount[index] += 1
print "Upper Bonds of the graph per row:"
for index in range(len(rowCount)):
count = abs(sum(whiteCount[0:index+1]) - sum(blackCount[0:index+1]))
print count
rowCount[index] = count
totalUpperBonds = sum(rowCount)
print "Upper bond of the graph:", totalUpperBonds
else:
print "The graph is not Kekulean"
def testConjectureSameFaces(root,interval):
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT, fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command = text.yview)
graphList = []
graphNumber = 0
counter = 0
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
if Break == True:
Break = False
quit.destroy()
break
text.insert(CURRENT, "Graph " + str(graphNumber) + "\n")
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
#must be 'fries' or 'clars'
Graph.comparison = 'clars'
structures.sort()
h = structures[-1]
h.setNumStructures(len(structures))
h.setFaces(getNumFaces(faceGraphToInts(randomFaces)))
#h.setString(structures[0].simpleToString())
#is the data right?
#print "Verts:", h.getNumVertices()
#print "Structures:", h.getNumStructures()
#print "Clar:", h.getFriesNumber()
for g in graphList:
if(h.getFaces() == g.getFaces()):
if h.getNumVertices() == g.getNumVertices() :#and h.getNumVertices() <= 26:
if h.getNumStructures() < g.getNumStructures():
#first part
if h.getClarsNumber() > g.getClarsNumber():
print 'Conjecture is false:'
drawConflictsCC(g, h)
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
graphNumber += 1
text.update_idletasks()
quit.update_idletasks()
scrollbar.update_idletasks()
text.destroy()
scrollbar.destroy()
quit.destroy()
#second part
def testConjectureSameFacesKKFF(root, interval):
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT, fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command = text.yview)
graphList = []
graphNumber = 0
counter = 0
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
if Break == True:
Break = False
quit.destroy()
break
text.insert(CURRENT, "Graph " + str(graphNumber) + "\n")
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
#must be 'fries' or 'clars'
Graph.comparison = 'fries'
structures.sort()
h = structures[-1]
h.setNumStructures(len(structures))
h.setFaces(getNumFaces(faceGraphToInts(randomFaces)))
clarNumberStructure = []
friesNumberStructure = []
for g in graphList:
if(h.getFaces() == g.getFaces()):
if h.getNumVertices() == g.getNumVertices() :#and h.getNumVertices() <= 26:
if h.getNumStructures() < g.getNumStructures():
if h.getFriesNumber() > g.getFriesNumber():
drawConflictsKKFF(g, h)
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
graphNumber += 1
text.update_idletasks()
quit.update_idletasks()
scrollbar.update_idletasks()
text.destroy()
scrollbar.destroy()
quit.destroy()
def testConjectureSameFacesFFCC(root, interval):
clarNumberStructures = []
friesNumberStructures = []
graphs = []
graphList = []
temp = 0
graphNumber = 0
counter = 0
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT, fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command = text.yview)
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
if Break == True:
Break = False
quit.destroy()
break
text.insert(CURRENT, "Graph " + str(graphNumber) + "\n")
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
randomGraph.setMaxClarManual(setMaxClar(randomGraph))
randomGraph.setMaxFriesManual(setMaxFries(randomGraph))
h = structures[-1]
graphs.append(randomGraph)
h.setMaxClarManual(setMaxClar(randomGraph))
h.setMaxFriesManual(setMaxFries(randomGraph))
h.setNumStructures(len(structures))
h.setFaces(getNumFaces(faceGraphToInts(randomFaces)))
graphCount = 0
graphNumber += 1
for g in graphList:
if(g.getFaces() == h.getFaces()):
if g.getNumVertices() == h.getNumVertices():
if g.getNumStructures() < h.getNumStructures():
if g.getMaxClar() > h.getMaxClar():
if g.getMaxFries() < h.getMaxFries():
print 'Conjecture is false:\n'
saveClarFaceFFCC(graphs[graphCount],randomGraph,temp)
saveFriesFaceFFCC(graphs[graphCount],randomGraph,temp)
folderName = "FFCCConjectureConflicts"
fileName = folderName + "/" + str(randomGraph.getNumVertices()) + "_" + str(temp)+ "/info" + ".txt"
f = open(fileName,'w')
f.write("C1: " + str(g.getMaxClar()) + " C2: " + str(h.getMaxClar()) + " F1: " + str(g.getMaxFries()) + " F2: " + str(h.getMaxFries()) + "\n")
f.write(str(faceGraphToInts(g.getFaceGraph())) + "\n")
f.write(str(faceGraphToInts(h.getFaceGraph())) + "\n")
f.close()
temp += 1
graphCount += 1
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
def setMaxFries(graph):
g = graph.getFaceGraph()
v = makeVertexGraph(g)
G = Graph(g,v)
structures = assignMatching(G)
Graph.comparison = 'fries'
structures.sort()
return structures[-1].getFriesNumber()
def setMaxClar(graph):
g = graph.getFaceGraph()
v = makeVertexGraph(g)
G = Graph(g,v)
structures = assignMatching(G)
Graph.comparison = 'clars'
structures.sort()
return structures[-1].getClarsNumber()
def saveClarFaceFFCC(graph1,graph2,count):
g1 = graph1.getFaceGraph()
g2 = graph2.getFaceGraph()
v1 = makeVertexGraph(g1)
v2 = makeVertexGraph(g2)
G1 = Graph(g1,v1)
G2 = Graph(g2,v2)
structures1 = assignMatching(G1)
structures2 = assignMatching(G2)
Graph.comparison = 'clars'
structures1.sort()
structures2.sort()
h1 = structures1[-1]
h2 = structures2[-1]
if not os.path.exists("FFCCConjectureConflicts"):
os.mkdir("FFCCConjectureConflicts")
folderName = "FFCCConjectureConflicts/" + str(G1.getNumVertices()) + "_" + str(count)
#setup folder
if not os.path.exists(folderName):
os.mkdir(folderName)
#print "adding"
fileName1 = folderName + "/clar1" + ".png"
fileName2 = folderName + "/clar2" + ".png"
#print fileName1
saveSinglePNG(h1,fileName1)
saveSinglePNG(h2,fileName2)
def saveFriesFaceFFCC(graph1,graph2,count):
g1 = graph1.getFaceGraph()
g2 = graph2.getFaceGraph()
v1 = makeVertexGraph(g1)
v2 = makeVertexGraph(g2)
G1 = Graph(g1,v1)
G2 = Graph(g2,v2)
structures1 = assignMatching(G1)
structures2 = assignMatching(G2)
Graph.comparison = 'fries'
structures1.sort()
structures2.sort()
h1 = structures1[-1]
h2 = structures2[-1]
if not os.path.exists("FFCCConjectureConflicts"):
os.mkdir("FFCCConjectureConflicts")
folderName = "FFCCConjectureConflicts/" + str(G1.getNumVertices()) + "_" + str(count)
#setup folder
if not os.path.exists(folderName):
os.mkdir(folderName)
#print "adding"
fileName1 = folderName + "/fries1" + ".png"
fileName2 = folderName + "/fries2" + ".png"
#print fileName1
saveSinglePNG(h1,fileName1)
saveSinglePNG(h2,fileName2)
def testConjectureDifferentFaces(hours=0):
graphList = []
results = open("results.txt", "w")
results.write("The program actually run!")
if hours == 0:
interval = float(raw_input("How many hours would you like to run the program? "))
else:
interval = hours
timeLimit = 3600 * interval
print "limit:", timeLimit
t1 = time.time()
t2 = time.time()
counter = 0
while t2 - t1 < timeLimit:
print "graph #" + str(counter)
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
for f in randomGraph.getFaceGraph():
pairs = randomGraph.getBondedVertices(f)
print str(pairs)
#must be 'fries' or 'clars'
Graph.comparison = 'clars'
structures.sort()
h = structures[-1]
h.setNumStructures(len(structures))
#h.setString(structures[0].simpleToString())
#is the data right?
#print "Verts:", h.getNumVertices()
#print "Structures:", h.getNumStructures()
#print "Clar:", h.getFriesNumber()
for g in graphList:
if h.getNumVertices() == g.getNumVertices() :#and h.getNumVertices() <= 26:
if h.getNumStructures() < g.getNumStructures():
#first part
if h.getClarsNumber() > g.getClarsNumber():
print 'Conjecture is false:'
results.write('\ngraph H: Clars: ' + str(h.getClarsNumber()) + " Number of Structures: " + str(h.getNumStructures()) + " Number of vertices: " + str(h.getNumVertices()) + "\n")
results.write(str(h))
results.write('\ngraph G: Clars: ' + str(g.getClarsNumber()) + " Number of Structures: " + str(g.getNumStructures()) + " Number of vertices: " + str(g.getNumVertices()) + "\n")
results.write(str(g))
results.write("\n\n")
drawConflictsCC(g, h)
#second part
if h.getFriesNumber() > g.getFriesNumber():
print 'Conjecture is false:'
results.write('\ngraph H: Fries: ' + str(h.getFriesNumber()) + " Number of Structures: " + str(h.getNumStructures()) + " Number of vertices: " + str(h.getNumVertices()) + "\n")
results.write(str(h))
results.write('\ngraph G: Fries: ' + str(g.getFriesNumber()) + " Number of Structures: " + str(g.getNumStructures()) + " Number of vertices: " + str(g.getNumVertices()) + "\n")
results.write(str(g))
results.write("\n\n")
drawConflictsKKFF(g, h)
#third part
if h.getClarsNumber() > g.getClarsNumber():
if h.getFriesNumber() < g.getFriesNumber():
print 'Conjecture is false:'
results.write('\ngraph H: Clars: ' + str(h.getClarsNumber()) + "graph H: Fries: " + str(h.getFriesNumber()) + " Number of Structures: " + str(h.getNumStructures()) + " Number of vertices: " + str(h.getNumVertices()) + "\n")
results.write(str(h))
results.write('\ngraph G: Clars: ' + str(g.getClarsNumber()) + "graph G: Fries: " + str(g.getFriesNumber()) +" Number of Structures: " + str(g.getNumStructures()) + " Number of vertices: " + str(g.getNumVertices()) + "\n")
results.write(str(g))
results.write("\n\n")
drawConflictsFFCC(g, h)
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
def findHighestClars(graphs):
clars = 0
for g in graphs:
if g.getClarsNumber() > clars:
clars = g.getClarsNumber()
return clars
def _findRequiredEdges(graphs):
masterSet = getRequiredSet(graphs)
if len(masterSet) > 0:
for edge in masterSet:
v1, v2 = edge
v1.required = True
v2.required = True
return True
else:
return False
def findRequiredEdges(hours=0):
if not os.path.exists("requiredEdges"):
os.mkdir("requiredEdges")
edgeFile = open("requiredEdges/RequiredEdges.txt", "w")
graphNumber = 0
rqNum = 0
flag = False
if hours == 0:
interval = float(raw_input("How many hours would you like to run the program? "))
else:
interval = hours
timeLimit = 3600 * interval
print "limit:", timeLimit
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
print "graph", graphNumber
flag = False
graph = _createRandomKekulean()
graphs = assignMatching(graph)
for f in graph.getFaceGraph():
pairs = graph.getBondedVertices(f)
print str(pairs)
flag = _findRequiredEdges(graphs)
if flag == True:
print "Found graph with required edges"
edgeFile.write("Graph: " + str(rqNum) + "\n")
edgeFile.write(graph.simpleToString())
edgeFile.write("\n\n")
#save PNG's
fileName = "requiredEdges/Graph" + str(rqNum) + ".png"
saveSinglePNG(graphs[0], fileName)
rqNum += 1
graphNumber += 1
t2 = time.time()
def BreakModule():
global Break
Break = True
def BreakLoop():
global BreakLoop
BreakLoop = True
def combineGraphs(root,interval):
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
graphNumber = 0
superGraphNumber = 0
deletedCount = 0
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT,fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command=text.yview)
storedGraphs = {}
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
text.insert(CURRENT,"graph: " + str(graphNumber) + "\n")
if Break == True:
Break = False
quit.destroy()
break
flag = False
#new stuff
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
#end new stuff
Graph.comparison = 'clars'
structures.sort()
randomGraph.maxClars = structures[-1].getClarsNumber()
req_edges = getRequiredSet(structures)
externalEdges = getExternalEdges(req_edges)
if len(externalEdges) > 0:
#add graph and edges to list
storedGraphs[randomGraph] = externalEdges
for g, edges in storedGraphs.items():
complements = getComplements(externalEdges, edges)
for edge, compEdge in complements:
faceA = (edge[0].getFaces() & edge[1].getFaces()).pop()
faceB = (compEdge[0].getFaces() & compEdge[1].getFaces()).pop()
x = faceA.getX() - faceB.getX()
y = faceA.getY() - faceB.getY()
if edge[2] == "TOP_RIGHT" and compEdge[2] == "BOTTOM_LEFT":
newGraph = offsetFaces(g, x, y + 1);
elif edge[2] == "RIGHT" and compEdge[2] == "LEFT":
newGraph = offsetFaces(g, x + 1, y);
elif edge[2] == "TOP_LEFT" and compEdge[2] == "BOTTOM_RIGHT":
newGraph = offsetFaces(g, x + 1, y + 1);
elif edge[2] == "BOTTOM_LEFT" and compEdge[2] == "TOP_RIGHT":
newGraph = offsetFaces(g, x, y - 1);
elif edge[2] == "LEFT" and compEdge[2] == "RIGHT":
newGraph = offsetFaces(g, x - 1, y);
elif edge[2] == "BOTTOM_RIGHT" and compEdge[2] == "TOP_LEFT":
newGraph = offsetFaces(g, x - 1, y - 1);
overlap = checkFaceOverlap(randomGraph, newGraph)
#print overlap
if overlap is False:
faceGraph = combineFaces(randomGraph, newGraph)
faceGraph = adjustForNegatives(faceGraph)
vertexGraph = makeVertexGraph(faceGraph)
superGraph = Graph(faceGraph, vertexGraph)
structures = assignMatching(superGraph)
_findRequiredEdges(structures)
#start new stuff
if len(structures) > 0:
#setup folder
folderName = "CombinedTemps"
if not os.path.exists(folderName):
os.mkdir(folderName)
fileName = folderName + "/superGraph.txt"
f = open(folderName + "/superGraph" + str(superGraphNumber) + ".txt" ,'w')
f.write(str(superGraph) + '\n')
f.close()
Graph.comparison = 'clars'
structures.sort()
if not os.path.exists("CombinedGraphs"):
os.mkdir("CombinedGraphs")
folderNameCG = "CombinedGraphs/superGraph" + str(superGraphNumber)
#setup folder
if not os.path.exists(folderNameCG):
os.mkdir(folderNameCG)
superName = folderNameCG + "/superGraph" + str(superGraphNumber) + ".png"
saveSinglePNG(structures[0], superName)
addCombinationsPNG(randomGraph, newGraph,superGraph, superGraphNumber, deletedCount)
superGraphNumber += 1
graphNumber += 1
t2 = time.time()
quit.update_idletasks()
quit.destroy()
def resetCombinedGraphs(root,appInfo,submitGraph,graphNumberEntry,view):
submitGraph.destroy()
view.destroy()
graphNumberEntry.destroy()
def analyzeCombinedGraphsSetup(root,appInfo,path = "CombinedTemps",extension = ".txt"):
runningApps = []
root.geometry("600x400")
graphNumber = IntVar()
entry = Entry(root, textvariable = graphNumber)
entry.pack()
runningApps.append(entry)
if not os.path.exists(path):
os.mkdir(path)
num_files = len([f for f in os.listdir(path)
if os.path.isfile(os.path.join(path, f))])
num_files -= 1
#for i in range(0,num_files):
#oldFilename = path + "/superGraph" + str(k+1) + extension
#os.rename(oldFilename, path + "/superGraph" + str(i) + extension)
label = Label(root, text="There are " + str(num_files) + " files in the directory. Which wuold you like to look at?")
label.pack()
runningApps.append(label)
i = 0
submit = Button(root, text ="Submit", command = lambda: checkAnalyze(root,appInfo,num_files,quit,entry,label,i,graphNumber,submit,runningApps))
submit.pack(side = BOTTOM)
while i == 0:
i = graphNumber.get()
submit.update_idletasks()
entry.update_idletasks()
label.update_idletasks()
def checkAnalyze(root,appInfo,num_files,quit,entry,label,i,graphNumber,submit,runningApps):
submit.destroy()
again = Label(root, text="That file does not exist, please try again.")
submit = Button(root, text ="Submit", command = lambda: analyzeCombinedGraphs(root,appInfo,i,runningApps,submit,again,label,entry))
submit.pack(side = BOTTOM)
if i < -1 or i > num_files:
again.pack()
else:
analyzeCombinedGraphs(root,appInfo,i,runningApps,submit,again,label,entry)
while (i < -1 or i > num_files):
submit.update_idletasks()
entry.update_idletasks()
label.update_idletasks()
again.update_idletasks()
i = graphNumber.get()
def analyzeCombinedGraphs(root,appInfo,i,runningApps,submit,again,label,entry):
submit.destroy()
again.destroy()
label.destroy()
entry.destroy()
selection = StringVar()
choiceEntry = Entry(root, textvariable = selection)
choice = selection.get()
def callback(root,appInfo,i,choice,selection,choiceEntry,extension = ".txt",path = "CombinedTemps"):
loading = Label(root, text="Analyzing graph data, this may take a few minutes.")
loading.pack()
fileName = "/superGraph" + str(i) + extension
faceGraph = getInput(path + "/superGraph" + str(i) + extension)
#check for connectedness
connected = isConnected(faceGraphToInts(faceGraph))
if connected == True:
vertexGraph = makeVertexGraph(faceGraph)
superGraph = Graph(faceGraph, vertexGraph)
structures = assignMatching(superGraph)
_findRequiredEdges(structures)
loading.destroy()
choiceEntry.pack()
typeSelection = Label(root, text="Would you like to view the graphs ranked by Fries or Clars?")
typeSelection.pack()
submit = Button(root, text ="Submit", command = lambda: userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry))
submit.pack(side = BOTTOM)
while True:
choice = selection.get()
flag = False
exit = False
if choice != 'fries' and choice != 'clars' and choice != "":
againSelection = Label(root, text="That file does not exist, please try again.")
againSelection.pack()
print "again"
flag = True
while choice != 'fries' and choice != 'clars':
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
againSelection.update_idletasks()
choice = selection.get()
if exit == True:
againSelection.destroy()
break
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
t = threading.Thread(target = lambda: callback(root,appInfo,i,choice,selection,choiceEntry))
t.setDaemon(True)
appInfo.setThreads(t)
t.start()
def userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry):
structureNumber = IntVar()
submit.destroy()
typeSelection.destroy()
choiceEntry.destroy()
def displayCombinedClarFries(structureNumber,structures,choice):
structures.sort()
if choice == 'clars':
Graph.comparison = 'clars'
elif choice == 'fries':
Graph.comparison = 'fries'
structures.reverse()
structures[structureNumber].displayGraph()
view = Label(root, text="There are " + str(len(structures)) + " distince Kekule structures avaiable. Which would you like to view?")
view.pack()
graphNumberEntry = Entry(root, textvariable = structureNumber)
graphNumberEntry.pack()
number = structureNumber.get()
submitGraph = Button(root, text ="Submit Structure", command = lambda: displayCombinedClarFries(number,structures,choice))
submitGraph.pack(side = BOTTOM)
def deleteB(button):
button.destroy()
reset = Button(root, text ="Quit", command = lambda: resetB(root,appInfo,submitGraph,graphNumberEntry,view))
reset.pack(side = BOTTOM)
def resetB(root,appInfo,submitGraph,graphNumberEntry,view):
deleteB(reset)
resetCombinedGraphs(root,appInfo,submitGraph,graphNumberEntry,view)
def addCombinationsPNG(graph,newGraph,superGraph,superGraphNumber,deletedCount):
new1 = graph.getFaceGraph()
new2 = newGraph.getFaceGraph()
vertexG1 = makeVertexGraph(new1)
vertexG2 = makeVertexGraph(new2)
g1 = Graph(new1,vertexG1)
g2 = Graph(new2,vertexG2)
firstStructures = assignMatching(g1)
secondStructures = assignMatching(g2)
_findRequiredEdges(firstStructures)
_findRequiredEdges(secondStructures)
Graph.comparison = 'clars'
firstStructures.sort()
secondStructures.sort()
if(isKekulean(g2) == True and isKekulean(g1) == True):
folderNameCG = "CombinedGraphs/superGraph" + str(superGraphNumber)
firstName = folderNameCG + "/Graph" + str(1) + ".png"
secondName = folderNameCG + "/Graph" + str(2) + ".png"
saveSinglePNG(firstStructures[0], firstName)
saveSinglePNG(secondStructures[0], secondName)
else:
directoryName = "CombinedDeleted"
if not os.path.exists(directoryName):
os.mkdir(directoryName)
folderName = "CombinedDeleted/superGraph" + str(superGraphNumber) + "_" + str(deletedCount)
if not os.path.exists(folderName):
os.mkdir(folderName)
f = superGraph.getFaceGraph()
v3 = makeVertexGraph(f)
g3 = Graph(f,v3)
superGraphStructure = assignMatching(g3)
fileName = folderName + "/superDeleted" + str(superGraphNumber) + ".png"
firstName = folderName + "/Graph" + str(1) + ".png"
secondName = folderName + "/Graph" + str(2) + ".png"
saveSinglePNG(superGraphStructure[0], fileName)
saveSinglePNG(firstStructures[0], firstName)
saveSinglePNG(secondStructures[0], secondName)
shutil.rmtree("CombinedGraphs/superGraph" + str(superGraphNumber))
superGraphNumber -= 1
deletedCount += 1
def removeCombinedDuplicates(path = "CombinedTemps",extension = ".txt"):
num_files = len([f for f in os.listdir(path)
if os.path.isfile(os.path.join(path, f))])
print num_files
num_files -= 7
print num_files
masterFaceGraph = []
for i in range(0,num_files):
filename = "/superGraph" + str(i) + extension
faceGraph = getInput(path + "/superGraph" + str(i) + extension)
masterFaceGraph.append(faceGraphToInts(faceGraph))
for f in range(0, len(masterFaceGraph)):
for k in range(f+1, len(masterFaceGraph)):
flag = True
for h in range(0,len(masterFaceGraph[f])):
a = masterFaceGraph[f][h]
b = masterFaceGraph[k][h]
if len(a) != len(b):
flag = False
break
for t in range(0,len(masterFaceGraph[f][h])):
c = a[t]
d = b[t]
if c != d:
flag = False
break
if flag == False:
break
if (flag == True):
masterFaceGraph.remove(masterFaceGraph[k])
shutil.rmtree("CombinedGraphs/superGraph" + str(k))
os.remove("CombinedTemps/superGraph" + str(k) + extension)
for i in range(k+1,num_files):
path1 = "CombinedGraphs"
path2 = "CombinedTemps"
oldFilename1 = path1 + "/superGraph" + str(i)
oldFilename2 = path2 + "/superGraph" + str(i) + extension
os.rename(oldFilename1 + "/superGraph" + str(i) + ".png", oldFilename1 + "/superGraph" + str(i-1) + ".png")
os.rename(oldFilename1, path1 + "/superGraph" + str(i-1))
os.rename(oldFilename2, path2 + "/superGraph" + str(i-1) + extension)
num_files -= 1 | Jc11235/Kekulean_Program | GUI_Version/Ubuntu_Version/DriverMethods.py | Python | gpl-2.0 | 39,406 |
# encoding: utf-8
# module gtk._gtk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.135
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class IconTheme(__gobject__gobject.GObject):
"""
Object GtkIconTheme
Signals from GtkIconTheme:
changed ()
Signals from GObject:
notify (GParam)
"""
def append_search_path(self, *args, **kwargs): # real signature unknown
pass
def choose_icon(self, *args, **kwargs): # real signature unknown
pass
@classmethod
def do_changed(cls, *args, **kwargs): # real signature unknown
pass
def get_example_icon_name(self, *args, **kwargs): # real signature unknown
pass
def get_icon_sizes(self, *args, **kwargs): # real signature unknown
pass
def get_search_path(self, *args, **kwargs): # real signature unknown
pass
def has_icon(self, *args, **kwargs): # real signature unknown
pass
def list_contexts(self, *args, **kwargs): # real signature unknown
pass
def list_icons(self, *args, **kwargs): # real signature unknown
pass
def load_icon(self, *args, **kwargs): # real signature unknown
pass
def lookup_by_gicon(self, *args, **kwargs): # real signature unknown
pass
def lookup_icon(self, *args, **kwargs): # real signature unknown
pass
def prepend_search_path(self, *args, **kwargs): # real signature unknown
pass
def rescan_if_needed(self, *args, **kwargs): # real signature unknown
pass
def set_custom_theme(self, *args, **kwargs): # real signature unknown
pass
def set_screen(self, *args, **kwargs): # real signature unknown
pass
def set_search_path(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/gtk/_gtk/IconTheme.py | Python | gpl-2.0 | 2,041 |
"""
Django settings for infoneige project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4dhoa-&xuxj594s#ooy=z@gquzl199=-t8k-i2@348qjs-87_t'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'api',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'infoneige.urls'
WSGI_APPLICATION = 'infoneige.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.dummy',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',)
} | ideanotion/infoneigeapi | infoneige/settings.py | Python | gpl-2.0 | 2,053 |
#!/usr/bin/python -u
#
# Copyright (c) 2008--2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# key=value formatted "config file" mapping script
#
# NOT TO BE USED DIRECTLY
# This is called by a script generated by the rhn-bootstrap utility.
#
# Specifically engineered with the RHN Update Agent configuration files
# in mind though it is relatively generic in nature.
#
# Author: Todd Warner <taw@redhat.com>
#
# $Id$
"""
Client configuration mapping script that writes to an RHN Update Agent-type
config file(s)
I.e., maps a file with RHN Update Agent-like key=value pairs e.g.,
serverURL=https://test-satellite.example.redhat.com/XMLRPC
noSSLServerURL=http://test-satellite.example.redhat.com/XMLRPC
enableProxy=0
sslCACert=/usr/share/rhn/RHN-ORG-TRUSTED-SSL-CERT
(NOTE - older RHN Satellite's and Proxy's used:
sslCACert=/usr/share/rhn/RHNS-CORP-CA-CERT)
And maps that to the client's configuration files.
-------------
To map new settings to a file that uses the format key=value, where
key[comment]=value is a comment line you do this (e.g., mapping
key=value pairs to /etc/sysconfig/rhn/up2date):
1. edit a file (e.g., 'client-config-overrides.txt'), inputing new key=value pairs
to replace in config file (e.g., /etc/sysconfig/rhn/up2date).
Specifically:
serverURL=https://test-satellite.example.redhat.com/XMLRPC
noSSLServerURL=http://test-satellite.example.redhat.com/XMLRPC
2. ./client_config_update.py /etc/sysconfig/rhn/up2date client-config-overrides.txt
That's all there is to it.
If you are running an older RHN Update Agent, the rhn_register file can be
mapped as well:
./client_config_update.py /etc/sysconfig/rhn/rhn_register client-config-overrides.txt
"""
import os
import sys
import string
import tempfile
DEFAULT_CLIENT_CONFIG_OVERRIDES = 'client-config-overrides.txt'
RHN_REGISTER = "/etc/sysconfig/rhn/rhn_register"
UP2DATE = "/etc/sysconfig/rhn/up2date"
def _parseConfigLine(line):
"""parse a line from a config file. Format can be either "key=value\n"
or "whatever text\n"
return either:
(key, value)
or
None
The '\n' is always stripped from the value.
"""
kv = string.split(line, '=')
if len(kv) < 2:
# not a setting
return None
if len(kv) > 2:
# '=' is part of the value, need to rejoin it.
kv = kv[0], string.join(kv[1:], '=')
if string.find(kv[0], '[comment]') > 0:
# comment; not a setting
return None
# it's a setting, trim the '\n' and return the (key, value) pair.
kv[0] = string.strip(kv[0])
if kv[1][-1] == '\n':
kv[1] = kv[1][:-1]
return tuple(kv)
def readConfigFile(configFile):
"read in config file, return dictionary of key/value pairs"
fin = open(configFile, 'rb')
d = {}
for line in fin.readlines():
kv = _parseConfigLine(line)
if kv:
d[kv[0]] = kv[1]
return d
def dumpConfigFile(configFile):
"print out dictionary of key/value pairs from configFile"
import pprint
pprint.pprint(readConfigFile(configFile))
def mapNewSettings(configFile, dnew):
fo = tempfile.TemporaryFile(prefix = '/tmp/client-config-overrides-', mode = 'r+b')
fin = open(configFile, 'rb')
changedYN = 0
# write to temp file
for line in fin.readlines():
kv = _parseConfigLine(line)
if not kv:
# not a setting, write the unaltered line
fo.write(line)
else:
# it's a setting, populate from the dictionary
if dnew.has_key(kv[0]):
if dnew[kv[0]] != kv[1]:
fo.write('%s=%s\n' % (kv[0], dnew[kv[0]]))
changedYN = 1
else:
fo.write(line)
# it's a setting but not being mapped
else:
fo.write(line)
fin.close()
if changedYN:
# write from temp file to configFile
fout = open(configFile, 'wb')
fo.seek(0)
fout.write(fo.read())
print '*', configFile, 'written'
def parseCommandline():
"""parse/process the commandline
Commandline is dead simple for easiest portability.
"""
# USAGE & HELP!
if '--usage' in sys.argv or '-h' in sys.argv or '--help' in sys.argv:
print """\
usage: python %s CONFIG_FILENAME NEW_MAPPINGS [options]
arguments:
CONFIG_FILENAME config file to alter
NEW_MAPPINGS file containing new settings that map onto the
config file
options:
-h, --help show this help message and exit
--usage show brief usage summary
examples:
python %s %s %s
python %s %s %s
""" % (sys.argv[0],
sys.argv[0], RHN_REGISTER, DEFAULT_CLIENT_CONFIG_OVERRIDES,
sys.argv[0], UP2DATE, DEFAULT_CLIENT_CONFIG_OVERRIDES)
sys.exit(0)
if len(sys.argv) != 3:
msg = "ERROR: exactly two arguments are required, see --help"
raise TypeError(msg)
configFilename = os.path.abspath(sys.argv[1])
newMappings = os.path.abspath(sys.argv[2])
if not os.path.exists(configFilename):
msg = ("ERROR: filename to alter (1st argument), does not exist:\n"
" %s"
% configFilename)
raise IOError(msg)
if not os.path.exists(newMappings):
msg = ("ERROR: filename that contains the mappings (2nd argument), "
"does not exist:\n"
" %s" % newMappings)
raise IOError(msg)
return configFilename, newMappings
def main():
"parse commandline, process config file key=value mappings"
configFilename, newMappings = parseCommandline()
#dumpConfigFile(configFilename)
#mapNewSettings('test-up2date', readConfigFile(DEFAULT_CLIENT_CONFIG_OVERRIDES))
mapNewSettings(configFilename, readConfigFile(newMappings))
if __name__ == '__main__':
sys.exit(main() or 0)
| colloquium/spacewalk | spacewalk/certs-tools/client_config_update.py | Python | gpl-2.0 | 6,500 |
import os
import sqlite3
from time import sleep
from threading import get_ident
class FifoMemoryQueue(object):
def __init__(self):
self._db = []
def push(self, item):
if not isinstance(item, bytes):
raise TypeError('Unsupported type: {}'.format(type(item).__name__))
self._db.append(item)
def pop(self):
self._db.pop()
def pull(self, batch_size=10):
return self._db[:batch_size]
def close(self):
self._db = []
def __len__(self):
return len(self._db)
class FifoDiskQueue(object):
_create = (
'CREATE TABLE IF NOT EXISTS fifoqueue '
'('
' id INTEGER PRIMARY KEY AUTOINCREMENT,'
' item BLOB'
')'
)
_size = 'SELECT COUNT(*) FROM fifoqueue'
_iterate = 'SELECT id, item FROM queue'
_push = 'INSERT INTO fifoqueue (item) VALUES (?)'
_write_lock = 'BEGIN IMMEDIATE'
_pull = 'SELECT id, item FROM fifoqueue ORDER BY id LIMIT 1'
_del = 'DELETE FROM fifoqueue WHERE id = ?'
_peek = (
'SELECT item FROM queue '
'ORDER BY id LIMIT 1'
)
def __init__(self, path):
self.path = os.path.abspath(path)
self._connection_cache = {}
with self._get_conn() as conn:
conn.execute(self._create)
def __len__(self):
with self._get_conn() as conn:
l = next(conn.execute(self._size))[0]
return l
def __iter__(self):
with self._get_conn() as conn:
for id, obj_buffer in conn.execute(self._iterate):
yield loads(str(obj_buffer))
def _get_conn(self):
id = get_ident()
if id not in self._connection_cache:
self._connection_cache[id] = sqlite3.Connection(self.path,
timeout=60)
self._connection_cache[id].text_factory = bytes
return self._connection_cache[id]
def push(self, obj):
if not isinstance(obj, bytes):
obj_buffer = bytes(obj, 'ascii')
else:
obj_buffer = obj
with self._get_conn() as conn:
conn.execute(self._push, (obj_buffer,))
def pull(self, sleep_wait=True):
keep_pooling = True
wait = 0.1
max_wait = 2
tries = 0
with self._get_conn() as conn:
while keep_pooling:
conn.execute(self._write_lock)
cursor = conn.execute(self._pull)
try:
id, obj_buffer = next(cursor)
yield obj_buffer
conn.execute(self._del, (id,))
keep_pooling = False
except StopIteration:
conn.commit() # unlock the database
if not sleep_wait:
keep_pooling = False
continue
tries += 1
sleep(wait)
wait = min(max_wait, tries/10 + wait)
def peek(self):
with self._get_conn() as conn:
cursor = conn.execute(self._peek)
try:
return loads(str(cursor.next()[0]))
except StopIteration:
return None
def close(self):
for k in self._connection_cache.keys():
self._connection_cache[k].close()
self._connection_cache = {}
class LifoDiskQueue(FifoDiskQueue):
_create = (
'CREATE TABLE IF NOT EXISTS lifoqueue '
'(id INTEGER PRIMARY KEY AUTOINCREMENT, item BLOB)'
)
_pop = 'SELECT id, item FROM lifoqueue ORDER BY id DESC LIMIT 1'
_size = 'SELECT COUNT(*) FROM lifoqueue'
_push = 'INSERT INTO lifoqueue (item) VALUES (?)'
_del = 'DELETE FROM lifoqueue WHERE id = ?'
| elaeon/sensors | queuelib.py | Python | gpl-2.0 | 3,810 |
# -*- coding: utf8 -*-
class Mv:
def command(self):
self.config = {
"command": {
"mv": {
"function": self.mvScreams,
"usage": "mv <user>",
"help": "Le clavier y colle!"
}
}}
return self.config
def mvScreams(self, Morphux, infos):
print(infos)
if (len(infos['args']) == 0 and infos['nick'] == "valouche"):
Morphux.sendMessage("Ta mere la chauve", infos['nick'])
elif (len(infos['args']) == 0 and infos['nick'] == "Ne02ptzero"):
Morphux.sendMessage("TU VAS LA CHIER TA CHIASSE?", infos['nick'])
elif (len(infos['args']) == 0):
Morphux.sendMessage("SARACE BOULBA", infos['nick'])
elif (infos['args'][0] == "allow"):
Morphux.sendMessage("ALLOW?", infos['nick'])
elif (infos['args'][0] == "thunes"):
Morphux.sendMessage("Money equals power", infos['nick'])
elif (infos['args'][0] == "theodule"):
Morphux.sendMessage("THEODUUULE", infos['nick'])
elif (infos['args'][0] == "gg"):
Morphux.sendMessage("Le beau jeu, le beau geste, la lucidité !", infos['nick'])
elif (Morphux.userExists(infos['args'][0]) == 0):
Morphux.sendMessage("Respecte toi " + infos['args'][0] + "!", infos['nick'])
| Morphux/IRC-Bot | modules/mv/mv.py | Python | gpl-2.0 | 1,360 |
from collections import OrderedDict
class SerializableModel(object):
def _asdict(self):
result = OrderedDict()
for key in self.__mapper__.c.keys():
result[key] = getattr(self, key)
return result
| taras1k/finance | server/utils.py | Python | gpl-2.0 | 237 |
# -*- coding: utf-8 -*-
from copy import deepcopy
from cfme.utils import conf
from cfme.utils.pretty import Pretty
from cfme.utils.update import Updateable
class FromConfigMixin(object):
@staticmethod
def rename_properties(creds):
"""
helper function to make properties have same names in credential objects.
Args:
creds: dict
Returns: updated dict
"""
creds = deepcopy(creds)
to_rename = [('password', 'secret'), ('username', 'principal')]
for key1, key2 in to_rename:
if key1 in creds:
creds[key2] = creds[key1]
del creds[key1]
return creds
@classmethod
def from_config(cls, key):
"""
helper function which allows to construct credential object from credentials.eyaml
Args:
key: credential key
Returns: credential object
"""
creds = cls.rename_properties(conf.credentials[key])
return cls(**creds)
@classmethod
def from_plaintext(cls, creds):
"""
helper function which allows to construct credential class from plaintext dict
Args:
creds: dict
Returns: credential object
"""
creds = cls.rename_properties(creds)
return cls(**creds)
class Credential(Pretty, Updateable, FromConfigMixin):
"""
A class to fill in credentials
Args:
principal: user name
secret: password
verify_secret: password
domain: concatenated with principal if defined
"""
pretty_attrs = ['principal', 'secret']
def __init__(self, principal, secret, verify_secret=None, domain=None,
tenant_id=None, subscription_id=None, **ignore):
self.principal = principal
self.secret = secret
self.verify_secret = verify_secret
self.domain = domain
self.tenant_id = tenant_id
self.subscription_id = subscription_id
def __getattribute__(self, attr):
if attr == 'verify_secret':
if object.__getattribute__(self, 'verify_secret') is None:
return object.__getattribute__(self, 'secret')
else:
return object.__getattribute__(self, 'verify_secret')
elif attr == 'principal':
domain = object.__getattribute__(self, 'domain')
principal = object.__getattribute__(self, 'principal')
return r'{}\{}'.format(domain, principal) if domain else principal
else:
return super(Credential, self).__getattribute__(attr)
@property
def view_value_mapping(self):
"""
used for filling forms like add/edit provider form
Returns: dict
"""
return {
'username': self.principal,
'password': self.secret,
'confirm_password': None
}
def __eq__(self, other):
if other is None:
return False
return self.principal == other.principal and self.secret == other.secret and \
self.verify_secret == other.verify_secret
def __ne__(self, other):
return not self.__eq__(other)
class EventsCredential(Credential):
pass
class CANDUCredential(Credential):
pass
class AzureCredential(Credential):
pass
class SSHCredential(Credential):
@property
def view_value_mapping(self):
"""
used for filling forms like add/edit provider form
Returns: dict
"""
return {
'username': self.principal,
'private_key': self.secret,
}
class TokenCredential(Pretty, Updateable, FromConfigMixin):
"""
A class to fill in credentials
Args:
token: identification token
verify_token: token once more
"""
pretty_attrs = ['token']
def __init__(self, token, verify_token=None, **kwargs):
self.token = token
self.verify_token = verify_token
for name, value in kwargs.items():
setattr(self, name, value)
def __getattribute__(self, attr):
if attr == 'verify_token':
if object.__getattribute__(self, 'verify_token') is not None:
return object.__getattribute__(self, 'verify_token')
else:
return object.__getattribute__(self, 'token')
else:
return super(TokenCredential, self).__getattribute__(attr)
def __eq__(self, other):
return self.token == other.token and self.verify_token == other.verify_token
def __ne__(self, other):
return not self.__eq__(other)
@property
def view_value_mapping(self):
"""
used for filling forms like add/edit provider form
Returns: dict
"""
return {
'token': self.token,
'verify_token': None
}
class ServiceAccountCredential(Pretty, Updateable):
"""
A class to fill in credentials
Args:
service_account: service account string
"""
pretty_attrs = ['service_account']
def __init__(self, service_account):
super(ServiceAccountCredential, self)
self.service_account = service_account
@property
def view_value_mapping(self):
"""
used for filling forms like add/edit provider form
Returns: dict
"""
return {
'service_account': self.service_account
}
def __eq__(self, other):
return self.service_account == other.service_account
def __ne__(self, other):
return not self.__eq__(other)
@classmethod
def from_config(cls, key):
# TODO: refactor this. consider json.dumps
creds = deepcopy(conf.credentials[key])
service_data = creds['service_account']
service_account = '''
"type": "{type}",
"project_id": "{project}",
"private_key_id": "{private_key_id}",
"private_key": "{private_key}",
"client_email": "{email}",
"client_id": "{client}",
"auth_uri": "{auth}",
"token_uri": "{token}",
"auth_provider_x509_cert_url": "{auth_provider}",
"client_x509_cert_url": "{cert_url}"
'''.format(
type=service_data.get('type'),
project=service_data.get('project_id'),
private_key_id=service_data.get('private_key_id'),
private_key=service_data.get('private_key').replace('\n', '\\n'),
email=service_data.get('client_email'),
client=service_data.get('client_id'),
auth=service_data.get('auth_uri'),
token=service_data.get('token_uri'),
auth_provider=service_data.get('auth_provider_x509_cert_url'),
cert_url=service_data.get('client_x509_cert_url'))
service_account = '{' + service_account + '}'
return cls(service_account=service_account)
| anurag03/integration_tests | cfme/base/credential.py | Python | gpl-2.0 | 7,024 |
#!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1364979192.582168
__CHEETAH_genTimestamp__ = 'Wed Apr 3 17:53:12 2013'
__CHEETAH_src__ = '/home/fermi/Work/Model/tmsingle/openpli3.0/build-tmsingle/tmp/work/mips32el-oe-linux/enigma2-plugin-extensions-openwebif-0.1+git1+279a2577c3bc6defebd4bf9e61a046dcf7f37c01-r0.72/git/plugin/controllers/views/web/sleeptimer.tmpl'
__CHEETAH_srcLastModified__ = 'Wed Apr 3 17:10:17 2013'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class sleeptimer(Template):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(sleeptimer, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def respond(self, trans=None):
## CHEETAH: main method generated for this template
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
_orig_filter_51193055 = _filter
filterName = u'WebSafe'
if self._CHEETAH__filters.has_key("WebSafe"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
write(u'''<?xml version="1.0" encoding="UTF-8"?>
<e2sleeptimer>
\t<e2enabled>''')
_v = VFFSL(SL,"enabled",True) # u'$enabled' on line 4, col 13
if _v is not None: write(_filter(_v, rawExpr=u'$enabled')) # from line 4, col 13.
write(u'''</e2enabled>
\t<e2minutes>''')
_v = VFFSL(SL,"minutes",True) # u'$minutes' on line 5, col 13
if _v is not None: write(_filter(_v, rawExpr=u'$minutes')) # from line 5, col 13.
write(u'''</e2minutes>
\t<e2action>''')
_v = VFFSL(SL,"action",True) # u'$action' on line 6, col 12
if _v is not None: write(_filter(_v, rawExpr=u'$action')) # from line 6, col 12.
write(u'''</e2action>
\t<e2text>''')
_v = VFFSL(SL,"message",True) # u'$message' on line 7, col 10
if _v is not None: write(_filter(_v, rawExpr=u'$message')) # from line 7, col 10.
write(u'''</e2text>
</e2sleeptimer>
''')
_filter = self._CHEETAH__currentFilter = _orig_filter_51193055
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_sleeptimer= 'respond'
## END CLASS DEFINITION
if not hasattr(sleeptimer, '_initCheetahAttributes'):
templateAPIClass = getattr(sleeptimer, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(sleeptimer)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=sleeptimer()).run()
| pli3/Openwebif | plugin/controllers/views/web/sleeptimer.py | Python | gpl-2.0 | 5,627 |
import httplib
import shlex
from datetime import datetime
#from pytz import timezone
import csv
import sys
import math
from time import time
from multiprocessing import Process
#finland = timezone('Europe/Helsinki')
server_dns = "121.78.237.160" #"keti3.oktree.com"
port = 8080
def run(ids):
conn = httplib.HTTPConnection(server_dns,port)
for id in ids:
read_data(conn,id)
def read_data(conn,sensor):
request = "/dashboard/query/?start="+start_time+"&end="+end_time+"&m=avg:"+sensor+"&ascii"
print "http://121.78.237.160:8080/" + request
conn.request("GET", request)
response = conn.getresponse()
data = response.read()
#print data
lines = data.split("\n")
ofile= open(output_folder+sensor+".csv","w")
try:
for line in lines:
parts = shlex.split(line)
if len(parts) > 1:
#print datetime.fromtimestamp(int(parts[1]),tz=finland).strftime('%Y-%m-%d %H:%M:%S'),parts[2]
ofile.write("%s,%s\n"%(datetime.fromtimestamp(int(parts[1])).strftime('%Y-%m-%d %H:%M:%S'),parts[2]))
except KeyboardInterrupt:
exit()
except:
pass
ofile.close()
def read_ids(filename):
sensor_ids = []
ifile = open(filename,"r")
reader = csv.reader(ifile)
for row in reader:
if len(row) > 1:
sensor_ids.append(row[0].strip())
ifile.close()
return sensor_ids
program_start_time = time()
sensor_ids = ["GW2.HA50_MV10_KH_A"]#read_ids(sys.argv[1])
output_folder = 'csv/' #sys.argv[2] #data_day/
index = 0 # in case the script failed a certain sensor id, restart from that point sensor_ids.index("VAK1.SPR_PALO_H")
#print sensor_ids
start_time = "2013/11/01-00:00:00"
end_time = "2013/11/07-23:59:00"
n = 6 #no of threads
factor = int(len(sensor_ids)/n)
print factor,len(sensor_ids)
all_processes = []
for i in range(n):
if i == n-1:
ids = sensor_ids[i*factor:]
else:
ids = sensor_ids[i*factor:(i+1)*factor]
p = Process(target=run, args=(ids,))
all_processes.append(p)
p.start()
for p in all_processes:
p.join()
print len(all_processes)
print "Run time (s): %f"%(time()-program_start_time)
#Multi processing: Run time (s): 6190.377293
#Serial processing: Run time (s): 25835.677658
| TinyOS-Camp/DDEA-DEV | [Python]Collection/query_and_dump_data.py | Python | gpl-2.0 | 2,400 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import sys
## Uncomment to run this script from an in-tree build (or adjust to the
## build directory) without installing the bindings.
#sys.path.append ('.')
#sys.path.append ('.libs')
import Hamlib
def StartUp():
"""Simple script to test the Hamlib.py module with Python2."""
print "%s: Python %s; %s\n" \
% (sys.argv[0], sys.version.split()[0], Hamlib.cvar.hamlib_version)
Hamlib.rig_set_debug(Hamlib.RIG_DEBUG_NONE)
# Init RIG_MODEL_DUMMY
my_rig = Hamlib.Rig(Hamlib.RIG_MODEL_DUMMY)
my_rig.set_conf("rig_pathname", "/dev/Rig")
my_rig.set_conf("retry", "5")
my_rig.open()
# 1073741944 is token value for "itu_region"
# but using get_conf is much more convenient
region = my_rig.get_conf(1073741944)
rpath = my_rig.get_conf("rig_pathname")
retry = my_rig.get_conf("retry")
print "status(str):\t\t", Hamlib.rigerror(my_rig.error_status)
print "get_conf:\t\tpath = %s, retry = %s, ITU region = %s" \
% (rpath, retry, region)
my_rig.set_freq(Hamlib.RIG_VFO_B, 5700000000)
my_rig.set_vfo(Hamlib.RIG_VFO_B)
print "freq:\t\t\t", my_rig.get_freq()
my_rig.set_freq(Hamlib.RIG_VFO_A, 145550000)
(mode, width) = my_rig.get_mode()
print "mode:\t\t\t", Hamlib.rig_strrmode(mode), "\nbandwidth:\t\t", width
my_rig.set_mode(Hamlib.RIG_MODE_CW)
(mode, width) = my_rig.get_mode()
print "mode:\t\t\t", Hamlib.rig_strrmode(mode), "\nbandwidth:\t\t", width
print "ITU_region:\t\t", my_rig.state.itu_region
print "Backend copyright:\t", my_rig.caps.copyright
print "Model:\t\t\t", my_rig.caps.model_name
print "Manufacturer:\t\t", my_rig.caps.mfg_name
print "Backend version:\t", my_rig.caps.version
print "Backend status:\t\t", Hamlib.rig_strstatus(my_rig.caps.status)
print "Rig info:\t\t", my_rig.get_info()
my_rig.set_level("VOX", 1)
print "VOX level:\t\t", my_rig.get_level_i("VOX")
my_rig.set_level(Hamlib.RIG_LEVEL_VOX, 5)
print "VOX level:\t\t", my_rig.get_level_i(Hamlib.RIG_LEVEL_VOX)
af = 12.34
print "Setting AF to %0.2f...." % (af)
my_rig.set_level("AF", af)
print "status:\t\t\t%s - %s" % (my_rig.error_status,
Hamlib.rigerror(my_rig.error_status))
print "AF level:\t\t%0.2f" % my_rig.get_level_f(Hamlib.RIG_LEVEL_AF)
print "strength:\t\t", my_rig.get_level_i(Hamlib.RIG_LEVEL_STRENGTH)
print "status:\t\t\t", my_rig.error_status
print "status(str):\t\t", Hamlib.rigerror(my_rig.error_status)
chan = Hamlib.channel(Hamlib.RIG_VFO_B)
my_rig.get_channel(chan)
print "get_channel status:\t", my_rig.error_status
print "VFO:\t\t\t", Hamlib.rig_strvfo(chan.vfo), ", ", chan.freq
print "Attenuators:\t\t", my_rig.caps.attenuator
print "\nSending Morse, '73'"
my_rig.send_morse(Hamlib.RIG_VFO_A, "73")
my_rig.close ()
print "\nSome static functions:"
err, lon1, lat1 = Hamlib.locator2longlat("IN98XC")
err, lon2, lat2 = Hamlib.locator2longlat("DM33DX")
err, loc1 = Hamlib.longlat2locator(lon1, lat1, 3)
err, loc2 = Hamlib.longlat2locator(lon2, lat2, 3)
print "Loc1:\t\tIN98XC -> %9.4f, %9.4f -> %s" % (lon1, lat1, loc1)
print "Loc2:\t\tDM33DX -> %9.4f, %9.4f -> %s" % (lon2, lat2, loc2)
err, dist, az = Hamlib.qrb(lon1, lat1, lon2, lat2)
longpath = Hamlib.distance_long_path(dist)
print "Distance:\t%.3f km, azimuth %.2f, long path:\t%.3f km" \
% (dist, az, longpath)
# dec2dms expects values from 180 to -180
# sw is 1 when deg is negative (west or south) as 0 cannot be signed
err, deg1, mins1, sec1, sw1 = Hamlib.dec2dms(lon1)
err, deg2, mins2, sec2, sw2 = Hamlib.dec2dms(lat1)
lon3 = Hamlib.dms2dec(deg1, mins1, sec1, sw1)
lat3 = Hamlib.dms2dec(deg2, mins2, sec2, sw2)
print 'Longitude:\t%4.4f, %4d° %2d\' %2d" %1s\trecoded: %9.4f' \
% (lon1, deg1, mins1, sec1, ('W' if sw1 else 'E'), lon3)
print 'Latitude:\t%4.4f, %4d° %2d\' %2d" %1s\trecoded: %9.4f' \
% (lat1, deg2, mins2, sec2, ('S' if sw2 else 'N'), lat3)
if __name__ == '__main__':
StartUp()
| airween/hamlib | bindings/pytest.py | Python | gpl-2.0 | 4,198 |
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import os
import sys
import vserver
import logger
import personmanager
import slicetagmanager
sys.path.append("/usr/share/NodeManager/")
import tools
import bwlimit
class VServerManager():
def __startSlice__(self, slice):
logger.log("slicemanager: %s: starting" % slice)
q = vserver.VServer(slice)
q.start()
logger.log("slicemanager: %s: started" % slice)
return True
def __stopSlice__(self, slice):
logger.log("slicemanager: %s: stoping" % slice)
q = vserver.VServer(slice)
q.stop()
logger.log("slicemanager: %s: stoped" % slice)
return True
def __createSlice__(self, slice):
# Sanity check
try:
vserver_instance = vserver.VServer(slice)
except vserver.NoSuchVServer:
pass
else:
logger.log("slicemanager: %s: Slice already exists" % slice)
return False
# FIXME: band-aid for MyPLC 4.x as it has no GetSliceFamily API call
vref = "planetlab-f8-i386"
# check the template exists -- there's probably a better way..
if not os.path.isdir("/vservers/.vref/%s" % vref):
logger.log ("slicemanager: %s: ERROR Could not create sliver - vreference image %s not found" % (slice, vref))
return False
# guess arch
try:
(x,y,arch) = vref.split("-")
# mh, this of course applies when "vref" is e.g. "netflow"
# and that"s not quite right
except:
arch="i386"
def personality (arch):
personality = "linux32"
if arch.find("64") >= 0:
personality = "linux64"
return personality
logger.log("slicemanager: %s: creating" % slice)
logger.log_call(["/bin/bash","-x","/usr/sbin/vuseradd", "-t", vref, slice])
logger.log("slicemanager: %s: created" % slice)
# export slicename to the slice in /etc/slicename
file("/vservers/%s/etc/slicename" % slice, "w").write(slice)
file("/vservers/%s/etc/slicefamily" % slice, "w").write(vref)
# set personality: only if needed (if arch"s differ)
if tools.root_context_arch() != arch:
file("/etc/vservers/%s/personality" % slice, "w").write(personality(arch)+"\n")
logger.log("slicemanager: %s: set personality to %s" % (slice, personality(arch)))
return True
def AddSliceToNode(self, slice, tags, keys):
if self.__createSlice__(slice) == True:
p = personmanager.PersonManager()
p.AddPersonToSlice(slice, keys)
s = slicetagmanager.SliceTagManager()
for tag in tags:
s.AddSliceTag(slice, tag["tagname"], tag["value"])
bwlimit.set(bwlimit.get_xid(slice))
self.__startSlice__(slice)
else:
return False
return True
def DeleteSliceFromNode(self, slice):
logger.log_call(["/bin/bash", "-x", "/usr/sbin/vuserdel", slice])
return True
| caglar10ur/func | func/minion/modules/nm/vservermanager.py | Python | gpl-2.0 | 3,354 |
# -*- coding: utf-8 -*-
""" ImageViewer.py: PyQt image viewer widget for a QPixmap in a QGraphicsView scene with mouse zooming and panning. """
import os.path
from PyQt5.QtCore import Qt, QRectF, pyqtSignal, QT_VERSION_STR
from PyQt5.QtGui import QImage, QPixmap, QPainterPath, QWheelEvent
from PyQt5.QtWidgets import QGraphicsView, QGraphicsScene, QFileDialog
__author__ = "Marcel Goldschen-Ohm <marcel.goldschen@gmail.com>"
__version__ = '0.9.0'
class ImageViewerQt(QGraphicsView):
""" PyQt image viewer widget for a QPixmap in a QGraphicsView scene with mouse zooming and panning.
Displays a QImage or QPixmap (QImage is internally converted to a QPixmap).
To display any other image format, you must first convert it to a QImage or QPixmap.
Some useful image format conversion utilities:
qimage2ndarray: NumPy ndarray <==> QImage (https://github.com/hmeine/qimage2ndarray)
ImageQt: PIL Image <==> QImage (https://github.com/python-pillow/Pillow/blob/master/PIL/ImageQt.py)
Mouse interaction:
Left mouse button drag: Pan image.
Right mouse button drag: Zoom box.
Right mouse button doubleclick: Zoom to show entire image.
"""
# Mouse button signals emit image scene (x, y) coordinates.
# !!! For image (row, column) matrix indexing, row = y and column = x.
leftMouseButtonPressed = pyqtSignal(float, float)
rightMouseButtonPressed = pyqtSignal(float, float)
leftMouseButtonReleased = pyqtSignal(float, float)
rightMouseButtonReleased = pyqtSignal(float, float)
leftMouseButtonDoubleClicked = pyqtSignal(float, float)
rightMouseButtonDoubleClicked = pyqtSignal(float, float)
def __init__(self):
QGraphicsView.__init__(self)
# Image is displayed as a QPixmap in a QGraphicsScene attached to this QGraphicsView.
self.scene = QGraphicsScene()
self.setScene(self.scene)
# Store a local handle to the scene's current image pixmap.
self._pixmapHandle = None
# Image aspect ratio mode.
# !!! ONLY applies to full image. Aspect ratio is always ignored when zooming.
# Qt.IgnoreAspectRatio: Scale image to fit viewport.
# Qt.KeepAspectRatio: Scale image to fit inside viewport, preserving aspect ratio.
# Qt.KeepAspectRatioByExpanding: Scale image to fill the viewport, preserving aspect ratio.
self.aspectRatioMode = Qt.KeepAspectRatio
# Scroll bar behaviour.
# Qt.ScrollBarAlwaysOff: Never shows a scroll bar.
# Qt.ScrollBarAlwaysOn: Always shows a scroll bar.
# Qt.ScrollBarAsNeeded: Shows a scroll bar only when zoomed.
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
# Stack of QRectF zoom boxes in scene coordinates.
self.zoomStack = []
# Flags for enabling/disabling mouse interaction.
self.canZoom = True
self.canPan = True
def hasImage(self):
""" Returns whether or not the scene contains an image pixmap.
"""
return self._pixmapHandle is not None
def clearImage(self):
""" Removes the current image pixmap from the scene if it exists.
"""
if self.hasImage():
self.scene.removeItem(self._pixmapHandle)
self._pixmapHandle = None
def pixmap(self):
""" Returns the scene's current image pixmap as a QPixmap, or else None if no image exists.
:rtype: QPixmap | None
"""
if self.hasImage():
return self._pixmapHandle.pixmap()
return None
def image(self):
""" Returns the scene's current image pixmap as a QImage, or else None if no image exists.
:rtype: QImage | None
"""
if self.hasImage():
return self._pixmapHandle.pixmap().toImage()
return None
def setImage(self, image):
""" Set the scene's current image pixmap to the input QImage or QPixmap.
Raises a RuntimeError if the input image has type other than QImage or QPixmap.
:type image: QImage | QPixmap
"""
if type(image) is QPixmap:
pixmap = image
elif type(image) is QImage:
pixmap = QPixmap.fromImage(image)
elif image is None:
pixmap = QPixmap()
else:
raise RuntimeError("ImageViewer.setImage: Argument must be a QImage or QPixmap.")
if self.hasImage():
self._pixmapHandle.setPixmap(pixmap)
else:
self._pixmapHandle = self.scene.addPixmap(pixmap)
self.setSceneRect(QRectF(pixmap.rect())) # Set scene size to image size.
self.zoomStack = []
self.updateViewer()
def loadImageFromFile(self, fileName=""):
""" Load an image from file.
Without any arguments, loadImageFromFile() will popup a file dialog to choose the image file.
With a fileName argument, loadImageFromFile(fileName) will attempt to load the specified image file directly.
"""
if len(fileName) == 0:
if QT_VERSION_STR[0] == '4':
fileName = QFileDialog.getOpenFileName(self, "Open image file.")
elif QT_VERSION_STR[0] == '5':
fileName, dummy = QFileDialog.getOpenFileName(self, "Open image file.")
if len(fileName) and os.path.isfile(fileName):
image = QImage(fileName)
self.setImage(image)
def updateViewer(self):
""" Show current zoom (if showing entire image, apply current aspect ratio mode).
"""
if not self.hasImage():
return
if len(self.zoomStack) and self.sceneRect().contains(self.zoomStack[-1]):
#self.fitInView(self.zoomStack[-1], Qt.IgnoreAspectRatio) # Show zoomed rect (ignore aspect ratio).
self.fitInView(self.zoomStack[-1], self.aspectRatioMode) # Show zoomed rect (ignore aspect ratio).
else:
self.zoomStack = [] # Clear the zoom stack (in case we got here because of an invalid zoom).
self.fitInView(self.sceneRect(), self.aspectRatioMode) # Show entire image (use current aspect ratio mode).
def resizeEvent(self, event):
""" Maintain current zoom on resize.
"""
self.updateViewer()
def mousePressEvent(self, event):
""" Start mouse pan or zoom mode.
"""
scenePos = self.mapToScene(event.pos())
if event.button() == Qt.LeftButton:
if self.canPan:
self.setDragMode(QGraphicsView.ScrollHandDrag)
self.leftMouseButtonPressed.emit(scenePos.x(), scenePos.y())
elif event.button() == Qt.RightButton:
if self.canZoom:
self.setDragMode(QGraphicsView.RubberBandDrag)
self.rightMouseButtonPressed.emit(scenePos.x(), scenePos.y())
QGraphicsView.mousePressEvent(self, event)
def mouseReleaseEvent(self, event):
""" Stop mouse pan or zoom mode (apply zoom if valid).
"""
QGraphicsView.mouseReleaseEvent(self, event)
scenePos = self.mapToScene(event.pos())
if event.button() == Qt.LeftButton:
self.setDragMode(QGraphicsView.NoDrag)
self.leftMouseButtonReleased.emit(scenePos.x(), scenePos.y())
elif event.button() == Qt.RightButton:
if self.canZoom:
#viewBBox = self.zoomStack[-1] if len(self.zoomStack) else self.sceneRect()
viewBBox = self.sceneRect()
selectionBBox = self.scene.selectionArea().boundingRect().intersected(viewBBox)
self.scene.setSelectionArea(QPainterPath()) # Clear current selection area.
if selectionBBox.isValid() and (selectionBBox != viewBBox):
self.zoomStack.append(selectionBBox)
self.updateViewer()
self.setDragMode(QGraphicsView.NoDrag)
self.rightMouseButtonReleased.emit(scenePos.x(), scenePos.y())
def mouseDoubleClickEvent(self, event):
""" Show entire image.
"""
scenePos = self.mapToScene(event.pos())
if event.button() == Qt.LeftButton:
self.leftMouseButtonDoubleClicked.emit(scenePos.x(), scenePos.y())
elif event.button() == Qt.RightButton:
if self.canZoom:
self.fitZoom()
self.rightMouseButtonDoubleClicked.emit(scenePos.x(), scenePos.y())
QGraphicsView.mouseDoubleClickEvent(self, event)
def fitZoom(self):
# Clear zoom stack.
self.zoomStack = []
self.updateViewer()
def zoom( self, zoomIn, pos=None ):
if pos is None:
pos = self.sceneRect().center().toPoint()
rect = QRectF( self.mapToScene(self.viewport().geometry()).boundingRect() )
if zoomIn:
W = rect.width()/2
H = rect.height()/2
else:
W = rect.width()*2
H = rect.height()*2
rect.setWidth( W )
rect.setHeight( H )
rect.moveCenter( self.mapToScene( pos ) )
rect = rect.intersected( self.sceneRect() )
self.zoomStack.append(rect)
self.updateViewer()
def wheelEvent(self, event):
if not self.canZoom:
return
self.zoom( event.angleDelta().y()>0, event.pos() )
| gis-support/DIVI-QGIS-Plugin | widgets/ImageViewerQt.py | Python | gpl-2.0 | 9,390 |
## This file is part of conftron.
##
## Copyright (C) 2011 Matt Peddie <peddie@jobyenergy.com>
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
## 02110-1301, USA.
import genconfig, baseio
from settings_templates import *
class LCMSettingField(baseio.TagInheritance):
required_tags = ['default', 'step', 'min', 'max']
def __init__(self, hsh, parent):
self.__dict__.update(hsh)
self._inherit(parent)
if self.has_key('absmax'):
self.min = -float(self.absmax)
self.max = float(self.absmax)
self.parent = parent
self.parentname = parent.name
self._musthave(parent, parse_settings_noval)
self.classname = parent.classname
parent.die += self._filter()
def field_setting(self):
return lcm_settings_field_template_mm % self
def _filter(self):
die = 0
die += self._are_defaults_sane()
return die
def _are_defaults_sane(self):
## Default values outside the range given by the bounds
## don't make sense either.
die = 0
if (float(self['min']) > float(self['default'])
or float(self['max']) < float(self['default'])):
print parse_settings_badval % {"sp":'default',
"f":self['name'],
"s":self.parent['name'],
"max":self['max'],
"min":self['min'],
"val":self['default']}
die += 1
if float(self['step']) > (float(self['max']) - float(self['min'])):
print parse_settings_badval % {"sp":'default',
"f":self['name'],
"s":self.parent['name'],
"max":self['max'],
"min":self['min'],
"val":self['step']}
die += 1
return die
class LCMSetting(baseio.CHeader, baseio.LCMFile, baseio.CCode, baseio.TagInheritance, baseio.IncludePasting):
def __init__(self, s, parent):
self.__dict__.update(s.attrib)
self.classname = parent.name
self._inherit(parent)
self.lcm_folder = genconfig.lcm_folder
self.die = 0
self.make_fields(s.getchildren())
self.field_settings = "\n".join([f.field_setting() for f in self.fields])
def make_fields(self, fields):
flattened = self.insert_includes(fields, ['member'])
self.check_includes(flattened, ['member'])
self.fields = [LCMSettingField(dict(f.attrib, **{'varname':self.varname}), self) for f in flattened]
def to_settings_file(self):
basename = "%(classname)s_%(type)s_%(varname)s" % self
filename = genconfig.settings_folder + "/" + basename
def sf(cf):
cf.write("#include <lcm/lcm.h>\n" % self)
cf.write("#include <math.h>\n" % self)
cf.write("#include <%(classname)s_settings.h>\n" % self)
if self.has_key('channel'):
cf.write(lcm_settings_init_custom_chan_template % self)
else:
cf.write(lcm_settings_init_template % self)
cf.write(lcm_settings_func_template % self)
self.to_h(filename, sf)
def to_settings_nop(self):
filename = genconfig.stubs_folder + "/%(classname)s_%(type)s_%(varname)s_setting_stub" % self
def stub_f(cf):
cf.write("#include <lcm_settings_auto.h>\n\n")
cf.write(lcm_settings_init_nop_template % self)
cf.write(lcm_settings_set_nop_template % self)
self.to_c_no_h(filename, stub_f)
def to_settings_prototype(self, cf):
cf.write(lcm_settings_prototype % self)
class Settings(baseio.CHeader,
baseio.LCMFile,
baseio.CCode,
baseio.TagInheritance,
baseio.Searchable,
baseio.IncludePasting):
def __init__(self, name, children, class_structs, path, filename):
self.name = name
self.path = path
self.file = filename
self.classname = name
self._filter_settings(children)
self.class_struct_includes = self._class_struct_includes(class_structs)
def merge(self, other):
for k, v in other.__dict__.iteritems():
if not k in genconfig.reserved_tag_names:
try:
# Is it a method?
getattr(getattr(self, k), "__call__")
except AttributeError:
# Nope.
self.__dict__[k] = other.__dict__[k]
self.settings.extend(other.settings)
return self
def search(self, searchname):
return self._search(self.settings, searchname)
def codegen(self):
self.init_calls = "\n".join([lcm_settings_init_call_template % s for s in self.settings])
self.null_calls = "\n".join([lcm_settings_init_null_template % s for s in self.settings])
self.to_settings_h()
self.settings_nops()
def init_call(self):
return " %(classname)s_settings_init(provider); \\\n" % self
def check_call(self):
return " %(classname)s_settings_check(); \\\n" % self
def _filter_settings(self, structs):
die = 0
flattened = self.insert_includes(structs, ['struct'])
self.check_includes(flattened, ['struct'])
outstructs = [LCMSetting(s, self) for s in flattened]
die = sum([s.die for s in outstructs])
if die:
print "Lots of settings errors detected; cannot continue code generation."
sys.exit(1)
self.settings = outstructs
def settings_functions(self):
for s in self.settings:
s.to_settings_file()
def settings_prototypes(self, cf):
cf.write("/* Prototypes for all the functions defined in settings/ folder */\n")
for s in self.settings:
cf.write(lcm_settings_prototype % s)
cf.write(lcm_settings_init_prototype % s)
def settings_nops(self):
for s in self.settings:
s.to_settings_nop()
def _class_struct_includes(self, structs):
out = []
formatstr = "#include \"%(lcm_folder)s/%(classname)s_%(type)s.h\""
if (structs):
out = [formatstr % s for s in structs]
else:
## Orphaned settings module; include only types we know
## about
out = [formatstr % s for s in self.settings]
return "\n".join(out)
def settings_includes(self, cf):
cf.write(self.class_struct_includes)
def to_settings_periodic(self):
pass
def to_settings_c(self):
pass
def to_settings_h(self):
self.settings_functions()
def settings_f(cf):
cf.write("#include \"%(classname)s_types.h\"\n\n" % self)
cf.write("#include \"%(classname)s_telemetry.h\"\n\n" % self)
cf.write("#ifdef __cplusplus\n")
cf.write("extern \"C\"{\n")
cf.write("#endif\n\n")
self.settings_prototypes(cf)
cf.write("\n#ifdef __cplusplus\n")
cf.write("}\n")
cf.write("#endif\n")
# Make initialization macro
cf.write(lcm_settings_init_class_template % self)
cf.write(lcm_check_call_template % self);
self.to_h(self.name + "_settings", settings_f)
| peddie/conftron | settings.py | Python | gpl-2.0 | 8,250 |
import sys
import os
def convbytes(bstr):
return str(buildint(bstr))
def buildint(bvals):
return int.from_bytes(bvals, byteorder='little')
def buildstr(bvals):
print(str(bvals))
#return str(bvals)
return bvals.decode("utf-8")
def getOutFile(infilename, outfiledir):
lastslashind = infilename.rfind("/")
ifname = infilename[lastslashind+1:]
ifname = outfiledir+"parsed_"+ifname
return ifname
infiledir = sys.argv[1]
outfiledir = sys.argv[2]
statenames = ["main", "doencode", "cksum", "compare_files", "treat_file", "make_ofname"]
for infilename in os.listdir(infiledir):
outfilename = getOutFile(infilename, outfiledir)
infilename = infiledir + infilename
with open(infilename, 'rb') as infile:
with open(outfilename, 'w') as ofile:
bytes_read = infile.read(4)
while bytes_read:
ival = buildint(bytes_read)
print ("read state name size int:"+str(ival))
bytes_read = infile.read(ival)
sval = buildstr(bytes_read)
print("sval:"+sval)
print("len sval:"+str(len(sval)))
while bytes_read:
print ("read bytes:"+sval)
ofile.write("DROPSTATE:"+sval)
ofile.write("\n")
bytes_read = infile.read(4)
ival = buildint(bytes_read)
print("read paircount:"+str(ival))
while bytes_read:
bytes_read = infile.read(4)
lival = buildint(bytes_read)
print("read size of name:"+str(lival))
bytes_read = infile.read(lival)
sval = buildstr(bytes_read)
print("read name:"+sval)
if(sval.startswith(tuple(statenames))):
print("BREAK!")
break
ofile.write(sval)
ofile.write(",")
bytes_read = infile.read(4)
lival = buildint(bytes_read)
print("read size of type:"+str(lival))
bytes_read = infile.read(lival)
sval = buildstr(bytes_read)
print("read type:"+sval)
ofile.write(sval)
ofile.write(",")
bytes_read = infile.read(4)
lival = buildint(bytes_read)
print("read size of data:"+str(lival))
bytes_read = infile.read(lival)
print("read data:"+str(bytes_read))
ofile.write(convbytes(bytes_read))
ofile.write("\n")
| blpete16/ProgPoint | parsetwo.py | Python | gpl-2.0 | 2,963 |
def merge(a, b):
"""
inuput: two sorted lists
output: a merged sorted list
for example:
merge([2,3], [1,4])
--> [1,2,3,4]
"""
merged = []
while a or b:
if a and b:
if a[0] < b[0]:
merged.append(a.pop(0))
else:
merged.append(b.pop(0))
else:
merged += a + b
break
return merged
def merge_sort(one_list):
# divide
if len(one_list) == 1:
return one_list
middle = int(len(one_list)/2)
left = merge_sort(one_list[:middle])
right = merge_sort(one_list[middle:])
# conquer
return merge(left, right)
| humw/algorithms_in_python | merge_sort/merge_sort.py | Python | gpl-2.0 | 667 |
# Author: Martin Oehler <oehler@knopper.net> 2013
# License: GPL V2
from django.forms import ModelForm
from django.forms import Form
from django.forms import ModelChoiceField
from django.forms.widgets import RadioSelect
from django.forms.widgets import CheckboxSelectMultiple
from django.forms.widgets import TextInput
from django.forms.widgets import Textarea
from django.forms.widgets import DateInput
from django.contrib.admin import widgets
from linboweb.linboserver.models import partition
from linboweb.linboserver.models import partitionSelection
from linboweb.linboserver.models import os
from linboweb.linboserver.models import vm
from linboweb.linboserver.models import client
from linboweb.linboserver.models import clientGroup
from linboweb.linboserver.models import pxelinuxcfg
class partitionForm(ModelForm):
class Meta:
model = partition
class partitionSelectionForm(ModelForm):
class Meta:
model = partitionSelection
class osForm(ModelForm):
partitionselection = ModelChoiceField(queryset=partitionSelection.objects.all())
class Meta:
model = os
class vmForm(ModelForm):
class Meta:
model = vm
class clientForm(ModelForm):
pxelinuxconfiguration = ModelChoiceField(queryset=pxelinuxcfg.objects.all())
class Meta:
model = client
class clientGroupForm(ModelForm):
class Meta:
model = clientGroup
class pxelinuxcfgForm(ModelForm):
class Meta:
model = pxelinuxcfg
widgets = {
'configuration': Textarea(attrs={'cols': 80, 'rows': 40}),
}
| MartinOehler/LINBO-ServerGUI | linboweb/linboserver/forms.py | Python | gpl-2.0 | 1,587 |
# -*- coding: utf-8 -*-
import fauxfactory
import pytest
from cfme.configure.configuration import Category, Tag
from cfme.rest.gen_data import a_provider as _a_provider
from cfme.rest.gen_data import categories as _categories
from cfme.rest.gen_data import dialog as _dialog
from cfme.rest.gen_data import services as _services
from cfme.rest.gen_data import service_catalogs as _service_catalogs
from cfme.rest.gen_data import service_templates as _service_templates
from cfme.rest.gen_data import tenants as _tenants
from cfme.rest.gen_data import tags as _tags
from cfme.rest.gen_data import vm as _vm
from utils.update import update
from utils.wait import wait_for
from utils import error
@pytest.yield_fixture
def category():
cg = Category(name=fauxfactory.gen_alphanumeric(8).lower(),
description=fauxfactory.gen_alphanumeric(32),
display_name=fauxfactory.gen_alphanumeric(32))
cg.create()
yield cg
cg.delete()
@pytest.mark.tier(2)
def test_tag_crud(category):
tag = Tag(name=fauxfactory.gen_alphanumeric(8).lower(),
display_name=fauxfactory.gen_alphanumeric(32),
category=category)
tag.create()
with update(tag):
tag.display_name = fauxfactory.gen_alphanumeric(32)
tag.delete(cancel=False)
class TestTagsViaREST(object):
@pytest.fixture(scope="function")
def categories(self, request, rest_api, num=3):
return _categories(request, rest_api, num)
@pytest.fixture(scope="function")
def tags(self, request, rest_api, categories):
return _tags(request, rest_api, categories)
@pytest.fixture(scope="module")
def categories_mod(self, request, rest_api_modscope, num=3):
return _categories(request, rest_api_modscope, num)
@pytest.fixture(scope="module")
def tags_mod(self, request, rest_api_modscope, categories_mod):
return _tags(request, rest_api_modscope, categories_mod)
@pytest.fixture(scope="module")
def service_catalogs(self, request, rest_api_modscope):
return _service_catalogs(request, rest_api_modscope)
@pytest.fixture(scope="module")
def tenants(self, request, rest_api_modscope):
return _tenants(request, rest_api_modscope, num=1)
@pytest.fixture(scope="module")
def a_provider(self):
return _a_provider()
@pytest.fixture(scope="module")
def dialog(self):
return _dialog()
@pytest.fixture(scope="module")
def services(self, request, rest_api_modscope, a_provider, dialog, service_catalogs):
try:
return _services(request, rest_api_modscope, a_provider, dialog, service_catalogs)
except:
pass
@pytest.fixture(scope="module")
def service_templates(self, request, rest_api_modscope, dialog):
return _service_templates(request, rest_api_modscope, dialog)
@pytest.fixture(scope="module")
def vm(self, request, a_provider, rest_api_modscope):
return _vm(request, a_provider, rest_api_modscope)
@pytest.mark.tier(2)
def test_edit_tags(self, rest_api, tags):
"""Tests tags editing from collection.
Metadata:
test_flag: rest
"""
new_names = []
tags_data_edited = []
for tag in tags:
new_name = "test_tag_{}".format(fauxfactory.gen_alphanumeric().lower())
new_names.append(new_name)
tag.reload()
tags_data_edited.append({
"href": tag.href,
"name": new_name,
})
rest_api.collections.tags.action.edit(*tags_data_edited)
assert rest_api.response.status_code == 200
for new_name in new_names:
wait_for(
lambda: rest_api.collections.tags.find_by(name=new_name),
num_sec=180,
delay=10,
)
@pytest.mark.tier(2)
def test_edit_tag(self, rest_api, tags):
"""Tests tag editing from detail.
Metadata:
test_flag: rest
"""
tag = rest_api.collections.tags.get(name=tags[0].name)
new_name = 'test_tag_{}'.format(fauxfactory.gen_alphanumeric())
tag.action.edit(name=new_name)
assert rest_api.response.status_code == 200
wait_for(
lambda: rest_api.collections.tags.find_by(name=new_name),
num_sec=180,
delay=10,
)
@pytest.mark.tier(3)
@pytest.mark.parametrize("method", ["post", "delete"], ids=["POST", "DELETE"])
def test_delete_tags_from_detail(self, rest_api, tags, method):
"""Tests deleting tags from detail.
Metadata:
test_flag: rest
"""
status = 204 if method == "delete" else 200
for tag in tags:
tag.action.delete(force_method=method)
assert rest_api.response.status_code == status
with error.expected("ActiveRecord::RecordNotFound"):
tag.action.delete(force_method=method)
assert rest_api.response.status_code == 404
@pytest.mark.tier(3)
def test_delete_tags_from_collection(self, rest_api, tags):
"""Tests deleting tags from collection.
Metadata:
test_flag: rest
"""
rest_api.collections.tags.action.delete(*tags)
assert rest_api.response.status_code == 200
with error.expected("ActiveRecord::RecordNotFound"):
rest_api.collections.tags.action.delete(*tags)
assert rest_api.response.status_code == 404
@pytest.mark.tier(3)
def test_create_tag_with_wrong_arguments(self, rest_api):
"""Tests creating tags with missing category "id", "href" or "name".
Metadata:
test_flag: rest
"""
data = {
"name": "test_tag_{}".format(fauxfactory.gen_alphanumeric().lower()),
"description": "test_tag_{}".format(fauxfactory.gen_alphanumeric().lower())
}
with error.expected("BadRequestError: Category id, href or name needs to be specified"):
rest_api.collections.tags.action.create(data)
assert rest_api.response.status_code == 400
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"collection_name", ["clusters", "hosts", "data_stores", "providers", "resource_pools",
"services", "service_templates", "tenants", "vms"])
def test_assign_and_unassign_tag(self, rest_api, tags_mod, a_provider, services,
service_templates, tenants, vm, collection_name):
"""Tests assigning and unassigning tags.
Metadata:
test_flag: rest
"""
collection = getattr(rest_api.collections, collection_name)
collection.reload()
if len(collection.all) == 0:
pytest.skip("No available entity in {} to assign tag".format(collection_name))
entity = collection[-1]
tag = tags_mod[0]
entity.tags.action.assign(tag)
assert rest_api.response.status_code == 200
entity.reload()
assert tag.id in [t.id for t in entity.tags.all]
entity.tags.action.unassign(tag)
assert rest_api.response.status_code == 200
entity.reload()
assert tag.id not in [t.id for t in entity.tags.all]
| rananda/cfme_tests | cfme/tests/configure/test_tag.py | Python | gpl-2.0 | 7,263 |
# -*- coding: utf-8 -*-
"""
Implement a PGN reader/writer.
See http://www.chessclub.com/help/PGN-spec
"""
__author__ = 'Robert Ancell <bob27@users.sourceforge.net>'
__license__ = 'GNU General Public License Version 2'
__copyright__ = 'Copyright 2005-2006 Robert Ancell'
import re
"""
; Example PGN file
[Event "F/S Return Match"]
[Site "Belgrade, Serbia JUG"]
[Date "1992.11.04"]
[Round "29"]
[White "Fischer, Robert J."]
[Black "Spassky, Boris V."]
[Result "1/2-1/2"]
1. e4 e5 2. Nf3 Nc6 3. Bb5 a6 4. Ba4 Nf6 5. O-O Be7 6. Re1 b5 7. Bb3 d6 8. c3
O-O 9. h3 Nb8 10. d4 Nbd7 11. c4 c6 12. cxb5 axb5 13. Nc3 Bb7 14. Bg5 b4 15.
Nb1 h6 16. Bh4 c5 17. dxe5 Nxe4 18. Bxe7 Qxe7 19. exd6 Qf6 20. Nbd2 Nxd6 21.
Nc4 Nxc4 22. Bxc4 Nb6 23. Ne5 Rae8 24. Bxf7+ Rxf7 25. Nxf7 Rxe1+ 26. Qxe1 Kxf7
27. Qe3 Qg5 28. Qxg5 hxg5 29. b3 Ke6 30. a3 Kd6 31. axb4 cxb4 32. Ra5 Nd5 33.
f3 Bc8 34. Kf2 Bf5 35. Ra7 g6 36. Ra6+ Kc5 37. Ke1 Nf4 38. g3 Nxh3 39. Kd2 Kb5
40. Rd6 Kc5 41. Ra6 Nf2 42. g4 Bd3 43. Re6 1/2-1/2
"""
RESULT_INCOMPLETE = '*'
RESULT_WHITE_WIN = '1-0'
RESULT_BLACK_WIN = '0-1'
RESULT_DRAW = '1/2-1/2'
results = {RESULT_INCOMPLETE: RESULT_INCOMPLETE,
RESULT_WHITE_WIN: RESULT_WHITE_WIN,
RESULT_BLACK_WIN: RESULT_BLACK_WIN,
RESULT_DRAW: RESULT_DRAW}
"""The required tags in a PGN file (the seven tag roster, STR)"""
TAG_EVENT = 'Event'
TAG_SITE = 'Site'
TAG_DATE = 'Date'
TAG_ROUND = 'Round'
TAG_WHITE = 'White'
TAG_BLACK = 'Black'
TAG_RESULT = 'Result'
"""Optional tags"""
TAG_TIME = 'Time'
TAG_FEN = 'FEN'
TAG_WHITE_TYPE = 'WhiteType'
TAG_WHITE_ELO = 'WhiteElo'
TAG_BLACK_TYPE = 'BlackType'
TAG_BLACK_ELO = 'BlackElo'
TAG_TIME_CONTROL = 'TimeControl'
TAG_TERMINATION = 'Termination'
# Values for the WhiteType and BlackType tag
PLAYER_HUMAN = 'human'
PLAYER_AI = 'program'
# Values for the Termination tag
TERMINATE_ABANDONED = 'abandoned'
TERMINATE_ADJUDICATION = 'adjudication'
TERMINATE_DEATH = 'death'
TERMINATE_EMERGENCY = 'emergency'
TERMINATE_NORMAL = 'normal'
TERMINATE_RULES_INFRACTION = 'rules infraction'
TERMINATE_TIME_FORFEIT = 'time forfeit'
TERMINATE_UNTERMINATED = 'unterminated'
# Comments are bounded by ';' to '\n' or '{' to '}'
# Lines starting with '%' are ignored and are used as an extension mechanism
# Strings are bounded by '"' and '"' and quotes inside the strings are escaped with '\"'
# Token types
TOKEN_LINE_COMMENT = 'Line comment'
TOKEN_COMMENT = 'Comment'
TOKEN_ESCAPED = 'Escaped data'
TOKEN_PERIOD = 'Period'
TOKEN_TAG_START = 'Tag start'
TOKEN_TAG_END = 'Tag end'
TOKEN_STRING = 'String'
TOKEN_SYMBOL = 'Symbol'
TOKEN_RAV_START = 'RAV start'
TOKEN_RAV_END = 'RAV end'
TOKEN_XML = 'XML'
TOKEN_NAG = 'NAG'
class Error(Exception):
"""PGN exception class"""
pass
class PGNParser:
"""
"""
STATE_IDLE = 'IDLE'
STATE_TAG_NAME = 'TAG_NAME'
STATE_TAG_VALUE = 'TAG_VALUE'
STATE_TAG_END = 'TAG_END'
STATE_MOVETEXT = 'MOVETEXT'
STATE_RAV = 'RAV'
STATE_XML = 'XML'
def __init__(self, maxGames = -1):
expressions = ['\%.*', # Escaped data
';.*', # Line comment
'\{', # Comment start
'\".*\"', # String
'[a-zA-Z0-9\*\_\+\#\=\:\-\/]+', # Symbol, '/' Not in spec but required from game draw and incomplete
'\[', # Tag start
'\]', # Tag end
'\$[0-9]{1,3}', # NAG
'\(', # RAV start
'\)', # RAV end
'\<.*\>', # XML
'[.]+'] # Period(s)
self.regexp = re.compile('|'.join(expressions))
self.tokens = {';': TOKEN_LINE_COMMENT,
'{': TOKEN_COMMENT,
'[': TOKEN_TAG_START,
']': TOKEN_TAG_END,
'"': TOKEN_STRING,
'.': TOKEN_PERIOD,
'$': TOKEN_NAG,
'(': TOKEN_RAV_START,
')': TOKEN_RAV_END,
'<': TOKEN_XML,
'%': TOKEN_ESCAPED}
for c in '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ*':
self.tokens[c] = TOKEN_SYMBOL
self.games = []
self.maxGames = maxGames
self.comment = None
self.state = self.STATE_IDLE
self.game = PGNGame() # Game being assembled
self.tagName = None # The tag being assembled
self.tagValue = None
self.prevTokenIsMoveNumber = False
self.currentMoveNumber = 0
self.ravDepth = 0 # The Recursive Annotation Variation (RAV) stack
def _parseTokenMovetext(self, tokenType, data):
"""
"""
if tokenType is TOKEN_SYMBOL:
# Ignore tokens inside RAV
if self.ravDepth != 0:
return
# See if this is a game terminate
if results.has_key(data):
self.games.append(self.game)
self.game = PGNGame()
self.prevTokenIsMoveNumber = False
self.currentMoveNumber = 0
self.ravDepth = 0
self.state = self.STATE_IDLE
# Otherwise it is a move number or a move
else:
try:
moveNumber = int(data)
except ValueError:
move = PGNMove()
move.number = self.currentMoveNumber
move.move = data
self.game.addMove(move)
self.currentMoveNumber += 1
else:
self.prevTokenIsMoveNumber = True
expected = (self.currentMoveNumber / 2) + 1
if moveNumber != expected:
raise Error('Expected move number %i, got %i' % (expected, moveNumber))
elif tokenType is TOKEN_NAG:
# Ignore tokens inside RAV
if self.ravDepth != 0:
return
move = self.game.getMove(self.currentMoveNumber)
move.nag = data
elif tokenType is TOKEN_PERIOD:
# Ignore tokens inside RAV
if self.ravDepth != 0:
return
if self.prevTokenIsMoveNumber is False:
raise Error('Unexpected period')
elif tokenType is TOKEN_RAV_START:
self.ravDepth += 1
# FIXME: Check for RAV errors
return
elif tokenType is TOKEN_RAV_END:
self.ravDepth -= 1
# FIXME: Check for RAV errors
return
else:
raise Error('Unknown token %s in movetext' % (str(tokenType)))
def parseToken(self, tokenType, data):
"""
"""
# Ignore all comments at any time
if tokenType is TOKEN_LINE_COMMENT or tokenType is TOKEN_COMMENT:
if self.currentMoveNumber > 0:
move = self.game.getMove(self.currentMoveNumber)
move.comment = data[1:-1]
return
if self.state is self.STATE_MOVETEXT:
self._parseTokenMovetext(tokenType, data)
elif self.state is self.STATE_IDLE:
if tokenType is TOKEN_TAG_START:
self.state = self.STATE_TAG_NAME
return
elif tokenType is TOKEN_SYMBOL:
self.whiteMove = None
self.prevTokenIsMoveNumber = False
self.ravDepth = 0
self.state = self.STATE_MOVETEXT
self._parseTokenMovetext(tokenType, data)
elif tokenType is TOKEN_ESCAPED:
pass
else:
raise Error('Unexpected token %s' % (str(tokenType)))
if self.state is self.STATE_TAG_NAME:
if tokenType is TOKEN_SYMBOL:
self.tagName = data
self.state = self.STATE_TAG_VALUE
else:
raise Error('Got a %s token, expecting a %s token' % (repr(tokenType), repr(TOKEN_SYMBOL)))
elif self.state is self.STATE_TAG_VALUE:
if tokenType is TOKEN_STRING:
self.tagValue = data[1:-1]
self.state = self.STATE_TAG_END
else:
raise Error('Got a %s token, expecting a %s token' % (repr(tokenType), repr(TOKEN_STRING)))
elif self.state is self.STATE_TAG_END:
if tokenType is TOKEN_TAG_END:
self.game.setTag(self.tagName, self.tagValue)
self.state = self.STATE_IDLE
else:
raise Error('Got a %s token, expecting a %s token' % (repr(tokenType), repr(TOKEN_TAG_END)))
def parseLine(self, line):
"""Parse a line from a PGN file.
Return an array of tokens extracted from the line.
"""
while len(line) > 0:
if self.comment is not None:
end = line.find('}')
if end < 0:
self.comment += line
return True
else:
comment = self.comment + line[:end]
self.comment = None
self.parseToken(TOKEN_COMMENT, comment)
line = line[end+1:]
continue
for match in self.regexp.finditer(line):
text = line[match.start():match.end()]
if text == '{':
line = line[match.end():]
self.comment = ''
break
else:
try:
tokenType = self.tokens[text[0]]
except KeyError:
raise Error("Unknown token %s" % repr(text))
self.parseToken(tokenType, text)
if self.comment is None:
return True
def complete(self):
if len(self.game.moves) > 0:
self.games.append(self.game)
class PGNMove:
"""
"""
#
move = ''
#
comment = ''
#
nag = ''
class PGNGame:
"""
"""
# The seven tag roster in the required order (REFERENCE)
_strTags = [TAG_EVENT, TAG_SITE, TAG_DATE, TAG_ROUND, TAG_WHITE, TAG_BLACK, TAG_RESULT]
def __init__(self):
# Set the default STR tags
self.tagsByName = {}
self.setTag(TAG_EVENT, '?')
self.setTag(TAG_SITE, '?')
self.setTag(TAG_DATE, '????.??.??')
self.setTag(TAG_ROUND, '?')
self.setTag(TAG_WHITE, '?')
self.setTag(TAG_BLACK, '?')
self.setTag(TAG_RESULT, '*')
self.moves = []
def getLines(self):
lines = []
# Get the names of the non STR tags
otherTags = list(set(self.tagsByName).difference(self._strTags))
# Write seven tag roster and the additional tags
for name in self._strTags + otherTags:
value = self.tagsByName[name]
lines.append('['+ name + ' ' + self._makePGNString(value) + ']')
lines.append('')
# Insert numbers in-between moves
tokens = []
moveNumber = 0
for m in self.moves:
if moveNumber % 2 == 0:
tokens.append('%i.' % (moveNumber / 2 + 1))
moveNumber += 1
tokens.append(m.move)
if m.nag != '':
tokens.append(m.nag)
if m.comment != '':
tokens.append('{' + m.comment + '}')
# Add result token to the end
tokens.append(self.tagsByName[TAG_RESULT])
# Print moves keeping the line length to less than 256 characters (PGN requirement)
line = ''
for t in tokens:
if line == '':
x = t
else:
x = ' ' + t
if len(line) + len(x) >= 80: #>= 256:
lines.append(line)
line = t
else:
line += x
lines.append(line)
return lines
def setTag(self, name, value):
"""Set a PGN tag.
'name' is the name of the tag to set (string).
'value' is the value to set the tag to (string) or None to delete the tag.
Tag names cannot contain whitespace.
Deleting a tag that does not exist has no effect.
Deleting a STR tag or setting one to an invalid value will raise an Error exception.
"""
if self._isValidTagName(name) is False:
raise Error('%s is an invalid tag name' % str(name))
# If no value delete
if value is None:
# If is a STR tag throw an exception
if self._strTags.has_key(name):
raise Error('%s is a PGN STR tag and cannot be deleted' % name)
# Delete the tag
try:
self._strTags.pop(name)
except KeyError:
pass
# Otherwise set the tag to the new value
else:
# FIXME: Validate if it is a STR tag
self.tagsByName[name] = value
def getTag(self, name, default = None):
"""Get a PGN tag.
'name' is the name of the tag to get (string).
'default' is the default value to return if this valid is missing (user-defined).
Return the value of the tag (string) or the default if the tag does not exist.
"""
try:
return self.tagsByName[name]
except KeyError:
return default
def addMove(self, move):
self.moves.append(move)
def getMove(self, moveNumber):
return self.moves[moveNumber - 1]
def getMoves(self):
return self.moves
def __str__(self):
string = ''
for tag, value in self.tagsByName.iteritems():
string += '%s = %s\n' % (tag, value)
string += '\n'
number = 1
moves = self.moves
while len(moves) >= 2:
string += '%3i. %s %s\n' % (number, moves[0].move, moves[1].move)
number += 1
moves = moves[2:]
if len(moves) > 0:
string += '%3i. %s\n' % (number, moves[0].move)
return string
# Private methods
def _makePGNString(self, string):
"""Make a PGN string.
'string' is the string to convert to a PGN string (string).
All characters are valid and quotes are escaped with '\"'.
Return the string surrounded with quotes. e.g. 'Mike "Dog" Smith' -> '"Mike \"Dog\" Smith"'
"""
pgnString = string
pgnString.replace('"', '\\"')
return '"' + pgnString + '"'
def _isValidTagName(self, name):
"""Valid a PGN tag name.
'name' is the tag name to validate (string).
Tags can only contain the characters, a-Z A-Z and _.
Return True if this is a valid tag name otherwise return False.
"""
if name is None or len(name) == 0:
return False
validCharacters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_'
for c in name:
if validCharacters.find(c) < 0:
return False
return True
class PGN:
"""
"""
__games = None
def __init__(self, fileName = None, maxGames = None):
"""Create a PGN reader/writer.
'fileName' is the file to load the PGN from or None to generate an empty PGN file.
'maxGames' is the maximum number of games to load from the file or None
to load the whole file. (int, Only applicable if a filename is supplied).
"""
self.__games = []
if fileName is not None:
self.__load(fileName, maxGames)
def addGame(self):
"""Add a new game to the PGN file.
Returns the PGNGame instance to modify"""
game = PGNGame()
self.__games.append(game)
return game
def getGame(self, index):
"""Get a game from the PGN file.
'index' is the game index to get (integer, 0-N).
Return this PGN game or raise an IndexError if no game with this index.
"""
return self.__games[index]
def save(self, fileName):
"""Save the PGN file.
'fileName' is the name of the file to save to.
"""
f = file(fileName, 'w')
# FIXME: Set the newline characters to the correct type?
# Sign it from glChess
f.write('; PGN saved game generated by glChess\n')
f.write('; http://glchess.sourceforge.net\n')
for game in self.__games:
f.write('\n')
for line in game.getLines():
f.write(line + '\n')
f.close()
def __len__(self):
return len(self.__games)
def __getitem__(self, index):
return self.__games[index]
def __getslice__(self, start, end):
return self.__games[start:end]
# Private methods
def __load(self, fileName, maxGames = None):
"""
"""
# Convert the file into PGN tokens
f = file(fileName, 'r')
p = PGNParser(maxGames)
lineNumber = 0
try:
for line in f.readlines():
lineNumber += 1
p.parseLine(line)
p.complete()
except Error, e:
raise Error('Error on line %d: %s' % (lineNumber, e.message))
# Must be at least one game in the PGN file
self.__games = p.games
if len(self.__games) == 0:
raise Error('Empty PGN file')
# Tidy up
f.close()
if __name__ == '__main__':
import time
def test(fileName, maxGames = None):
s = time.time()
p = PGN(fileName, maxGames)
print time.time() - s
number = 1
games = p[:]
#for game in games:
# print 'Game ' + str(number)
# print game
# print
# number += 1
#test('example.pgn')
#test('rav.pgn')
#test('wolga-benko.pgn', 3)
#test('wolga-benko.pgn')
#test('yahoo_chess.pgn')
#p = PGN('example.pgn')
#p.save('out.pgn')
| guillaumebel/nibbles-clutter | glchess/src/lib/chess/pgn.py | Python | gpl-2.0 | 18,947 |
"""
Utility classes and functions to handle Virtual Machine creation using qemu.
:copyright: 2008-2009 Red Hat Inc.
"""
import time
import os
import logging
import fcntl
import re
import commands
from autotest.client.shared import error
from autotest.client import utils
import utils_misc
import virt_vm
import test_setup
import storage
import qemu_monitor
import aexpect
import qemu_virtio_port
import remote
import data_dir
import utils_net
import qemu_devices
class QemuSegFaultError(virt_vm.VMError):
def __init__(self, crash_message):
virt_vm.VMError.__init__(self, crash_message)
self.crash_message = crash_message
def __str__(self):
return ("Qemu crashed: %s" % self.crash_message)
class VMMigrateProtoUnsupportedError(virt_vm.VMMigrateProtoUnknownError):
"""
When QEMU tells us it doesn't know about a given migration protocol.
This usually happens when we're testing older QEMU. It makes sense to
skip the test in this situation.
"""
def __init__(self, protocol, output):
self.protocol = protocol
self.output = output
def __str__(self):
return ("QEMU reports it doesn't know migration protocol '%s'. "
"QEMU output: %s" % (self.protocol, self.output))
class KVMInternalError(virt_vm.VMError):
pass
class ImageUnbootableError(virt_vm.VMError):
def __init__(self, name):
virt_vm.VMError.__init__(self, name)
self.name = name
def __str__(self):
return ("VM '%s' can't bootup from image,"
" check your boot disk image file." % self.name)
class VM(virt_vm.BaseVM):
"""
This class handles all basic VM operations.
"""
MIGRATION_PROTOS = ['rdma', 'x-rdma', 'tcp', 'unix', 'exec', 'fd']
# By default we inherit all timeouts from the base VM class except...
CLOSE_SESSION_TIMEOUT = 30
# Because we've seen qemu taking longer than 5 seconds to initialize
# itself completely, including creating the monitor sockets files
# which are used on create(), this timeout is considerably larger
# than the one on the base vm class
CREATE_TIMEOUT = 20
def __init__(self, name, params, root_dir, address_cache, state=None):
"""
Initialize the object and set a few attributes.
:param name: The name of the object
:param params: A dict containing VM params
(see method make_qemu_command for a full description)
:param root_dir: Base directory for relative filenames
:param address_cache: A dict that maps MAC addresses to IP addresses
:param state: If provided, use this as self.__dict__
"""
if state:
self.__dict__ = state
else:
self.process = None
self.serial_console = None
self.redirs = {}
self.spice_options = {}
self.vnc_port = 5900
self.monitors = []
self.virtio_ports = [] # virtio_console / virtio_serialport
self.pci_assignable = None
self.uuid = None
self.vcpu_threads = []
self.vhost_threads = []
self.devices = None
self.name = name
self.params = params
self.root_dir = root_dir
self.address_cache = address_cache
self.index_in_use = {}
# This usb_dev_dict member stores usb controller and device info,
# It's dict, each key is an id of usb controller,
# and key's value is a list, contains usb devices' ids which
# attach to this controller.
# A filled usb_dev_dict may look like:
# { "usb1" : ["stg1", "stg2", "stg3", "stg4", "stg5", "stg6"],
# "usb2" : ["stg7", "stg8"],
# ...
# }
# This structure can used in usb hotplug/unplug test.
self.usb_dev_dict = {}
self.logs = {}
self.logsessions = {}
self.driver_type = 'qemu'
self.params['driver_type_' + self.name] = self.driver_type
# virtnet init depends on vm_type/driver_type being set w/in params
super(VM, self).__init__(name, params)
# un-overwrite instance attribute, virtnet db lookups depend on this
if state:
self.instance = state['instance']
self.qemu_command = ''
self.start_time = 0.0
def verify_alive(self):
"""
Make sure the VM is alive and that the main monitor is responsive.
:raise VMDeadError: If the VM is dead
:raise: Various monitor exceptions if the monitor is unresponsive
"""
self.verify_disk_image_bootable()
self.verify_userspace_crash()
self.verify_kernel_crash()
self.verify_illegal_instruction()
self.verify_kvm_internal_error()
try:
virt_vm.BaseVM.verify_alive(self)
if self.monitor:
self.monitor.verify_responsive()
except virt_vm.VMDeadError:
raise virt_vm.VMDeadError(self.process.get_status(),
self.process.get_output())
def is_alive(self):
"""
Return True if the VM is alive and its monitor is responsive.
"""
return not self.is_dead() and (not self.monitor or
self.monitor.is_responsive())
def is_dead(self):
"""
Return True if the qemu process is dead.
"""
return not self.process or not self.process.is_alive()
def is_paused(self):
"""
Return True if the qemu process is paused ('stop'ed)
"""
if self.is_dead():
return False
try:
self.verify_status("paused")
return True
except virt_vm.VMStatusError:
return False
def verify_status(self, status):
"""
Check VM status
:param status: Optional VM status, 'running' or 'paused'
:raise VMStatusError: If the VM status is not same as parameter
"""
if not self.monitor.verify_status(status):
raise virt_vm.VMStatusError('Unexpected VM status: "%s"' %
self.monitor.get_status())
def verify_userspace_crash(self):
"""
Verify if the userspace component (qemu) crashed.
"""
if "(core dumped)" in self.process.get_output():
for line in self.process.get_output().splitlines():
if "(core dumped)" in line:
raise QemuSegFaultError(line)
def verify_kvm_internal_error(self):
"""
Verify KVM internal error.
"""
if "KVM internal error." in self.process.get_output():
out = self.process.get_output()
out = out[out.find("KVM internal error."):]
raise KVMInternalError(out)
def verify_disk_image_bootable(self):
if self.params.get("image_verify_bootable") == "yes":
pattern = self.params.get("image_unbootable_pattern")
if not pattern:
raise virt_vm.VMConfigMissingError(self.name,
"image_unbootable_pattern")
try:
seabios_log = self.logsessions['seabios'].get_output()
if re.search(pattern, seabios_log, re.S):
logging.error("Can't boot guest from image.")
# Set 'shutdown_command' to None to force autotest
# shuts down guest with monitor.
self.params["shutdown_command"] = None
raise ImageUnbootableError(self.name)
except KeyError:
pass
def clone(self, name=None, params=None, root_dir=None, address_cache=None,
copy_state=False):
"""
Return a clone of the VM object with optionally modified parameters.
The clone is initially not alive and needs to be started using create().
Any parameters not passed to this function are copied from the source
VM.
:param name: Optional new VM name
:param params: Optional new VM creation parameters
:param root_dir: Optional new base directory for relative filenames
:param address_cache: A dict that maps MAC addresses to IP addresses
:param copy_state: If True, copy the original VM's state to the clone.
Mainly useful for make_qemu_command().
"""
if name is None:
name = self.name
if params is None:
params = self.params.copy()
if root_dir is None:
root_dir = self.root_dir
if address_cache is None:
address_cache = self.address_cache
if copy_state:
state = self.__dict__.copy()
else:
state = None
return VM(name, params, root_dir, address_cache, state)
def get_serial_console_filename(self, name=None):
"""
Return the serial console filename.
:param name: The serial port name.
"""
if name:
return "/tmp/serial-%s-%s" % (name, self.instance)
return "/tmp/serial-%s" % self.instance
def get_serial_console_filenames(self):
"""
Return a list of all serial console filenames
(as specified in the VM's params).
"""
return [self.get_serial_console_filename(_) for _ in
self.params.objects("isa_serials")]
def make_create_command(self, name=None, params=None, root_dir=None):
"""
Generate a qemu command line. All parameters are optional. If a
parameter is not supplied, the corresponding value stored in the
class attributes is used.
:param name: The name of the object
:param params: A dict containing VM params
:param root_dir: Base directory for relative filenames
:note: The params dict should contain:
mem -- memory size in MBs
cdrom -- ISO filename to use with the qemu -cdrom parameter
extra_params -- a string to append to the qemu command
shell_port -- port of the remote shell daemon on the guest
(SSH, Telnet or the home-made Remote Shell Server)
shell_client -- client program to use for connecting to the
remote shell daemon on the guest (ssh, telnet or nc)
x11_display -- if specified, the DISPLAY environment variable
will be be set to this value for the qemu process (useful for
SDL rendering)
images -- a list of image object names, separated by spaces
nics -- a list of NIC object names, separated by spaces
For each image in images:
drive_format -- string to pass as 'if' parameter for this
image (e.g. ide, scsi)
image_snapshot -- if yes, pass 'snapshot=on' to qemu for
this image
image_boot -- if yes, pass 'boot=on' to qemu for this image
In addition, all parameters required by get_image_filename.
For each NIC in nics:
nic_model -- string to pass as 'model' parameter for this
NIC (e.g. e1000)
"""
# Helper function for command line option wrappers
def _add_option(option, value, option_type=None, first=False):
"""
Add option to qemu parameters.
"""
if first:
fmt = " %s=%s"
else:
fmt = ",%s=%s"
if option_type is bool:
# Decode value for bool parameter (supports True, False, None)
if value in ['yes', 'on', True]:
return fmt % (option, "on")
elif value in ['no', 'off', False]:
return fmt % (option, "off")
elif value and isinstance(value, bool):
return fmt % (option, "on")
elif value and isinstance(value, str):
# "EMPTY_STRING" and "NULL_STRING" is used for testing illegal
# foramt of option.
# "EMPTY_STRING": set option as a empty string "".
# "NO_EQUAL_STRING": set option as a option string only,
# even without "=".
# (In most case, qemu-kvm should recognize it as "<null>")
if value == "NO_EQUAL_STRING":
return ",%s" % option
if value == "EMPTY_STRING":
value = '""'
return fmt % (option, str(value))
return ""
# Wrappers for all supported qemu command line parameters.
# This is meant to allow support for multiple qemu versions.
# Each of these functions receives the output of 'qemu -help'
# as a parameter, and should add the requested command line
# option accordingly.
def add_name(devices, name):
return " -name '%s'" % name
def process_sandbox(devices, action):
if action == "add":
if devices.has_option("sandbox"):
return " -sandbox on "
elif action == "rem":
if devices.has_option("sandbox"):
return " -sandbox off "
def add_human_monitor(devices, monitor_name, filename):
if not devices.has_option("chardev"):
return " -monitor unix:'%s',server,nowait" % filename
monitor_id = "hmp_id_%s" % monitor_name
cmd = " -chardev socket"
cmd += _add_option("id", monitor_id)
cmd += _add_option("path", filename)
cmd += _add_option("server", "NO_EQUAL_STRING")
cmd += _add_option("nowait", "NO_EQUAL_STRING")
cmd += " -mon chardev=%s" % monitor_id
cmd += _add_option("mode", "readline")
return cmd
def add_qmp_monitor(devices, monitor_name, filename):
if not devices.has_option("qmp"):
logging.warn("Fallback to human monitor since qmp is"
" unsupported")
return add_human_monitor(devices, monitor_name, filename)
if not devices.has_option("chardev"):
return " -qmp unix:'%s',server,nowait" % filename
monitor_id = "qmp_id_%s" % monitor_name
cmd = " -chardev socket"
cmd += _add_option("id", monitor_id)
cmd += _add_option("path", filename)
cmd += _add_option("server", "NO_EQUAL_STRING")
cmd += _add_option("nowait", "NO_EQUAL_STRING")
cmd += " -mon chardev=%s" % monitor_id
cmd += _add_option("mode", "control")
return cmd
def add_serial(devices, name, filename):
if not devices.has_option("chardev"):
return " -serial unix:'%s',server,nowait" % filename
serial_id = "serial_id_%s" % name
cmd = " -chardev socket"
cmd += _add_option("id", serial_id)
cmd += _add_option("path", filename)
cmd += _add_option("server", "NO_EQUAL_STRING")
cmd += _add_option("nowait", "NO_EQUAL_STRING")
cmd += " -device isa-serial"
cmd += _add_option("chardev", serial_id)
return cmd
def add_virtio_port(devices, name, bus, filename, porttype, chardev,
name_prefix=None, index=None, extra_params=""):
"""
Appends virtio_serialport or virtio_console device to cmdline.
:param help: qemu -h output
:param name: Name of the port
:param bus: Which virtio-serial-pci device use
:param filename: Path to chardev filename
:param porttype: Type of the port (*serialport, console)
:param chardev: Which chardev to use (*socket, spicevmc)
:param name_prefix: Custom name prefix (port index is appended)
:param index: Index of the current virtio_port
:param extra_params: Space sepparated chardev params
"""
cmd = ''
# host chardev
if chardev == "spicevmc": # SPICE
cmd += " -chardev spicevmc,id=dev%s,name=%s" % (name, name)
else: # SOCKET
cmd = (" -chardev socket,id=dev%s,path=%s,server,nowait"
% (name, filename))
# virtport device
if porttype in ("console", "virtio_console"):
cmd += " -device virtconsole"
else:
cmd += " -device virtserialport"
if name_prefix: # used by spiceagent (com.redhat.spice.*)
port_name = "%s%d" % (name_prefix, index)
else:
port_name = name
cmd += ",chardev=dev%s,name=%s,id=%s" % (name, port_name, name)
cmd += _add_option("bus", bus)
# Space sepparated chardev params
_params = ""
for parm in extra_params.split():
_params += ',' + parm
cmd += _params
return cmd
def add_log_seabios(devices):
if not devices.has_device("isa-debugcon"):
return ""
default_id = "seabioslog_id_%s" % self.instance
filename = "/tmp/seabios-%s" % self.instance
self.logs["seabios"] = filename
cmd = " -chardev socket"
cmd += _add_option("id", default_id)
cmd += _add_option("path", filename)
cmd += _add_option("server", "NO_EQUAL_STRING")
cmd += _add_option("nowait", "NO_EQUAL_STRING")
cmd += " -device isa-debugcon"
cmd += _add_option("chardev", default_id)
cmd += _add_option("iobase", "0x402")
return cmd
def add_log_anaconda(devices):
chardev_id = "anacondalog_chardev_%s" % self.instance
vioser_id = "anacondalog_vioser_%s" % self.instance
filename = "/tmp/anaconda-%s" % self.instance
self.logs["anaconda"] = filename
dev = qemu_devices.QCustomDevice('chardev', backend='backend')
dev.set_param('backend', 'socket')
dev.set_param('id', chardev_id)
dev.set_param("path", filename)
dev.set_param("server", 'NO_EQUAL_STRING')
dev.set_param("nowait", 'NO_EQUAL_STRING')
devices.insert(dev)
dev = QDevice('virtio-serial-pci', parent_bus={'type': 'pci'})
dev.set_param("id", vioser_id)
devices.insert(dev)
dev = QDevice('virtserialport')
dev.set_param("bus", "%s.0" % vioser_id)
dev.set_param("chardev", chardev_id)
dev.set_param("name", "org.fedoraproject.anaconda.log.0")
devices.insert(dev)
def add_mem(devices, mem):
return " -m %s" % mem
def add_smp(devices):
smp_str = " -smp %d" % self.cpuinfo.smp
smp_pattern = "smp n\[,maxcpus=cpus\].*"
if devices.has_option(smp_pattern):
smp_str += ",maxcpus=%d" % self.cpuinfo.maxcpus
smp_str += ",cores=%d" % self.cpuinfo.cores
smp_str += ",threads=%d" % self.cpuinfo.threads
smp_str += ",sockets=%d" % self.cpuinfo.sockets
return smp_str
def add_nic(devices, vlan, model=None, mac=None, device_id=None,
netdev_id=None, nic_extra_params=None, pci_addr=None,
bootindex=None, queues=1, vectors=None):
if model == 'none':
return
if devices.has_option("device"):
if not model:
model = "rtl8139"
elif model == "virtio":
model = "virtio-net-pci"
dev = QDevice(model)
dev.set_param('mac', mac)
# only pci domain=0,bus=0,function=0 is supported for now.
#
# libvirt gains the pci_slot, free_pci_addr here,
# value by parsing the xml file, i.e. counting all the
# pci devices and store the number.
if model != 'spapr-vlan':
dev.parent_bus = {'type': 'pci'}
dev.set_param('addr', pci_addr)
if nic_extra_params:
nic_extra_params = (_.split('=', 1) for _ in
nic_extra_params.split(',') if _)
for key, val in nic_extra_params:
dev.set_param(key, val)
dev.set_param("bootindex", bootindex)
else:
dev = qemu_devices.QCustomDevice('net', backend='type')
dev.set_param('type', 'nic')
dev.set_param('model', model)
dev.set_param('macaddr', mac, 'NEED_QUOTE')
dev.set_param('id', device_id, 'NEED_QUOTE')
if "virtio" in model:
if int(queues) > 1:
dev.set_param('mq', 'on')
if vectors:
dev.set_param('vectors', vectors)
if devices.has_option("netdev"):
dev.set_param('netdev', netdev_id)
else:
dev.set_param('vlan', vlan)
devices.insert(dev)
def add_net(devices, vlan, nettype, ifname=None, tftp=None,
bootfile=None, hostfwd=[], netdev_id=None,
netdev_extra_params=None, tapfds=None, script=None,
downscript=None, vhost=None, queues=None, vhostfds=None):
mode = None
if nettype in ['bridge', 'network', 'macvtap']:
mode = 'tap'
elif nettype == 'user':
mode = 'user'
else:
logging.warning("Unknown/unsupported nettype %s" % nettype)
return ''
if devices.has_option("netdev"):
cmd = " -netdev %s,id=%s" % (mode, netdev_id)
if vhost:
cmd += ",%s" % vhost
if vhostfds:
if (int(queues) > 1 and
'vhostfds=' in devices.get_help_text()):
cmd += ",vhostfds=%s" % vhostfds
else:
txt = ""
if int(queues) > 1:
txt = "qemu do not support vhost multiqueue,"
txt += " Fall back to single queue."
if 'vhostfd=' in devices.get_help_text():
cmd += ",vhostfd=%s" % vhostfds.split(":")[0]
else:
txt += " qemu do not support vhostfd."
if txt:
logging.warn(txt)
if netdev_extra_params:
cmd += "%s" % netdev_extra_params
else:
cmd = " -net %s,vlan=%d" % (mode, vlan)
if mode == "tap" and tapfds:
if (int(queues)) > 1 and ',fds=' in devices.get_help_text():
cmd += ",fds=%s" % tapfds
else:
cmd += ",fd=%s" % tapfds
elif mode == "user":
if tftp and "[,tftp=" in devices.get_help_text():
cmd += ",tftp='%s'" % tftp
if bootfile and "[,bootfile=" in devices.get_help_text():
cmd += ",bootfile='%s'" % bootfile
if "[,hostfwd=" in devices.get_help_text():
for host_port, guest_port in hostfwd:
cmd += ",hostfwd=tcp::%s-:%s" % (host_port, guest_port)
else:
if ifname:
cmd += ",ifname='%s'" % ifname
if script:
cmd += ",script='%s'" % script
cmd += ",downscript='%s'" % (downscript or "no")
return cmd
def add_floppy(devices, filename, index):
cmd_list = [" -fda '%s'", " -fdb '%s'"]
return cmd_list[index] % filename
def add_tftp(devices, filename):
# If the new syntax is supported, don't add -tftp
if "[,tftp=" in devices.get_help_text():
return ""
else:
return " -tftp '%s'" % filename
def add_bootp(devices, filename):
# If the new syntax is supported, don't add -bootp
if "[,bootfile=" in devices.get_help_text():
return ""
else:
return " -bootp '%s'" % filename
def add_tcp_redir(devices, host_port, guest_port):
# If the new syntax is supported, don't add -redir
if "[,hostfwd=" in devices.get_help_text():
return ""
else:
return " -redir tcp:%s::%s" % (host_port, guest_port)
def add_vnc(devices, vnc_port, vnc_password='no', extra_params=None):
vnc_cmd = " -vnc :%d" % (vnc_port - 5900)
if vnc_password == "yes":
vnc_cmd += ",password"
if extra_params:
vnc_cmd += ",%s" % extra_params
return vnc_cmd
def add_sdl(devices):
if devices.has_option("sdl"):
return " -sdl"
else:
return ""
def add_nographic(devices):
return " -nographic"
def add_uuid(devices, uuid):
return " -uuid '%s'" % uuid
def add_pcidevice(devices, host, params, device_driver="pci-assign"):
if device_driver == "pci-assign":
if (devices.has_device("pci-assign") or
devices.has_device("kvm-pci-assign")):
dev = QDevice(device_driver, parent_bus={'type': 'pci'})
else:
dev = qemu_devices.QCustomDevice('pcidevice',
parent_bus={'type': 'pci'})
else:
if devices.has_device(device_driver):
dev = QDevice(device_driver, parent_bus={'type': 'pci'})
else:
dev = qemu_devices.QCustomDevice('pcidevice',
parent_bus={'type': 'pci'})
help_cmd = "%s -device pci-assign,\\? 2>&1" % qemu_binary
pcidevice_help = utils.system_output(help_cmd)
dev.set_param('host', host)
dev.set_param('id', 'id_%s' % host.replace(":", "."))
fail_param = []
for param in params.get("pci-assign_params", "").split():
value = params.get(param)
if value:
if bool(re.search(param, pcidevice_help, re.M)):
dev.set_param(param, value)
else:
fail_param.append(param)
if fail_param:
msg = ("parameter %s is not support in device pci-assign."
" It only support following parameter:\n %s" %
(param, pcidevice_help))
logging.warn(msg)
devices.insert(dev)
def add_spice_rhel5(devices, spice_params, port_range=(3100, 3199)):
"""
processes spice parameters on rhel5 host.
:param spice_options - dict with spice keys/values
:param port_range - tuple with port range, default: (3000, 3199)
"""
if devices.has_option("spice"):
cmd = " -spice"
else:
return ""
spice_help = ""
if devices.has_option("spice-help"):
spice_help = commands.getoutput("%s -device \\?" % qemu_binary)
s_port = str(utils_misc.find_free_port(*port_range))
self.spice_options['spice_port'] = s_port
cmd += " port=%s" % s_port
for param in spice_params.split():
value = params.get(param)
if value:
if bool(re.search(param, spice_help, re.M)):
cmd += ",%s=%s" % (param, value)
else:
msg = ("parameter %s is not supported in spice. It "
"only supports the following parameters:\n %s"
% (param, spice_help))
logging.warn(msg)
else:
cmd += ",%s" % param
if devices.has_option("qxl"):
qxl_dev_nr = params.get("qxl_dev_nr", 1)
cmd += " -qxl %s" % qxl_dev_nr
return cmd
def add_spice(port_range=(3000, 3199),
tls_port_range=(3200, 3399)):
"""
processes spice parameters
:param port_range - tuple with port range, default: (3000, 3199)
:param tls_port_range - tuple with tls port range,
default: (3200, 3399)
"""
spice_opts = [] # will be used for ",".join()
tmp = None
def optget(opt):
"""a helper function"""
return self.spice_options.get(opt)
def set_yes_no_value(key, yes_value=None, no_value=None):
"""just a helper function"""
tmp = optget(key)
if tmp == "no" and no_value:
spice_opts.append(no_value)
elif tmp == "yes" and yes_value:
spice_opts.append(yes_value)
def set_value(opt_string, key, fallback=None):
"""just a helper function"""
tmp = optget(key)
if tmp:
spice_opts.append(opt_string % tmp)
elif fallback:
spice_opts.append(fallback)
s_port = str(utils_misc.find_free_port(*port_range))
if optget("spice_port") == "generate":
if not self.is_alive():
self.spice_options['spice_port'] = s_port
spice_opts.append("port=%s" % s_port)
self.spice_port = s_port
else:
self.spice_options['spice_port'] = self.spice_port
spice_opts.append("port=%s" % self.spice_port)
else:
set_value("port=%s", "spice_port")
set_value("password=%s", "spice_password", "disable-ticketing")
if optget("listening_addr") == "ipv4":
host_ip = utils_net.get_host_ip_address(self.params)
self.spice_options['listening_addr'] = "ipv4"
spice_opts.append("addr=%s" % host_ip)
#set_value("addr=%s", "listening_addr", )
elif optget("listening_addr") == "ipv6":
host_ip = utils_net.get_host_ip_address(self.params)
host_ip_ipv6 = utils_misc.convert_ipv4_to_ipv6(host_ip)
self.spice_options['listening_addr'] = "ipv6"
spice_opts.append("addr=%s" % host_ip_ipv6)
set_yes_no_value(
"disable_copy_paste", yes_value="disable-copy-paste")
set_value("addr=%s", "spice_addr")
if optget("spice_ssl") == "yes":
# SSL only part
t_port = str(utils_misc.find_free_port(*tls_port_range))
if optget("spice_tls_port") == "generate":
if not self.is_alive():
self.spice_options['spice_tls_port'] = t_port
spice_opts.append("tls-port=%s" % t_port)
self.spice_tls_port = t_port
else:
self.spice_options[
'spice_tls_port'] = self.spice_tls_port
spice_opts.append("tls-port=%s" % self.spice_tls_port)
else:
set_value("tls-port=%s", "spice_tls_port")
prefix = optget("spice_x509_prefix")
if ((prefix is None or not os.path.exists(prefix)) and
(optget("spice_gen_x509") == "yes")):
# Generate spice_x509_* is not always necessary,
# Regenerate them will make your existing VM
# not longer accessiable via encrypted spice.
c_subj = optget("spice_x509_cacert_subj")
s_subj = optget("spice_x509_server_subj")
# If CN is not specified, add IP of host
if s_subj[-3:] == "CN=":
s_subj += utils_net.get_host_ip_address(self.params)
passwd = optget("spice_x509_key_password")
secure = optget("spice_x509_secure")
utils_misc.create_x509_dir(prefix, c_subj, s_subj, passwd,
secure)
tmp = optget("spice_x509_dir")
if tmp == "yes":
spice_opts.append("x509-dir=%s" % (prefix))
elif tmp == "no":
cacert = optget("spice_x509_cacert_file")
server_key = optget("spice_x509_key_file")
server_cert = optget("spice_x509_cert_file")
keyfile_str = ("x509-key-file=%s,x509-cacert-file=%s,"
"x509-cert-file=%s" %
(os.path.join(prefix, server_key),
os.path.join(prefix, cacert),
os.path.join(prefix, server_cert)))
spice_opts.append(keyfile_str)
set_yes_no_value("spice_x509_secure",
yes_value="x509-key-password=%s" %
(optget("spice_x509_key_password")))
tmp = optget("spice_secure_channels")
if tmp:
for item in tmp.split(","):
spice_opts.append("tls-channel=%s" % (item.strip()))
# Less common options
set_value("seamless-migration=%s", "spice_seamless_migration")
set_value("image-compression=%s", "spice_image_compression")
set_value("jpeg-wan-compression=%s", "spice_jpeg_wan_compression")
set_value("zlib-glz-wan-compression=%s",
"spice_zlib_glz_wan_compression")
set_value("streaming-video=%s", "spice_streaming_video")
set_value("agent-mouse=%s", "spice_agent_mouse")
set_value("playback-compression=%s", "spice_playback_compression")
set_yes_no_value("spice_ipv4", yes_value="ipv4")
set_yes_no_value("spice_ipv6", yes_value="ipv6")
return " -spice %s" % (",".join(spice_opts))
def add_qxl(qxl_nr, qxl_memory=None):
"""
adds extra qxl devices + sets memory to -vga qxl and extra qxls
:param qxl_nr total number of qxl devices
:param qxl_memory sets memory to individual devices
"""
qxl_str = ""
vram_help = ""
if qxl_memory:
vram_help = "vram_size=%d" % qxl_memory
qxl_str += " -global qxl-vga.%s" % (vram_help)
for index in range(1, qxl_nr):
qxl_str += " -device qxl,id=video%d,%s"\
% (index, vram_help)
return qxl_str
def add_vga(vga):
return " -vga %s" % vga
def add_kernel(devices, filename):
return " -kernel '%s'" % filename
def add_initrd(devices, filename):
return " -initrd '%s'" % filename
def add_rtc(devices):
# Pay attention that rtc-td-hack is for early version
# if "rtc " in help:
if devices.has_option("rtc"):
cmd = " -rtc base=%s" % params.get("rtc_base", "utc")
cmd += _add_option("clock", params.get("rtc_clock", "host"))
cmd += _add_option("driftfix", params.get("rtc_drift", "none"))
return cmd
elif devices.has_option("rtc-td-hack"):
return " -rtc-td-hack"
else:
return ""
def add_kernel_cmdline(devices, cmdline):
return " -append '%s'" % cmdline
def add_testdev(devices, filename=None):
if devices.has_device("testdev"):
return (" -chardev file,id=testlog,path=%s"
" -device testdev,chardev=testlog" % filename)
elif devices.has_device("pc-testdev"):
return " -device pc-testdev"
else:
return ""
def add_isa_debug_exit(devices, iobase=0xf4, iosize=0x04):
if devices.has_device("isa-debug-exit"):
return (" -device isa-debug-exit,iobase=%s,iosize=%s" %
(iobase, iosize))
else:
return ""
def add_no_hpet(devices):
if devices.has_option("no-hpet"):
return " -no-hpet"
else:
return ""
def add_cpu_flags(devices, cpu_model, flags=None, vendor_id=None,
family=None):
if devices.has_option('cpu'):
cmd = " -cpu '%s'" % cpu_model
if vendor_id:
cmd += ",vendor=\"%s\"" % vendor_id
if flags:
if not flags.startswith(","):
cmd += ","
cmd += "%s" % flags
if family is not None:
cmd += ",family=%s" % family
return cmd
else:
return ""
def add_boot(devices, boot_order, boot_once, boot_menu):
cmd = " -boot"
pattern = "boot \[order=drives\]\[,once=drives\]\[,menu=on\|off\]"
if devices.has_option("boot \[a\|c\|d\|n\]"):
cmd += " %s" % boot_once
elif devices.has_option(pattern):
cmd += (" order=%s,once=%s,menu=%s" %
(boot_order, boot_once, boot_menu))
else:
cmd = ""
return cmd
def get_index(index):
while self.index_in_use.get(str(index)):
index += 1
return index
def add_sga(devices):
if not devices.has_option("device"):
return ""
return " -device sga"
def add_watchdog(devices, device_type=None, action="reset"):
watchdog_cmd = ""
if devices.has_option("watchdog"):
if device_type:
watchdog_cmd += " -watchdog %s" % device_type
watchdog_cmd += " -watchdog-action %s" % action
return watchdog_cmd
def add_option_rom(devices, opt_rom):
if not devices.has_option("option-rom"):
return ""
return " -option-rom %s" % opt_rom
def add_smartcard(devices, sc_chardev, sc_id):
sc_cmd = " -device usb-ccid,id=ccid0"
sc_cmd += " -chardev " + sc_chardev
sc_cmd += ",id=" + sc_id + ",name=smartcard"
sc_cmd += " -device ccid-card-passthru,chardev=" + sc_id
return sc_cmd
def add_numa_node(devices, mem=None, cpus=None, nodeid=None):
"""
This function used to add numa node to guest command line
"""
if not devices.has_option("numa"):
return ""
numa_cmd = " -numa node"
if mem is not None:
numa_cmd += ",mem=%s" % mem
if cpus is not None:
numa_cmd += ",cpus=%s" % cpus
if nodeid is not None:
numa_cmd += ",nodeid=%s" % nodeid
return numa_cmd
# End of command line option wrappers
# If nothing changed and devices exists, return imediatelly
if (name is None and params is None and root_dir is None
and self.devices is not None):
return self.devices
if name is None:
name = self.name
if params is None:
params = self.params
if root_dir is None:
root_dir = self.root_dir
have_ahci = False
have_virtio_scsi = False
virtio_scsi_pcis = []
# init value by default.
# PCI addr 0,1,2 are taken by PCI/ISA/IDE bridge and the GPU.
self.pci_addr_list = [0, 1, 2]
# Clone this VM using the new params
vm = self.clone(name, params, root_dir, copy_state=True)
# global counters
ide_bus = 0
ide_unit = 0
vdisk = 0
scsi_disk = 0
global_image_bootindex = 0
if params.get("kernel"):
global_image_bootindex = 1
qemu_binary = utils_misc.get_qemu_binary(params)
self.qemu_binary = qemu_binary
support_cpu_model = commands.getoutput("%s -cpu \\?" % qemu_binary)
index_global = 0
# init the dict index_in_use
for key in params.keys():
if 'drive_index' in key:
self.index_in_use[params.get(key)] = True
cmd = ""
# Enable the use of glibc's malloc_perturb feature
if params.get("malloc_perturb", "no") == "yes":
cmd += "MALLOC_PERTURB_=1 "
# Set the X11 display parameter if requested
if params.get("x11_display"):
cmd += "DISPLAY=%s " % params.get("x11_display")
if params.get("qemu_audio_drv"):
cmd += "QEMU_AUDIO_DRV=%s " % params.get("qemu_audio_drv")
# Add command prefix for qemu-kvm. like taskset, valgrind and so on
if params.get("qemu_command_prefix"):
qemu_command_prefix = params.get("qemu_command_prefix")
cmd += "%s " % qemu_command_prefix
# Add numa memory cmd to pin guest memory to numa node
if params.get("numa_node"):
numa_node = int(params.get("numa_node"))
if numa_node < 0:
p = utils_misc.NumaNode(numa_node)
n = int(utils_misc.get_node_count()) + numa_node
cmd += "numactl -m %s " % n
else:
n = numa_node - 1
cmd += "numactl -m %s " % n
# Start constructing devices representation
devices = qemu_devices.DevContainer(qemu_binary, self.name,
params.get('strict_mode'),
params.get(
'workaround_qemu_qmp_crash'),
params.get('allow_hotplugged_vm'))
StrDev = qemu_devices.QStringDevice
QDevice = qemu_devices.QDevice
devices.insert(StrDev('PREFIX', cmdline=cmd))
# Add the qemu binary
devices.insert(StrDev('qemu', cmdline=qemu_binary))
devices.insert(StrDev('-S', cmdline="-S"))
# Add the VM's name
devices.insert(StrDev('vmname', cmdline=add_name(devices, name)))
if params.get("qemu_sandbox", "on") == "on":
devices.insert(StrDev('sandbox', cmdline=process_sandbox(devices, "add")))
elif params.get("sandbox", "off") == "off":
devices.insert(StrDev('qemu_sandbox', cmdline=process_sandbox(devices, "rem")))
devs = devices.machine_by_params(params)
for dev in devs:
devices.insert(dev)
# no automagic devices please
defaults = params.get("defaults", "no")
if devices.has_option("nodefaults") and defaults != "yes":
devices.insert(StrDev('nodefaults', cmdline=" -nodefaults"))
vga = params.get("vga")
if vga:
if vga != 'none':
devices.insert(StrDev('VGA-%s' % vga, {'addr': 2},
cmdline=add_vga(vga),
parent_bus={'type': 'pci'}))
else:
devices.insert(StrDev('VGA-none', cmdline=add_vga(vga)))
if vga == "qxl":
qxl_dev_memory = int(params.get("qxl_dev_memory", 0))
qxl_dev_nr = int(params.get("qxl_dev_nr", 1))
devices.insert(StrDev('qxl',
cmdline=add_qxl(qxl_dev_nr, qxl_dev_memory)))
elif params.get('defaults', 'no') != 'no': # by default add cirrus
devices.insert(StrDev('VGA-cirrus', {'addr': 2},
cmdline=add_vga(vga),
parent_bus={'type': 'pci'}))
# When old scsi fmt is used, new device with lowest pci_addr is created
devices.hook_fill_scsi_hbas(params)
# -soundhw addresses are always the lowest after scsi
soundhw = params.get("soundcards")
if soundhw:
if not devices.has_option('device') or soundhw == "all":
for sndcard in ('AC97', 'ES1370', 'intel-hda'):
# Add all dummy PCI devices and the actuall command below
devices.insert(StrDev("SND-%s" % sndcard,
parent_bus={'type': 'pci'}))
devices.insert(StrDev('SoundHW',
cmdline="-soundhw %s" % soundhw))
else:
# TODO: Use QDevices for this and set the addresses properly
for sound_device in soundhw.split(","):
if "hda" in sound_device:
devices.insert(QDevice('intel-hda',
parent_bus={'type': 'pci'}))
devices.insert(QDevice('hda-duplex'))
elif sound_device in ["es1370", "ac97"]:
devices.insert(QDevice(sound_device.upper(),
parent_bus={'type': 'pci'}))
else:
devices.insert(QDevice(sound_device,
parent_bus={'type': 'pci'}))
# Add monitors
for monitor_name in params.objects("monitors"):
monitor_params = params.object_params(monitor_name)
monitor_filename = qemu_monitor.get_monitor_filename(vm,
monitor_name)
if monitor_params.get("monitor_type") == "qmp":
cmd = add_qmp_monitor(devices, monitor_name,
monitor_filename)
devices.insert(StrDev('QMP-%s' % monitor_name, cmdline=cmd))
else:
cmd = add_human_monitor(devices, monitor_name,
monitor_filename)
devices.insert(StrDev('HMP-%s' % monitor_name, cmdline=cmd))
# Add serial console redirection
for serial in params.objects("isa_serials"):
serial_filename = vm.get_serial_console_filename(serial)
cmd = add_serial(devices, serial, serial_filename)
devices.insert(StrDev('SER-%s' % serial, cmdline=cmd))
# Add virtio_serial ports
no_virtio_serial_pcis = 0
no_virtio_ports = 0
virtio_port_spread = int(params.get('virtio_port_spread', 2))
for port_name in params.objects("virtio_ports"):
port_params = params.object_params(port_name)
bus = params.get('virtio_port_bus', False)
if bus is not False: # Manually set bus
bus = int(bus)
elif not virtio_port_spread:
# bus not specified, let qemu decide
pass
elif not no_virtio_ports % virtio_port_spread:
# Add new vio-pci every n-th port. (Spread ports)
bus = no_virtio_serial_pcis
else: # Port not overriden, use last vio-pci
bus = no_virtio_serial_pcis - 1
if bus < 0: # First bus
bus = 0
# Add virtio_serial_pcis
for i in range(no_virtio_serial_pcis, bus + 1):
dev = QDevice('virtio-serial-pci', parent_bus={'type': 'pci'})
dev.set_param('id', 'virtio_serial_pci%d' % i)
devices.insert(dev)
no_virtio_serial_pcis += 1
if bus is not False:
bus = "virtio_serial_pci%d.0" % bus
# Add actual ports
cmd = add_virtio_port(devices, port_name, bus,
self.get_virtio_port_filename(port_name),
port_params.get('virtio_port_type'),
port_params.get('virtio_port_chardev'),
port_params.get('virtio_port_name_prefix'),
no_virtio_ports,
port_params.get('virtio_port_params', ''))
devices.insert(StrDev('VIO-%s' % port_name, cmdline=cmd))
no_virtio_ports += 1
# Add logging
devices.insert(StrDev('isa-log', cmdline=add_log_seabios(devices)))
if params.get("anaconda_log", "no") == "yes":
add_log_anaconda(devices)
# Add USB controllers
usbs = params.objects("usbs")
if not devices.has_option("device"):
usbs = ("oldusb",) # Old qemu, add only one controller '-usb'
for usb_name in usbs:
usb_params = params.object_params(usb_name)
for dev in devices.usbc_by_params(usb_name, usb_params):
devices.insert(dev)
# Add images (harddrives)
for image_name in params.objects("images"):
# FIXME: Use qemu_devices for handling indexes
image_params = params.object_params(image_name)
if image_params.get("boot_drive") == "no":
continue
if params.get("index_enable") == "yes":
drive_index = image_params.get("drive_index")
if drive_index:
index = drive_index
else:
index_global = get_index(index_global)
index = str(index_global)
index_global += 1
else:
index = None
image_bootindex = None
image_boot = image_params.get("image_boot")
if not re.search("boot=on\|off", devices.get_help_text(),
re.MULTILINE):
if image_boot in ['yes', 'on', True]:
image_bootindex = str(global_image_bootindex)
global_image_bootindex += 1
image_boot = "unused"
image_bootindex = image_params.get('bootindex',
image_bootindex)
else:
if image_boot in ['yes', 'on', True]:
if global_image_bootindex > 0:
image_boot = False
global_image_bootindex += 1
image_params = params.object_params(image_name)
if image_params.get("boot_drive") == "no":
continue
devs = devices.images_define_by_params(image_name, image_params,
'disk', index, image_boot,
image_bootindex)
for _ in devs:
devices.insert(_)
# Networking
redirs = []
for redir_name in params.objects("redirs"):
redir_params = params.object_params(redir_name)
guest_port = int(redir_params.get("guest_port"))
host_port = vm.redirs.get(guest_port)
redirs += [(host_port, guest_port)]
iov = 0
for nic in vm.virtnet:
nic_params = params.object_params(nic.nic_name)
if nic_params.get('pci_assignable') == "no":
script = nic_params.get("nic_script")
downscript = nic_params.get("nic_downscript")
vhost = nic_params.get("vhost")
script_dir = data_dir.get_data_dir()
if script:
script = utils_misc.get_path(script_dir, script)
if downscript:
downscript = utils_misc.get_path(script_dir, downscript)
# setup nic parameters as needed
# add_netdev if netdev_id not set
nic = vm.add_nic(**dict(nic))
# gather set values or None if unset
vlan = int(nic.get('vlan'))
netdev_id = nic.get('netdev_id')
device_id = nic.get('device_id')
mac = nic.get('mac')
nic_model = nic.get("nic_model")
nic_extra = nic.get("nic_extra_params")
bootindex = nic_params.get("bootindex")
netdev_extra = nic.get("netdev_extra_params")
bootp = nic.get("bootp")
if nic.get("tftp"):
tftp = utils_misc.get_path(root_dir, nic.get("tftp"))
else:
tftp = None
nettype = nic.get("nettype", "bridge")
# don't force conversion add_nic()/add_net() optional parameter
if nic.has_key('tapfds'):
tapfds = nic.tapfds
else:
tapfds = None
if nic.has_key('vhostfds'):
vhostfds = nic.vhostfds
else:
vhostfds = None
ifname = nic.get('ifname')
queues = nic.get("queues", 1)
# specify the number of MSI-X vectors that the card should have;
# this option currently only affects virtio cards
if nic_params.get("enable_msix_vectors") == "yes":
if nic.has_key("vectors"):
vectors = nic.vectors
else:
vectors = 2 * int(queues) + 1
else:
vectors = None
# Handle the '-net nic' part
add_nic(devices, vlan, nic_model, mac,
device_id, netdev_id, nic_extra,
nic_params.get("nic_pci_addr"),
bootindex, queues, vectors)
# Handle the '-net tap' or '-net user' or '-netdev' part
cmd = add_net(devices, vlan, nettype, ifname, tftp,
bootp, redirs, netdev_id, netdev_extra,
tapfds, script, downscript, vhost, queues,
vhostfds)
# TODO: Is every NIC a PCI device?
devices.insert(StrDev("NET-%s" % nettype, cmdline=cmd))
else:
device_driver = nic_params.get("device_driver", "pci-assign")
pci_id = vm.pa_pci_ids[iov]
add_pcidevice(devices, pci_id, params=nic_params,
device_driver=device_driver)
iov += 1
mem = params.get("mem")
if mem:
devices.insert(StrDev('mem', cmdline=add_mem(devices, mem)))
smp = int(params.get("smp", 0))
vcpu_maxcpus = int(params.get("vcpu_maxcpus", 0))
vcpu_sockets = int(params.get("vcpu_sockets", 0))
vcpu_cores = int(params.get("vcpu_cores", 0))
vcpu_threads = int(params.get("vcpu_threads", 0))
# Force CPU threads to 2 when smp > 8.
if smp > 8 and vcpu_threads <= 1:
vcpu_threads = 2
# Some versions of windows don't support more than 2 sockets of cpu,
# here is a workaround to make all windows use only 2 sockets.
if (vcpu_sockets and vcpu_sockets > 2
and params.get("os_type") == 'windows'):
vcpu_sockets = 2
if smp == 0 or vcpu_sockets == 0:
vcpu_cores = vcpu_cores or 1
vcpu_threads = vcpu_threads or 1
if smp and vcpu_sockets == 0:
vcpu_sockets = int(smp / (vcpu_cores * vcpu_threads)) or 1
else:
vcpu_sockets = vcpu_sockets or 1
if smp == 0:
smp = vcpu_cores * vcpu_threads * vcpu_sockets
else:
if vcpu_cores == 0:
vcpu_threads = vcpu_threads or 1
vcpu_cores = int(smp / (vcpu_sockets * vcpu_threads)) or 1
else:
vcpu_threads = int(smp / (vcpu_cores * vcpu_sockets)) or 1
self.cpuinfo.smp = smp
self.cpuinfo.maxcpus = vcpu_maxcpus or smp
self.cpuinfo.cores = vcpu_cores
self.cpuinfo.threads = vcpu_threads
self.cpuinfo.sockets = vcpu_sockets
devices.insert(StrDev('smp', cmdline=add_smp(devices)))
numa_total_cpus = 0
numa_total_mem = 0
for numa_node in params.objects("guest_numa_nodes"):
numa_params = params.object_params(numa_node)
numa_mem = numa_params.get("numa_mem")
numa_cpus = numa_params.get("numa_cpus")
numa_nodeid = numa_params.get("numa_nodeid")
if numa_mem is not None:
numa_total_mem += int(numa_mem)
if numa_cpus is not None:
numa_total_cpus += len(utils_misc.cpu_str_to_list(numa_cpus))
devices.insert(StrDev('numa', cmdline=add_numa_node(devices)))
if params.get("numa_consistency_check_cpu_mem", "no") == "yes":
if (numa_total_cpus > int(smp) or numa_total_mem > int(mem)
or len(params.objects("guest_numa_nodes")) > int(smp)):
logging.debug("-numa need %s vcpu and %s memory. It is not "
"matched the -smp and -mem. The vcpu number "
"from -smp is %s, and memory size from -mem is"
" %s" % (numa_total_cpus, numa_total_mem, smp,
mem))
raise virt_vm.VMDeviceError("The numa node cfg can not fit"
" smp and memory cfg.")
cpu_model = params.get("cpu_model")
use_default_cpu_model = True
if cpu_model:
use_default_cpu_model = False
for model in re.split(",", cpu_model):
model = model.strip()
if not model in support_cpu_model:
continue
cpu_model = model
break
else:
cpu_model = model
logging.error("Non existing CPU model %s will be passed "
"to qemu (wrong config or negative test)", model)
if use_default_cpu_model:
cpu_model = params.get("default_cpu_model")
if cpu_model:
vendor = params.get("cpu_model_vendor")
flags = params.get("cpu_model_flags")
family = params.get("cpu_family")
self.cpuinfo.model = cpu_model
self.cpuinfo.vendor = vendor
self.cpuinfo.flags = flags
self.cpuinfo.family = family
cmd = add_cpu_flags(devices, cpu_model, flags, vendor, family)
devices.insert(StrDev('cpu', cmdline=cmd))
# Add cdroms
for cdrom in params.objects("cdroms"):
image_params = params.object_params(cdrom)
# FIXME: Use qemu_devices for handling indexes
if image_params.get("boot_drive") == "no":
continue
if params.get("index_enable") == "yes":
drive_index = image_params.get("drive_index")
if drive_index:
index = drive_index
else:
index_global = get_index(index_global)
index = str(index_global)
index_global += 1
else:
index = None
image_bootindex = None
image_boot = image_params.get("image_boot")
if not re.search("boot=on\|off", devices.get_help_text(),
re.MULTILINE):
if image_boot in ['yes', 'on', True]:
image_bootindex = str(global_image_bootindex)
global_image_bootindex += 1
image_boot = "unused"
image_bootindex = image_params.get(
'bootindex', image_bootindex)
else:
if image_boot in ['yes', 'on', True]:
if global_image_bootindex > 0:
image_boot = False
global_image_bootindex += 1
iso = image_params.get("cdrom")
if iso or image_params.get("cdrom_without_file") == "yes":
devs = devices.cdroms_define_by_params(cdrom, image_params,
'cdrom', index,
image_boot,
image_bootindex)
for _ in devs:
devices.insert(_)
# We may want to add {floppy_otps} parameter for -fda, -fdb
# {fat:floppy:}/path/. However vvfat is not usually recommended.
for floppy_name in params.objects('floppies'):
image_params = params.object_params(floppy_name)
# TODO: Unify image, cdrom, floppy params
image_params['drive_format'] = 'floppy'
image_params[
'image_readonly'] = image_params.get("floppy_readonly",
"no")
# Use the absolute patch with floppies (pure *.vfd)
image_params['image_raw_device'] = 'yes'
image_params['image_name'] = utils_misc.get_path(
data_dir.get_data_dir(),
image_params["floppy_name"])
image_params['image_format'] = None
devs = devices.images_define_by_params(floppy_name, image_params,
media='')
for _ in devs:
devices.insert(_)
# Add usb devices
for usb_dev in params.objects("usb_devices"):
usb_dev_params = params.object_params(usb_dev)
devices.insert(devices.usb_by_params(usb_dev, usb_dev_params))
tftp = params.get("tftp")
if tftp:
tftp = utils_misc.get_path(data_dir.get_data_dir(), tftp)
devices.insert(StrDev('tftp', cmdline=add_tftp(devices, tftp)))
bootp = params.get("bootp")
if bootp:
devices.insert(StrDev('bootp',
cmdline=add_bootp(devices, bootp)))
kernel = params.get("kernel")
if kernel:
kernel = utils_misc.get_path(data_dir.get_data_dir(), kernel)
devices.insert(StrDev('kernel',
cmdline=add_kernel(devices, kernel)))
kernel_params = params.get("kernel_params")
if kernel_params:
cmd = add_kernel_cmdline(devices, kernel_params)
devices.insert(StrDev('kernel-params', cmdline=cmd))
initrd = params.get("initrd")
if initrd:
initrd = utils_misc.get_path(data_dir.get_data_dir(), initrd)
devices.insert(StrDev('initrd',
cmdline=add_initrd(devices, initrd)))
for host_port, guest_port in redirs:
cmd = add_tcp_redir(devices, host_port, guest_port)
devices.insert(StrDev('tcp-redir', cmdline=cmd))
cmd = ""
if params.get("display") == "vnc":
vnc_extra_params = params.get("vnc_extra_params")
vnc_password = params.get("vnc_password", "no")
cmd += add_vnc(devices, self.vnc_port, vnc_password,
vnc_extra_params)
elif params.get("display") == "sdl":
cmd += add_sdl(devices)
elif params.get("display") == "nographic":
cmd += add_nographic(devices)
elif params.get("display") == "spice":
if params.get("rhel5_spice"):
spice_params = params.get("spice_params")
cmd += add_spice_rhel5(devices, spice_params)
else:
spice_keys = (
"spice_port", "spice_password", "spice_addr", "spice_ssl",
"spice_tls_port", "spice_tls_ciphers", "spice_gen_x509",
"spice_x509_dir", "spice_x509_prefix",
"spice_x509_key_file", "spice_x509_cacert_file",
"spice_x509_key_password", "spice_x509_secure",
"spice_x509_cacert_subj", "spice_x509_server_subj",
"spice_secure_channels", "spice_image_compression",
"spice_jpeg_wan_compression",
"spice_zlib_glz_wan_compression", "spice_streaming_video",
"spice_agent_mouse", "spice_playback_compression",
"spice_ipv4", "spice_ipv6", "spice_x509_cert_file",
"disable_copy_paste", "spice_seamless_migration",
"listening_addr"
)
for skey in spice_keys:
value = params.get(skey, None)
if value:
self.spice_options[skey] = value
cmd += add_spice()
if cmd:
devices.insert(StrDev('display', cmdline=cmd))
if params.get("uuid") == "random":
cmd = add_uuid(devices, vm.uuid)
devices.insert(StrDev('uuid', cmdline=cmd))
elif params.get("uuid"):
cmd = add_uuid(devices, params.get("uuid"))
devices.insert(StrDev('uuid', cmdline=cmd))
if params.get("testdev") == "yes":
cmd = add_testdev(devices, vm.get_testlog_filename())
devices.insert(StrDev('testdev', cmdline=cmd))
if params.get("isa_debugexit") == "yes":
iobase = params.get("isa_debugexit_iobase")
iosize = params.get("isa_debugexit_iosize")
cmd = add_isa_debug_exit(devices, iobase, iosize)
devices.insert(StrDev('isa_debugexit', cmdline=cmd))
if params.get("disable_hpet") == "yes":
devices.insert(StrDev('nohpet', cmdline=add_no_hpet(devices)))
devices.insert(StrDev('rtc', cmdline=add_rtc(devices)))
if devices.has_option("boot"):
boot_order = params.get("boot_order", "cdn")
boot_once = params.get("boot_once", "c")
boot_menu = params.get("boot_menu", "off")
cmd = add_boot(devices, boot_order, boot_once, boot_menu)
devices.insert(StrDev('bootmenu', cmdline=cmd))
p9_export_dir = params.get("9p_export_dir")
if p9_export_dir:
cmd = " -fsdev"
p9_fs_driver = params.get("9p_fs_driver")
if p9_fs_driver == "handle":
cmd += " handle,id=local1,path=" + p9_export_dir
elif p9_fs_driver == "proxy":
cmd += " proxy,id=local1,socket="
else:
p9_fs_driver = "local"
cmd += " local,id=local1,path=" + p9_export_dir
# security model is needed only for local fs driver
if p9_fs_driver == "local":
p9_security_model = params.get("9p_security_model")
if not p9_security_model:
p9_security_model = "none"
cmd += ",security_model=" + p9_security_model
elif p9_fs_driver == "proxy":
p9_socket_name = params.get("9p_socket_name")
if not p9_socket_name:
raise virt_vm.VMImageMissingError("Socket name not "
"defined")
cmd += p9_socket_name
p9_immediate_writeout = params.get("9p_immediate_writeout")
if p9_immediate_writeout == "yes":
cmd += ",writeout=immediate"
p9_readonly = params.get("9p_readonly")
if p9_readonly == "yes":
cmd += ",readonly"
devices.insert(StrDev('fsdev', cmdline=cmd))
dev = QDevice('virtio-9p-pci', parent_bus={'type': 'pci'})
dev.set_param('fsdev', 'local1')
dev.set_param('mount_tag', 'autotest_tag')
devices.insert(dev)
extra_params = params.get("extra_params")
if extra_params:
devices.insert(StrDev('extra', cmdline=extra_params))
bios_path = params.get("bios_path")
if bios_path:
devices.insert(StrDev('bios', cmdline="-bios %s" % bios_path))
disable_kvm_option = ""
if (devices.has_option("no-kvm")):
disable_kvm_option = "-no-kvm"
enable_kvm_option = ""
if (devices.has_option("enable-kvm")):
enable_kvm_option = "-enable-kvm"
if (params.get("disable_kvm", "no") == "yes"):
params["enable_kvm"] = "no"
if (params.get("enable_kvm", "yes") == "no"):
devices.insert(StrDev('nokvm', cmdline=disable_kvm_option))
logging.debug("qemu will run in TCG mode")
else:
devices.insert(StrDev('kvm', cmdline=enable_kvm_option))
logging.debug("qemu will run in KVM mode")
self.no_shutdown = (devices.has_option("no-shutdown") and
params.get("disable_shutdown", "no") == "yes")
if self.no_shutdown:
devices.insert(StrDev('noshutdown', cmdline="-no-shutdown"))
user_runas = params.get("user_runas")
if devices.has_option("runas") and user_runas:
devices.insert(StrDev('runas', cmdline="-runas %s" % user_runas))
if params.get("enable_sga") == "yes":
devices.insert(StrDev('sga', cmdline=add_sga(devices)))
if params.get("smartcard", "no") == "yes":
sc_chardev = params.get("smartcard_chardev")
sc_id = params.get("smartcard_id")
devices.insert(StrDev('smartcard',
cmdline=add_smartcard(devices, sc_chardev, sc_id)))
if params.get("enable_watchdog", "no") == "yes":
cmd = add_watchdog(devices,
params.get("watchdog_device_type", None),
params.get("watchdog_action", "reset"))
devices.insert(StrDev('watchdog', cmdline=cmd))
option_roms = params.get("option_roms")
if option_roms:
cmd = ""
for opt_rom in option_roms.split():
cmd += add_option_rom(help, opt_rom)
if cmd:
devices.insert(StrDev('ROM', cmdline=cmd))
return devices
def _nic_tap_add_helper(self, nic):
if nic.nettype == 'macvtap':
logging.info("Adding macvtap ifname: %s", nic.ifname)
utils_net.add_nic_macvtap(nic)
else:
nic.tapfds = utils_net.open_tap("/dev/net/tun", nic.ifname,
queues=nic.queues, vnet_hdr=True)
logging.debug("Adding VM %s NIC ifname %s to bridge %s",
self.name, nic.ifname, nic.netdst)
if nic.nettype == 'bridge':
utils_net.add_to_bridge(nic.ifname, nic.netdst)
utils_net.bring_up_ifname(nic.ifname)
def _nic_tap_remove_helper(self, nic):
try:
if nic.nettype == 'macvtap':
logging.info("Remove macvtap ifname %s", nic.ifname)
tap = utils_net.Macvtap(nic.ifname)
tap.delete()
else:
logging.debug("Removing VM %s NIC ifname %s from bridge %s",
self.name, nic.ifname, nic.netdst)
if nic.tapfds:
for i in nic.tapfds.split(':'):
os.close(int(i))
if nic.vhostfds:
for i in nic.tapfds.split(':'):
os.close(int(i))
except TypeError:
pass
@error.context_aware
def create(self, name=None, params=None, root_dir=None,
timeout=CREATE_TIMEOUT, migration_mode=None,
migration_exec_cmd=None, migration_fd=None,
mac_source=None):
"""
Start the VM by running a qemu command.
All parameters are optional. If name, params or root_dir are not
supplied, the respective values stored as class attributes are used.
:param name: The name of the object
:param params: A dict containing VM params
:param root_dir: Base directory for relative filenames
:param migration_mode: If supplied, start VM for incoming migration
using this protocol (either 'rdma', 'x-rdma', 'rdma', 'tcp', 'unix' or 'exec')
:param migration_exec_cmd: Command to embed in '-incoming "exec: ..."'
(e.g. 'gzip -c -d filename') if migration_mode is 'exec'
default to listening on a random TCP port
:param migration_fd: Open descriptor from machine should migrate.
:param mac_source: A VM object from which to copy MAC addresses. If not
specified, new addresses will be generated.
:raise VMCreateError: If qemu terminates unexpectedly
:raise VMKVMInitError: If KVM initialization fails
:raise VMHugePageError: If hugepage initialization fails
:raise VMImageMissingError: If a CD image is missing
:raise VMHashMismatchError: If a CD image hash has doesn't match the
expected hash
:raise VMBadPATypeError: If an unsupported PCI assignment type is
requested
:raise VMPAError: If no PCI assignable devices could be assigned
:raise TAPCreationError: If fail to create tap fd
:raise BRAddIfError: If fail to add a tap to a bridge
:raise TAPBringUpError: If fail to bring up a tap
:raise PrivateBridgeError: If fail to bring the private bridge
"""
error.context("creating '%s'" % self.name)
self.destroy(free_mac_addresses=False)
if name is not None:
self.name = name
self.devices = None # Representation changed
if params is not None:
self.params = params
self.devices = None # Representation changed
if root_dir is not None:
self.root_dir = root_dir
self.devices = None # Representation changed
name = self.name
params = self.params
root_dir = self.root_dir
# Verify the md5sum of the ISO images
for cdrom in params.objects("cdroms"):
cdrom_params = params.object_params(cdrom)
iso = cdrom_params.get("cdrom")
if iso:
iso = utils_misc.get_path(data_dir.get_data_dir(), iso)
if not os.path.exists(iso):
raise virt_vm.VMImageMissingError(iso)
compare = False
if cdrom_params.get("md5sum_1m"):
logging.debug("Comparing expected MD5 sum with MD5 sum of "
"first MB of ISO file...")
actual_hash = utils.hash_file(iso, 1048576, method="md5")
expected_hash = cdrom_params.get("md5sum_1m")
compare = True
elif cdrom_params.get("md5sum"):
logging.debug("Comparing expected MD5 sum with MD5 sum of "
"ISO file...")
actual_hash = utils.hash_file(iso, method="md5")
expected_hash = cdrom_params.get("md5sum")
compare = True
elif cdrom_params.get("sha1sum"):
logging.debug("Comparing expected SHA1 sum with SHA1 sum "
"of ISO file...")
actual_hash = utils.hash_file(iso, method="sha1")
expected_hash = cdrom_params.get("sha1sum")
compare = True
if compare:
if actual_hash == expected_hash:
logging.debug("Hashes match")
else:
raise virt_vm.VMHashMismatchError(actual_hash,
expected_hash)
# Make sure the following code is not executed by more than one thread
# at the same time
lockfile = open("/tmp/kvm-autotest-vm-create.lock", "w+")
fcntl.lockf(lockfile, fcntl.LOCK_EX)
try:
# Handle port redirections
redir_names = params.objects("redirs")
host_ports = utils_misc.find_free_ports(
5000, 6000, len(redir_names))
self.redirs = {}
for i in range(len(redir_names)):
redir_params = params.object_params(redir_names[i])
guest_port = int(redir_params.get("guest_port"))
self.redirs[guest_port] = host_ports[i]
# Generate basic parameter values for all NICs and create TAP fd
for nic in self.virtnet:
nic_params = params.object_params(nic.nic_name)
pa_type = nic_params.get("pci_assignable")
if pa_type and pa_type != "no":
device_driver = nic_params.get("device_driver",
"pci-assign")
if "mac" not in nic:
self.virtnet.generate_mac_address(nic["nic_name"])
mac = nic["mac"]
if self.pci_assignable is None:
self.pci_assignable = test_setup.PciAssignable(
driver=params.get("driver"),
driver_option=params.get("driver_option"),
host_set_flag=params.get("host_setup_flag"),
kvm_params=params.get("kvm_default"),
vf_filter_re=params.get("vf_filter_re"),
pf_filter_re=params.get("pf_filter_re"),
device_driver=device_driver)
# Virtual Functions (VF) assignable devices
if pa_type == "vf":
self.pci_assignable.add_device(device_type=pa_type,
mac=mac)
# Physical NIC (PF) assignable devices
elif pa_type == "pf":
self.pci_assignable.add_device(device_type=pa_type,
name=nic_params.get("device_name"))
else:
raise virt_vm.VMBadPATypeError(pa_type)
else:
# fill in key values, validate nettype
# note: make_create_command() calls vm.add_nic (i.e. on a
# copy)
if nic_params.get('netdst') == 'private':
nic.netdst = (test_setup.
PrivateBridgeConfig(nic_params).brname)
nic = self.add_nic(**dict(nic)) # implied add_netdev
if mac_source:
# Will raise exception if source doesn't
# have cooresponding nic
logging.debug("Copying mac for nic %s from VM %s"
% (nic.nic_name, mac_source.name))
nic.mac = mac_source.get_mac_address(nic.nic_name)
if nic.ifname in utils_net.get_net_if():
self.virtnet.generate_ifname(nic.nic_name)
if nic.nettype in ['bridge', 'network', 'macvtap']:
self._nic_tap_add_helper(nic)
if ((nic_params.get("vhost") == 'vhost=on') and
(nic_params.get("enable_vhostfd", "yes") == "yes")):
vhostfds = []
for i in xrange(int(nic.queues)):
vhostfds.append(str(os.open("/dev/vhost-net",
os.O_RDWR)))
nic.vhostfds = ':'.join(vhostfds)
elif nic.nettype == 'user':
logging.info("Assuming dependencies met for "
"user mode nic %s, and ready to go"
% nic.nic_name)
self.virtnet.update_db()
# Find available VNC port, if needed
if params.get("display") == "vnc":
self.vnc_port = utils_misc.find_free_port(5900, 6100)
# Find random UUID if specified 'uuid = random' in config file
if params.get("uuid") == "random":
f = open("/proc/sys/kernel/random/uuid")
self.uuid = f.read().strip()
f.close()
if self.pci_assignable is not None:
self.pa_pci_ids = self.pci_assignable.request_devs()
if self.pa_pci_ids:
logging.debug("Successfully assigned devices: %s",
self.pa_pci_ids)
else:
raise virt_vm.VMPAError(pa_type)
# Make qemu command
try:
self.devices = self.make_create_command()
logging.debug(self.devices.str_short())
logging.debug(self.devices.str_bus_short())
qemu_command = self.devices.cmdline()
except error.TestNAError:
# TestNAErrors should be kept as-is so we generate SKIP
# results instead of bogus FAIL results
raise
except Exception:
for nic in self.virtnet:
self._nic_tap_remove_helper(nic)
# TODO: log_last_traceback is being moved into autotest.
# use autotest.client.shared.base_utils when it's completed.
if 'log_last_traceback' in utils.__dict__:
utils.log_last_traceback('Fail to create qemu command:')
else:
utils_misc.log_last_traceback('Fail to create qemu'
'command:')
raise virt_vm.VMStartError(self.name, 'Error occurred while '
'executing make_create_command(). '
'Check the log for traceback.')
# Add migration parameters if required
if migration_mode in ["tcp", "rdma", "x-rdma"]:
self.migration_port = utils_misc.find_free_port(5200, 6000)
qemu_command += (" -incoming " + migration_mode +
":0:%d" % self.migration_port)
elif migration_mode == "unix":
self.migration_file = "/tmp/migration-unix-%s" % self.instance
qemu_command += " -incoming unix:%s" % self.migration_file
elif migration_mode == "exec":
if migration_exec_cmd is None:
self.migration_port = utils_misc.find_free_port(5200, 6000)
qemu_command += (' -incoming "exec:nc -l %s"' %
self.migration_port)
else:
qemu_command += (' -incoming "exec:%s"' %
migration_exec_cmd)
elif migration_mode == "fd":
qemu_command += ' -incoming "fd:%d"' % (migration_fd)
p9_fs_driver = params.get("9p_fs_driver")
if p9_fs_driver == "proxy":
proxy_helper_name = params.get("9p_proxy_binary",
"virtfs-proxy-helper")
proxy_helper_cmd = utils_misc.get_path(root_dir,
proxy_helper_name)
if not proxy_helper_cmd:
raise virt_vm.VMConfigMissingError(self.name,
"9p_proxy_binary")
p9_export_dir = params.get("9p_export_dir")
if not p9_export_dir:
raise virt_vm.VMConfigMissingError(self.name,
"9p_export_dir")
proxy_helper_cmd += " -p " + p9_export_dir
proxy_helper_cmd += " -u 0 -g 0"
p9_socket_name = params.get("9p_socket_name")
proxy_helper_cmd += " -s " + p9_socket_name
proxy_helper_cmd += " -n"
logging.info("Running Proxy Helper:\n%s", proxy_helper_cmd)
self.process = aexpect.run_bg(proxy_helper_cmd, None,
logging.info,
"[9p proxy helper]",
auto_close=False)
logging.info("Running qemu command (reformatted):\n%s",
qemu_command.replace(" -", " \\\n -"))
self.qemu_command = qemu_command
self.process = aexpect.run_bg(qemu_command, None,
logging.info, "[qemu output] ",
auto_close=False)
self.start_time = time.time()
# test doesn't need to hold tapfd's open
for nic in self.virtnet:
if nic.has_key('tapfds'): # implies bridge/tap
try:
for i in nic.tapfds.split(':'):
os.close(int(i))
# qemu process retains access via open file
# remove this attribute from virtnet because
# fd numbers are not always predictable and
# vm instance must support cloning.
del nic['tapfds']
# File descriptor is already closed
except OSError:
pass
if nic.has_key('vhostfds'):
try:
for i in nic.vhostfds.split(':'):
os.close(int(i))
del nic['vhostfds']
except OSError:
pass
# Make sure the process was started successfully
if not self.process.is_alive():
status = self.process.get_status()
output = self.process.get_output().strip()
migration_in_course = migration_mode is not None
unknown_protocol = "unknown migration protocol" in output
if migration_in_course and unknown_protocol:
e = VMMigrateProtoUnsupportedError(migration_mode, output)
else:
e = virt_vm.VMCreateError(qemu_command, status, output)
self.destroy()
raise e
# Establish monitor connections
self.monitors = []
for monitor_name in params.objects("monitors"):
monitor_params = params.object_params(monitor_name)
try:
monitor = qemu_monitor.wait_for_create_monitor(self,
monitor_name, monitor_params, timeout)
except qemu_monitor.MonitorConnectError, detail:
logging.error(detail)
self.destroy()
raise
# Add this monitor to the list
self.monitors += [monitor]
# Create isa serial ports.
self.serial_ports = []
for serial in params.objects("isa_serials"):
self.serial_ports.append(serial)
# Create virtio_ports (virtio_serialports and virtio_consoles)
i = 0
self.virtio_ports = []
for port in params.objects("virtio_ports"):
port_params = params.object_params(port)
if port_params.get('virtio_port_chardev') == "spicevmc":
filename = 'dev%s' % port
else:
filename = self.get_virtio_port_filename(port)
port_name = port_params.get('virtio_port_name_prefix', None)
if port_name: # If port_name_prefix was used
port_name = port_name + str(i)
else: # Implicit name - port
port_name = port
if port_params.get('virtio_port_type') in ("console",
"virtio_console"):
self.virtio_ports.append(
qemu_virtio_port.VirtioConsole(port, port_name,
filename))
else:
self.virtio_ports.append(
qemu_virtio_port.VirtioSerial(port, port_name,
filename))
i += 1
# Get the output so far, to see if we have any problems with
# KVM modules or with hugepage setup.
output = self.process.get_output()
if re.search("Could not initialize KVM", output, re.IGNORECASE):
e = virt_vm.VMKVMInitError(
qemu_command, self.process.get_output())
self.destroy()
raise e
if "alloc_mem_area" in output:
e = virt_vm.VMHugePageError(
qemu_command, self.process.get_output())
self.destroy()
raise e
logging.debug("VM appears to be alive with PID %s", self.get_pid())
vcpu_thread_pattern = self.params.get("vcpu_thread_pattern",
r"thread_id.?[:|=]\s*(\d+)")
self.vcpu_threads = self.get_vcpu_pids(vcpu_thread_pattern)
vhost_thread_pattern = params.get("vhost_thread_pattern",
r"\w+\s+(\d+)\s.*\[vhost-%s\]")
self.vhost_threads = self.get_vhost_threads(vhost_thread_pattern)
# Establish a session with the serial console
# Let's consider the first serial port as serial console.
# Note: requires a version of netcat that supports -U
try:
tmp_serial = self.serial_ports[0]
except IndexError:
raise virt_vm.VMConfigMissingError(name, "isa_serial")
self.serial_console = aexpect.ShellSession(
"nc -U %s" % self.get_serial_console_filename(tmp_serial),
auto_close=False,
output_func=utils_misc.log_line,
output_params=("serial-%s-%s.log" % (tmp_serial, name),),
prompt=self.params.get("shell_prompt", "[\#\$]"))
del tmp_serial
for key, value in self.logs.items():
outfile = "%s-%s.log" % (key, name)
self.logsessions[key] = aexpect.Tail(
"nc -U %s" % value,
auto_close=False,
output_func=utils_misc.log_line,
output_params=(outfile,))
self.logsessions[key].set_log_file(outfile)
if params.get("paused_after_start_vm") != "yes":
# start guest
if self.monitor.verify_status("paused"):
try:
self.monitor.cmd("cont")
except qemu_monitor.QMPCmdError, e:
if ((e.data['class'] == "MigrationExpected") and
(migration_mode is not None)):
logging.debug("Migration did not start yet...")
else:
raise e
finally:
fcntl.lockf(lockfile, fcntl.LOCK_UN)
lockfile.close()
def wait_for_status(self, status, timeout, first=0.0, step=1.0, text=None):
"""
Wait until the VM status changes to specified status
:return: True in case the status has changed before timeout, otherwise
return None.
:param timeout: Timeout in seconds
:param first: Time to sleep before first attempt
:param steps: Time to sleep between attempts in seconds
:param text: Text to print while waiting, for debug purposes
"""
return utils_misc.wait_for(lambda: self.monitor.verify_status(status),
timeout, first, step, text)
def wait_until_paused(self, timeout):
"""
Wait until the VM is paused.
:return: True in case the VM is paused before timeout, otherwise
return None.
:param timeout: Timeout in seconds
"""
return self.wait_for_status("paused", timeout)
def wait_until_dead(self, timeout, first=0.0, step=1.0):
"""
Wait until VM is dead.
:return: True if VM is dead before timeout, otherwise returns None.
:param timeout: Timeout in seconds
:param first: Time to sleep before first attempt
:param steps: Time to sleep between attempts in seconds
"""
return utils_misc.wait_for(self.is_dead, timeout, first, step)
def wait_for_shutdown(self, timeout=60):
"""
Wait until guest shuts down.
Helps until the VM is shut down by the guest.
:return: True in case the VM was shut down, None otherwise.
Note that the VM is not necessarily dead when this function returns
True. If QEMU is running in -no-shutdown mode, the QEMU process
may be still alive.
"""
if self.no_shutdown:
return self.wait_until_paused(timeout)
else:
return self.wait_until_dead(timeout, 1, 1)
def graceful_shutdown(self, timeout=60):
"""
Try to gracefully shut down the VM.
:return: True if VM was successfully shut down, None otherwise.
Note that the VM is not necessarily dead when this function returns
True. If QEMU is running in -no-shutdown mode, the QEMU process
may be still alive.
"""
if self.params.get("shutdown_command"):
# Try to destroy with shell command
logging.debug("Shutting down VM %s (shell)", self.name)
try:
if len(self.virtnet) > 0:
session = self.login()
else:
session = self.serial_login()
except (virt_vm.VMInterfaceIndexError), e:
try:
session = self.serial_login()
except (remote.LoginError, virt_vm.VMError), e:
logging.debug(e)
except (remote.LoginError, virt_vm.VMError), e:
logging.debug(e)
else:
try:
# Send the shutdown command
session.sendline(self.params.get("shutdown_command"))
if self.wait_for_shutdown(timeout):
return True
finally:
session.close()
def _cleanup(self, free_mac_addresses):
"""
Do cleanup works
.removes VM monitor files.
.process close
.serial_console close
.logsessions close
.delete tmp files
.free_mac_addresses, if needed
.delete macvtap, if needed
:param free_mac_addresses: Whether to release the VM's NICs back
to the address pool.
"""
self.monitors = []
if self.pci_assignable:
self.pci_assignable.release_devs()
self.pci_assignable = None
if self.process:
self.process.close()
if self.serial_console:
self.serial_console.close()
if self.logsessions:
for key in self.logsessions:
self.logsessions[key].close()
# Generate the tmp file which should be deleted.
file_list = [self.get_testlog_filename()]
file_list += qemu_monitor.get_monitor_filenames(self)
file_list += self.get_virtio_port_filenames()
file_list += self.get_serial_console_filenames()
file_list += self.logs.values()
for f in file_list:
try:
os.unlink(f)
except OSError:
pass
if hasattr(self, "migration_file"):
try:
os.unlink(self.migration_file)
except OSError:
pass
if free_mac_addresses:
for nic_index in xrange(0, len(self.virtnet)):
self.free_mac_address(nic_index)
for nic in self.virtnet:
if nic.nettype == 'macvtap':
tap = utils_net.Macvtap(nic.ifname)
tap.delete()
def destroy(self, gracefully=True, free_mac_addresses=True):
"""
Destroy the VM.
If gracefully is True, first attempt to shutdown the VM with a shell
command. Then, attempt to destroy the VM via the monitor with a 'quit'
command. If that fails, send SIGKILL to the qemu process.
:param gracefully: If True, an attempt will be made to end the VM
using a shell command before trying to end the qemu process
with a 'quit' or a kill signal.
:param free_mac_addresses: If True, the MAC addresses used by the VM
will be freed.
"""
try:
# Is it already dead?
if self.is_dead():
return
logging.debug("Destroying VM %s (PID %s)", self.name,
self.get_pid())
kill_timeout = int(self.params.get("kill_timeout", "60"))
if gracefully:
self.graceful_shutdown(kill_timeout)
if self.is_dead():
logging.debug("VM %s down (shell)", self.name)
return
else:
logging.debug("VM %s failed to go down (shell)", self.name)
if self.monitor:
# Try to finish process with a monitor command
logging.debug("Ending VM %s process (monitor)", self.name)
try:
self.monitor.quit()
except qemu_monitor.MonitorError, e:
logging.warn(e)
else:
# Wait for the VM to be really dead
if self.wait_until_dead(5, 0.5, 0.5):
logging.debug("VM %s down (monitor)", self.name)
return
else:
logging.debug("VM %s failed to go down (monitor)",
self.name)
# If the VM isn't dead yet...
pid = self.process.get_pid()
logging.debug("Ending VM %s process (killing PID %s)",
self.name, pid)
utils_misc.kill_process_tree(pid, 9)
# Wait for the VM to be really dead
if utils_misc.wait_for(self.is_dead, 5, 0.5, 0.5):
logging.debug("VM %s down (process killed)", self.name)
return
# If all else fails, we've got a zombie...
logging.error("VM %s (PID %s) is a zombie!", self.name,
self.process.get_pid())
finally:
self._cleanup(free_mac_addresses)
@property
def monitor(self):
"""
Return the main monitor object, selected by the parameter main_monitor.
If main_monitor isn't defined, return the first monitor.
If no monitors exist, or if main_monitor refers to a nonexistent
monitor, return None.
"""
for m in self.monitors:
if m.name == self.params.get("main_monitor"):
return m
if self.monitors and not self.params.get("main_monitor"):
return self.monitors[0]
return None
def get_monitors_by_type(self, mon_type):
"""
Return list of monitors of mon_type type.
:param mon_type: desired monitor type (qmp, human)
"""
return [_ for _ in self.monitors if _.protocol == mon_type]
def get_peer(self, netid):
"""
Return the peer of netdev or network deivce.
:param netid: id of netdev or device
:return: id of the peer device otherwise None
"""
o = self.monitor.info("network")
network_info = o
if isinstance(o, dict):
network_info = o.get["return"]
netdev_peer_re = self.params.get("netdev_peer_re")
if not netdev_peer_re:
default_netdev_peer_re = "\s{2,}(.*?): .*?\\\s(.*?):"
logging.warning("Missing config netdev_peer_re for VM %s, "
"using default %s", self.name,
default_netdev_peer_re)
netdev_peer_re = default_netdev_peer_re
pairs = re.findall(netdev_peer_re, network_info, re.S)
for nic, tap in pairs:
if nic == netid:
return tap
if tap == netid:
return nic
return None
def get_ifname(self, nic_index=0):
"""
Return the ifname of a bridge/tap device associated with a NIC.
:param nic_index: Index of the NIC
"""
return self.virtnet[nic_index].ifname
def get_pid(self):
"""
Return the VM's PID. If the VM is dead return None.
:note: This works under the assumption that self.process.get_pid()
returns the PID of the parent shell process.
"""
try:
children = commands.getoutput("ps --ppid=%d -o pid=" %
self.process.get_pid()).split()
return int(children[0])
except (TypeError, IndexError, ValueError):
return None
def get_shell_pid(self):
"""
Return the PID of the parent shell process.
:note: This works under the assumption that self.process.get_pid()
returns the PID of the parent shell process.
"""
return self.process.get_pid()
def get_vnc_port(self):
"""
Return self.vnc_port.
"""
return self.vnc_port
def get_vcpu_pids(self, vcpu_thread_pattern):
"""
Return the list of vcpu PIDs
:return: the list of vcpu PIDs
"""
return [int(_) for _ in re.findall(vcpu_thread_pattern,
str(self.monitor.info("cpus")))]
def get_vhost_threads(self, vhost_thread_pattern):
"""
Return the list of vhost threads PIDs
:param vhost_thread_pattern: a regex to match the vhost threads
:type vhost_thread_pattern: string
:return: a list of vhost threads PIDs
:rtype: list of integer
"""
return [int(_) for _ in re.findall(vhost_thread_pattern %
self.get_pid(),
utils.system_output("ps aux"))]
def get_shared_meminfo(self):
"""
Returns the VM's shared memory information.
:return: Shared memory used by VM (MB)
"""
if self.is_dead():
logging.error("Could not get shared memory info from dead VM.")
return None
filename = "/proc/%d/statm" % self.get_pid()
shm = int(open(filename).read().split()[2])
# statm stores informations in pages, translate it to MB
return shm * 4.0 / 1024
def get_spice_var(self, spice_var):
"""
Returns string value of spice variable of choice or None
:param spice_var - spice related variable 'spice_port', ...
"""
return self.spice_options.get(spice_var, None)
@error.context_aware
def hotplug_vcpu(self, cpu_id=None, plug_command=""):
"""
Hotplug a vcpu, if not assign the cpu_id, will use the minimum unused.
the function will use the plug_command if you assigned it, else the
function will use the command automatically generated based on the
type of monitor
:param cpu_id the cpu_id you want hotplug.
"""
vcpu_threads_count = len(self.vcpu_threads)
plug_cpu_id = cpu_id
if plug_cpu_id is None:
plug_cpu_id = vcpu_threads_count
if plug_command:
vcpu_add_cmd = plug_command % plug_cpu_id
else:
if self.monitor.protocol == 'human':
vcpu_add_cmd = "cpu_set %s online" % plug_cpu_id
elif self.monitor.protocol == 'qmp':
vcpu_add_cmd = "cpu-add id=%s" % plug_cpu_id
try:
self.monitor.verify_supported_cmd(vcpu_add_cmd.split()[0])
except qemu_monitor.MonitorNotSupportedCmdError:
raise error.TestNAError("%s monitor not support cmd '%s'" %
(self.monitor.protocol, vcpu_add_cmd))
try:
cmd_output = self.monitor.send_args_cmd(vcpu_add_cmd)
except qemu_monitor.QMPCmdError, e:
return (False, str(e))
vcpu_thread_pattern = self.params.get("vcpu_thread_pattern",
r"thread_id.?[:|=]\s*(\d+)")
self.vcpu_threads = self.get_vcpu_pids(vcpu_thread_pattern)
if len(self.vcpu_threads) == vcpu_threads_count + 1:
return(True, plug_cpu_id)
else:
return(False, cmd_output)
@error.context_aware
def hotplug_nic(self, **params):
"""
Convenience method wrapper for add_nic() and add_netdev().
:return: dict-like object containing nic's details
"""
nic_name = self.add_nic(**params)["nic_name"]
self.activate_netdev(nic_name)
self.activate_nic(nic_name)
return self.virtnet[nic_name]
@error.context_aware
def hotunplug_nic(self, nic_index_or_name):
"""
Convenience method wrapper for del/deactivate nic and netdev.
"""
# make sure we got a name
nic_name = self.virtnet[nic_index_or_name].nic_name
self.deactivate_nic(nic_name)
self.deactivate_netdev(nic_name)
self.del_nic(nic_name)
@error.context_aware
def add_netdev(self, **params):
"""
Hotplug a netdev device.
:param **params: NIC info. dict.
:return: netdev_id
"""
nic_name = params['nic_name']
nic = self.virtnet[nic_name]
nic_index = self.virtnet.nic_name_index(nic_name)
nic.set_if_none('netdev_id', utils_misc.generate_random_id())
nic.set_if_none('ifname', self.virtnet.generate_ifname(nic_index))
nic.set_if_none('nettype', 'bridge')
if nic.nettype in ['bridge', 'macvtap']: # implies tap
# destination is required, hard-code reasonable default if unset
# nic.set_if_none('netdst', 'virbr0')
# tapfd allocated/set in activate because requires system resources
nic.set_if_none('queues', '1')
ids = []
for i in range(int(nic.queues)):
ids.append(utils_misc.generate_random_id())
nic.set_if_none('tapfd_ids', ids)
elif nic.nettype == 'user':
pass # nothing to do
else: # unsupported nettype
raise virt_vm.VMUnknownNetTypeError(self.name, nic_name,
nic.nettype)
return nic.netdev_id
@error.context_aware
def del_netdev(self, nic_index_or_name):
"""
Remove netdev info. from nic on VM, does not deactivate.
:param: nic_index_or_name: name or index number for existing NIC
"""
nic = self.virtnet[nic_index_or_name]
error.context("removing netdev info from nic %s from vm %s" % (
nic, self.name))
for propertea in ['netdev_id', 'ifname', 'queues',
'tapfds', 'tapfd_ids', 'vectors']:
if nic.has_key(propertea):
del nic[propertea]
def add_nic(self, **params):
"""
Add new or setup existing NIC, optionally creating netdev if None
:param **params: Parameters to set
:param nic_name: Name for existing or new device
:param nic_model: Model name to emulate
:param netdev_id: Existing qemu net device ID name, None to create new
:param mac: Optional MAC address, None to randomly generate.
"""
# returns existing or new nic object
nic = super(VM, self).add_nic(**params)
nic_index = self.virtnet.nic_name_index(nic.nic_name)
nic.set_if_none('vlan', str(nic_index))
nic.set_if_none('device_id', utils_misc.generate_random_id())
nic.set_if_none('queues', '1')
if not nic.has_key('netdev_id'):
# virtnet items are lists that act like dicts
nic.netdev_id = self.add_netdev(**dict(nic))
nic.set_if_none('nic_model', params['nic_model'])
nic.set_if_none('queues', params.get('queues', '1'))
if params.get("enable_msix_vectors") == "yes":
nic.set_if_none('vectors', 2 * int(nic.queues) + 1)
return nic
@error.context_aware
def activate_netdev(self, nic_index_or_name):
"""
Activate an inactive host-side networking device
:raise:: IndexError if nic doesn't exist
:raise:: VMUnknownNetTypeError: if nettype is unset/unsupported
:raise:: IOError if TAP device node cannot be opened
:raise:: VMAddNetDevError: if operation failed
"""
tapfds = []
nic = self.virtnet[nic_index_or_name]
error.context("Activating netdev for %s based on %s" %
(self.name, nic))
msg_sfx = ("nic %s on vm %s with attach_cmd " %
(self.virtnet[nic_index_or_name], self.name))
attach_cmd = "netdev_add"
if nic.nettype == 'bridge': # implies tap
error.context("Opening tap device node for %s " % nic.ifname,
logging.debug)
python_tapfds = utils_net.open_tap("/dev/net/tun",
nic.ifname,
queues=nic.queues,
vnet_hdr=False)
for i in range(int(nic.queues)):
error.context("Assigning tap %s to qemu by fd" %
nic.tapfd_ids[i], logging.info)
lsof_cmd = "lsof -a -p %s -Ff -- /dev/net/tun" % self.get_pid()
openfd_list = utils.system_output(lsof_cmd).splitlines()
self.monitor.getfd(int(python_tapfds.split(':')[i]),
nic.tapfd_ids[i])
n_openfd_list = utils.system_output(lsof_cmd).splitlines()
new_qemu_fd = list(set(n_openfd_list) - set(openfd_list))
if not new_qemu_fd:
err_msg = "Can't get the tap fd in qemu process!"
raise virt_vm.VMAddNetDevError(err_msg)
tapfds.append(new_qemu_fd[0].lstrip("f"))
nic.set_if_none("tapfds", ":".join(tapfds))
if not self.devices:
err_msg = "Can't add nic for VM which is not running."
raise virt_vm.VMAddNetDevError(err_msg)
if ((int(nic.queues)) > 1 and
',fds=' in self.devices.get_help_text()):
attach_cmd += " type=tap,id=%s,fds=%s" % (nic.device_id,
nic.tapfds)
else:
attach_cmd += " type=tap,id=%s,fd=%s" % (nic.device_id,
nic.tapfds)
error.context("Raising interface for " + msg_sfx + attach_cmd,
logging.debug)
utils_net.bring_up_ifname(nic.ifname)
error.context("Raising bridge for " + msg_sfx + attach_cmd,
logging.debug)
# assume this will puke if netdst unset
if not nic.netdst is None:
utils_net.add_to_bridge(nic.ifname, nic.netdst)
elif nic.nettype == 'macvtap':
pass
elif nic.nettype == 'user':
attach_cmd += " user,id=%s" % nic.device_id
elif nic.nettype == 'none':
attach_cmd += " none"
else: # unsupported nettype
raise virt_vm.VMUnknownNetTypeError(self.name, nic_index_or_name,
nic.nettype)
if nic.has_key('netdev_extra_params'):
attach_cmd += nic.netdev_extra_params
error.context("Hotplugging " + msg_sfx + attach_cmd, logging.debug)
if self.monitor.protocol == 'qmp':
self.monitor.send_args_cmd(attach_cmd)
else:
self.monitor.send_args_cmd(attach_cmd, convert=False)
network_info = self.monitor.info("network")
if nic.device_id not in network_info:
# Don't leave resources dangling
self.deactivate_netdev(nic_index_or_name)
raise virt_vm.VMAddNetDevError(("Failed to add netdev: %s for " %
nic.device_id) + msg_sfx +
attach_cmd)
@error.context_aware
def activate_nic(self, nic_index_or_name):
"""
Activate an VM's inactive NIC device and verify state
:param nic_index_or_name: name or index number for existing NIC
"""
error.context("Retrieving info for NIC %s on VM %s" % (
nic_index_or_name, self.name))
nic = self.virtnet[nic_index_or_name]
device_add_cmd = "device_add"
if nic.has_key('nic_model'):
device_add_cmd += ' driver=%s' % nic.nic_model
device_add_cmd += ",netdev=%s" % nic.device_id
if nic.has_key('mac'):
device_add_cmd += ",mac=%s" % nic.mac
device_add_cmd += ",id=%s" % nic.nic_name
if nic['nic_model'] == 'virtio-net-pci':
if int(nic['queues']) > 1:
device_add_cmd += ",mq=on"
if nic.has_key('vectors'):
device_add_cmd += ",vectors=%s" % nic.vectors
device_add_cmd += nic.get('nic_extra_params', '')
if nic.has_key('romfile'):
device_add_cmd += ",romfile=%s" % nic.romfile
error.context("Activating nic on VM %s with monitor command %s" % (
self.name, device_add_cmd))
if self.monitor.protocol == 'qmp':
self.monitor.send_args_cmd(device_add_cmd)
else:
self.monitor.send_args_cmd(device_add_cmd, convert=False)
error.context("Verifying nic %s shows in qtree" % nic.nic_name)
qtree = self.monitor.info("qtree")
if not nic.nic_name in qtree:
logging.error(qtree)
raise virt_vm.VMAddNicError("Device %s was not plugged into qdev"
"tree" % nic.nic_name)
@error.context_aware
def deactivate_nic(self, nic_index_or_name, wait=20):
"""
Reverses what activate_nic did
:param nic_index_or_name: name or index number for existing NIC
:param wait: Time test will wait for the guest to unplug the device
"""
nic = self.virtnet[nic_index_or_name]
error.context("Removing nic %s from VM %s" % (nic_index_or_name,
self.name))
nic_del_cmd = "device_del id=%s" % (nic.nic_name)
if self.monitor.protocol == 'qmp':
self.monitor.send_args_cmd(nic_del_cmd)
else:
self.monitor.send_args_cmd(nic_del_cmd, convert=True)
if wait:
logging.info("waiting for the guest to finish the unplug")
if not utils_misc.wait_for(lambda: nic.nic_name not in
self.monitor.info("qtree"),
wait, 5, 1):
raise virt_vm.VMDelNicError("Device is not unplugged by "
"guest, please check whether the "
"hotplug module was loaded in "
"guest")
@error.context_aware
def deactivate_netdev(self, nic_index_or_name):
"""
Reverses what activate_netdev() did
:param: nic_index_or_name: name or index number for existing NIC
"""
# FIXME: Need to down interface & remove from bridge????
netdev_id = self.virtnet[nic_index_or_name].device_id
error.context("removing netdev id %s from vm %s" %
(netdev_id, self.name))
nic_del_cmd = "netdev_del id=%s" % netdev_id
if self.monitor.protocol == 'qmp':
self.monitor.send_args_cmd(nic_del_cmd)
else:
self.monitor.send_args_cmd(nic_del_cmd, convert=True)
network_info = self.monitor.info("network")
if netdev_id in network_info:
raise virt_vm.VMDelNetDevError("Fail to remove netdev %s" %
netdev_id)
@error.context_aware
def del_nic(self, nic_index_or_name):
"""
Undefine nic prameters, reverses what add_nic did.
:param nic_index_or_name: name or index number for existing NIC
:param wait: Time test will wait for the guest to unplug the device
"""
super(VM, self).del_nic(nic_index_or_name)
@error.context_aware
def send_fd(self, fd, fd_name="migfd"):
"""
Send file descriptor over unix socket to VM.
:param fd: File descriptor.
:param fd_name: File descriptor identificator in VM.
"""
error.context("Send fd %d like %s to VM %s" % (fd, fd_name, self.name))
logging.debug("Send file descriptor %s to source VM.", fd_name)
if self.monitor.protocol == 'human':
self.monitor.cmd("getfd %s" % (fd_name), fd=fd)
elif self.monitor.protocol == 'qmp':
self.monitor.cmd("getfd", args={'fdname': fd_name}, fd=fd)
error.context()
def mig_finished(self):
ret = True
if (self.params["display"] == "spice" and
self.get_spice_var("spice_seamless_migration") == "on"):
s = self.monitor.info("spice")
if isinstance(s, str):
ret = "migrated: true" in s
else:
ret = s.get("migrated") == "true"
o = self.monitor.info("migrate")
if isinstance(o, str):
return ret and (not "status: active" in o)
else:
return ret and (o.get("status") != "active")
def mig_succeeded(self):
o = self.monitor.info("migrate")
if isinstance(o, str):
return "status: completed" in o
else:
return o.get("status") == "completed"
def mig_failed(self):
o = self.monitor.info("migrate")
if isinstance(o, str):
return "status: failed" in o
else:
return o.get("status") == "failed"
def mig_cancelled(self):
if self.mig_succeeded():
raise virt_vm.VMMigrateCancelError(
"Migration completed successfully")
elif self.mig_failed():
raise virt_vm.VMMigrateFailedError("Migration failed")
o = self.monitor.info("migrate")
if isinstance(o, str):
return ("Migration status: cancelled" in o or
"Migration status: canceled" in o)
else:
return (o.get("status") == "cancelled" or
o.get("status") == "canceled")
def wait_for_migration(self, timeout):
if not utils_misc.wait_for(self.mig_finished, timeout, 2, 2,
"Waiting for migration to complete"):
raise virt_vm.VMMigrateTimeoutError("Timeout expired while waiting"
" for migration to finish")
@error.context_aware
def migrate(self, timeout=virt_vm.BaseVM.MIGRATE_TIMEOUT, protocol="tcp",
cancel_delay=None, offline=False, stable_check=False,
clean=True, save_path="/tmp", dest_host="localhost",
remote_port=None, not_wait_for_migration=False,
fd_src=None, fd_dst=None, migration_exec_cmd_src=None,
migration_exec_cmd_dst=None):
"""
Migrate the VM.
If the migration is local, the VM object's state is switched with that
of the destination VM. Otherwise, the state is switched with that of
a dead VM (returned by self.clone()).
:param timeout: Time to wait for migration to complete.
:param protocol: Migration protocol (as defined in MIGRATION_PROTOS)
:param cancel_delay: If provided, specifies a time duration after which
migration will be canceled. Used for testing migrate_cancel.
:param offline: If True, pause the source VM before migration.
:param stable_check: If True, compare the VM's state after migration to
its state before migration and raise an exception if they
differ.
:param clean: If True, delete the saved state files (relevant only if
stable_check is also True).
@save_path: The path for state files.
:param dest_host: Destination host (defaults to 'localhost').
:param remote_port: Port to use for remote migration.
:param not_wait_for_migration: If True migration start but not wait till
the end of migration.
:param fd_s: File descriptor for migration to which source
VM write data. Descriptor is closed during the migration.
:param fd_d: File descriptor for migration from which destination
VM read data.
:param migration_exec_cmd_src: Command to embed in '-incoming "exec: "'
(e.g. 'exec:gzip -c > filename') if migration_mode is 'exec'
default to listening on a random TCP port
:param migration_exec_cmd_dst: Command to embed in '-incoming "exec: "'
(e.g. 'gzip -c -d filename') if migration_mode is 'exec'
default to listening on a random TCP port
"""
if protocol not in self.MIGRATION_PROTOS:
raise virt_vm.VMMigrateProtoUnknownError(protocol)
error.base_context("migrating '%s'" % self.name)
local = dest_host == "localhost"
mig_fd_name = None
if protocol == "fd":
# Check if descriptors aren't None for local migration.
if local and (fd_dst is None or fd_src is None):
(fd_dst, fd_src) = os.pipe()
mig_fd_name = "migfd_%d_%d" % (fd_src, time.time())
self.send_fd(fd_src, mig_fd_name)
os.close(fd_src)
clone = self.clone()
if (local and not (migration_exec_cmd_src
and "gzip" in migration_exec_cmd_src)):
error.context("creating destination VM")
if stable_check:
# Pause the dest vm after creation
extra_params = clone.params.get("extra_params", "") + " -S"
clone.params["extra_params"] = extra_params
clone.create(migration_mode=protocol, mac_source=self,
migration_fd=fd_dst,
migration_exec_cmd=migration_exec_cmd_dst)
if fd_dst:
os.close(fd_dst)
error.context()
try:
if (self.params["display"] == "spice" and local and
not (protocol == "exec" and
(migration_exec_cmd_src and "gzip" in migration_exec_cmd_src))):
host_ip = utils_net.get_host_ip_address(self.params)
dest_port = clone.spice_options.get('spice_port', '')
if self.params.get("spice_ssl") == "yes":
dest_tls_port = clone.spice_options.get("spice_tls_port",
"")
cert_s = clone.spice_options.get("spice_x509_server_subj",
"")
cert_subj = "%s" % cert_s[1:]
cert_subj += host_ip
cert_subj = "\"%s\"" % cert_subj
else:
dest_tls_port = ""
cert_subj = ""
logging.debug("Informing migration to spice client")
commands = ["__com.redhat_spice_migrate_info",
"spice_migrate_info",
"client_migrate_info"]
for command in commands:
try:
self.monitor.verify_supported_cmd(command)
except qemu_monitor.MonitorNotSupportedCmdError:
continue
# spice_migrate_info requires host_ip, dest_port
# client_migrate_info also requires protocol
cmdline = "%s hostname=%s" % (command, host_ip)
if command == "client_migrate_info":
cmdline += " ,protocol=%s" % self.params['display']
if dest_port:
cmdline += ",port=%s" % dest_port
if dest_tls_port:
cmdline += ",tls-port=%s" % dest_tls_port
if cert_subj:
cmdline += ",cert-subject=%s" % cert_subj
break
self.monitor.send_args_cmd(cmdline)
if protocol in ["tcp", "rdma", "x-rdma"]:
if local:
uri = protocol + ":localhost:%d" % clone.migration_port
else:
uri = protocol + ":%s:%d" % (dest_host, remote_port)
elif protocol == "unix":
uri = "unix:%s" % clone.migration_file
elif protocol == "exec":
if local:
if not migration_exec_cmd_src:
uri = '"exec:nc localhost %s"' % clone.migration_port
else:
uri = '"exec:%s"' % (migration_exec_cmd_src)
else:
uri = '"exec:%s"' % (migration_exec_cmd_src)
elif protocol == "fd":
uri = "fd:%s" % mig_fd_name
if offline is True:
self.monitor.cmd("stop")
logging.info("Migrating to %s", uri)
self.monitor.migrate(uri)
if not_wait_for_migration:
return clone
if cancel_delay:
time.sleep(cancel_delay)
self.monitor.cmd("migrate_cancel")
if not utils_misc.wait_for(self.mig_cancelled, 60, 2, 2,
"Waiting for migration "
"cancellation"):
raise virt_vm.VMMigrateCancelError(
"Cannot cancel migration")
return
self.wait_for_migration(timeout)
if (local and (migration_exec_cmd_src
and "gzip" in migration_exec_cmd_src)):
error.context("creating destination VM")
if stable_check:
# Pause the dest vm after creation
extra_params = clone.params.get("extra_params", "") + " -S"
clone.params["extra_params"] = extra_params
clone.create(migration_mode=protocol, mac_source=self,
migration_fd=fd_dst,
migration_exec_cmd=migration_exec_cmd_dst)
self.verify_alive()
# Report migration status
if self.mig_succeeded():
logging.info("Migration completed successfully")
elif self.mig_failed():
raise virt_vm.VMMigrateFailedError("Migration failed")
else:
raise virt_vm.VMMigrateFailedError("Migration ended with "
"unknown status")
# Switch self <-> clone
temp = self.clone(copy_state=True)
self.__dict__ = clone.__dict__
clone = temp
# From now on, clone is the source VM that will soon be destroyed
# and self is the destination VM that will remain alive. If this
# is remote migration, self is a dead VM object.
error.context("after migration")
if local:
time.sleep(1)
self.verify_kernel_crash()
self.verify_alive()
if local and stable_check:
try:
save1 = os.path.join(save_path, "src-" + clone.instance)
save2 = os.path.join(save_path, "dst-" + self.instance)
clone.save_to_file(save1)
self.save_to_file(save2)
# Fail if we see deltas
md5_save1 = utils.hash_file(save1)
md5_save2 = utils.hash_file(save2)
if md5_save1 != md5_save2:
raise virt_vm.VMMigrateStateMismatchError()
finally:
if clean:
if os.path.isfile(save1):
os.remove(save1)
if os.path.isfile(save2):
os.remove(save2)
finally:
# If we're doing remote migration and it's completed successfully,
# self points to a dead VM object
if not not_wait_for_migration:
if self.is_alive():
self.monitor.cmd("cont")
clone.destroy(gracefully=False)
@error.context_aware
def reboot(self, session=None, method="shell", nic_index=0,
timeout=virt_vm.BaseVM.REBOOT_TIMEOUT):
"""
Reboot the VM and wait for it to come back up by trying to log in until
timeout expires.
:param session: A shell session object or None.
:param method: Reboot method. Can be "shell" (send a shell reboot
command) or "system_reset" (send a system_reset monitor command).
:param nic_index: Index of NIC to access in the VM, when logging in
after rebooting.
:param timeout: Time to wait for login to succeed (after rebooting).
:return: A new shell session object.
"""
error.base_context("rebooting '%s'" % self.name, logging.info)
error.context("before reboot")
error.context()
if method == "shell":
session = session or self.login()
session.sendline(self.params.get("reboot_command"))
error.context("waiting for guest to go down", logging.info)
if not utils_misc.wait_for(
lambda:
not session.is_responsive(
timeout=self.CLOSE_SESSION_TIMEOUT),
timeout / 2, 0, 1):
raise virt_vm.VMRebootError("Guest refuses to go down")
session.close()
elif method == "system_reset":
# Clear the event list of all QMP monitors
qmp_monitors = [m for m in self.monitors if m.protocol == "qmp"]
for m in qmp_monitors:
m.clear_events()
# Send a system_reset monitor command
self.monitor.cmd("system_reset")
# Look for RESET QMP events
time.sleep(1)
for m in qmp_monitors:
if m.get_event("RESET"):
logging.info("RESET QMP event received")
else:
raise virt_vm.VMRebootError("RESET QMP event not received "
"after system_reset "
"(monitor '%s')" % m.name)
else:
raise virt_vm.VMRebootError("Unknown reboot method: %s" % method)
if self.params.get("mac_changeable") == "yes":
utils_net.update_mac_ip_address(self, self.params)
error.context("logging in after reboot", logging.info)
return self.wait_for_login(nic_index, timeout=timeout)
def send_key(self, keystr):
"""
Send a key event to the VM.
:param keystr: A key event string (e.g. "ctrl-alt-delete")
"""
# For compatibility with versions of QEMU that do not recognize all
# key names: replace keyname with the hex value from the dict, which
# QEMU will definitely accept
key_mapping = {"semicolon": "0x27",
"comma": "0x33",
"dot": "0x34",
"slash": "0x35"}
for key, value in key_mapping.items():
keystr = keystr.replace(key, value)
self.monitor.sendkey(keystr)
time.sleep(0.2)
# should this really be expected from VMs of all hypervisor types?
def screendump(self, filename, debug=True):
try:
if self.monitor:
self.monitor.screendump(filename=filename, debug=debug)
except qemu_monitor.MonitorError, e:
logging.warn(e)
def save_to_file(self, path):
"""
Override BaseVM save_to_file method
"""
self.verify_status('paused') # Throws exception if not
# Set high speed 1TB/S
self.monitor.migrate_set_speed(str(2 << 39))
self.monitor.migrate_set_downtime(self.MIGRATE_TIMEOUT)
logging.debug("Saving VM %s to %s" % (self.name, path))
# Can only check status if background migration
self.monitor.migrate("exec:cat>%s" % path, wait=False)
utils_misc.wait_for(
# no monitor.migrate-status method
lambda:
re.search("(status.*completed)",
str(self.monitor.info("migrate")), re.M),
self.MIGRATE_TIMEOUT, 2, 2,
"Waiting for save to %s to complete" % path)
# Restore the speed and downtime to default values
self.monitor.migrate_set_speed(str(32 << 20))
self.monitor.migrate_set_downtime(0.03)
# Base class defines VM must be off after a save
self.monitor.cmd("system_reset")
self.verify_status('paused') # Throws exception if not
def restore_from_file(self, path):
"""
Override BaseVM restore_from_file method
"""
self.verify_status('paused') # Throws exception if not
logging.debug("Restoring VM %s from %s" % (self.name, path))
# Rely on create() in incoming migration mode to do the 'right thing'
self.create(name=self.name, params=self.params, root_dir=self.root_dir,
timeout=self.MIGRATE_TIMEOUT, migration_mode="exec",
migration_exec_cmd="cat " + path, mac_source=self)
self.verify_status('running') # Throws exception if not
def savevm(self, tag_name):
"""
Override BaseVM savevm method
"""
self.verify_status('paused') # Throws exception if not
logging.debug("Saving VM %s to %s" % (self.name, tag_name))
self.monitor.send_args_cmd("savevm id=%s" % tag_name)
self.monitor.cmd("system_reset")
self.verify_status('paused') # Throws exception if not
def loadvm(self, tag_name):
"""
Override BaseVM loadvm method
"""
self.verify_status('paused') # Throws exception if not
logging.debug("Loading VM %s from %s" % (self.name, tag_name))
self.monitor.send_args_cmd("loadvm id=%s" % tag_name)
self.verify_status('paused') # Throws exception if not
def pause(self):
"""
Pause the VM operation.
"""
self.monitor.cmd("stop")
def resume(self):
"""
Resume the VM operation in case it's stopped.
"""
self.monitor.cmd("cont")
def set_link(self, netdev_name, up):
"""
Set link up/down.
:param name: Link name
:param up: Bool value, True=set up this link, False=Set down this link
"""
self.monitor.set_link(netdev_name, up)
def get_block_old(self, blocks_info, p_dict={}):
"""
Get specified block device from monitor's info block command.
The block device is defined by parameter in p_dict.
:param p_dict: Dictionary that contains parameters and its value used
to define specified block device.
@blocks_info: the results of monitor command 'info block'
:return: Matched block device name, None when not find any device.
"""
if isinstance(blocks_info, str):
for block in blocks_info.splitlines():
match = True
for key, value in p_dict.iteritems():
if value is True:
check_str = "%s=1" % key
elif value is False:
check_str = "%s=0" % key
else:
check_str = "%s=%s" % (key, value)
if check_str not in block:
match = False
break
if match:
return block.split(":")[0]
else:
for block in blocks_info:
match = True
for key, value in p_dict.iteritems():
if isinstance(value, bool):
check_str = "u'%s': %s" % (key, value)
else:
check_str = "u'%s': u'%s'" % (key, value)
if check_str not in str(block):
match = False
break
if match:
return block['device']
return None
def process_info_block(self, blocks_info):
"""
process the info block, so that can deal with
the new and old qemu formart.
:param blocks_info: the output of qemu command
'info block'
"""
block_list = []
block_entry = []
for block in blocks_info.splitlines():
if block:
block_entry.append(block.strip())
else:
block_list.append(' '.join(block_entry))
block_entry = []
# don't forget the last one
block_list.append(' '.join(block_entry))
return block_list
def get_block(self, p_dict={}):
"""
Get specified block device from monitor's info block command.
The block device is defined by parameter in p_dict.
:param p_dict: Dictionary that contains parameters and its value used
to define specified block device.
:return: Matched block device name, None when not find any device.
"""
blocks_info = self.monitor.info("block")
block = self.get_block_old(blocks_info, p_dict)
if block:
return block
block_list = self.process_info_block(blocks_info)
for block in block_list:
for key, value in p_dict.iteritems():
# for new qemu we just deal with key = [removable,
# file,backing_file], for other types key, we should
# fixup later
logging.info("block = %s" % block)
if key == 'removable':
if value is False:
if not 'Removable device' in block:
return block.split(":")[0]
elif value is True:
if 'Removable device' in block:
return block.split(":")[0]
# file in key means both file and backing_file
if ('file' in key) and (value in block):
return block.split(":")[0]
return None
def check_block_locked(self, value):
"""
Check whether specified block device is locked or not.
Return True, if device is locked, else False.
:param vm: VM object
:param value: Parameter that can specify block device.
Can be any possible identification of a device,
Such as device name/image file name/...
:return: True if device is locked, False if device is unlocked.
"""
assert value, "Device identification not specified"
blocks_info = self.monitor.info("block")
assert value in str(blocks_info), \
"Device %s not listed in monitor's output" % value
if isinstance(blocks_info, str):
lock_str = "locked=1"
lock_str_new = "locked"
no_lock_str = "not locked"
for block in blocks_info.splitlines():
if (value in block) and (lock_str in block):
return True
# deal with new qemu
block_list = self.process_info_block(blocks_info)
for block_new in block_list:
if (value in block_new) and ("Removable device" in block_new):
if no_lock_str in block_new:
return False
elif lock_str_new in block_new:
return True
else:
for block in blocks_info:
if value in str(block):
return block['locked']
return False
def live_snapshot(self, base_file, snapshot_file,
snapshot_format="qcow2"):
"""
Take a live disk snapshot.
:param base_file: base file name
:param snapshot_file: snapshot file name
:param snapshot_format: snapshot file format
:return: File name of disk snapshot.
"""
device = self.get_block({"file": base_file})
output = self.monitor.live_snapshot(device, snapshot_file,
snapshot_format)
logging.debug(output)
device = self.get_block({"file": snapshot_file})
if device:
current_file = device
else:
current_file = None
return current_file
def block_stream(self, device, speed, base=None, correct=True):
"""
start to stream block device, aka merge snapshot;
:param device: device ID;
:param speed: limited speed, default unit B/s;
:param base: base file;
:param correct: auto correct cmd, correct by default
"""
cmd = self.params.get("block_stream_cmd", "block-stream")
return self.monitor.block_stream(device, speed, base,
cmd, correct=correct)
def block_mirror(self, device, target, speed, sync,
format, mode="absolute-paths", correct=True):
"""
Mirror block device to target file;
:param device: device ID
:param target: destination image file name;
:param speed: max limited speed, default unit is B/s;
:param sync: what parts of the disk image should be copied to the
destination;
:param mode: new image open mode
:param format: target image format
:param correct: auto correct cmd, correct by default
"""
cmd = self.params.get("block_mirror_cmd", "drive-mirror")
return self.monitor.block_mirror(device, target, speed, sync,
format, mode, cmd, correct=correct)
def block_reopen(self, device, new_image, format="qcow2", correct=True):
"""
Reopen a new image, no need to do this step in rhel7 host
:param device: device ID
:param new_image: new image filename
:param format: new image format
:param correct: auto correct cmd, correct by default
"""
cmd = self.params.get("block_reopen_cmd", "block-job-complete")
return self.monitor.block_reopen(device, new_image,
format, cmd, correct=correct)
def cancel_block_job(self, device, correct=True):
"""
cancel active job on the image_file
:param device: device ID
:param correct: auto correct cmd, correct by default
"""
cmd = self.params.get("block_job_cancel_cmd", "block-job-cancel")
return self.monitor.cancel_block_job(device, cmd, correct=correct)
def set_job_speed(self, device, speed="0", correct=True):
"""
set max speed of block job;
:param device: device ID
:param speed: max speed of block job
:param correct: auto correct cmd, correct by default
"""
cmd = self.params.get("set_block_job_speed", "block-job-set-speed")
return self.monitor.set_block_job_speed(device, speed,
cmd, correct=correct)
def get_job_status(self, device):
"""
get block job info;
:param device: device ID
"""
return self.monitor.query_block_job(device)
| spcui/virt-test | virttest/qemu_vm.py | Python | gpl-2.0 | 146,685 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (C) 2009-2010 Nicolas P. Rougier
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
'''
The atb module provides bindings for AntTweakBar which is a small and
easy-to-use C/C++ library that allows programmers to quickly add a light and
intuitive graphical user interface into graphic applications based on OpenGL,
DirectX 9 or DirectX 10 to interactively tweak their parameters on-screen.
'''
import ctypes
from raw import *
def check_error(status, error=0):
if status == error:
raise Exception(TwGetLastError())
else:
return status
def enum(name, values):
E = (TwEnumVal*len(values))()
for i,(label,value) in enumerate(values.iteritems()):
E[i].Value, E[i].Label = value, label
return check_error(TwDefineEnum(name, E, len(values)))
def init():
check_error(TwInit(TW_OPENGL, 0))
def _dict_to_defs(args):
'''
Converts a dictionary like {a:'b', 1:2} to the string "a=b 1=2" suitable
for passing to define method. Automatic type conversion is done as follows:
- if the value is bool result is simply the name of the string eg
{'closed':True} -> "closed"
- if the value is a tuple the items are converted to strings and joined
by spaces, eg {'size':(10, 20)} -> "size='10 20'"
'''
r = []
for k, v in args.iteritems():
if type(v) is bool: v = ""
elif type(v) is tuple: v = "='%s'" % " ".join((str(i) for i in v))
else: v = "='%s'" % str(v)
r.append(k+v)
return " ".join(r)
def draw():
check_error(TwDraw())
def terminate():
check_error(TwTerminate())
class Bar(object):
'''
Bar is an internal structure used to store tweak bar attributes and
states.
'''
def __init__(self, name=None, **defs):
'''
Create a new bar.
Arguments:
----------
name : str
Name of the new bar.
Keyword arguments:
------------------
label : str
Changes the label of a bar, that is the title displayed on top of a
bar. By default, the label is the name used when the bar was
created.
help : str
Defines the help message associated to a bar. This message will be
displayed inside the Help bar automatically created to help the
user.
You can also define a global help message. It will be displayed at
the beginning of the Help bar. To define it, use the GLOBAL keyword
instead of the bar name.
color : (int,int,int)
Changes the color of a bar to (red,green,blue).
red, green and blue are integer values between 0 and 255 that
define the red, green and blue color channels. See also the alpha
and text parameters to change bar visual aspect.
alpha : int
Changes the bar opacity.
Bar opacity can vary from 0 for fully transparent to 255 for fully
opaque. See also the color and text parameters to change bar visual
aspect.
text : str
Changes text color to 'dark' or 'light'.
Depending on your application background color and on bar color and
alpha, bar text might be more readable if it is dark or light. This
parameter allows to switch between the two modes. See also the
color and alpha parameters to change bar visual aspect.
position : (int,int)
Move a bar to a new position (x,y).
x and y are positive integer values that represent the new position
of the bar in pixels. (x=0, y=0) is upper-left corner of the
application window.
size : (int,int)
Change the bar size to (sx,sy).
sx and sy are positive integer values that represent the new size
of the bar in pixels.
valueswidth : int
Change the width 'w' of the bar right column used to display numerical
values.
w is a positive integer that represents width in pixels.
refresh : float
Change the refresh rate 'r' of the bar.
Values displayed by a bar are automatically updated to reflect
changes of their associated variables. r is a real value
corresponding to the number of seconds between two updates.
fontsize : int
Change the size 's' of the font used by the bars.
s is 1 for small font, 2 for medium font, or 3 for large font. Note
that all bars share the same font, so this change is applied to all
bars.
visible : bool
Show or hide a tweak bar.
iconified : bool
Iconify or deiconify a tweak bar.
iconpos : str
Changes the place where icons of iconified bars are displayed.
p is one of the following values:
- 'bottomleft' or 'bl' for bottom-left corner of the window (default).
- 'bottomright' or 'br' for bottom-right corner of the window.
- 'topleft' or 'tl' for top-left corner of the window.
- 'topright' or 'tr' for top-right corner of the window.
Note that this parameter is applied to all bar icons.
iconalign : str
Changes the alignment of icons of iconified bars. It can be
'vertical' (the default), or 'horizontal'.
Note that this parameter is applied to all bar icons.
iconmargin : (int,int)
Add a margin (x,y) between borders of the window and icons of
iconified bars. x and y are the number of pixels between window
borders and icons in the x and y directions respectively.
Note that this parameter is applied to all bar icons.
iconifiable : bool
Allow a bar to be iconified or not by the user.
movable : bool
Allow a bar to be moved or not by the user.
resizable : bool
Allow a bar to be resized or not by the user.
fontresizable : bool
Allow bar fonts to be resized or not by the user.
Note that this parameter is applied to all bars.
alwaystop : bool
Set a bar to be always on top of the others.
alwaysbottom : bool
Set a bar to be always behind the others.
'''
if not name:
name = "Unnamed"
self._name = name
self._bar = TwNewBar(name)
if defs:
self.define(_dict_to_defs(defs))
self._c_callbacks = []
def _get_name(self):
return self._name
name = property(_get_name,
doc='''Name of the bar''')
def _set_label(self, label):
c = ctypes.c_char_p(label)
TwSetParam(self._bar, "", "label", PARAM_CSTRING, 1, c)
def _get_label(self):
c = ctypes.create_string_buffer(4096)
TwGetParam(self._bar, "", "label", PARAM_CSTRING, 4095, c)
return c.value
label = property(_get_label, _set_label,
doc='''Bar label.
Changes the label of a bar, that is the title displayed on top of a bar.
By default, the label is the name used when the bar was created.
:type: str
''')
def _set_alpha(self, alpha):
c = ctypes.c_int(alpha)
TwSetParam(self._bar, "", "alpha", PARAM_INT32, 1, ctypes.byref(c))
def _get_alpha(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "alpha", PARAM_INT32, 1, ctypes.byref(c))
return c.value
alpha = property(_get_alpha, _set_alpha,
doc='''Bar opacity.
Bar opacity can vary from 0 for fully transparent to 255 for fully opaque.
See also the color and text parameters to change bar visual aspect.
:type: int
''')
def _set_color(self, color):
c = (ctypes.c_int*3)(color[0],color[1],color[2])
TwSetParam(self._bar, "", "color", PARAM_INT32, 3, ctypes.byref(c))
def _get_color(self):
c = (ctypes.c_int*3)(0,0,0)
TwGetParam(self._bar, "", "color", PARAM_INT32, 3, ctypes.byref(c))
return c[0], c[1], c[2]
color = property(_get_color, _set_color,
doc='''Bar color.
Red, green and blue are integer values between 0 and 255 that define the
red, green and blue color channels. See also the alpha and text parameters
to change bar visual aspect.
:type: (int,int,int)
''')
def _set_help(self, help):
c = ctypes.c_char_p(help)
TwSetParam(self._bar, "", "help", PARAM_CSTRING, 1, c)
def _get_help(self):
c = ctypes.create_string_buffer(4096)
TwGetParam(self._bar, "", "help", PARAM_CSTRING, 4095, c)
return c.value
help = property(_get_help, _set_help,
doc='''Help message.
Defines the help message associated to a bar. This message will be
displayed inside the Help bar automatically created to help the user.
:type: str
''')
def _set_text(self, text):
c = ctypes.c_char_p(text)
TwSetParam(self._bar, "", "text", PARAM_CSTRING, 1, c)
def _get_text(self):
c = ctypes.create_string_buffer(16)
TwGetParam(self._bar, "", "text", PARAM_CSTRING, 15, c)
return c.value
text = property(_get_text, _set_text,
doc='''Text color.
Depending on your application background color and on bar color and alpha,
bar text might be more readable if it is dark or light. This parameter
allows to switch between the two modes. See also the color and alpha
parameters to change bar visual aspect.
:type: str
''')
def _set_position(self, position):
c = (ctypes.c_int*2)(position[0],position[1])
TwSetParam(self._bar, "", "position", PARAM_INT32, 2, ctypes.byref(c))
def _get_position(self):
c = (ctypes.c_int*2)(0,0)
TwGetParam(self._bar, "", "position", PARAM_INT32, 2, ctypes.byref(c))
return c[0], c[1]
position = property(_get_position, _set_position,
doc='''Bar position (x,y).
x and y are positive integer values that represent the new position of the
bar in pixels. (x=0, y=0) is upper-left corner of the application window.
:type: (int,int)
''')
def _set_size(self, size):
c = (ctypes.c_int*2)(size[0],size[1])
TwSetParam(self._bar, "", "size", PARAM_INT32, 2, ctypes.byref(c))
def _get_size(self):
c = (ctypes.c_int*2)(0,0)
TwGetParam(self._bar, "", "size", PARAM_INT32, 2, ctypes.byref(c))
return c[0], c[1]
size = property(_get_size, _set_size,
doc='''Bar size (sx,sy).
sx and sy are positive integer values that represent the new size of the bar
in pixels.
:type: (int,int)
''')
def _set_valuewidth(self, valuewidth):
c = ctypes.c_int(valuewidth)
TwSetParam(self._bar, "", "valuewidth", PARAM_INT32, 1, ctypes.byref(c))
def _get_valuewidth(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "valuewidth", PARAM_INT32, 1, ctypes.byref(c))
return c.value
valuewidth = property(_get_valuewidth, _set_valuewidth,
doc='''Value width.
Width of the bar right column used to display numerical values.
:type: int
''')
def _set_fontsize(self, fontsize):
c = ctypes.c_int(fontsize)
TwSetParam(self._bar, "", "fontsize", PARAM_INT32, 1, ctypes.byref(c))
def _get_fontsize(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "fontsize", PARAM_INT32, 1, ctypes.byref(c))
return c.value
fontsize = property(_get_fontsize, _set_fontsize,
doc='''Font size s.
s is 1 for small font, 2 for medium font, or 3 for large font. Note that
all bars share the same font, so this change is applied to all bars.
fontsize is a global parameter.
:type: int
''')
def _set_refresh(self, refresh):
c = ctypes.c_float(refresh)
TwSetParam(self._bar, "", "refresh", PARAM_FLOAT, 1, ctypes.byref(c))
def _get_refresh(self):
c = ctypes.c_float(0)
TwGetParam(self._bar, "", "refresh", PARAM_FLOAT, 1, ctypes.byref(c))
return c.value
refresh = property(_get_refresh, _set_refresh,
doc='''Refresh rate.
Values displayed by a bar are automatically updated to reflect changes of
their associated variables. r is a real value corresponding to the number
of seconds between two updates.
:type: float
''')
def _set_visible(self, visible):
c = ctypes.c_int(visible)
TwSetParam(self._bar, "", "visible", PARAM_INT32, 1, ctypes.byref(c))
def _get_visible(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "visible", PARAM_INT32, 1, ctypes.byref(c))
return c.value
visible = property(_get_visible, _set_visible,
doc='''Bar visibility.
See also show and hide method.
:type: int
''')
def _set_iconified(self, iconified):
c = ctypes.c_int(iconified)
TwSetParam(self._bar, "", "iconified", PARAM_INT32, 1, ctypes.byref(c))
def _get_iconified(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "iconified", PARAM_INT32, 1, ctypes.byref(c))
return c.value
iconified = property(_get_iconified, _set_iconified,
doc='''Bar iconification.
Iconify or deiconify the bar.
:type: int
''')
def _set_iconpos(self, iconpos):
c = ctypes.c_char_p(iconpos)
TwSetParam(self._bar, "", "iconpos", PARAM_CSTRING, 1, c)
def _get_iconpos(self):
c = ctypes.create_string_buffer(32)
TwGetParam(self._bar, "", "iconpos", PARAM_CSTRING, 31, c)
return c.value
iconpos = property(_get_iconpos, _set_iconpos,
doc='''Bar icon position p.
p is one of the following values:
- 'bottomleft' or 'bl' for bottom-left corner of the window (default).
- 'bottomright' or 'br' for bottom-right corner of the window.
- 'topleft' or 'tl' for top-left corner of the window.
- 'topright' or 'tr' for top-right corner of the window.
Note that this parameter is applied to all bar icons.
:type: str
''')
def _set_iconalign(self, iconalign):
c = ctypes.c_char_p(iconalign)
TwSetParam(self._bar, "", "iconalign", PARAM_CSTRING, 1, c)
def _get_iconalign(self):
c = ctypes.create_string_buffer(32)
TwGetParam(self._bar, "", "iconalign", PARAM_CSTRING, 31, c)
return c.value
iconalign = property(_get_iconalign, _set_iconalign,
doc='''Bar icon alignment p.
Changes the alignment of icons of iconified bars. It can be 'vertical' (the
default), or 'horizontal'.
Note that this parameter is applied to all bar icons.
:type: str
''')
def _set_iconmargin(self, iconmargin):
c = (ctypes.c_int*2)(iconmargin[0],iconmargin[1])
TwSetParam(self._bar, "", "iconmargin", PARAM_INT32, 2, ctypes.byref(c))
def _get_iconmargin(self):
c = (ctypes.c_int*2)(0,0)
TwGetParam(self._bar, "", "iconmargin", PARAM_INT32, 2, ctypes.byref(c))
return c[0], c[1]
iconmargin = property(_get_iconmargin, _set_iconmargin,
doc='''Bar icon margin (x,y).
Add a margin between borders of the window and icons of iconified bars. x
and y are the number of pixels between window borders and icons in the x
and y directions respectively.
Note that this parameter is applied to all bar icons.
:type: (int,int)
''')
def _set_iconifiable(self, iconifiable):
c = ctypes.c_int(iconifiable)
TwSetParam(self._bar, "", "iconifiable", PARAM_INT32, 1, ctypes.byref(c))
def _get_iconifiable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "iconifiable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
iconifiable = property(_get_iconifiable, _set_iconifiable,
doc='''Allow a bar to be iconified or not by the user.
:type: int
''')
def _set_movable(self, movable):
c = ctypes.c_int(movable)
TwSetParam(self._bar, "", "movable", PARAM_INT32, 1, ctypes.byref(c))
def _get_movable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "movable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
movable = property(_get_movable, _set_movable,
doc='''Allow a bar to be moved or not by the user.
:type: int
''')
def _set_resizable(self, resizable):
c = ctypes.c_int(resizable)
TwSetParam(self._bar, "", "resizable", PARAM_INT32, 1, ctypes.byref(c))
def _get_resizable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "resizable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
resizable = property(_get_resizable, _set_resizable,
doc='''Allow a bar to be resized or not by the user.
:type: int
''')
def _set_fontresizable(self, fontresizable):
c = ctypes.c_int(fontresizable)
TwSetParam(self._bar, "", "fontresizable", PARAM_INT32, 1, ctypes.byref(c))
def _get_fontresizable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "fontresizable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
fontresizable = property(_get_fontresizable, _set_fontresizable,
doc='''Allow a bar to have font resized or not by the user.
:type: int
''')
def _set_alwaystop(self, alwaystop):
c = ctypes.c_int(alwaystop)
TwSetParam(self._bar, "", "alwaystop", PARAM_INT32, 1, ctypes.byref(c))
def _get_alwaystop(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "alwaystop", PARAM_INT32, 1, ctypes.byref(c))
return c.value
alwaystop = property(_get_alwaystop, _set_alwaystop,
doc='''Set a bar to be always on top of the others.
:type: int
''')
def _set_alwaybottom(self, alwaybottom):
c = ctypes.c_int(alwaybottom)
TwSetParam(self._bar, "", "alwaybottom", PARAM_INT32, 1, ctypes.byref(c))
def _get_alwaybottom(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "alwaybottom", PARAM_INT32, 1, ctypes.byref(c))
return c.value
alwaybottom = property(_get_alwaybottom, _set_alwaybottom,
doc='''Set a bar to be always behind the others.
:type: int
''')
def clear(self):
check_error(TwRemoveAllVars(self._bar))
def remove(self, name):
check_error(TwRemoveVar(self._bar, name))
def update(self):
check_error(TwRefreshBar(self._bar))
def bring_to_front(self):
check_error(TwSetTopBar(self._bar))
def add_var(self, name, value=None, vtype=None, readonly=False,
getter=None, setter=None, data=None, **defs):
'''
Add a new variable to the tweak bar.
Arguments:
----------
name : str
The name of the variable. It will be displayed in the tweak bar if
no label is specified for this variable. It will also be used to
refer to this variable in other functions, so choose a unique,
simple and short name and avoid special characters like spaces or
punctuation marks.
value : ctypes
Value of the variable
vtype : TYPE_xxx
Type of the variable. It must be one of the TYPE_xxx constants or an enum type.
readonly: bool
Makes a variable read-only or read-write. The user would be able to
modify it or not.
getter : func(data) or func()
The callback function that will be called by AntTweakBar to get the
variable's value.
setter : func(value, data)
The callback function that will be called to change the variable's
value.
data : object
Data to be send to getter/setter functions
Keyword arguments:
------------------
label : str
Changes the label of a variable, that is the name displayed before
its value. By default, the label is the name used when the variable
was added to a bar.
help : str
Defines the help message associated to a variable. This message will
be displayed inside the Help bar automatically created to help the
user.
group : str
Move a variable into a group. This allows you to regroup
variables. If groupname does not exist, it is created and added to
the bar. You can also put groups into groups, and so obtain a
hierarchical organization.
visible: bool
Show or hide a variable.
min / max: scalar
Set maximum and minimum value of a variable. Thus, user cannot
exceed these bounding values when (s)he edit the variable.
step: scalar
Set a step value for a variable. When user interactively edit the
variable, it is incremented or decremented by this value.
precision : scalar
Defines the number of significant digits printed after the period
for floating point variables. This number must be between 0 and 12,
or -1 to disable precision and use the default formating.
If precision is not defined and step is defined, the step number of
significant digits is used for defining the precision.
hexa : bool
For integer variables only.
Print an integer variable as hexadecimal or decimal number.
True / False : str
For boolean variables only.
By default, if a boolean variable is true, it is displayed as 'ON',
and if it is false, as 'OFF'. You can change this message with the
true and false parameters, the new string will replace the previous
message.
opened : bool
For groups only.
Fold or unfold a group displayed in a tweak bar (as when the +/-
button displayed in front of the group is clicked).
'''
groups = name.split('/')
name = groups[-1]
_typemap = {ctypes.c_bool: TW_TYPE_BOOL8,
ctypes.c_int: TW_TYPE_INT32,
ctypes.c_long: TW_TYPE_INT32,
ctypes.c_float: TW_TYPE_FLOAT,
ctypes.c_float * 3: TW_TYPE_COLOR3F,
ctypes.c_float * 4: TW_TYPE_COLOR4F,
ctypes.c_char * 512: TW_TYPE_CSSTRING(512)}
_typemap_inv = dict([(v, k) for k, v in _typemap.iteritems()])
if vtype is None and value is not None:
vtype = _typemap.get(type(value))
elif vtype:
vtype = _typemap.get(vtype, vtype)
elif vtype is None and getter is not None:
t = type(getter())
if t == bool:
vtype = TW_TYPE_BOOL8
elif t == int:
vtype = TW_TYPE_INT16
elif t == long:
vtype = TW_TYPE_INT32
elif t == float:
vtype = TW_TYPE_FLOAT
else:
raise ValueError("Cannot determin value type")
ctype = _typemap_inv.get(vtype,c_int)
def_str = _dict_to_defs(defs)
if getter:
def wrapped_getter(p, user_data):
v = ctypes.cast(p, ctypes.POINTER(ctype))
d = ctypes.cast(user_data, ctypes.py_object)
if d.value is not None:
v[0] = getter(d.value)
else:
v[0] = getter()
if setter:
def wrapped_setter(p, user_data):
v = ctypes.cast(p, ctypes.POINTER(ctype))
d = ctypes.cast(user_data, ctypes.py_object)
if d.value is not None:
setter(v[0], d.value)
else:
setter(v[0])
if (getter and readonly) or (getter and not setter):
c_callback = GET_FUNC(wrapped_getter)
self._c_callbacks.append(c_callback)
r = TwAddVarCB(self._bar, name, vtype, None, c_callback,
ctypes.py_object(data), def_str)
elif (getter and setter):
c_setter = SET_FUNC(wrapped_setter)
c_getter = GET_FUNC(wrapped_getter)
self._c_callbacks.extend((c_setter, c_getter))
r = TwAddVarCB(self._bar, name, vtype, c_setter, c_getter,
ctypes.py_object(data), def_str)
elif readonly:
r = TwAddVarRO(self._bar, name, vtype, ctypes.byref(value), def_str)
else:
r = TwAddVarRW(self._bar, name, vtype, ctypes.byref(value), def_str)
check_error(r)
if len(groups) > 1:
name = self.name
for i in range(len(groups)-1,0,-1):
self.define("group=%s" % groups[i-1], groups[i])
def add_button(self, name, callback, data=None, **defs):
'''
'''
def wrapped_callback(userdata):
d = ctypes.cast(userdata, ctypes.py_object)
if d.value is not None:
callback(d.value)
else:
callback()
c_callback = BUTTON_FUNC(wrapped_callback)
self._c_callbacks.append(c_callback)
def_str = _dict_to_defs(defs)
data_p = ctypes.py_object(data)
check_error( TwAddButton(self._bar, name, c_callback, data_p, def_str) )
def add_separator(self, name, **defs):
''' '''
def_str = _dict_to_defs(defs)
check_error( TwAddSeparator(self._bar, name, def_str ) )
def define(self, definition='', varname=None):
'''
This function defines optional parameters for tweak bars and
variables. For instance, it allows you to change the color of a tweak
bar, to set a min and a max value for a variable, to add an help
message that inform users of the meaning of a variable, and so on...
If no varname is given, definition is applied to bar, else, it is
applied to the given variable.
'''
if varname:
arg = '%s/%s %s' % (self.name, varname,definition)
else:
arg = '%s %s' % (self.name, definition)
check_error(TwDefine(arg))
def destroy(self):
TwDeleteBar(self._bar) | Flavsditz/projects | eyeTracking/pupil/pupil_src/shared_modules/atb/__init__.py | Python | gpl-2.0 | 27,057 |
# -*- coding: utf8 -*-
#
# Created by 'myth' on 8/26/15
from tkinter import *
from algorithms import ASTAR_OPTIONS, ASTAR_HEURISTIC, GAC_DEFAULT_K, GAC_DEFAULT_CONSTRAINT
from common import *
def generate_menus(window):
"""
Takes in the window main menu bar and registers the submenus and
their commands
:param window: The main application window
"""
# Define menu labels and their commands here
menus = [
(u'File', [
(u'Exit', window.controller.exit)
]),
(u'Boards', sorted([
(os.path.basename(board),
lambda fp=board: window.controller.load_board(file_path=fp))
for board in fetch_files_from_dir(rootdir='module1/boards/')
])),
(u'Graphs', sorted([
(os.path.basename(board),
lambda fp=board: window.controller.load_graph(file_path=fp))
for board in fetch_files_from_dir(rootdir='module2/graphs/')
])),
(u'Nonograms', sorted([
(os.path.basename(board),
lambda fp=board: window.controller.load_nonogram(file_path=fp))
for board in fetch_files_from_dir(rootdir='module3/nonograms/')
])),
(u'Run', [
(u'GO!', window.controller.solve),
]),
]
# Iterate over the main menu components and their actions
for name, actions in menus:
menu = Menu(window.menu, tearoff=0)
window.menu.add_cascade(label=name, menu=menu)
# Register commands
for label, cmd in actions:
menu.add_command(label=label, command=cmd)
def generate_options(frame, module=1):
"""
Generates options for
:param frame: Stats and options frame reference
"""
# Clear all widgets for consistency
for child_widget in frame.winfo_children():
child_widget.destroy()
mode_label = Label(frame, text='Algorithm mode:')
mode_label.grid(row=0, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
mode_var = StringVar(master=frame, value=ASTAR_OPTIONS[0], name='algorithm_mode')
frame.master.controller.references['algorithm_mode'] = mode_var
options = OptionMenu(frame, mode_var, *ASTAR_OPTIONS)
options.grid(row=0, column=1, sticky='E')
heuristic_label = Label(frame, text='Heuristic:')
heuristic_label.grid(row=1, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
if module == 1:
heuristic_var = StringVar(master=frame, value=ASTAR_HEURISTIC[0], name='heuristic')
h_options = OptionMenu(frame, heuristic_var, *ASTAR_HEURISTIC)
else:
heuristic_var = StringVar(master=frame, value='minimum_domain_sum', name='heuristic')
h_options = OptionMenu(frame, heuristic_var, 'minimum_domain_sum')
frame.master.controller.references['heuristic'] = heuristic_var
h_options.grid(row=1, column=1, sticky='E')
if module == 1 or module == 3:
update_interval_label = Label(frame, text='Update interval (ms):')
update_interval_label.grid(row=2, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
update_interval = Entry(frame)
update_interval.insert(0, str(GUI_UPDATE_INTERVAL))
update_interval.grid(row=2, column=1, padx=5, pady=5, ipadx=5, ipady=5, sticky='E')
frame.master.controller.references['update_interval'] = update_interval
elif module == 2:
k_value_label = Label(frame, text='K value:')
k_value_label.grid(row=3, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
k_value = Entry(frame)
k_value.insert(0, str(GAC_DEFAULT_K))
k_value.grid(row=3, column=1, padx=5, pady=5, ipadx=5, ipady=5, sticky='E')
frame.master.controller.references['k_value'] = k_value
constraint_formula_label = Label(frame, text='Constraint formula:')
constraint_formula_label.grid(row=4, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
constraint_formula = Entry(frame)
constraint_formula.insert(0, GAC_DEFAULT_CONSTRAINT)
constraint_formula.grid(row=4, column=1, padx=5, pady=5, ipadx=5, ipady=5, sticky='E')
frame.master.controller.references['constraint_formula'] = constraint_formula
def generate_stats(frame, module=1):
"""
Generates and fills the Statistics LabelFrame
"""
# Clear all widgets for consistency
for child_widget in frame.winfo_children():
child_widget.destroy()
path_length = StringVar(frame)
path_length.set('Path length: 0')
path_length_label = Label(frame, textvariable=path_length)
frame.master.controller.references['path_length'] = path_length
path_length_label.grid(row=0, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
open_set_size = StringVar(frame)
open_set_size.set('OpenSet size: 0')
open_set_size_label = Label(frame, textvariable=open_set_size)
frame.master.controller.references['open_set_size'] = open_set_size
open_set_size_label.grid(row=1, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
closed_set_size = StringVar(frame)
closed_set_size.set('ClosedSet size: 0')
closed_set_size_label = Label(frame, textvariable=closed_set_size)
frame.master.controller.references['closed_set_size'] = closed_set_size
closed_set_size_label.grid(row=2, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
total_set_size = StringVar(frame)
total_set_size.set('Total set size: 0')
total_set_size_label = Label(frame, textvariable=total_set_size)
frame.master.controller.references['total_set_size'] = total_set_size
total_set_size_label.grid(row=3, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
if module == 2:
total_unsatisfied_constraints = StringVar(frame)
total_unsatisfied_constraints.set('Unsatisfied constraints: 0')
total_unsatisfied_constraints_label = Label(frame, textvariable=total_unsatisfied_constraints)
frame.master.controller.references['total_unsatisfied_constraints'] = total_unsatisfied_constraints
total_unsatisfied_constraints_label.grid(row=4, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
total_missing_assignment = StringVar(frame)
total_missing_assignment.set('Vertices missing assignment: 0')
total_missing_assignment_label = Label(frame, textvariable=total_missing_assignment)
frame.master.controller.references['total_missing_assignment'] = total_missing_assignment
total_missing_assignment_label.grid(row=5, padx=5, pady=5, ipadx=5, ipady=5, sticky='W')
| myth/trashcan | it3105/project1/gui/widgets.py | Python | gpl-2.0 | 6,474 |
#!/usr/bin/env python
# coding: UTF-8
# TcpdumpWrapper
#
# Copyright(c) 2015-2018 Uptime Technologies, LLC.
from datetime import datetime, timedelta, date, time
import hashlib
import os
import re
import subprocess
import sys
import log
class TcpdumpPacket:
def __init__(self, ts, src, dst, bytes, debug=None):
self.ts = self.string2timestamp(ts)
self.src = src
self.dst = dst
self.debug = debug
self.bytes = bytes
self.messages = []
log.debug("+ " + ts + " " + src + " " + dst)
self.end()
self.session_id = self.get_session_id()
def string2timestamp(self, ts):
t = datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f')
log.debug("ts = " + str(t))
return t
def get_session_id(self):
src_dst = [self.iph_src + ":" + str(self.tcph_src),
self.iph_dst + ":" + str(self.tcph_dst)]
ss = ""
for s in sorted(src_dst):
ss = ss + s + " "
return hashlib.md5(ss).hexdigest()[0:12]
def get_timestamp(self):
log.debug("get_timestamp: %s" % str(self.ts))
return self.ts
def get_messages(self):
return self.messages
def parse_ip_header(self, data):
self.iph_version = (data[0] >> 4) & 0b1111
self.iph_header_len = data[0] & 0b1111
self.iph_tos = data[1]
self.iph_dgram_len = (data[2] << 8) + data[3]
self.iph_id = (data[4] << 8) + data[5]
self.iph_dst = "%d.%d.%d.%d" % (data[12], data[13], data[14], data[15])
self.iph_src = "%d.%d.%d.%d" % (data[16], data[17], data[18], data[19])
if self.debug is True:
print("version : %d" % self.iph_version)
print("hd len : %d (%d)" % (self.iph_header_len,
self.iph_header_len * 4))
print("tos : %d" % self.iph_tos)
print("dgram len: %d" % self.iph_dgram_len)
print("data len: %d" % (self.iph_dgram_len -
self.iph_header_len*4))
print("id : %d" % self.iph_id)
print("dst : %s" % (self.iph_dst))
print("src : %s" % (self.iph_src))
return self.iph_header_len * 4
def parse_tcp_header(self, data):
self.tcph_src = (data[0] << 8) + data[1]
self.tcph_dst = (data[2] << 8) + data[3]
self.tcph_seq = ((data[4] << 24) + (data[5] << 16) +
(data[6] << 8) + data[7])
self.tcph_offset = (data[12] >> 4) & 0b1111
if self.debug is True:
print("src port : %d" % (self.tcph_src))
print("dst port : %d" % (self.tcph_dst))
print("seq : %d" % (self.tcph_seq))
print("offset : %d (%d)" % (self.tcph_offset,
self.tcph_offset * 4))
return self.tcph_offset * 4
def end(self):
cur = 0
iph_len = self.parse_ip_header(self.bytes[cur:])
cur = cur + iph_len
tcph_len = self.parse_tcp_header(self.bytes[cur:])
cur = cur + tcph_len
self.payload = self.bytes[cur:]
s = ""
for d in self.payload:
s = s + "%02x " % (d)
log.debug("payload: " + s)
if len(self.payload) >= 5:
pos = 0
cont = True
while cont:
if len(self.payload[pos:]) < 5:
cont = False
break
ch = self.read_char(self.payload[pos:])
# if not(ch >= 48 and ch <= 122):
# break
pos = pos + 1
i = self.read_int32(self.payload[pos:])
pos = pos + 4
log.debug("sess: " + self.get_session_id() + ": " +
str(self.ts) + ": %c[%x] len=%d" % (ch, ch, i))
# client to server
if ch == ord('S'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('Q'):
s = self.read_string(self.payload[pos:], i - 4)
log.debug(s)
self.messages.append([chr(ch), s])
elif ch == ord('P'):
s = self.read_string(self.payload[pos:], i - 4)
s1 = s.split('\0')
log.debug("> " + s1[0] + "," + s1[1])
self.messages.append([chr(ch), s1[0], s1[1]])
elif ch == ord('E'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
elif ch == ord('B'):
s = self.read_string(self.payload[pos:], i - 4)
s1 = s.split('\0')
log.debug("> " + s1[0] + "," + s1[1])
self.messages.append([chr(ch), s1[0], s1[1]])
elif ch == ord('X'):
self.messages.append([chr(ch), None])
cont = False
# server to client
elif ch == ord('T'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('D'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('C'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('1'):
self.messages.append([chr(ch), None])
elif ch == ord('2'):
self.messages.append([chr(ch), None])
elif ch == ord('n'):
self.messages.append([chr(ch), None])
elif ch == ord('Z'):
self.messages.append([chr(ch), None])
cont = False
pos = pos + (i - 4)
def parse(self):
self.pos = 12
while len(self.payload) > self.pos + 5:
c = self.read_char()
log.debug("%02x(%c)" % (c, c))
l = self.read_int32()
log.debug(l)
self.pos = self.pos + l
def read_char(self, data):
ch = data[0]
return ch
def read_int32(self, data):
i = (data[0] << 24) + (data[1] << 16) + (data[2] << 8) + (data[3])
return i
def read_string(self, data, size):
s = ""
i = 0
while i < size:
s = s + "%c" % data[i]
i = i + 1
return s
class TcpdumpWrapper:
pkt = None
tcpdump = None
process = None
line = None
def __init__(self, host=None, port=None, interface=None, inputfile=None,
debug=None):
if debug is True:
log.setLevel(log.DEBUG)
self.host = host
self.port = port
self.iface = interface
self.inputfile = inputfile
self.debug = debug
if self.port is None:
self.port = "5432"
if self.iface is None:
self.iface = "any"
self.tcpdump = "tcpdump -tttt"
if self.inputfile is not None:
self.tcpdump = self.tcpdump + " -r " + self.inputfile
self.tcpdump = (self.tcpdump + " -l -i " + self.iface +
" -s 0 -X -p tcp port " + str(self.port))
if self.host is not None:
self.tcpdump = self.tcpdump + " and host " + self.host
log.info(self.tcpdump)
self.process = subprocess.Popen([self.tcpdump],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
bufsize=0)
# header
self.p1 = re.compile('^([\d-]+) ([\d:\.]+) IP (.*) > (.*): Flags')
# data
self.p2 = re.compile('^\t+0x(.+): (.+) ')
def get_packet(self):
self.data = ""
self.hdr = None
while True:
if self.line is None:
self.line = self.readline()
if self.line is None:
if self.hdr is not None and self.data is not None:
# EOF found.
pkt = TcpdumpPacket(self.hdr[0], self.hdr[1],
self.hdr[2],
self.parse_data(self.data),
self.debug)
self.data = ""
self.hdr = None
return pkt
else:
return None
if self.line[0] != '\t':
if len(self.data) > 0:
pkt = TcpdumpPacket(self.hdr[0], self.hdr[1], self.hdr[2],
self.parse_data(self.data), self.debug)
self.data = ""
self.hdr = None
return pkt
self.hdr = self.parse_header(self.line)
# d: ts, src, dest
log.debug("Header: ts=" + self.hdr[0] + ", src=" +
self.hdr[1] + ", dest=" + self.hdr[2])
else:
# log.debug(">" + self.line[10:50] + "<")
self.data = self.data + self.line[10:50]
self.line = None
def get_last_packet(self):
pkt = None
if self.hdr is not None and self.data is not None:
pkt = TcpdumpPacket(self.hdr[0], self.hdr[1], self.hdr[2],
self.parse_data(self.data), self.debug)
self.data = ""
self.hdr = None
return pkt
def readline(self):
self.process.stdout.flush()
line = self.process.stdout.readline()
if len(line) == 0:
# EOF
self.process.poll()
if self.process.returncode != 0:
log.error("%s" % self.process.stderr.readline())
sys.exit(1)
log.debug("readline: EOF")
return None
return line.rstrip()
def parse_header(self, line):
r1 = self.p1.match(line)
if r1 is not None:
# Header line may look like this:
# 2015-04-30 13:33:27.579311 IP localhost.55641 > \
# localhost.postgres: Flags [.], ack 66, win 664, \
# options [nop,nop,TS val 265008484 ecr 265008484], length 0
ts = r1.group(1) + " " + r1.group(2)
src = r1.group(3)
dest = r1.group(4)
return [ts, src, dest]
return None
def parse_data(self, line):
bytes = []
# line:
# 4500 0039 e080 4000 4006 5c3c 7f00 0001
offset = 0
length = len(line)
log.debug("! " + line)
cur = 0
while cur < length:
if line[cur] == ' ':
cur = cur + 1
continue
# chr to hex
h = int(line[cur:cur+2], 16)
bytes.append(h)
# print(payload[cur:cur+2] + ", " + str(h))
cur = cur + 2
return bytes
def parse(self, line):
# return true when messages avalable.
msg_avail = False
log.debug("parse: " + line)
# packet header info
r1 = self.p1.match(line)
if r1 is not None:
# Header line may look like this:
# 13:33:27.579311 IP localhost.55641 > localhost.postgres: \
# Flags [.], ack 66, win 664, options \
# [nop,nop,TS val 265008484 ecr 265008484], length 0
log.debug("Header: " + line)
ts = r1.group(1)
src = r1.group(2)
dest = r1.group(3)
# close the previous packet
if self.pkt is not None:
self.pkt.end()
# retreive all info/messages in the previous packet.
self.session_id = self.pkt.get_session_id()
self.timestamp = self.pkt.get_timestamp()
self.messages = copy.deepcopy(self.pkt.get_messages())
msg_avail = True
for m in self.pkt.get_messages():
log.debug("ts:%s cmd:%c msg:%s" %
(str(self.pkt.get_timestamp()), m[0], m[1]))
# new packet coming
self.pkt = TcpdumpPacket(ts, src, dest, self.debug)
self.packets.append(self.pkt)
# packet bytes
r2 = self.p2.match(line)
if r2 is not None:
log.debug("matched r2: " + line)
if self.pkt is not None:
log.debug("append_bytes: " + line)
self.pkt.append_bytes(r2.group(2))
return msg_avail
def print_packets(self):
print(self.packets)
| uptimejp/postgres-toolkit | postgres_toolkit/TcpdumpWrapper.py | Python | gpl-2.0 | 13,163 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Plugins.Plugin import PluginDescriptor
from Components.ActionMap import *
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmap, MultiContentEntryPixmapAlphaTest
from Components.Pixmap import Pixmap
from Components.AVSwitch import AVSwitch
from Components.PluginComponent import plugins
from Components.config import *
from Components.ConfigList import ConfigList, ConfigListScreen
from Components.GUIComponent import GUIComponent
from Components.Sources.List import List
from Components.MenuList import MenuList
from Components.FileList import FileList, FileEntryComponent
from Tools.Directories import SCOPE_CURRENT_SKIN, resolveFilename, fileExists
from Tools.LoadPixmap import LoadPixmap
from Tools.BoundFunction import boundFunction
from Tools.Directories import pathExists, fileExists, SCOPE_SKIN_IMAGE, resolveFilename
from enigma import RT_HALIGN_LEFT, eListboxPythonMultiContent, eServiceReference, eServiceCenter, gFont
from enigma import eListboxPythonMultiContent, eListbox, gFont, RT_HALIGN_LEFT, RT_HALIGN_RIGHT, RT_HALIGN_CENTER, loadPNG, RT_WRAP, eConsoleAppContainer, eServiceCenter, eServiceReference, getDesktop, loadPic, loadJPG, RT_VALIGN_CENTER, gPixmapPtr, ePicLoad, eTimer
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.VirtualKeyBoard import VirtualKeyBoard
from twisted.web.client import getPage
from twisted.web.client import downloadPage
from twisted.web import client, error as weberror
from twisted.internet import reactor
from twisted.internet import defer
from urllib import urlencode
import sys, os, re, shutil, time
from threading import Thread
from os import listdir as os_listdir, path as os_path
from re import compile
import re
try:
from enigma import eMediaDatabase
isDreamOS = True
except:
try:
file = open("/proc/stb/info/model", "r")
dev = file.readline().strip()
file.close()
if dev == "dm7080":
isDreamOS = True
elif dev == "dm820":
isDreamOS = True
else:
isDreamOS = False
except:
isDreamOS = False
def getCoverPath():
blockList = ['hdd','cf','usb','sdcard']
dirList = os_listdir("/media")
coverPaths = ['/usr/share/enigma2/cover/', '/data/cover/', '/media/cf/cover/', '/media/usb/cover/', '/media/sdcard/cover/', '/media/hdd/cover/']
if fileExists("/proc/mounts"):
mountsFile = open("/proc/mounts" ,"r")
for line in mountsFile:
entry = line.split()
if entry[2] in ["nfs", "nfs4", "smbfs", "cifs"]:
if entry[1].startswith("/media/"):
blockList.append(entry[1][7:])
mountsFile.close()
for dir in dirList:
if dir in blockList:
print dir, blockList
continue
if os_path.ismount("/media/%s" %(dir)) or (os_path.islink("/media/%s" %(dir)) and os_path.ismount(os_path.realpath("/media/%s" %(dir)))):
path = "/media/%s/cover/" % (dir)
coverPaths.append(path)
return coverPaths
pname = "Find MovieList Covers"
pversion = "0.5 OpenNfr-mod"
config.movielist.cover = ConfigSubsection()
config.movielist.cover.themoviedb_coversize = ConfigSelection(default="w185", choices = ["w92", "w185", "w500", "original"])
config.movielist.cover.followsymlink = ConfigYesNo(default = False)
config.movielist.cover.getdescription = ConfigYesNo(default = False)
config.movielist.cover.bgtimer = ConfigYesNo(default = False)
config.movielist.cover.bgtime = ConfigInteger(3, (1,24))
config.movielist.cover.savestyle = ConfigSelection(default="movielist", choices = ["movielist", "opennfr"])
config.movielist.cover.coverpath = ConfigSelection(default = "/media/hdd/cover/", choices = getCoverPath())
config.movielist.cover.scanpath = ConfigText(default = "/media/hdd/movie/", fixed_size = False)
fileExtensionsRemove = "(.avi|.mkv|.divx|.f4v|.flv|.img|.iso|.m2ts|.m4v|.mov|.mp4|.mpeg|.mpg|.mts|.vob|.wmv)"
def cleanFile(text):
cutlist = ['x264','720p','1080p','1080i','PAL','GERMAN','ENGLiSH','WS','DVDRiP','UNRATED','RETAIL','Web-DL','DL','LD','MiC','MD','DVDR','BDRiP','BLURAY','DTS','UNCUT','ANiME',
'AC3MD','AC3','AC3D','TS','DVDSCR','COMPLETE','INTERNAL','DTSD','XViD','DIVX','DUBBED','LINE.DUBBED','DD51','DVDR9','DVDR5','h264','AVC',
'WEBHDTVRiP','WEBHDRiP','WEBRiP','WEBHDTV','WebHD','HDTVRiP','HDRiP','HDTV','ITUNESHD','REPACK','SYNC']
#text = text.replace('.wmv','').replace('.flv','').replace('.ts','').replace('.m2ts','').replace('.mkv','').replace('.avi','').replace('.mpeg','').replace('.mpg','').replace('.iso','').replace('.mp4','').replace('.jpg','').replace('.txt','')
text = re.sub(fileExtensionsRemove + "$", '', text)
for word in cutlist:
text = re.sub('(\_|\-|\.|\+|\s)'+word+'(\_|\-|\.|\+|\s)','+', text, flags=re.I)
text = text.replace('.',' ').replace('-',' ').replace('_',' ').replace('+','')
return text
class BackgroundCoverScanner(Thread):
def __init__(self, session):
assert not BackgroundCoverScanner.instance, "only one MovieDataUpdater instance is allowed!"
BackgroundCoverScanner.instance = self # set instance
self.session = session
self.scanning = False
self.bgTimerRunning = False
self.fileExtensions = [".avi",".mkv",".divx",".f4v",".flv",".img",".iso",".m2ts",".m4v",".mov",".mp4",".mpeg",".mpg",".mts",".vob",".wmv"]
Thread.__init__(self)
def startTimer(self):
if config.movielist.cover.bgtimer.value:
self.bgTimer = eTimer()
if isDreamOS:
self.bgTimer_conn = self.bgTimer.timeout.connect(self.getFileList)
else:
self.bgTimer.callback.append(self.getFileList)
self.bgTimer.start(3600000 * int(config.movielist.cover.bgtime.value))
self.bgTimerRunning = True
print "----------------------- S t a r t - T i m e r -------------------------"
def stopTimer(self):
if self.bgTimerRunning:
if not config.movielist.cover.bgtimer.value:
self.bgTimer.stop()
self.bgTimerRunning = False
print "----------------------- S t o p - T i m e r -------------------------"
def setCallbacks(self, callback_infos, callback_found, callback_notfound, callback_error, callback_menulist, callback_finished):
# self.msgCallback, self.foundCallback, self.notFoundCallback, self.errorCallback, self.listCallback, self.msgDone
self.callback_infos = callback_infos
self.callback_found = callback_found
self.callback_notfound = callback_notfound
self.callback_error = callback_error
self.callback_menulist = callback_menulist
self.callback_finished = callback_finished
def getFileList(self, background=True):
self.background = background
if not self.scanning:
print "----------------------- Cover Background Scanner -------------------------"
print "Scan Path: %s" % config.movielist.cover.scanpath.value
self.scanning = True
if config.movielist.cover.savestyle.value == "opennfr":
if not pathExists(config.movielist.cover.coverpath.value):
shutil.os.mkdir(config.movielist.cover.coverpath.value)
if not self.background:
self.callback_infos("Scanning: '%s'" % str(config.movielist.cover.scanpath.value))
data = []
symlinks_dupe = []
for root, dirs, files in os.walk(config.movielist.cover.scanpath.value, topdown=False, onerror=None, followlinks=config.movielist.cover.followsymlink.value):
if not root.endswith('/'):
root += "/"
slink = os.path.realpath(root)
if not slink in symlinks_dupe:
symlinks_dupe.append(slink)
else:
break
for file in files:
filename_org = os.path.join(root, file)
if any([file.endswith(x) for x in self.fileExtensions]):
if config.movielist.cover.savestyle.value == "opennfr":
filename = self.getMovieSaveFile(file)
if not filename is None:
filename = "%s%s.jpg" % (config.movielist.cover.coverpath.value, filename)
else:
continue
else:
filename = re.sub(fileExtensionsRemove + "$", '.jpg', filename_org)
if not fileExists(filename):
if os.path.isdir(filename_org):
url = 'http://api.themoviedb.org/3/search/movie?api_key=8789cfd3fbab7dccf1269c3d7d867aff&query=%s&language=de' % file.replace(' ','%20')
data.append(('dir', 'movie', filename, file, url, None, None))
else:
#cleanTitle = re.sub('\W.*?([0-9][0-9][0-9][0-9])', '', file)
# Remove Year
cleanTitle = re.sub('([0-9][0-9][0-9][0-9])', '', file)
# Remove fileExtensions
cleanTitle = cleanFile(cleanTitle)
if re.search('[Ss][0-9]+[Ee][0-9]+', file) is not None:
season = None
episode = None
seasonEpisode = re.findall('.*?[Ss]([0-9]+)[Ee]([0-9]+)', cleanTitle, re.S|re.I)
if seasonEpisode:
(season, episode) = seasonEpisode[0]
name2 = re.sub('[Ss][0-9]+[Ee][0-9]+.*[a-zA-Z0-9_]+','', cleanTitle, flags=re.S|re.I)
url = 'http://thetvdb.com/api/GetSeries.php?seriesname=%s&language=de' % name2.replace(' ','%20')
data.append(('file', 'serie', filename, name2, url, season, episode))
else:
url = 'http://api.themoviedb.org/3/search/movie?api_key=8789cfd3fbab7dccf1269c3d7d867aff&query=%s&language=de' % cleanTitle.replace(' ','%20')
data.append(('file', 'movie', filename, cleanTitle, url, None, None))
elif file.endswith('.ts'):
metaName = None
#cleanTitle = re.sub('^.*? - .*? - ', '', file)
#cleanTitle = re.sub('[.]ts', '', cleanTitle)
if fileExists(filename_org+".meta"):
metaName = open(filename_org+".meta",'r').readlines()[1].rstrip("\n").rstrip("\t").rstrip("\r")
if config.movielist.cover.savestyle.value == "opennfr":
filename = "%s%s.jpg" % (config.movielist.cover.coverpath.value, metaName.replace(" ","_").replace(".","_"))
else:
filename = re.sub("\.ts$", '.jpg', filename_org)
if not fileExists(filename):
if metaName is not None:
if re.search('[Ss][0-9]+[Ee][0-9]+', metaName) is not None:
#if metaName is not None:
cleanTitle = re.sub('[Ss][0-9]+[Ee][0-9]+.*[a-zA-Z0-9_]+','', metaName, flags=re.S|re.I)
cleanTitle = cleanFile(cleanTitle)
print "cleanTitle:", cleanTitle
url = 'http://thetvdb.com/api/GetSeries.php?seriesname=%s&language=de' % cleanTitle.replace(' ','%20')
data.append(('file', 'serie', filename, cleanTitle, url, None, None))
#else:
# url = 'http://thetvdb.com/api/GetSeries.php?seriesname=%s&language=de' % cleanTitle.replace(' ','%20')
# data.append(('file', 'serie', filename, cleanTitle, url, None, None))
else:
#if metaName is not None:
url = 'http://api.themoviedb.org/3/search/movie?api_key=8789cfd3fbab7dccf1269c3d7d867aff&query=%s&language=de' % metaName.replace(' ','%20')
data.append(('file', 'movie', filename, metaName, url, None, None))
#else:
# url = 'http://api.themoviedb.org/3/search/movie?api_key=8789cfd3fbab7dccf1269c3d7d867aff&query=%s&language=de' % cleanTitle.replace(' ','%20')
# data.append(('file', 'serie', filename, cleanTitle, url, None, None))
self.count = len(data)
if not self.background:
self.callback_infos("Found %s File(s)." % self.count)
if self.count != 0:
self.scanForCovers(data)
else:
if not self.background:
self.scanning = False
self.callback_infos("No Movie(s) found!")
self.callback_finished("Done")
else:
self.scanning = False
else:
print "still scanning.."
def getMovieSaveFile(self, moviename):
if re.search('[Ss][0-9]+[Ee][0-9]+', moviename) is not None:
tvseries = compile('(.*\w)[\s\.|-]+[S|s][0-9]+[E|e][0-9]+[\s\.|-].*?\.[ts|avi|mkv|divx|f4v|flv|img|iso|m2ts|m4v|mov|mp4|mpeg|mpg|mts|vob|wmv]')
tvseriesalt = compile('^[S|s][0-9]+[E|e][0-9]+[\s\.\-](.*\w)\.[ts|avi|mkv|divx|f4v|flv|img|iso|m2ts|m4v|mov|mp4|mpeg|mpg|mts|vob|wmv]')
if tvseries.match(moviename) is not None:
return tvseries.match(moviename).groups()[0].replace(" ","_").replace(".","_")
elif tvseriesalt.match(moviename) is not None:
return tvseriesalt.match(moviename).groups()[0].replace(" ","_").replace(".","_")
else:
return None
else:
movietitle = compile('(.*\w)\.[ts|avi|mkv|divx|f4v|flv|img|iso|m2ts|m4v|mov|mp4|mpeg|mpg|mts|vob|wmv]')
if movietitle.match(moviename) is not None:
return movietitle.match(moviename).groups()[0].replace(" ","_").replace(".","_")
else:
return None
def scanForCovers(self, data):
self.start_time = time.clock()
# filename', 'serie', filename, cleanTitle, url, season, episode
self.guilist = []
self.counting = 0
self.found = 0
self.notfound = 0
self.error = 0
ds = defer.DeferredSemaphore(tokens=2)
downloads = [ds.run(self.download, url).addCallback(self.parseWebpage, which, type, filename, title, url, season, episode).addErrback(self.dataErrorInfo) for which, type, filename, title, url, season, episode in data]
finished = defer.DeferredList(downloads).addErrback(self.dataErrorInfo)
def download(self, url):
return getPage(url, timeout=20, headers={'Accept': 'application/json'})
def parseWebpage(self, data, which, type, filename, title, url, season, episode):
self.counting += 1
if not self.background:
self.callback_infos("Cover(s): %s / %s - Scan: %s" % (str(self.counting), str(self.count), title))
if type == "movie":
list = []
try:
list = re.search('poster_path":"(.+?)".*?"original_title":"(.+?)"', str(data), re.S).groups(1)
except:
list = re.search('original_title":"(.+?)".*?"poster_path":"(.+?)"', str(data), re.S).groups(1)
if list:
self.guilist.append(((title, True, filename),))
purl = "http://image.tmdb.org/t/p/%s%s" % (str(config.movielist.cover.themoviedb_coversize.value), str(list[0].replace('\\','')))
downloadPage(purl, filename).addCallback(self.countFound).addErrback(self.dataErrorDownload)
else:
self.guilist.append(((title, False, filename),))
self.notfound += 1
if not self.background:
self.callback_notfound(self.notfound)
# get description
if config.movielist.cover.getdescription.value:
idx = []
idx = re.findall('"id":(.*?),', data, re.S)
if idx:
iurl = "http://api.themoviedb.org/3/movie/%s?api_key=8789cfd3fbab7dccf1269c3d7d867aff&language=de" % idx[0]
getPage(iurl, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.getInfos, id, type, filename).addErrback(self.dataError)
elif type == "serie":
list = []
list = re.findall('<seriesid>(.*?)</seriesid>', data, re.S)
if list:
self.guilist.append(((title, True, filename),))
purl = "http://www.thetvdb.com/banners/_cache/posters/%s-1.jpg" % list[0]
downloadPage(purl, filename).addCallback(self.countFound).addErrback(self.dataErrorDownload)
else:
self.notfound += 1
self.guilist.append(((title, False, filename),))
if not self.background:
self.callback_notfound(self.notfound)
# get description
if config.movielist.cover.getdescription.value:
if season and episode:
iurl = "http://www.thetvdb.com/api/2AAF0562E31BCEEC/series/%s/default/%s/%s/de.xml" % (list[0], str(int(season)), str(int(episode)))
getPage(iurl, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.getInfos, id, type, filename).addErrback(self.dataError)
else:
self.notfound += 1
if not self.background:
self.callback_notfound(self.notfound)
if not self.background:
self.callback_menulist(self.guilist)
self.checkDone()
def checkDone(self):
print self.counting, self.count
if int(self.counting) == int(str(self.count)):
elapsed_time = (time.clock() - self.start_time)
if not self.background:
self.callback_infos("Downloaded %s Cover(s) in %.1f sec." % (str(self.found), elapsed_time))
self.callback_finished("Done")
self.scanning = False
print "Found:", self.found
print "Not Found:", self.notfound
print "Errors:", self.error
print "Total: %s / %s" % (self.counting, self.count)
self.callback_finished(self.count)
def countFound(self, data):
self.found += 1
if not self.background:
self.callback_found(self.found)
if int(self.counting) == int(str(self.count)):
elapsed_time = (time.clock() - self.start_time)
self.callback_infos("Downloaded %s Cover(s) in %.1f sec." % (str(self.found), elapsed_time))
self.checkDone()
def getInfos(self, data, id, type, filename):
if type == "movie":
infos = re.findall('"genres":\[(.*?)\].*?"overview":"(.*?)"', data, re.S)
if infos:
(genres, desc) = infos[0]
self.writeTofile(decodeHtml(desc), filename)
elif type == "serie":
infos = re.findall('<Overview>(.*?)</Overview>', data, re.S)
if infos:
desc = infos[0]
self.writeTofile(decodeHtml(desc), filename)
def writeTofile(self, text, filename):
if not fileExists(filename.replace('.jpg','.txt')):
wFile = open(filename.replace('.jpg','.txt'),"w")
wFile.write(text)
wFile.close()
def dataError(self, error):
print "ERROR:", error
self.checkDone()
def dataErrorInfo(self, error):
self.error += 1
self.counting += 1
print "ERROR dataErrorInfo:", error
if not self.background:
self.callback_error(self.error)
self.checkDone()
def dataErrorDownload(self, error):
self.error += 1
self.counting += 1
if not self.background:
self.callback_error(self.error)
print "ERROR:", error
self.checkDone()
class fmlcMenuList(GUIComponent, object):
GUI_WIDGET = eListbox
def __init__(self):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.l.setFont(0, gFont('Regular', 22))
self.l.setItemHeight(30)
self.l.setBuildFunc(self.buildList)
def buildList(self, entry):
width = self.l.getItemSize().width()
(name, coverFound, filename) = entry
res = [ None ]
if coverFound:
truePath = "/usr/share/enigma2/skin_default/extensions/cover_yes.png"
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 10, 1, 25, 25, loadPNG(truePath)))
else:
falsePath = "/usr/share/enigma2/skin_default/extensions/cover_no.png"
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 10, 1, 25, 25, loadPNG(falsePath)))
res.append((eListboxPythonMultiContent.TYPE_TEXT, 50, 0, 1280, 30, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, str(name)))
return res
def getCurrent(self):
cur = self.l.getCurrentSelection()
return cur and cur[0]
def postWidgetCreate(self, instance):
instance.setContent(self.l)
self.instance.setWrapAround(True)
def preWidgetRemove(self, instance):
instance.setContent(None)
def setList(self, list):
self.l.setList(list)
def moveToIndex(self, idx):
self.instance.moveSelectionTo(idx)
def getSelectionIndex(self):
return self.l.getCurrentSelectionIndex()
def getSelectedIndex(self):
return self.l.getCurrentSelectionIndex()
def selectionEnabled(self, enabled):
if self.instance is not None:
self.instance.setSelectionEnable(enabled)
def pageUp(self):
if self.instance is not None:
self.instance.moveSelection(self.instance.pageUp)
def pageDown(self):
if self.instance is not None:
self.instance.moveSelection(self.instance.pageDown)
def up(self):
if self.instance is not None:
self.instance.moveSelection(self.instance.moveUp)
def down(self):
if self.instance is not None:
self.instance.moveSelection(self.instance.moveDown)
class FindMovieList(Screen):
skin = """
<screen position="40,80" size="1200,600" title=" " >
<widget name="info" position="10,10" size="820,30" font="Regular;24" foregroundColor="#00fff000"/>
<widget name="path" position="10,50" size="820,30" font="Regular;24" foregroundColor="#00fff000"/>
<widget name="found" position="850,10" size="300,22" font="Regular;20" foregroundColor="#00fff000"/>
<widget name="notfound" position="850,40" size="300,22" font="Regular;20" foregroundColor="#00fff000"/>
<widget name="error" position="850,70" size="300,22" font="Regular;20" foregroundColor="#00fff000"/>
<widget name="list" position="10,90" size="800,480" scrollbarMode="showOnDemand"/>
<widget name="cover" position="850,110" size="300,420" alphatest="blend"/>
<widget name="key_red" position="40,570" size="250,25" halign="center" transparent="1" font="Regular;20"/>
<widget name="key_green" position="330,570" size="250,22" halign="center" transparent="1" font="Regular;20"/>
<widget name="key_blue" position="890,570" size="250,22" halign="center" transparent="1" font="Regular;20"/>
<eLabel position="40,596" size="250,4" zPosition="-10" backgroundColor="#20f23d21" />
<eLabel position="330,596" size="250,4" zPosition="-10" backgroundColor="#20389416" />
<eLabel position="890,596" size="250,4" zPosition="-10" backgroundColor="#200064c7" />
</screen>"""
def __init__(self, session, service):
Screen.__init__(self, session)
self.session = session
self.service = service
BackgroundCoverScanner(session)
bg_func = BackgroundCoverScanner.instance
bg_func.startTimer()
self["actions"] = ActionMap(["OkCancelActions", "ShortcutActions", "WizardActions", "ColorActions", "SetupActions", "NumberActions", "MenuActions", "EPGSelectActions"], {
"cancel": self.cancel,
"green" : self.getFileList,
"blue" : self.setScanPath,
"left" : self.keyLeft,
"right" : self.keyRight,
"up" : self.keyUp,
"down" : self.keyDown,
"ok" : self.keyOk
}, -1)
#self['title']
self.title = "%s v%s" % (pname, pversion)
self['info'] = Label("")
self['path'] = Label("Scan Path: %s" % config.movielist.cover.scanpath.value)
self['found'] = Label("Download:")
self['notfound'] = Label("Not Found:")
self['error'] = Label("Download Error:")
self['cover'] = Pixmap()
self['key_red'] = Label("Exit")
self['key_green'] = Label("Search Cover(s)")
self['key_yellow'] = Label("")
self['key_blue'] = Label("Set Scan Path")
self['list'] = fmlcMenuList()
self.fileScanner = BackgroundCoverScanner.instance
self.fileScanner.setCallbacks(self.msgCallback, self.foundCallback, self.notFoundCallback, self.errorCallback, self.listCallback, self.msgDone)
self.scanning = False
self.first = False
self.onLayoutFinish.append(self._onLayoutFinish)
def _onLayoutFinish(self):
self['info'].setText("Press 'Green' for scanning your MovieList and search Cover(s).")
def msgCallback(self, txt):
self['info'].setText(txt)
def foundCallback(self, txt):
self['found'].setText("Download: %s" % str(txt))
def notFoundCallback(self, txt):
self['notfound'].setText("Not Found: %s" % str(txt))
def errorCallback(self, txt):
self['error'].setText("Download Error: %s" % str(txt))
def listCallback(self, list):
self['list'].setList(list)
if not self.first:
self.first = True
self.getCover()
def msgDone(self, txt):
self.first = True
def __onClose(self):
self.fileScanner.setCallbacks(None, None, None, None, None, None)
def getFileList(self):
self['found'].setText("Download:")
self['notfound'].setText("Not Found:")
self['error'].setText("Download Error:")
self.fileScanner.getFileList(False)
def getCover(self):
check = self['list'].getCurrent()
if check == None:
return
filename = self['list'].getCurrent()[2]
self.showCover(filename)
def showCover(self, poster_path):
self.picload = ePicLoad()
if not fileExists(poster_path):
poster_path = "/usr/share/enigma2/skin_default/extensions/no_coverbg.png"
if fileExists(poster_path):
self["cover"].instance.setPixmap(gPixmapPtr())
scale = AVSwitch().getFramebufferScale()
size = self["cover"].instance.size()
self.picload.setPara((size.width(), size.height(), scale[0], scale[1], False, 1, "#00000000"))
if isDreamOS:
if self.picload.startDecode(poster_path, False) == 0:
ptr = self.picload.getData()
if ptr != None:
self["cover"].instance.setPixmap(ptr)
self["cover"].show()
else:
if self.picload.startDecode(poster_path, 0, 0, False) == 0:
ptr = self.picload.getData()
if ptr != None:
self["cover"].instance.setPixmap(ptr)
self["cover"].show()
del self.picload
def keyOk(self):
pass
def setScanPath(self):
self.session.openWithCallback(self.selectedMediaFile, FindMovieListScanPath, config.movielist.cover.scanpath.value)
def selectedMediaFile(self, res):
if res is not None:
config.movielist.cover.scanpath.value = res
config.movielist.cover.scanpath.save()
configfile.save()
self['path'].setText("Scan Path: %s" % config.movielist.cover.scanpath.value)
def keyLeft(self):
check = self['list'].getCurrent()
if check == None:
return
self['list'].pageUp()
self.getCover()
def keyRight(self):
check = self['list'].getCurrent()
if check == None:
return
self['list'].pageDown()
self.getCover()
def keyDown(self):
check = self['list'].getCurrent()
if check == None:
return
self['list'].down()
self.getCover()
def keyUp(self):
check = self['list'].getCurrent()
if check == None:
return
self['list'].up()
self.getCover()
def cancel(self):
self.close()
class FindMovieListScanPath(Screen):
skin = """
<screen position="40,80" size="1200,600" title=" " >
<widget name="media" position="10,10" size="540,30" valign="top" font="Regular;22" />
<widget name="folderlist" position="10,45" zPosition="1" size="540,300" scrollbarMode="showOnDemand"/>
<widget name="key_red" position="40,570" size="250,25" halign="center" transparent="1" font="Regular;20"/>
<widget name="key_green" position="330,570" size="250,22" halign="center" transparent="1" font="Regular;20"/>
<eLabel position="40,596" size="250,4" zPosition="-10" backgroundColor="#20f23d21" />
<eLabel position="330,596" size="250,4" zPosition="-10" backgroundColor="#20389416" />
</screen>
"""
def __init__(self, session, initDir, plugin_path = None):
Screen.__init__(self, session)
if not os.path.isdir(initDir):
initDir = "/media/hdd/movie/"
self["folderlist"] = FileList(initDir, inhibitMounts = False, inhibitDirs = False, showMountpoints = False, showFiles = False)
self["media"] = Label()
self["actions"] = ActionMap(["WizardActions", "DirectionActions", "ColorActions", "EPGSelectActions"],
{
"back": self.cancel,
"left": self.left,
"right": self.right,
"up": self.up,
"down": self.down,
"ok": self.ok,
"green": self.green,
"red": self.cancel
}, -1)
self.title=_("Choose Download folder")
try:
self["title"] = StaticText(self.title)
except:
print 'self["title"] was not found in skin'
self["key_red" ]= Label(_("Cancel"))
self["key_green"] = Label(_("Ok"))
def cancel(self):
self.close(None)
def green(self):
directory = self["folderlist"].getSelection()[0]
if (directory.endswith("/")):
self.fullpath = self["folderlist"].getSelection()[0]
else:
self.fullpath = self["folderlist"].getSelection()[0] + "/"
self.close(self.fullpath)
def up(self):
self["folderlist"].up()
self.updateFile()
def down(self):
self["folderlist"].down()
self.updateFile()
def left(self):
self["folderlist"].pageUp()
self.updateFile()
def right(self):
self["folderlist"].pageDown()
self.updateFile()
def ok(self):
if self["folderlist"].canDescent():
self["folderlist"].descent()
self.updateFile()
def updateFile(self):
currFolder = self["folderlist"].getSelection()[0]
self["media"].setText(currFolder)
def decodeHtml(text):
text = text.replace('ä','ä')
text = text.replace('\u00e4','ä')
text = text.replace('ä','ä')
text = text.replace('Ä','Ä')
text = text.replace('\u00c4','Ä')
text = text.replace('Ä','Ä')
text = text.replace('ö','ö')
text = text.replace('\u00f6','ö')
text = text.replace('ö','ö')
text = text.replace('ö','Ö')
text = text.replace('Ö','Ö')
text = text.replace('\u00d6','Ö')
text = text.replace('Ö','Ö')
text = text.replace('ü','ü')
text = text.replace('\u00fc','ü')
text = text.replace('ü','ü')
text = text.replace('Ü','Ü')
text = text.replace('\u00dc','Ü')
text = text.replace('Ü','Ü')
text = text.replace('ß','ß')
text = text.replace('\u00df','ß')
text = text.replace('ß','ß')
text = text.replace('&','&')
text = text.replace('"','\"')
text = text.replace('>','>')
text = text.replace(''',"'")
text = text.replace('´','\'')
text = text.replace('–','-')
text = text.replace('„','"')
text = text.replace('”','"')
text = text.replace('“','"')
text = text.replace('‘','\'')
text = text.replace('’','\'')
text = text.replace('"','"')
text = text.replace('"','"')
text = text.replace('&','&')
text = text.replace(''','\'')
text = text.replace(''','\'')
text = text.replace(' ',' ')
text = text.replace('\u00a0',' ')
text = text.replace('\u00b4','\'')
text = text.replace('\u003d','=')
text = text.replace('\u0026','&')
text = text.replace('®','')
text = text.replace('á','a')
text = text.replace('é','e')
text = text.replace('ó','o')
text = text.replace('–',"-")
text = text.replace('—',"â€â€")
text = text.replace('—','â€â€')
text = text.replace('\u2013',"–")
text = text.replace('‘',"'")
text = text.replace('’',"'")
text = text.replace('“',"'")
text = text.replace('”','"')
text = text.replace('„',',')
text = text.replace('\u014d','Ã…Â')
text = text.replace('\u016b','Å«')
text = text.replace('\u201a','\"')
text = text.replace('\u2018','\"')
text = text.replace('\u201e','\"')
text = text.replace('\u201c','\"')
text = text.replace('\u201d','\'')
text = text.replace('\u2019s','’')
text = text.replace('\u00e0','à')
text = text.replace('\u00e7','ç')
text = text.replace('\u00e8','é')
text = text.replace('\u00e9','é')
text = text.replace('\u00c1','ÃÂ')
text = text.replace('\u00c6','Æ')
text = text.replace('\u00e1','á')
text = text.replace('Ä','Ä')
text = text.replace('Ö','Ö')
text = text.replace('Ü','Ü')
text = text.replace('ä','ä')
text = text.replace('ö','ö')
text = text.replace('ü','ü')
text = text.replace('ß','ß')
text = text.replace('é','é')
text = text.replace('·','·')
text = text.replace("'","'")
text = text.replace("&","&")
text = text.replace("û","û")
text = text.replace("ø","ø")
text = text.replace("!","!")
text = text.replace("?","?")
text = text.replace('…','...')
text = text.replace('\u2026','...')
text = text.replace('…','...')
text = text.replace('‪','')
return text
def autostart(session, **kwargs):
BackgroundCoverScanner(session)
bg_func = BackgroundCoverScanner.instance
bg_func.startTimer()
def main(session, service, **kwargs):
session.open(FindMovieList, service)
def main2(session, **kwargs):
session.open(FindMovieList, None)
def Plugins(**kwargs):
return [PluginDescriptor(name="Find MovieList Covers", description="Search Covers", where = PluginDescriptor.WHERE_MOVIELIST, fnc=main),
PluginDescriptor(name="Find MovieList Covers", description="Search Covers", where = PluginDescriptor.WHERE_PLUGINMENU, fnc=main2),
PluginDescriptor(where=[PluginDescriptor.WHERE_SESSIONSTART], fnc=autostart)
]
| schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Components/SearchCovers.py | Python | gpl-2.0 | 32,601 |
from random import randint
overflowstrings = ["A" * 255, "A" * 256, "A" * 257, "A" * 420, "A" * 511, "A" * 512, "A" * 1023, "A" * 1024, "A" * 2047, "A" * 2048, "A" * 4096, "A" * 4097, "A" * 5000, "A" * 10000, "A" * 20000, "A" * 32762, "A" * 32763, "A" * 32764, "A" * 32765, "A" * 32766, "A" * 32767, "A" * 32768, "A" * 65534, "A" * 65535, "A" * 65536, "%x" * 1024, "%n" * 1025 , "%s" * 2048, "%s%n%x%d" * 5000, "%s" * 30000, "%s" * 40000, "%.1024d", "%.2048d", "%.4096d", "%.8200d", "%99999999999s", "%99999999999d", "%99999999999x", "%99999999999n", "%99999999999s" * 1000, "%99999999999d" * 1000, "%99999999999x" * 1000, "%99999999999n" * 1000, "%08x" * 100, "%%20s" * 1000,"%%20x" * 1000,"%%20n" * 1000,"%%20d" * 1000, "%#0123456x%08x%x%s%p%n%d%o%u%c%h%l%q%j%z%Z%t%i%e%g%f%a%C%S%08x%%#0123456x%%x%%s%%p%%n%%d%%o%%u%%c%%h%%l%%q%%j%%z%%Z%%t%%i%%e%%g%%f%%a%%C%%S%%08x"]
def bitflipping(data,mangle_percentage = 7):
l = len(data)
n = int(l*mangle_percentage/100) # 7% of the bytes to be modified
for i in range(0,n): # We change the bytes
r = randint(0,l-1)
data = data[0:r] + chr(randint(0,255)) + data[r+1:]
return data
def bofinjection(data):
l = len(data)
r = randint(0,len(overflowstrings)-1)
data = data[0:r] + overflowstrings[r] + data[r-l:]
return data
def fuzz(data, bit_flip_percentage = 20, bof_injection_percentage = 20, bit_flip_density = 7):
#print "Fuzz:"
#print " bfp:" + str(bit_flip_percentage)
#print " bip:" + str(bof_injection_percentage)
r = randint(0,100)
#print " first r:" + str(r)
was_fuzzed = False
if r<=bit_flip_percentage:
was_fuzzed = True
data = bitflipping(data, bit_flip_density)
#print " second r:" + str(r)
r = randint(0,100)
if r<=bof_injection_percentage:
was_fuzzed = True
data = bofinjection(data)
return was_fuzzed, data
| ausarbluhd/EternalLLC | scripts/mallory/src/fuzz.py | Python | gpl-2.0 | 1,915 |
"""
Source: https://github.com/txt/mase/blob/master/src/ok.md
# Unit tests in Python
Python has some great unit testing tools. The one
shown below is a "less-is-more" approach and is
based on [Kent Beck video on how to write a test engine in just a
few lines of code](https://www.youtube.com/watch?v=nIonZ6-4nuU).
For example usages, see [okok.py](okok.md) which can be loaded via
```
python okok.py
```
Share and enjoy.
"""
def ok(*lst):
print "### ",lst[0].__name__
for one in lst: unittest(one)
return one
class unittest:
tries = fails = 0 # tracks the record so far
@staticmethod
def score():
t = unittest.tries
f = unittest.fails
return "# TRIES= %s FAIL= %s %%PASS = %s%%" % (
t,f,int(round(t*100/(t+f+0.001))))
def __init__(i,test):
unittest.tries += 1
try:
test()
except Exception,e:
unittest.fails += 1
i.report(test)
def report(i,test):
import traceback
print traceback.format_exc()
print unittest.score(),':',test.__name__ | rchakra3/x9115rc3 | hw/code/1/ok.py | Python | gpl-2.0 | 1,020 |
__author__ = 'Filushin_DV'
import generators
import tables
import profiler
from faker import Factory
date_start = '01/01/2015'
date_end = '06/06/2015'
citys = []
faker = Factory.create()
for i in range(100):
citys.append(faker.city())
valid_parents = (10, 2, 3)
field_list = []
field_list.append(tables.GenField('id', 'int', 1, 1000))
field_list.append(tables.GenField('id_parent', 'int', 1, 1, *valid_parents))
field_list.append(tables.GenField('name', 'str', 1, 5, *citys))
field_list.append(tables.GenField('is_delete', 'int', 0, 1))
field_list.append(tables.GenField('added', 'date', '01/01/2015', '06/06/2015'))
field_list.append(tables.GenField('edited', 'datetime', '01/01/2015', '06/06/2015'))
table = tables.GenTable('city', 5, *field_list)
with profiler.Profiler() as p:
gen = generators.SqlGenerator(table)
gen.generate_sql()
#gen.save_script('file.sql')
for line in gen.get_sql():
print(line)
#print (string.punctuation) | DFilyushin/pydgen | prog.py | Python | gpl-2.0 | 967 |
print((int((str(179**10))*4))**(1/10))
| Wladislao/Python_Problems | 3xxx/34xx/345x/3451.py | Python | gpl-2.0 | 39 |
import sys
from datetime import datetime, timedelta
from array import array
from numpy import hsplit, asarray
class ECG:
'''Checks validity of selected .ecg file. If it is valid .ecg file creates an instance with all the data stored in .ecg file'''
def __init__(self, filename, enc='cp1250'):
'''Default encoding is set to cp1250 - set accordingly to your needs'''
self.leadNamesDict = {0:'Unknown', 1:'Bipolar', 2:'X biploar', 3:'Y bipolar', 4:'Z biploar', \
5:'I', 6:'II', 7:'III', 8:'VR', 9:'VL', 10:'VF', \
11:'V1', 12:'V2', 13:'V3', 14:'V4', 15:'V5', 16:'V6', \
17:'ES', 18:'AS', 19:'AI'}
self.fn = filename
self.enc = enc
if not self.fn:
NoneFileSpecified()
with open(self.fn, mode='rb') as ecgFile:
self.magicNumber = ecgFile.read(8).decode(self.enc)
if self.magicNumber != 'ISHNE1.0':
raise Exception('File does not have \'ISHNE1.0\' string in the first 8 bytes')
self.crc = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
self.headerFixedLength = 512
self.headerVariableLength = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True)
#get back to 10th byte where header starts
ecgFile.seek(10)
self.headerWhole = ecgFile.read(self.headerFixedLength + self.headerVariableLength)
crc = int(self.compute_crc(self.headerWhole),2)
if (crc != self.crc):
raise Exception('CRC check for file failed. Computed CRC: {0}, CRC in file: {1}'.format(crc, self.crc))
#get back to 14th byte just after headerVariableLength
ecgFile.seek(14)
self.channelNumberOfSamples = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True)
self.headerVariableOffset = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True)
self.ecgBytesBlockOffset = int.from_bytes(ecgFile.read(4), byteorder='little', signed=True)
self.fileVersion = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
self.patientFirstName = ecgFile.read(40).decode(self.enc)
self.patientFirstName = self.patientFirstName.split('\x00', 1)[0]
self.patientLastName = ecgFile.read(40).decode(self.enc)
self.patientLastName = self.patientLastName.split('\x00', 1)[0]
self.patientID = ecgFile.read(20).decode(self.enc)
self.patientID = self.patientID.split('\x00', 1)[0]
self.patientSex = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
self.patientRace = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
#patient date of birth as [dd,mm,yy]
dob = list()
for i in range(0,3):
dob.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True))
self.patientDateOfBirth = datetime(dob[2], dob[1], dob[0])
# date of test recording as [dd,mm,yy]
dor = list()
for i in range(0,3):
dor.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True))
#date of file creation as [dd,mm,yy]
dof = list()
for i in range(0,3):
dof.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True))
self.dateOfFileCreation = datetime(dor[2], dor[1], dor[0])
#testStart - time of test begining HH:MM:SS
testStart = list()
for i in range(0,3):
testStart.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True))
self.datetimeStartOfTest = datetime(dor[2],dor[1],dor[0],testStart[0],testStart[1],testStart[2])
self.numberOfLeads = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
self.leadsSpecs = list()
self.leadsNames = list()
for i in range(0,12):
spec = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
self.leadsSpecs.append(spec)
self.leadsNames.append(self.leadNamesDict[spec])
self.leadsQuality = list()
for i in range(0,12):
self.leadsQuality.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=True))
self.leadsResolution = list()
for i in range(0,12):
self.leadsResolution.append(int.from_bytes(ecgFile.read(2), byteorder='little', signed=False))
self.pacemaker = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
self.recorderType = ecgFile.read(40).decode(self.enc)
self.recorderType = self.recorderType.split('\x00', 1)[0]
self.samplingRate = int.from_bytes(ecgFile.read(2), byteorder='little', signed=True)
self.datetimeEndOfTest = self.datetimeStartOfTest + timedelta(seconds=int(self.channelNumberOfSamples/self.samplingRate))
self.fileProperiaty = ecgFile.read(80).decode(self.enc)
self.fileProperiaty = self.fileProperiaty.split('\x00', 1)[0]
self.fileCopyright = ecgFile.read(80).decode(self.enc)
self.fileCopyright = self.fileCopyright.split('\x00', 1)[0]
self.reserved = ecgFile.read(80).decode(self.enc)
self.reserved = self.reserved.split('\x00', 1)[0]
self.reserved = ecgFile.read(80).decode(self.enc)
self.reserved = self.reserved.split('\x00', 1)[0]
self.headerVariable = ecgFile.read(self.headerVariableLength).decode(self.enc)
if len(self.headerVariable) > 0:
self.headerVariable = self.headerVariable.split('\x00', 1)[0]
ecgFile.seek(self.ecgBytesBlockOffset)
ecgBytes = array('h')
ecgBytes.fromfile(ecgFile, self.channelNumberOfSamples * self.numberOfLeads)
ecgBytesArray = asarray(ecgBytes)
ecgBytesArray = ecgBytesArray.reshape(-1,self.numberOfLeads)
self.ecgInChannels = hsplit(ecgBytesArray, self.numberOfLeads)
def compute_crc(self, data: bytes):
rol = lambda val, r_bits, max_bits: \
(val << r_bits%max_bits) & (2**max_bits-1) | \
((val & (2**max_bits-1)) >> (max_bits-(r_bits%max_bits)))
b = bytearray()
data = bytearray(data)
crc=0xFFFF
crchi, crclo = divmod(crc, 0x100)
for a in data:
a = a ^ crchi
crchi = a
a = a >> 4
a = a ^ crchi
crchi = crclo
crclo = a
a = rol(a,4,8)
b=a
a = rol(a,1,8)
a = a & 0x1F
crchi = a ^ crchi
a = b & 0xF0
crchi = a ^ crchi
b = rol(b,1,8)
b = b & 0xE0
crclo = b ^ crclo
checksum = bin(crchi) + bin(crclo)
checksum = checksum[:9] + '0' + checksum[11:]
return checksum
class NoneFileSpecified(Exception):
'''Filename can not be empty'''
| panrobot/ishneECGviewer | ecgReader.py | Python | gpl-2.0 | 7,145 |
from __future__ import print_function
import os
import sys
from datetime import datetime
import portage
import gentoolkit.pprinter as pp
from gentoolkit.query import Query
from gentoolkit.package import Package
from euscan import CONFIG, BLACKLIST_PACKAGES
from euscan import handlers, output
from euscan.out import from_mirror
from euscan.helpers import version_blacklisted
from euscan.version import is_version_stable
from euscan.ebuild import package_from_ebuild
def filter_versions(cp, versions):
filtered = {}
for url, version, handler, confidence in versions:
# Try to keep the most specific urls (determinted by the length)
if version in filtered and len(url) < len(filtered[version]):
continue
# Remove blacklisted versions
if version_blacklisted(cp, version):
continue
filtered[version] = {
"url": url,
"handler": handler,
"confidence": confidence
}
return [
(cp, filtered[version]["url"], version, filtered[version]["handler"],
filtered[version]["confidence"])
for version in filtered
]
def parse_src_uri(uris):
ret = {}
uris = uris.split()
uris.reverse()
while uris:
uri = uris.pop()
if '://' not in uri:
continue
if uris and uris[-1] == "->":
uris.pop() # operator
file_ = uris.pop()
else:
file_ = os.path.basename(uri)
if file_ not in ret:
ret[file_] = []
ret[file_].append(uri)
return ret
def reload_gentoolkit():
import gentoolkit
# Not used in recent versions
if not hasattr(gentoolkit.package, 'PORTDB'):
return
PORTDB = portage.db[portage.root]["porttree"].dbapi
if hasattr(gentoolkit.dbapi, 'PORTDB'):
gentoolkit.dbapi.PORTDB = PORTDB
if hasattr(gentoolkit.package, 'PORTDB'):
gentoolkit.package.PORTDB = PORTDB
if hasattr(gentoolkit.query, 'PORTDB'):
gentoolkit.query.PORTDB = PORTDB
def scan_upstream(query, on_progress=None):
"""
Scans the upstream searching new versions for the given query
"""
matches = []
if query.endswith(".ebuild"):
cpv = package_from_ebuild(query)
reload_gentoolkit()
if cpv:
matches = [Package(cpv)]
else:
matches = Query(query).find(
include_masked=True,
in_installed=False,
)
if not matches:
output.ewarn(
pp.warn("No package matching '%s'" % pp.pkgquery(query))
)
return None
matches = sorted(matches)
pkg = matches.pop()
while '9999' in pkg.version and len(matches):
pkg = matches.pop()
if not pkg:
output.ewarn(
pp.warn("Package '%s' only have a dev version (9999)"
% pp.pkgquery(pkg.cp))
)
return None
# useful data only for formatted output
start_time = datetime.now()
output.metadata("datetime", start_time.isoformat(), show=False)
output.metadata("cp", pkg.cp, show=False)
output.metadata("cpv", pkg.cpv, show=False)
if on_progress:
on_progress(increment=10)
if pkg.cp in BLACKLIST_PACKAGES:
output.ewarn(
pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp))
)
return None
if not CONFIG['quiet']:
if not CONFIG['format']:
pp.uprint(
" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))
)
pp.uprint()
else:
output.metadata("overlay", pp.section(pkg.repo_name()))
ebuild_path = pkg.ebuild_path()
if ebuild_path:
output.metadata(
"ebuild", pp.path(os.path.normpath(ebuild_path))
)
uris, homepage, description = pkg.environment(
('SRC_URI', 'HOMEPAGE', 'DESCRIPTION')
)
output.metadata("repository", pkg.repo_name())
output.metadata("homepage", homepage)
output.metadata("description", description)
else:
uris = pkg.environment('SRC_URI')
cpv = pkg.cpv
uris = parse_src_uri(uris)
uris_expanded = [
from_mirror(uri) if 'mirror://' in uri else uri for uri in uris
]
pkg._uris = uris
pkg._uris_expanded = uris_expanded
versions = handlers.scan(pkg, uris, on_progress)
cp, ver, rev = portage.pkgsplit(pkg.cpv)
result = filter_versions(cp, versions)
if on_progress:
on_progress(increment=10)
# output scan time for formatted output
scan_time = (datetime.now() - start_time).total_seconds()
output.metadata("scan_time", scan_time, show=False)
is_current_version_stable = is_version_stable(ver)
if len(result) > 0:
if not (CONFIG['format'] or CONFIG['quiet']):
print("")
for cp, url, version, handler, confidence in result:
if CONFIG["ignore-pre-release"]:
if not is_version_stable(version):
continue
if CONFIG["ignore-pre-release-if-stable"]:
if is_current_version_stable and \
not is_version_stable(version):
continue
if CONFIG['progress']:
print("", file=sys.stderr)
output.result(cp, version, url, handler, confidence)
return result
| iksaif/euscan | pym/euscan/scan.py | Python | gpl-2.0 | 5,444 |
import matplotlib.pyplot as plt
import numpy as np
from image_funcs import *
from scipy.misc import imread, imsave
def grid(image, threshold):
###############################################################################
import scipy.misc
nuclei = imread('3.jpg')
nuclei = scipy.misc.imresize(nuclei, 0.05)
nuclei = np.max(nuclei, 2)
plt.imshow(nuclei)
plt.gray()
imsave('nuclei.jpg', nuclei)
################################################################################
binary = fill_holes(nuclei)
#imsave('nuclei.jpg', binary)
from skimage.exposure import rescale_intensity
rescaled_nuclei = rescale_intensity(nuclei, in_range=(np.min(nuclei),np.max(nuclei)))
new_range = tuple(np.percentile(nuclei,(2,98)))
rescaled_nuclei = rescale_intensity(nuclei, in_range=new_range)
from skimage.filter import gaussian_filter
blured = gaussian_filter(nuclei,8)
plt.imshow(blured)
highpass = nuclei - 0.8*blured
sharp = highpass + nuclei
sharp = np.floor(sharp).astype(np.uint8)
from skimage.filter import threshold_otsu
thres = threshold_otsu(rescaled_nuclei)
binary = rescaled_nuclei > thres
from skimage.filter import canny
edges = canny(sharp, sigma = 1, high_threshold = 35., low_threshold = 14.)
from scipy.ndimage.morphology import binary_dilation, binary_erosion
diamond = np.array([0,1,0,1,1,1,0,1,0], dtype=bool).reshape((3,3))
edges = double_dilation(edges, diamond)
binary = fill_holes(edges)
binary = double_erosion(binary, diamond)
imsave('bin.jpg', binary)
| varnivey/hakoton_images | image_preparation/cells_search.py | Python | gpl-2.0 | 1,484 |
#!/usr/bin/env python3
import os
import sys
import redis
app_path = os.path.realpath('%s/../../..' % os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, '%s/lib' % app_path)
import settings
def get_db(conf):
return redis.StrictRedis(host=conf['host'], port=int(conf['port']), db=int(conf['id']))
if __name__ == '__main__':
import argparse
argparser = argparse.ArgumentParser(description="RedisDB clean-up utility")
argparser.add_argument('clean_what', metavar="ACTION",
help="what item group should be cleaned (session, concordance)")
args = argparser.parse_args()
patterns = {
'session': 'session:*',
'concordance': 'concordance:*'
}
if not args.clean_what in patterns:
raise ValueError('Unknown action: %s' % args.clean_what)
settings.load('%s/conf/config.xml' % app_path)
db = get_db(settings.get('plugins', 'db'))
keys = db.keys(patterns[args.clean_what])
i = 0
for key in keys:
db.delete(key)
print(('deleted: %s' % key))
i += 1
print(('Finished deleting %d keys' % i))
| czcorpus/kontext | lib/plugins/redis_db/cleanup.py | Python | gpl-2.0 | 1,129 |
"""
Loadable.Loadable subclass
"""
# This file is part of Munin.
# Munin is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# Munin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Munin; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# This work is Copyright (C)2006 by Andreas Jacobsen
# Individual portions may be copyright by individual contributors, and
# are included in this collective work with permission of the copyright
# owners.
class cajbook(loadable.loadable):
def __init__(self, client, conn, cursor):
loadable.loadable.__init__(self, client, conn, cursor, 50)
self.paramre = re.compile(r"^\s*(\d+)[. :-](\d+)[. :-](\d+)\s+(\d+)(\s+(yes))?")
self.usage = self.__class__.__name__ + " <x:y:z> (<eta>|<landing tick>)"
def execute(self, nick, username, host, target, prefix, command, user, access):
m = self.commandre.search(command)
if not m:
return 0
m = self.paramre.search(irc_msg.command_parameters)
if not m:
self.client.reply(prefix, nick, target, "Usage: %s" % (self.usage,))
return 0
x = m.group(1)
y = m.group(2)
z = m.group(3)
when = int(m.group(4))
override = m.group(6)
if access < self.level:
self.client.reply(
prefix,
nick,
target,
"You do not have enough access to use this command",
)
return 0
if int(x) != 6 or int(y) != 8:
self.client.reply(
prefix,
nick,
target,
"This command only works for the galaxy 2:5, if you need a normal booking try !book",
)
return 0
p = loadable.planet(x=x, y=y, z=z)
if not p.load_most_recent(self.conn, self.client, self.cursor):
self.client.reply(
prefix, nick, target, "No planet matching '%s:%s:%s' found" % (x, y, z)
)
return 1
else:
i = loadable.intel(pid=p.id)
if not i.load_from_db(self.conn, self.client, self.cursor):
pass
else:
if i and i.alliance and i.alliance.lower() == "ascendancy":
self.client.reply(
prefix,
nick,
target,
"%s:%s:%s is %s in Ascendancy. Quick, launch before they notice the highlight."
% (x, y, z, i.nick or "someone"),
)
return 0
curtick = self.current_tick()
tick = -1
eta = -1
if when < 80:
tick = curtick + when
eta = when
elif when < curtick:
self.client.reply(
prefix,
nick,
target,
"Can not book targets in the past. You wanted tick %s, but current tick is %s."
% (when, curtick),
)
return 1
else:
tick = when
eta = tick - curtick
if tick > 32767:
tick = 32767
args = ()
query = "SELECT t1.id AS id, t1.nick AS nick, t1.pid AS pid, t1.tick AS tick, t1.uid AS uid, t2.pnick AS pnick, t2.userlevel AS userlevel, t3.x AS x, t3.y AS y, t3.z AS z"
query += " FROM target AS t1"
query += " INNER JOIN planet_dump AS t3 ON t1.pid=t3.id"
query += " LEFT JOIN user_list AS t2 ON t1.uid=t2.id"
query += " WHERE"
query += " t1.tick > %s"
query += (
" AND t3.tick = (SELECT MAX(tick) FROM updates) AND t3.x=%s AND t3.y=%s"
)
query += " AND t3.z=%s"
self.cursor.execute(query, (tick, x, y, z))
if self.cursor.rowcount > 0 and not override:
reply = (
"There are already bookings for that target after landing pt %s (eta %s). To see status on this target, do !status %s:%s:%s."
% (tick, eta, x, y, z)
)
reply += (
" To force booking at your desired eta/landing tick, use !book %s:%s:%s %s yes (Bookers:"
% (x, y, z, tick)
)
prev = []
for r in self.cursor.fetchall():
owner = "nick:" + r["nick"]
if r["pnick"]:
owner = "user:" + r["pnick"]
prev.append("(%s %s)" % (r["tick"], owner))
reply += " " + string.join(prev, ", ")
reply += " )"
self.client.reply(prefix, nick, target, reply)
return 1
uid = None
if user:
u = loadable.user(pnick=user)
if u.load_from_db(self.conn, self.client, self.cursor):
uid = u.id
query = "INSERT INTO target (nick,pid,tick,uid) VALUES (%s,%s,%s,%s)"
try:
self.cursor.execute(query, (nick, p.id, tick, uid))
if uid:
reply = "Booked landing on %s:%s:%s tick %s for user %s" % (
p.x,
p.y,
p.z,
tick,
user,
)
else:
reply = "Booked landing on %s:%s:%s tick %s for nick %s" % (
p.x,
p.y,
p.z,
tick,
nick,
)
except psycopg.IntegrityError:
query = "SELECT t1.id AS id, t1.nick AS nick, t1.pid AS pid, t1.tick AS tick, t1.uid AS uid, t2.pnick AS pnick, t2.userlevel AS userlevel "
query += " FROM target AS t1 LEFT JOIN user_list AS t2 ON t1.uid=t2.id "
query += " WHERE t1.pid=%s AND t1.tick=%s"
self.cursor.execute(query, (p.id, tick))
book = self.cursor.fetchone()
if not book:
raise Exception(
"Integrity error? Unable to booking for pid %s and tick %s"
% (p.id, tick)
)
if book["pnick"]:
reply = (
"Target %s:%s:%s is already booked for landing tick %s by user %s"
% (p.x, p.y, p.z, book["tick"], book["pnick"])
)
else:
reply = (
"Target %s:%s:%s is already booked for landing tick %s by nick %s"
% (p.x, p.y, p.z, book["tick"], book["nick"])
)
except:
raise
self.client.reply(prefix, nick, target, reply)
return 1
| munin/munin | deprecated/cajbook.py | Python | gpl-2.0 | 7,158 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from rna_prop_ui import PropertyPanel
class DataButtonsPanel():
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "data"
@classmethod
def poll(cls, context):
return context.meta_ball
class DATA_PT_context_metaball(DataButtonsPanel, bpy.types.Panel):
bl_label = ""
bl_options = {'HIDE_HEADER'}
def draw(self, context):
layout = self.layout
ob = context.object
mball = context.meta_ball
space = context.space_data
if ob:
layout.template_ID(ob, "data", unlink="None")
elif mball:
layout.template_ID(space, "pin_id", unlink="None")
class DATA_PT_metaball(DataButtonsPanel, bpy.types.Panel):
bl_label = "Metaball"
def draw(self, context):
layout = self.layout
mball = context.meta_ball
split = layout.split()
col = split.column()
col.label(text="Resolution:")
sub = col.column(align=True)
sub.prop(mball, "resolution", text="View")
sub.prop(mball, "render_resolution", text="Render")
col = split.column()
col.label(text="Settings:")
col.prop(mball, "threshold", text="Threshold")
layout.label(text="Update:")
layout.prop(mball, "update_method", expand=True)
class DATA_PT_metaball_element(DataButtonsPanel, bpy.types.Panel):
bl_label = "Active Element"
@classmethod
def poll(cls, context):
return (context.meta_ball and context.meta_ball.elements.active)
def draw(self, context):
layout = self.layout
metaelem = context.meta_ball.elements.active
layout.prop(metaelem, "type")
split = layout.split()
col = split.column(align=True)
col.label(text="Settings:")
col.prop(metaelem, "stiffness", text="Stiffness")
col.prop(metaelem, "use_negative", text="Negative")
col.prop(metaelem, "hide", text="Hide")
col = split.column(align=True)
if metaelem.type in ('CUBE', 'ELLIPSOID'):
col.label(text="Size:")
col.prop(metaelem, "size_x", text="X")
col.prop(metaelem, "size_y", text="Y")
col.prop(metaelem, "size_z", text="Z")
elif metaelem.type == 'TUBE':
col.label(text="Size:")
col.prop(metaelem, "size_x", text="X")
elif metaelem.type == 'PLANE':
col.label(text="Size:")
col.prop(metaelem, "size_x", text="X")
col.prop(metaelem, "size_y", text="Y")
class DATA_PT_custom_props_metaball(DataButtonsPanel, PropertyPanel, bpy.types.Panel):
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.MetaBall
def register():
pass
def unregister():
pass
if __name__ == "__main__":
register()
| 244xiao/blender-java | blender-java/src/resources/release/scripts/ui/properties_data_metaball.py | Python | gpl-2.0 | 3,814 |
from django.core.validators import RegexValidator
from django.db import models
from event.models import Event
class Job(models.Model):
id = models.AutoField(primary_key=True)
event = models.ForeignKey(Event)
name = models.CharField(
max_length=75,
validators=[
RegexValidator(
r'^[(A-Z)|(a-z)|(\s)|(\')]+$',
),
],
)
start_date = models.DateField()
end_date = models.DateField()
description = models.TextField(
blank=True,
validators=[
RegexValidator(
r'^[(A-Z)|(a-z)|(0-9)|(\s)|(\.)|(,)|(\-)|(!)|(\')]+$',
),
],
)
| willingc/vms | vms/job/models.py | Python | gpl-2.0 | 675 |
#!/usr/bin/python3
def soma_hipotenusas(n):
chipotenusa=1
somahipot=[]
while chipotenusa <=n:
cat1 = 1
while cat1 <=n:
cat2=1
cat1+=1
while cat2 <=n:
if (cat1**2 + cat2**2) == chipotenusa**2:
if chipotenusa not in somahipot:
somahipot.append(chipotenusa)
# print(cat1, cat2, chipotenusa, somahipot)
cat2+=1
chipotenusa+=1
acumulador=0
#print(somahipot)
for i in range(0,len(somahipot)):
acumulador=acumulador+somahipot[i]
return acumulador
| david81brs/seaport | week6/l5_hipotenusas.py | Python | gpl-2.0 | 632 |
# config.py
# Configuration for tasks application.
# Author: Julien Pecqueur (julien@peclu.net)
# License: GPL
NAME = 'tasks.py'
VERSION = 0.1
# Path to tasks database
DB = './tasks.db'
# Interface to listen
HOST = '0.0.0.0'
PORT = '8081'
# Debug mode
DEBUG = False
# Auto-reload service in case of file change
RELOADER = True
| jpec/tasks.py | config.py | Python | gpl-2.0 | 350 |
'''
Created on Feb 22, 2013
@author: u5682
'''
from datetime import datetime
import os, sys, pickle
import subprocess
from time import sleep
import xml.dom.minidom
from xml.dom.minidom import Node
class DistributedTask(object):
'''
classdocs
'''
def __init__(self, fileName = None):
'''
Constructor
'''
self.creationDate = datetime.now()
if fileName == None:
self.executable = ""
self.arguments = []
self.outputPath = ""
self.outputFile = ""
self.errorPath = ""
self.inputPath = ""
self.inputSandbox = ""
self.outputSandbox = ""
self.workingDirectory = ""
self.requirements = ""
self.inputFiles = []
self.outputFiles = []
self.jobName = ""
self.nativeSpecification = ""
else:
self.fromXML(fileName)
def fromXML(self, fileName):
#primerp rseamos el fichero
doc = xml.dom.minidom.parse(fileName)
self.executable = obtainText(doc, 'executable')
self.arguments = obtainTextList(doc, 'arguments', 'argument')
self.outputPath = obtainText(doc, 'outputPath')
self.outputFile = obtainText(doc, 'outputFile')
self.errorPath = obtainText(doc, 'errorPath')
self.inputPath = obtainText(doc, 'inputPath')
self.inputSandbox = obtainText(doc, 'inputSandbox')
self.outputSandbox = obtainText(doc, 'outputSandbox')
self.workingDirectory = obtainText(doc, 'workingDirectory')
self.requirements = obtainText(doc, 'requirements')
self.inputFiles = obtainTextList(doc, 'inputFiles', 'inputFile')
self.outputFiles = obtainTextList(doc, 'outputFiles', 'outputFile')
self.jobName = obtainText(doc, 'jobName')
self.nativeSpecification = obtainText(doc, 'nativeSpecification')
#y ahora creamos un objeto nuevo con la informacion obtenida
def getArguments(self):
argumentList = []
for arg in self.arguments:
argumentList.append(arg.encode('ascii','ignore'))
return argumentList
def getInputFiles(self):
inputFileList = []
for inputF in self.inputFiles:
inputFileList.append(inputF.text)
return inputFileList
def getOutputFiles(self):
outputFileList = []
for outputF in self.outputFiles:
outputFileList.append(outputF.text)
return outputFileList
def outputFilesExist(self):
for outputF in self.outputFiles:
requiredFile = self.taskInfo.workingDirectory + "/" + outputF.text
if not os.path.exists(requiredFile):
print("OUTPUT FILE MISSING: " + requiredFile)
return False
return True
def obtainText(node, tagName):
L = node.getElementsByTagName(tagName)
auxText = ""
for node2 in L:
for node3 in node2.childNodes:
if node3.nodeType == Node.TEXT_NODE:
auxText +=node3.data
return auxText
def obtainTextList(node, fatherTagName, sonTagName):
L = node.getElementsByTagName(fatherTagName)
auxTextArray = []
for node2 in L:
L2 = node.getElementsByTagName(sonTagName)
for node3 in L2:
for node4 in node3.childNodes:
if node4.nodeType == Node.TEXT_NODE:
auxTextArray.append(node4.data)
return auxTextArray
| supermanue/distributedController | clusterController/DistributedTask.py | Python | gpl-2.0 | 3,026 |
'''
Ultimate Whitecream
Copyright (C) 2016 mortael
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib, urllib2, re, cookielib, os, sys, socket
import xbmc, xbmcplugin, xbmcgui, xbmcaddon
import utils, sqlite3
mobileagent = {'User-Agent': 'Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13D15 Safari/601.1'}
def Main():
utils.addDir('[COLOR red]Refresh Cam4 images[/COLOR]','',283,'',Folder=False)
utils.addDir('[COLOR hotpink]Featured[/COLOR]','http://www.cam4.com/featured/1',281,'',1)
utils.addDir('[COLOR hotpink]Females[/COLOR]','http://www.cam4.com/female/1',281,'',1)
utils.addDir('[COLOR hotpink]Couples[/COLOR]','http://www.cam4.com/couple/1',281,'',1)
utils.addDir('[COLOR hotpink]Males[/COLOR]','http://www.cam4.com/male/1',281,'',1)
utils.addDir('[COLOR hotpink]Transsexual[/COLOR]','http://www.cam4.com/shemale/1',281,'',1)
xbmcplugin.endOfDirectory(utils.addon_handle)
def clean_database(showdialog=False):
conn = sqlite3.connect(xbmc.translatePath("special://database/Textures13.db"))
try:
with conn:
list = conn.execute("SELECT id, cachedurl FROM texture WHERE url LIKE '%%%s%%';" % ".systemcdn.net")
for row in list:
conn.execute("DELETE FROM sizes WHERE idtexture LIKE '%s';" % row[0])
try: os.remove(xbmc.translatePath("special://thumbnails/" + row[1]))
except: pass
conn.execute("DELETE FROM texture WHERE url LIKE '%%%s%%';" % ".systemcdn.net")
if showdialog:
utils.notify('Finished','Cam4 images cleared')
except:
pass
def List(url, page):
if utils.addon.getSetting("chaturbate") == "true":
clean_database()
listhtml = utils.getHtml(url, url)
match = re.compile('profileDataBox"> <a href="([^"]+)".*?src="([^"]+)" title="Chat Now Free with ([^"]+)"', re.DOTALL | re.IGNORECASE).findall(listhtml)
for videourl, img, name in match:
name = utils.cleantext(name)
videourl = "http://www.cam4.com" + videourl
utils.addDownLink(name, videourl, 282, img, '', noDownload=True)
if re.search('<link rel="next"', listhtml, re.DOTALL | re.IGNORECASE):
npage = page + 1
url = url.replace('/'+str(page),'/'+str(npage))
utils.addDir('Next Page ('+str(npage)+')', url, 281, '', npage)
xbmcplugin.endOfDirectory(utils.addon_handle)
def Playvid(url, name):
listhtml = utils.getHtml(url, '', mobileagent)
match = re.compile('<video id=Cam4HLSPlayer class="SD" controls autoplay src="([^"]+)"> </video>', re.DOTALL | re.IGNORECASE).findall(listhtml)
if match:
videourl = match[0]
iconimage = xbmc.getInfoImage("ListItem.Thumb")
listitem = xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
listitem.setInfo('video', {'Title': name, 'Genre': 'Porn'})
listitem.setProperty("IsPlayable","true")
if int(sys.argv[1]) == -1:
pl = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
pl.clear()
pl.add(videourl, listitem)
xbmc.Player().play(pl)
else:
listitem.setPath(str(videourl))
xbmcplugin.setResolvedUrl(utils.addon_handle, True, listitem) | felipenaselva/repo.felipe | plugin.video.uwc/cam4.py | Python | gpl-2.0 | 3,936 |
#!/usr/bin/env python
"""
Use Telemac (selafin) output
==================================
"""
from datetime import timedelta, datetime
from os import sep
from pyproj import Proj
from opendrift.readers import reader_telemac_selafin
from opendrift.models.oceandrift import OceanDrift
o = OceanDrift(loglevel=0)
filename='{}Telemac_3D{}r3d_tide_open_drift.slf'.format(o.test_data_folder(),sep)
#Lambert North
proj="+proj=lcc +lat_1=49.50000000000001 +lat_0=49.50000000000001 +lon_0=0 \
+k_0=0.999877341 +x_0=600000 +y_0=200000 +a=6378249.2 +b=6356515 \
+units=m +no_defs"
start_time= datetime(2021,1,1,00,00)
selafin = reader_telemac_selafin.Reader(filename=filename,proj4 = proj, start_time=start_time)
o.add_reader(selafin)
o.set_config('general:coastline_action', 'previous')
# start_time = selafin.start_time generally wrong
time_step = timedelta(seconds=selafin.slf.tags["times"][1])
length=timedelta(seconds=selafin.slf.tags["times"][-1])
num_steps = len(selafin.slf.tags["times"])
# center seeds in the middle
x,y = (selafin.x.max()-selafin.x.min())/2+selafin.x.min(),\
(selafin.y.max()-selafin.y.min())/2+selafin.y.min()
p= Proj(proj, preserve_units=False)
lon, lat = p(x,y,inverse=True)
o.seed_elements(lon=lon, lat=lat, radius=20000, number= 200, z= 0, \
time= start_time)
o.run(time_step=time_step/10, duration=length)
o.plot(fast = True)
| OpenDrift/opendrift | examples/example_long_selafin.py | Python | gpl-2.0 | 1,371 |
"""
script_watcher.py: Reload watched script upon changes.
Copyright (C) 2015 Isaac Weaver
Author: Isaac Weaver <wisaac407@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
bl_info = {
"name": "Script Watcher",
"author": "Isaac Weaver",
"version": (0, 5),
"blender": (2, 75, 0),
"location": "Properties > Scene > Script Watcher",
"description": "Reloads an external script on edits.",
"warning": "Still in beta stage.",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Development/Script_Watcher",
"tracker_url": "https://github.com/wisaac407/blender-script-watcher/issues/new",
"category": "Development",
}
import os, sys
import io
import traceback
import types
import bpy
from bpy.app.handlers import persistent
@persistent
def load_handler(dummy):
try:
if (bpy.context.scene.sw_settings.running and bpy.context.scene.sw_settings.auto_watch_on_startup):
bpy.ops.wm.sw_watch_end('EXEC_DEFAULT')
bpy.ops.wm.sw_watch_start('EXEC_DEFAULT')
else:
bpy.ops.wm.sw_watch_end('EXEC_DEFAULT')
except:
print("Exception on startup check!")
def add_scrollback(ctx, text, text_type):
for line in text:
bpy.ops.console.scrollback_append(ctx, text=line.replace('\t', ' '),
type=text_type)
class SplitIO(io.StringIO):
"""Feed the input stream into another stream."""
PREFIX = '[Script Watcher]: '
_can_prefix = True
def __init__(self, stream):
io.StringIO.__init__(self)
self.stream = stream
def write(self, s):
# Make sure we prefix our string before we do anything else with it.
if self._can_prefix:
s = self.PREFIX + s
# only add the prefix if the last stream ended with a newline.
self._can_prefix = s.endswith('\n')
# Make sure to call the super classes write method.
io.StringIO.write(self, s)
# When we are written to, we also write to the secondary stream.
self.stream.write(s)
# Define the script watching operator.
class WatchScriptOperator(bpy.types.Operator):
"""Watches the script for changes, reloads the script if any changes occur."""
bl_idname = "wm.sw_watch_start"
bl_label = "Watch Script"
_timer = None
_running = False
_times = None
filepath = None
def get_paths(self):
"""Find all the python paths surrounding the given filepath."""
dirname = os.path.dirname(self.filepath)
paths = []
filepaths = []
for root, dirs, files in os.walk(dirname, topdown=True):
if '__init__.py' in files:
paths.append(root)
for f in files:
filepaths.append(os.path.join(root, f))
else:
dirs[:] = [] # No __init__ so we stop walking this dir.
# If we just have one (non __init__) file then return just that file.
return paths, filepaths or [self.filepath]
def get_mod_name(self):
"""Return the module name and the root path of the givin python file path."""
dir, mod = os.path.split(self.filepath)
# Module is a package.
if mod == '__init__.py':
mod = os.path.basename(dir)
dir = os.path.dirname(dir)
# Module is a single file.
else:
mod = os.path.splitext(mod)[0]
return mod, dir
def remove_cached_mods(self):
"""Remove all the script modules from the system cache."""
paths, files = self.get_paths()
for mod_name, mod in list(sys.modules.items()):
if hasattr(mod, '__file__') and os.path.dirname(mod.__file__) in paths:
del sys.modules[mod_name]
def _reload_script_module(self):
print('Reloading script:', self.filepath)
self.remove_cached_mods()
try:
f = open(self.filepath)
paths, files = self.get_paths()
# Get the module name and the root module path.
mod_name, mod_root = self.get_mod_name()
# Create the module and setup the basic properties.
mod = types.ModuleType('__main__')
mod.__file__ = self.filepath
mod.__path__ = paths
mod.__package__ = mod_name
# Add the module to the system module cache.
sys.modules[mod_name] = mod
# Fianally, execute the module.
exec(compile(f.read(), self.filepath, 'exec'), mod.__dict__)
except IOError:
print('Could not open script file.')
except:
sys.stderr.write("There was an error when running the script:\n" + traceback.format_exc())
else:
f.close()
def reload_script(self, context):
"""Reload this script while printing the output to blenders python console."""
# Setup stdout and stderr.
stdout = SplitIO(sys.stdout)
stderr = SplitIO(sys.stderr)
sys.stdout = stdout
sys.stderr = stderr
# Run the script.
self._reload_script_module()
# Go back to the begining so we can read the streams.
stdout.seek(0)
stderr.seek(0)
# Don't use readlines because that leaves trailing new lines.
output = stdout.read().split('\n')
output_err = stderr.read().split('\n')
if self.use_py_console:
# Print the output to the consoles.
for area in context.screen.areas:
if area.type == "CONSOLE":
ctx = context.copy()
ctx.update({"area": area})
# Actually print the output.
if output:
add_scrollback(ctx, output, 'OUTPUT')
if output_err:
add_scrollback(ctx, output_err, 'ERROR')
# Cleanup
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
def modal(self, context, event):
if not context.scene.sw_settings.running:
self.cancel(context)
return {'CANCELLED'}
if context.scene.sw_settings.reload:
context.scene.sw_settings.reload = False
self.reload_script(context)
return {'PASS_THROUGH'}
if event.type == 'TIMER':
for path in self._times:
cur_time = os.stat(path).st_mtime
if cur_time != self._times[path]:
self._times[path] = cur_time
self.reload_script(context)
return {'PASS_THROUGH'}
def execute(self, context):
if context.scene.sw_settings.running:
return {'CANCELLED'}
# Grab the settings and store them as local variables.
self.filepath = bpy.path.abspath(context.scene.sw_settings.filepath)
self.use_py_console = context.scene.sw_settings.use_py_console
# If it's not a file, doesn't exist or permistion is denied we don't preceed.
if not os.path.isfile(self.filepath):
self.report({'ERROR'}, 'Unable to open script.')
return {'CANCELLED'}
# Setup the times dict to keep track of when all the files where last edited.
dirs, files = self.get_paths()
self._times = dict((path, os.stat(path).st_mtime) for path in files) # Where we store the times of all the paths.
self._times[files[0]] = 0 # We set one of the times to 0 so the script will be loaded on startup.
# Setup the event timer.
wm = context.window_manager
self._timer = wm.event_timer_add(0.1, context.window)
wm.modal_handler_add(self)
context.scene.sw_settings.running = True
return {'RUNNING_MODAL'}
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
self.remove_cached_mods()
context.scene.sw_settings.running = False
class CancelScriptWatcher(bpy.types.Operator):
"""Stop watching the current script."""
bl_idname = "wm.sw_watch_end"
bl_label = "Stop Watching"
def execute(self, context):
# Setting the running flag to false will cause the modal to cancel itself.
context.scene.sw_settings.running = False
return {'FINISHED'}
class ReloadScriptWatcher(bpy.types.Operator):
"""Reload the current script."""
bl_idname = "wm.sw_reload"
bl_label = "Reload Script"
def execute(self, context):
# Setting the reload flag to true will cause the modal to cancel itself.
context.scene.sw_settings.reload = True
return {'FINISHED'}
# Create the UI for the operator. NEEDS FINISHING!!
class ScriptWatcherPanel(bpy.types.Panel):
"""UI for the script watcher."""
bl_label = "Script Watcher"
bl_idname = "SCENE_PT_script_watcher"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "scene"
def draw(self, context):
layout = self.layout
running = context.scene.sw_settings.running
col = layout.column()
col.prop(context.scene.sw_settings, 'filepath')
col.prop(context.scene.sw_settings, 'use_py_console')
col.prop(context.scene.sw_settings, 'auto_watch_on_startup')
col.operator('wm.sw_watch_start', icon='VISIBLE_IPO_ON')
col.enabled = not running
if running:
row = layout.row(align=True)
row.operator('wm.sw_watch_end', icon='CANCEL')
row.operator('wm.sw_reload', icon='FILE_REFRESH')
class ScriptWatcherSettings(bpy.types.PropertyGroup):
"""All the script watcher settings."""
running = bpy.props.BoolProperty(default=False)
reload = bpy.props.BoolProperty(default=False)
filepath = bpy.props.StringProperty(
name = 'Script',
description = 'Script file to watch for changes.',
subtype = 'FILE_PATH'
)
use_py_console = bpy.props.BoolProperty(
name = 'Use py console',
description = 'Use blenders built-in python console for program output (e.g. print statments and error messages)',
default = False
)
auto_watch_on_startup = bpy.props.BoolProperty(
name = 'Watch on startup',
description = 'Watch script automatically on new .blend load',
default = False
)
def register():
bpy.utils.register_class(WatchScriptOperator)
bpy.utils.register_class(ScriptWatcherPanel)
bpy.utils.register_class(CancelScriptWatcher)
bpy.utils.register_class(ReloadScriptWatcher)
bpy.utils.register_class(ScriptWatcherSettings)
bpy.types.Scene.sw_settings = \
bpy.props.PointerProperty(type=ScriptWatcherSettings)
bpy.app.handlers.load_post.append(load_handler)
def unregister():
bpy.utils.unregister_class(WatchScriptOperator)
bpy.utils.unregister_class(ScriptWatcherPanel)
bpy.utils.unregister_class(CancelScriptWatcher)
bpy.utils.unregister_class(ReloadScriptWatcher)
bpy.utils.unregister_class(ScriptWatcherSettings)
bpy.app.handlers.load_post.remove(load_handler)
del bpy.types.Scene.sw_settings
if __name__ == "__main__":
register()
| kilbee/blender-script-watcher | script_watcher.py | Python | gpl-2.0 | 12,299 |
# -*- coding: utf-8 -*-
"""Programa baseado no Teste Wisconsin
Autor: Neno Henrique Albernaz
Criado em Junho de 2008"""
########################################################################
class Carta:
# Definição dos itens de uma carta.
_numeros = [(u"Um", u"Uma"),
(u"Dois", u"Duas"),
(u"Três", u"Três"),
(u"Quatro", u"Quatro")]
_formas = [(u"Triângulo", u"Triângulos", "triangulo"),
(u"Estrela", u"Estrelas", "estrela"),
(u"Cruz", u"Cruzes", "cruz"),
(u"Círculo", u"Círculos", "circulo")]
_cores = [((u"Vermelho", u"Vermelha"), (u"Vermelhos", u"Vermelhas"), "vermelho"),
((u"Verde", u"Verde"), (u"Verdes", u"Verdes"), "verde"),
((u"Amarelo", u"Amarela"), (u"Amarelos", u"Amarelas"), "amarelo"),
((u"Azul", u"Azul"), (u"Azuis", u"Azuis"), "azul")]
_atributos = dict(numero='num', forma='img', cor='color')
def __init__(self, numero, forma, cor):
genero = 0 if forma not in [1, 2] else 1
num = 0 if numero == 1 else 1
self.numero = self._numeros[numero-1][genero]
self.forma = self._formas[forma][num]
self.cor = self._cores[cor][num][genero]
self.num = numero
self.img = u"/static/plugins/wisconsin/images/%s.png" % self._formas[forma][2]
self.color = self._cores[cor][2]
def pegaAtributosCarta(self):
return u"%s %s %s" % (self.numero, self.forma, self.cor)
def testaMesmaCategoria(self, outra_carta, categoria):
"""Testa se as cartas são iguais na categoria.
Categoria pode ter atribuido três valores: numero, forma ou cor."""
return (getattr(self, self._atributos[categoria]) ==
getattr(outra_carta, self._atributos[categoria]))
def testaTudoDiferente(self, outra_carta):
"""Testa se as cartas são diferentes em todas as categorias."""
for categoria in self._atributos:
if self.testaMesmaCategoria(outra_carta, categoria):
return False
return True
########################################################################
def criaListaEstimulo():
"""Cria a lista de cartas estimulo. O conteudo de cada item da lista é uma carta."""
return [Carta(1, 0, 0),
Carta(2, 1, 1),
Carta(3, 2, 2),
Carta(4, 3, 3)]
########################################################################
def criaListaResposta():
"""Cria a lista de cartas resposta. O conteudo de cada item da lista é uma carta."""
lista = ['101', '420', '203', '130', '411', '122', '403', '330', '421', '232',
'113', '300', '223', '112', '301', '433', '210', '332', '400', '132',
'213', '321', '212', '303', '410', '202', '323', '430', '211', '120',
'431', '110', '333', '422', '111', '402', '233', '312', '131', '423',
'100', '313', '432', '201', '310', '222', '133', '302', '221', '412',
'103', '311', '230', '401', '123', '331', '220', '102', '320', '231',
'423', '322', '200', '122']
for indice in range(len(lista)):
lista[indice] = Carta(int(lista[indice][0]),
int(lista[indice][1]),
int(lista[indice][2]))
return 2 * lista
########################################################################
def criaListaCategorias():
"""Cria a lista de categorias.
Cria a lista com as três categorias: cor, forma e numero. Repete
devido ao teste passar duas vezes nas categorias."""
return ["cor", "forma", "numero", "cor", "forma", "numero"]
########################################################################
def instrucoes_teste():
"""Imprimi na tela as instruções do teste. """
return u"""Este é um teste um pouco diferente, porque eu não posso lhe
dizer muito a respeito do que fazer. Você vai ser solicitado a associar
cada uma das cartas que vou te dar com uma dessas quatro cartas-chave
mostradas na tela. Sempre selecione o link da carta-chave que você achar
que combine com a carta que vou te dar. Eu não posso lhe dizer como
associar as cartas, mas lhe direi, cada vez, se você está certo ou errado.
Não há limite de tempo neste teste. Está Pronto? Vamos começar."""
########################################################################
# Inicializa as variaveis.
listaCartasResposta = criaListaResposta()
listaCartasEstimulo = criaListaEstimulo()
listaCategorias = criaListaCategorias()
numCartasResposta = 64
| labase/activnce | main/plugins/wisconsin/wcst.py | Python | gpl-2.0 | 4,743 |
from MAF import MAF
class IngroupSplitter:
def __init__(self, ingroup, splitdist, junkchars='Nn'):
self.ingroup = ingroup
self.splitdist = splitdist
self.junkchars = junkchars
assert "-" not in self.junkchars
#return a list of mafs representing the old mafs that have split
#or a list containing just the original maf if no splitting is required
def split(self, maf):
in_al = []
in_pos = []
index = []
indir = []
out_al = []
out_pos = []
outdex = []
outdir = []
for i in range(maf.count()):
assert maf.start(i) <= maf.end(i), (maf.start(i), maf.end(i))
if maf.name(i) in self.ingroup:
in_al.append(maf.data(i))
in_pos.append(maf.start(i))
index.append(i)
if maf.strand(i) == '+':
indir.append(1)
else:
indir.append(-1)
else:
out_al.append(maf.data(i))
out_pos.append(maf.start(i))
outdex.append(i)
if maf.strand(i) == '+':
outdir.append(1)
else:
outdir.append(-1)
gapcount = 0
# keep track of how many gaps there are in each sequence in current block of junk
block_dashes = dict()
for j in index + outdex:
block_dashes[j] = 0
for i in range(len(in_al[0])):
ns = 0
for j in range(len(in_al)):
if in_al[j][i] not in "-":
in_pos[j] = in_pos[j] +1
else:
block_dashes[index[j]] += 1
if in_al[j][i] in self.junkchars + '-':
ns = ns + 1#indir[j]
for j in range(len(out_al)):
if out_al[j][i] not in "-":
out_pos[j] = out_pos[j] + 1#outdir[j]
else:
block_dashes[outdex[j]] += 1
if ns == len(in_al):
gapcount = gapcount +1
elif gapcount <= self.splitdist:
gapcount = 0
for j in index + outdex:
block_dashes[j] = 0
elif gapcount > self.splitdist:
maf1 = "a score=%f\n" % maf.score()
for x in range(len(index)):
ind = index[x]
start = maf.start(ind)
# size = abs(in_pos[x] - start) - gapcount
assert in_pos[x] >= start
# assert gapcount >= block_dashes[ind], (gapcount, block_dashes[ind], ind)
size = in_pos[x] - start - (gapcount - block_dashes[ind])
# if in_al[x][i] not in "-":
size = size -1
# print "#### in:", in_pos[x], gapcount, block_dashes[ind], start, size
maf1 += "s %s.%s %i %i %s %i %s\n" % (maf.name(ind), maf.chromosome(ind), start, size, maf.strand(ind), maf.srcLength(ind), maf.data(ind)[0:i-gapcount])
for x in range(len(outdex)):
ind = outdex[x]
start = maf.start(ind)
size = out_pos[x] - start - (gapcount - block_dashes[ind])
# if out_al[x][i] not in "-":
size = size -1
# print "#### out:", out_pos[x], gapcount, block_dashes[ind], start, size
maf1 += "s %s.%s %i %i %s %i %s\n" % (maf.name(ind), maf.chromosome(ind), start, size, maf.strand(ind), maf.srcLength(ind), maf.data(ind)[0:i-gapcount])
maf2 = "a score=%f\n" % maf.score()
for x in range(len(index)):
ind = index[x]
start = in_pos[x]
if in_al[x][i] not in "-":
start = start - 1#indir[x]
size = maf.length(ind) - (start - maf.start(ind))
maf2 += "s %s.%s %i %i %s %i %s\n" % (maf.name(ind), maf.chromosome(ind), start, size, maf.strand(ind), maf.srcLength(ind), maf.data(ind)[i:])
for x in range(len(outdex)):
ind = outdex[x]
start = out_pos[x]
if out_al[x][i] not in "-":
start = start - 1#outdir[x]
size = maf.length(ind) - (start - maf.start(ind))
maf2 += "s %s.%s %i %i %s %i %s\n" % (maf.name(ind), maf.chromosome(ind), start, size, maf.strand(ind), maf.srcLength(ind), maf.data(ind)[i:])
gapcount = 0
tmpmaf = MAF(maf1)
res = []
# print "## 1 ##"
# print tmpmaf
for x in range(tmpmaf.count()):
assert tmpmaf.start(x) <= tmpmaf.start(x), (tmpmaf.start(x), tmpmaf.end(x))
if(len(tmpmaf.data(0)) > 0):
res.append(tmpmaf)
tmpmaf2 = MAF(maf2)
# print "## 2 ##"
# print tmpmaf2
for x in range(tmpmaf2.count()):
assert tmpmaf2.start(x) <= tmpmaf2.start(x), (tmpmaf2.start(x), tmpmaf2.end(x))
if(len(tmpmaf2.data(0)) > 0):
for m in self.split(tmpmaf2):
res.append(m)
# for m in self.split(MAF(maf2)):
# res.append(m)
return res
return [maf]
| kaspermunch/CoalhmmPipeline | CoalhmmPipeline/IngroupSplitter.py | Python | gpl-2.0 | 5,732 |
#! /usr/bin/env python
# Copyright (C) 2009 Sebastian Garcia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
# Author:
# Sebastian Garcia eldraco@gmail.com
#
# Based on code from Twisted examples.
# Copyright (c) Twisted Matrix Laboratories.
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
# CHANGELOG
# 0.6
# - Added some more chars to the command injection prevention.
# - Clients decide the nmap scanning rate.
# - If the server sends a --min-rate parameter, we now delete it. WE control the scan speed.
# - Clients decide the nmap scanning rate.
# - Exit if nmap is not installed
# - Stop sending the euid, it was a privacy violation. Now we just say if we are root or not.
#
# TODO
# - privileges on nmap
#
try:
from OpenSSL import SSL
except:
print 'You need openssl libs for python. apt-get install python-openssl'
exit(-1)
import sys
try:
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from twisted.protocols.basic import LineReceiver
from twisted.internet import ssl, reactor
except:
print 'You need twisted libs for python. apt-get install python-twisted'
exit(-1)
import time, getopt, shlex
from subprocess import Popen
from subprocess import PIPE
import os
import random
# Global variables
server_ip = False
server_port = 46001
vernum = '0.6'
# Your name alias defaults to anonymous
alias='Anonymous'
debug=False
# Do not use a max rate by default
maxrate = False
# End global variables
# Print version information and exit
def version():
print "+----------------------------------------------------------------------+"
print "| dnmap Client Version "+ vernum +" |"
print "| This program is free software; you can redistribute it and/or modify |"
print "| it under the terms of the GNU General Public License as published by |"
print "| the Free Software Foundation; either version 2 of the License, or |"
print "| (at your option) any later version. |"
print "| |"
print "| Author: Garcia Sebastian, eldraco@gmail.com |"
print "| www.mateslab.com.ar |"
print "+----------------------------------------------------------------------+"
print
# Print help information and exit:
def usage():
print "+----------------------------------------------------------------------+"
print "| dnmap Client Version "+ vernum +" |"
print "| This program is free software; you can redistribute it and/or modify |"
print "| it under the terms of the GNU General Public License as published by |"
print "| the Free Software Foundation; either version 2 of the License, or |"
print "| (at your option) any later version. |"
print "| |"
print "| Author: Garcia Sebastian, eldraco@gmail.com |"
print "| www.mateslab.com.ar |"
print "+----------------------------------------------------------------------+"
print "\nusage: %s <options>" % sys.argv[0]
print "options:"
print " -s, --server-ip IP address of dnmap server."
print " -p, --server-port Port of dnmap server. Dnmap port defaults to 46001"
print " -a, --alias Your name alias so we can give credit to you for your help. Optional"
print " -d, --debug Debuging."
print " -m, --max-rate Force nmaps commands to use at most this rate. Useful to slow nmap down. Adds the --max-rate parameter."
print
sys.exit(1)
def check_clean(line):
global debug
try:
outbound_chars = [';', '#', '`']
ret = True
for char in outbound_chars:
if char in line:
ret = False
return ret
except Exception as inst:
print 'Problem in dataReceived function'
print type(inst)
print inst.args
print inst
class NmapClient(LineReceiver):
def connectionMade(self):
global client_id
global alias
global debug
print 'Client connected succesfully...'
print 'Waiting for more commands....'
if debug:
print ' -- Your client ID is: {0} , and your alias is: {1}'.format(str(client_id), str(alias))
euid = os.geteuid()
# Do not send the euid, just tell if we are root or not.
if euid==0:
# True
iamroot = 1
else:
# False
iamroot = 0
# 'Client ID' text must be sent to receive another command
line = 'Starts the Client ID:{0}:Alias:{1}:Version:{2}:ImRoot:{3}'.format(str(client_id),str(alias),vernum,iamroot)
if debug:
print ' -- Line sent: {0}'.format(line)
self.sendLine(line)
#line = 'Send more commands to Client ID:{0}:Alias:{1}:\0'.format(str(client_id),str(alias))
line = 'Send more commands'
if debug:
print ' -- Line sent: {0}'.format(line)
self.sendLine(line)
def dataReceived(self, line):
global debug
global client_id
global alias
# If a wait is received. just wait.
if 'Wait' in line:
sleeptime = int(line.split(':')[1])
time.sleep(sleeptime)
# Ask for more
# line = 'Send more commands to Client ID:{0}:Alias:{1}:'.format(str(client_id),str(alias))
line = 'Send more commands'
if debug:
print ' -- Line sent: {0}'.format(line)
self.sendLine(line)
else:
# dataReceived does not wait for end of lines or CR nor LF
if debug:
print "\tCommand Received: {0}".format(line.strip('\n').strip('\r'))
# A little bit of protection from the server
if check_clean(line):
# Store the nmap output file so we can send it to the server later
try:
nmap_output_file = line.split('-oA ')[1].split(' ')[0].strip(' ').strip("\n")
except IndexError:
random_file_name = str(random.randrange(0, 100000000, 1))
print '+ No -oA given. We add it anyway so not to lose the results. Added -oA ' + random_file_name
line = line + '-oA ' + random_file_name
nmap_output_file = line.split('-oA ')[1].split(' ')[0].strip(' ').strip("\n")
try:
nmap_returncode = -1
# Check for rate commands
# Verfiy that the server is NOT trying to force us to be faster. NMAP PARAMETER DEPENDACE
if 'min-rate' in line:
temp_vect = shlex.split(line)
word_index = temp_vect.index('--min-rate')
# Just delete the --min-rate parameter with its value
nmap_command = temp_vect[0:word_index] + temp_vect[word_index + 1:]
else:
nmap_command = shlex.split(line)
# Do we have to add a max-rate parameter?
if maxrate:
nmap_command.append('--max-rate')
nmap_command.append(str((maxrate)))
# Strip the command, so we can controll that only nmap is executed really
nmap_command = nmap_command[1:]
nmap_command.insert(0, 'nmap')
# Recreate the final command to show it
nmap_command_string = ''
for i in nmap_command:
nmap_command_string = nmap_command_string + i + ' '
print "\tCommand Executed: {0}".format(nmap_command_string)
# For some reason this executable thing does not work! seems to change nmap sP for sS
# nmap_process = Popen(nmap_command,executable='nmap',stdout=PIPE)
nmap_process = Popen(nmap_command, stdout=PIPE)
raw_nmap_output = nmap_process.communicate()[0]
nmap_returncode = nmap_process.returncode
except OSError:
print 'You don\'t have nmap installed. You can install it with apt-get install nmap'
exit(-1)
except ValueError:
raw_nmap_output = 'Invalid nmap arguments.'
print raw_nmap_output
except Exception as inst:
print 'Problem in dataReceived function'
print type(inst)
print inst.args
print inst
if nmap_returncode >= 0:
# Nmap ended ok and the files were created
if os.path.isfile(nmap_output_file + ".xml") and os.path.isfile(
nmap_output_file + ".gnmap") and os.path.isfile(nmap_output_file + ".nmap"):
with open(nmap_output_file + ".xml", "r") as f:
XMLData = f.read()
with open(nmap_output_file + ".gnmap", "r") as f:
GNmapData = f.read()
with open(nmap_output_file + ".nmap", "r") as f:
NmapData = f.read()
xml_linesep = "\r\n#XMLOUTPUT#\r\n"
gnmap_linesep = "\r\n#GNMAPOUTPUT#\r\n"
# Tell the server that we are sending the nmap output
print '\tSending output to the server...'
line = 'Nmap Output File:{0}:'.format(nmap_output_file.strip('\n').strip('\r'))
if debug:
print ' -- Line sent: {0}'.format(line)
self.sendLine(line)
line = NmapData + xml_linesep + XMLData + gnmap_linesep + GNmapData
# line = raw_nmap_output + xml_linesep + XMLData + gnmap_linesep + GNmapData
print 'GNmapData: {}'.format(len(NmapData))
print 'xml_linesep: {}'.format(len(xml_linesep))
print 'XMLData: {}'.format(len(XMLData))
print 'gnmap_linesep: {}'.format(len(gnmap_linesep))
print 'GNmapData: {}'.format(len(GNmapData))
self.sendLine(line)
if debug:
print ' -- Line sent: {0}'.format(line)
line = 'Nmap Output Finished:{0}:'.format(nmap_output_file.strip('\n').strip('\r'))
if debug:
print ' -- Line sent: {0}'.format(line)
self.sendLine(line)
# Move nmap output files to its directory
os.system('mv *.nmap nmap_output > /dev/null 2>&1')
os.system('mv *.gnmap nmap_output > /dev/null 2>&1')
os.system('mv *.xml nmap_output > /dev/null 2>&1')
# Ask for another command.
# 'Client ID' text must be sent to receive another command
print 'Waiting for more commands....'
# line = 'Send more commands to Client ID:{0}:Alias:{1}:'.format(str(client_id),str(alias))
line = 'Send more commands'
if debug:
print ' -- Line sent: {0}'.format(line)
self.sendLine(line)
else:
# Something strange was sent to us...
print
print 'WARNING! Ignoring some strange command was sent to us: {0}'.format(line)
line = 'Send more commands'
if debug:
print ' -- Line sent: {0}'.format(line)
self.sendLine(line)
class NmapClientFactory(ReconnectingClientFactory):
try:
protocol = NmapClient
def startedConnecting(self, connector):
print 'Starting connection...'
def clientConnectionFailed(self, connector, reason):
print 'Connection failed:', reason.getErrorMessage()
# Try to reconnect
print 'Trying to reconnect. Please wait...'
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
def clientConnectionLost(self, connector, reason):
print 'Connection lost. Reason: {0}'.format(reason.getErrorMessage())
# Try to reconnect
print 'Trying to reconnect in 10 secs. Please wait...'
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
except Exception as inst:
print 'Problem in NmapClientFactory'
print type(inst)
print inst.args
print inst
def process_commands():
global server_ip
global server_port
global client_id
global factory
try:
print 'Client Started...'
# Generate the client unique ID
client_id = str(random.randrange(0, 100000000, 1))
# Create the output directory
print 'Nmap output files stored in \'nmap_output\' directory...'
os.system('mkdir nmap_output > /dev/null 2>&1')
factory = NmapClientFactory()
# Do not wait more that 10 seconds between reconnections
factory.maxDelay = 2
reactor.connectSSL(str(server_ip), int(server_port), factory, ssl.ClientContextFactory())
#reactor.addSystemEventTrigger('before','shutdown',myCleanUpFunction)
reactor.run()
except Exception as inst:
print 'Problem in process_commands function'
print type(inst)
print inst.args
print inst
def main():
global server_ip
global server_port
global alias
global debug
global maxrate
try:
opts, args = getopt.getopt(sys.argv[1:], "a:dm:p:s:", ["server-ip=","server-port","max-rate","alias=","debug"])
except getopt.GetoptError: usage()
for opt, arg in opts:
if opt in ("-s", "--server-ip"): server_ip=str(arg)
if opt in ("-p", "--server-port"): server_port=arg
if opt in ("-a", "--alias"): alias=str(arg).strip('\n').strip('\r').strip(' ')
if opt in ("-d", "--debug"): debug=True
if opt in ("-m", "--max-rate"): maxrate=str(arg)
try:
if server_ip and server_port:
version()
# Start connecting
process_commands()
else:
usage()
except KeyboardInterrupt:
# CTRL-C pretty handling.
print "Keyboard Interruption!. Exiting."
sys.exit(1)
if __name__ == '__main__':
main()
| royharoush/rtools | dnc.py | Python | gpl-2.0 | 13,314 |
from yade import export,polyhedra_utils
mat = PolyhedraMat()
O.bodies.append([
sphere((0,0,0),1),
sphere((0,3,0),1),
sphere((0,2,4),2),
sphere((0,5,2),1.5),
facet([Vector3(0,-3,-1),Vector3(0,-2,5),Vector3(5,4,0)]),
facet([Vector3(0,-3,-1),Vector3(0,-2,5),Vector3(-5,4,0)]),
polyhedra_utils.polyhedra(mat,(1,2,3),0),
polyhedra_utils.polyhedralBall(2,20,mat,(-2,-2,4)),
])
O.bodies[-1].state.pos = (-2,-2,-2)
O.bodies[-1].state.ori = Quaternion((1,1,2),1)
O.bodies[-2].state.pos = (-2,-2,3)
O.bodies[-2].state.ori = Quaternion((1,2,0),1)
createInteraction(0,1)
createInteraction(0,2)
createInteraction(0,3)
createInteraction(1,2)
createInteraction(1,3)
createInteraction(2,3)
O.step()
vtkExporter = export.VTKExporter('vtkExporterTesting')
vtkExporter.exportSpheres(what=[('dist','b.state.pos.norm()')])
vtkExporter.exportFacets(what=[('pos','b.state.pos')])
vtkExporter.exportInteractions(what=[('kn','i.phys.kn')])
vtkExporter.exportPolyhedra(what=[('n','b.id')])
| ThomasSweijen/yadesolute2 | examples/test/vtk-exporter/vtkExporter.py | Python | gpl-2.0 | 976 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserActivateKey',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_key', models.CharField(max_length=40, null=True, blank=True)),
('key_expires', models.DateTimeField(null=True, blank=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'tcms_user_activate_keys',
},
),
]
| MrSenko/Nitrate | tcms/core/contrib/auth/migrations/0001_initial.py | Python | gpl-2.0 | 867 |
"""Unit tests for psrfits_to_sdfits.py."""
import unittest
import sys
import scipy as sp
import numpy.random as rand
import psrfits_to_sdfits as p2s
import kiyopy.custom_exceptions as ce
class TestFormatData(unittest.TestCase) :
def setUp(self) :
self.ntime = 5
self.npol = 4
self.nfreq = 10
self.good_data = sp.empty((self.ntime, self.npol, self.nfreq),
dtype=int)
self.good_data[:,:,:] = sp.reshape(sp.arange(self.ntime*self.nfreq),
(self.ntime, 1, self.nfreq))
self.good_data[:,0,:] += 100
self.good_data[:,1:,:] -= self.ntime*self.nfreq//2
self.raw_data = sp.empty((self.ntime, self.npol, self.nfreq),
dtype=sp.uint8)
self.raw_data[:,0,:] = self.good_data[:,0,:]
self.raw_data.dtype = sp.int8
self.raw_data[:,1:,:] = self.good_data[:,1:,:]
self.raw_data.dtype = sp.uint8
self.raw_data = self.raw_data.flatten()
def test_runs(self) :
p2s.format_data(self.raw_data, self.ntime, self.npol, self.nfreq)
def test_requires_uint(self) :
self.assertRaises(TypeError, p2s.format_data, self.good_data,
self.ntime, self.npol, self.nfreq)
def test_right_answer(self):
reformated = p2s.format_data(self.raw_data, self.ntime, self.npol,
self.nfreq)
self.assertTrue(sp.allclose(reformated, self.good_data))
class TestFoldOnCal(unittest.TestCase) :
def setUp(self):
self.ntime = 2048
self.nfreq = 10
self.data = sp.zeros((self.ntime, 4, self.nfreq))
self.n_bins_cal = 64
# Set channel dependant gain.
self.level = 0.1*(self.nfreq + sp.arange(self.nfreq))
# Add noise.
self.data[:,:,:] += (0.1 * self.level
* rand.randn(self.ntime, 4, self.nfreq))
# Add DC level.
self.dc = 10 * self.level
self.data += self.dc
# First can transition.
self.first_trans = rand.randint(0, self.n_bins_cal // 2)
# The following randomly assigns self.neg to -1 or 1.
self.neg = 0
while not self.neg: self.neg = rand.randint(-1, 2)
# First upward edge:
if self.neg == 1:
self.offset = self.first_trans
else:
self.offset = self.first_trans + self.n_bins_cal // 2
self.data[:,0,:] += self.level
for ii in range(self.ntime//self.n_bins_cal) :
s = slice(self.first_trans + ii*self.n_bins_cal, self.first_trans +
(2*ii+1)*self.n_bins_cal//2)
self.data[s, 0, :] += self.neg * self.level
# Transition values and locations.
self.t_slice = slice(self.first_trans, sys.maxint, self.n_bins_cal//2)
self.t_vals = 0.5 + 0.1 * rand.randn(2*self.ntime//self.n_bins_cal,
self.nfreq)
self.t_vals *= - self.level
def test_runs(self) :
p2s.get_cal_mask(self.data, self.n_bins_cal)
def test_right_answer_basic(self) :
first_ind_on, n_blank = p2s.get_cal_mask(self.data, self.n_bins_cal)
self.assertEqual(first_ind_on, (self.offset + 1) % self.n_bins_cal)
self.assertEqual(n_blank, 2)
def test_right_answer_partial(self) :
self.data[self.t_slice, 0, :] += self.t_vals
first_ind_on, n_blank = p2s.get_cal_mask(self.data, self.n_bins_cal)
self.assertEqual(first_ind_on, (self.offset + 1) % self.n_bins_cal)
self.assertEqual(n_blank, 1)
def test_checks_cal_per(self) :
self.assertRaises(ValueError, p2s.get_cal_mask, self.data,
self.n_bins_cal + 1)
def test_fails_to_many_transitions(self) :
self.data[self.t_slice, 0, :] += self.t_vals
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal*2)
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal//2)
def test_fails_any_nan(self) :
self.data[self.t_slice,0,:] = float('nan')
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fails_offs_in_ons(self) :
self.data[self.t_slice, 0, :] += self.t_vals
s = slice((self.offset + 7) % self.n_bins_cal, sys.maxint,
self.n_bins_cal)
self.data[s, :, :] = self.dc
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fails_late_on(self) :
self.data[self.t_slice, 0, :] = self.dc
s = slice(self.offset+1, sys.maxint, self.n_bins_cal)
self.data[s, :, :] = self.dc
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fails_to_many_semi_bins(self) :
self.data[self.t_slice, 0, :] += self.t_vals
s = slice((self.offset + 7) % self.n_bins_cal, sys.maxint,
self.n_bins_cal)
self.data[s, :, :] = self.dc + self.level * 0.7
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fast_flagger(self):
for ii in range(self.ntime * self.nfreq * 4 // self.n_bins_cal // 10):
#for ii in range(3):
i_f = rand.randint(0, self.nfreq)
i_t = rand.randint(0, self.ntime)
i_p = rand.randint(0, 4)
self.data[i_t,i_p,i_f] += self.level[i_f] * 5
data, weights = p2s.separate_cal(self.data, self.n_bins_cal, flag=10)
right_answer = sp.empty((4, 2, self.nfreq))
right_answer[...] = self.dc
right_answer[0,0,:] += self.level
self.assertTrue(sp.allclose(data, right_answer, atol=self.level / 10))
self.assertTrue(sp.all(weights <= 1.))
kept_fraction = 1. - 4./self.n_bins_cal - (4./self.n_bins_cal/10)
self.assertTrue(sp.allclose(sp.mean(weights), kept_fraction, rtol=1e-3))
class TestSeparateCal(unittest.TestCase) :
"""Unlike the tests for get_cal_mask, these tests are tightly controled
with no noise so we can detect deviations from expected."""
def setUp(self) :
self.ntime = 2048
self.nfreq = 10
self.data = sp.zeros((self.ntime, 4, self.nfreq))
self.n_bins_cal = 64
self.offset = 10
def post_setup(self) :
if self.offset > self.n_bins_cal//2 :
last_on_start = (self.offset + self.n_bins_cal//2)% self.n_bins_cal
self.data[:last_on_start, :, :] = 1
for ii in range(self.ntime//self.n_bins_cal) :
s = slice(self.offset + ii*self.n_bins_cal, self.offset +
(2*ii+1)*self.n_bins_cal//2)
self.data[s, :, :] = 1
self.t_slice_on = slice(self.offset, sys.maxint, self.n_bins_cal)
self.t_slice_off = slice((self.offset +
self.n_bins_cal//2)%self.n_bins_cal,
sys.maxint, self.n_bins_cal)
def check_answer(self) :
data = self.data.copy()
outdata, weights = p2s.separate_cal(data, self.n_bins_cal, flag=-1)
self.assertTrue(sp.allclose(outdata[:,:,0,:], 1.0))
self.assertTrue(sp.allclose(outdata[:,:,1,:], 0.0))
data = self.data.copy()
outdata, weights = p2s.separate_cal(data, self.n_bins_cal, flag=10)
self.assertTrue(sp.allclose(outdata[:,:,0,:], 1.0))
self.assertTrue(sp.allclose(outdata[:,:,1,:], 0.0))
def test_works_no_transition(self) :
self.post_setup()
self.check_answer()
def test_works_transition(self) :
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Move the offset to the the second half and make sure it works.
def test_works_no_transition_late(self) :
self.offset = 57
self.post_setup()
self.check_answer()
def test_works_transition_late(self) :
self.offset = 57
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Test offset = 63
def test_works_no_transition__1(self) :
self.offset = 63
self.post_setup()
self.check_answer()
def test_works_transition__1(self) :
self.offset = 63
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Test offset = 32
def test_works_no_transition_32(self) :
self.offset = 32
self.post_setup()
self.check_answer()
def test_works_transition_32(self) :
self.offset = 32
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Test offset = 0
def test_works_no_transition_0(self) :
self.offset = 0
self.post_setup()
self.check_answer()
def test_works_transition_0(self) :
self.offset = 0
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
if __name__ == '__main__' :
unittest.main()
| kiyo-masui/SDdata | sddata/tests/test_psrfits_to_sdfits.py | Python | gpl-2.0 | 9,556 |
#!/usr/bin/env python
import sys
import numpy as np
from matplotlib import pyplot as plt
def main():
infile = open(sys.argv[1])
data = np.array(map(lambda x:map(float, x.strip('\n').split('\t')), infile.readlines()))
X = data[:, 0:-1]
(N, D) = X.shape
Y = data[:, -1].reshape((N, 1))
plt.plot(X[np.where(Y == 0)[0]][:, 0], X[np.where(Y == 0)[0]][:, 1], 'b.')
plt.plot(X[np.where(Y == 1)[0]][:, 0], X[np.where(Y == 1)[0]][:, 1], 'r.')
plt.show()
infile.close()
if __name__ == '__main__':
main() | yil8/NNtoolbox | bin/plot.py | Python | gpl-2.0 | 551 |
"""
Copyright 2016, 2017 UFPE - Universidade Federal de Pernambuco
Este arquivo é parte do programa Amadeus Sistema de Gestão de Aprendizagem, ou simplesmente Amadeus LMS
O Amadeus LMS é um software livre; você pode redistribui-lo e/ou modifica-lo dentro dos termos da Licença Pública Geral GNU como publicada pela Fundação do Software Livre (FSF); na versão 2 da Licença.
Este programa é distribuído na esperança que possa ser útil, mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO a qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a Licença Pública Geral GNU para maiores detalhes.
Você deve ter recebido uma cópia da Licença Pública Geral GNU, sob o título "LICENSE", junto com este programa, se não, escreva para a Fundação do Software Livre (FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
"""
import os
import zipfile
import time
from django.db.models import Q
from django.conf import settings
from django.core.files import File
from django.shortcuts import get_object_or_404
from rest_framework import serializers
from subjects.serializers import TagSerializer
from subjects.models import Tag, Subject
from .models import Question, Alternative
class AlternativeSerializer(serializers.ModelSerializer):
alt_img = serializers.CharField(required = False, allow_blank = True, max_length = 255)
def validate(self, data):
files = self.context.get('files', None)
if files:
if data["alt_img"] in files.namelist():
file_path = os.path.join(settings.MEDIA_ROOT, data["alt_img"])
if os.path.isfile(file_path):
dst_path = os.path.join(settings.MEDIA_ROOT, "tmp")
path = files.extract(data["alt_img"], dst_path)
new_name = os.path.join("questions", os.path.join("alternatives", "alternative_" + str(time.time()) + os.path.splitext(data["question_img"])[1]))
os.rename(os.path.join(dst_path, path), os.path.join(settings.MEDIA_ROOT, new_name))
data["alt_img"] = new_name
else:
path = files.extract(data["alt_img"], settings.MEDIA_ROOT)
else:
data["alt_img"] = None
else:
data["alt_img"] = None
return data
class Meta:
model = Alternative
exclude = ('question',)
class QuestionDatabaseSerializer(serializers.ModelSerializer):
categories = TagSerializer(many = True)
alt_question = AlternativeSerializer('get_files', many = True)
question_img = serializers.CharField(required = False, allow_blank = True, max_length = 255)
def get_subject(self, obj):
subject = self.context.get("subject", None)
return subject
def get_files(self, obj):
files = self.context.get("files", None)
return files
def validate(self, data):
files = self.context.get('files', None)
if files:
if data["question_img"] in files.namelist():
file_path = os.path.join(settings.MEDIA_ROOT, data["question_img"])
if os.path.isfile(file_path):
dst_path = os.path.join(settings.MEDIA_ROOT, "tmp")
path = files.extract(data["question_img"], dst_path)
new_name = os.path.join("questions","question_" + str(time.time()) + os.path.splitext(data["question_img"])[1])
os.rename(os.path.join(dst_path, path), os.path.join(settings.MEDIA_ROOT, new_name))
data["question_img"] = new_name
else:
path = files.extract(data["question_img"], settings.MEDIA_ROOT)
else:
data["question_img"] = None
else:
data["question_img"] = None
return data
class Meta:
model = Question
exclude = ('subject', )
def create(self, data):
question_data = data
subject = self.context.get("subject", None)
alternatives = question_data["alt_question"]
del question_data["alt_question"]
question = None
if not Question.objects.filter(enunciado=question_data["enunciado"], subject=subject).exists():
question = Question()
question.enunciado = question_data["enunciado"]
question.question_img = question_data["question_img"]
question.subject = subject
question.save()
tags = data["categories"]
for tag in tags:
if not tag["name"] == "":
if tag["id"] == "":
if Tag.objects.filter(name = tag["name"]).exists():
tag = get_object_or_404(Tag, name = tag["name"])
else:
tag = Tag.objects.create(name = tag["name"])
else:
tag = get_object_or_404(Tag, id = tag["id"])
question.categories.add(tag)
for alt in alternatives:
Alternative.objects.create(question = question, **alt)
return question | amadeusproject/amadeuslms | banco_questoes/serializers.py | Python | gpl-2.0 | 5,241 |
import random
import math
import sympy
from sympy import latex, fraction, Symbol, Rational
localid =11181500100000
letter=["a","b","c","d"]
n=[0,0,0,0,0,0]
m=[0,0,0,0,0]
f = open("111815001.tex","w") #opens file with name of "test.txt"
for x in range(0, 1000):
localid = localid +1
writewrong=["\correctchoice{\(","\wrongchoice{\(","\wrongchoice{\(","\wrongchoice{\("]
for count in range (0,5):
n[count]=random.randint(-20, 20)
m[1]=n[4]-n[2]
m[2]=n[3]-n[1]
m[3]=n[2]-n[1]
m[4]=n[4]-n[3]
if n[2]==n[4]:
letter[0]='undefined'
letter[2]=latex(Rational(-m[3],m[2]))
letter[3]=latex(Rational(-m[4],m[3]))
letter[1]=latex(Rational(m[4],m[3]))
else:
letter[0]=latex(Rational(m[1],m[2]))
letter[1]=latex(Rational(-m[1],m[2]))
letter[2]=latex(Rational(-m[2],m[1]))
letter[3]=latex(Rational(m[2],m[1]))
zz=random.randint(1,6)
if zz==1:
letter[1]=latex(Rational(m[4],m[3]))
elif zz==2:
letter[2]=latex(Rational(m[4],m[3]))
elif zz==3:
letter[3]=latex(Rational(m[4],m[3]))
n[5]=random.randint(0,10)
if n[2]==n[4]:
letter[0]='undefined'
elif n[5]==8:
zz=random.randint(1,3)
letter[zz]='undefined'
if(len(letter)==4):
for z in range (0, 4):
writewrong[z]=writewrong[z]+str(letter[z])
random.shuffle(writewrong)
f.write("\n\n\n")
f.write("\\element{slope}{")
f.write("\n")
f.write("\\begin{question}{")
f.write(str(localid))
f.write("}")
f.write("\n")
f.write("Find the slope using points: (")
f.write(str(n[1]))
f.write(",")
f.write(str(n[2]))
f.write(") and (")
f.write(str(n[3]))
f.write(",")
f.write(str(n[4]))
f.write("):")
f.write("\n")
f.write("\\begin{choiceshoriz}")
f.write("\n")
for y in range(0, 4):
f.write("\n")
f.write(writewrong[y])
f.write("\)}")
f.write("\n")
f.write("\\end{choiceshoriz}")
f.write("\n")
f.write("\\end{question}")
f.write("\n")
f.write("}")
f.close()
| johncbolton/amcpy | 111815001.py | Python | gpl-2.0 | 2,105 |
"""
network/bluetooth_server.py
Starts a bluetooth server with max 7 connections.
Receives and sends data from/to paired client.
PS3 controller: receive only
Android App: receive/send
"""
import base64
from threading import Thread
from bluetooth import *
import protocol
import thread
import atexit
PACKET_SIZE = 990
class BluetoothServer(Thread):
def __init__(self, apphandler, driver):
super(BluetoothServer, self).__init__()
self.daemon = True # if main is killed, this also gets killed
self.apphandler = apphandler # pass apphandler object so we can pass control commands to it
self.driver = driver
self.server_sock = BluetoothSocket(RFCOMM) # create socket
self.server_sock.bind(("", PORT_ANY)) # bind to bluetooth adapter
self.server_sock.listen(1) # server listens to accept 1 connection at a time
self.port = self.server_sock.getsockname()[1] # socket is bound on this port
# advertise service
uuid = "9d7debbc-c85d-11d1-9eb4-006008c3a19a" # ID of the service
advertise_service(self.server_sock, "Aranha", # start service, so the app can connect
service_id=uuid,
service_classes=[uuid, SERIAL_PORT_CLASS],
profiles=[SERIAL_PORT_PROFILE])
atexit.register(self.server_sock.close)
# accept clients and create threads for them
def run(self):
while True:
print("Waiting for connection on RFCOMM channel %d" % self.port)
client_sock, client_info = self.server_sock.accept() # wait for connection. if new connection, continue
print("Accepted connection from ", client_info)
# start client thread
BluetoothClientThread(self.server_sock, client_sock, client_info, self.apphandler, self.driver).start()
class BluetoothClientThread(Thread):
def __init__(self, server_sock, client_sock, client_info, apphandler, driver):
super(BluetoothClientThread, self).__init__()
self.daemon = True
self.server_sock = server_sock
self.client_sock = client_sock
self.client_info = client_info
self.apphandler = apphandler
self.driver = driver
atexit.register(self.client_sock.close)
atexit.register(self.server_sock.close)
def run(self):
while True:
try:
data = base64.b64decode(self.client_sock.recv(1024))
if len(data) == 0:
break
thread.start_new_thread(protocol.handle, (self, data, self.driver))
except IOError:
break
# close connection
print("Closed connection from ", self.client_info)
self.client_sock.close()
self.is_alive = False
def encode_and_send(self, header, msg):
msg = base64.b64encode(msg)
#print "strlen msg:", len(msg)
self.client_sock.send(header + "" + str(len(msg)) + chr(0))
self.client_sock.send(msg) | CallanIwan/Aranha | core/network/bluetooth_server.py | Python | gpl-2.0 | 3,131 |
"""
Copyright (C) 2018 MuadDib
----------------------------------------------------------------------------
"THE BEER-WARE LICENSE" (Revision 42):
@tantrumdev wrote this file. As long as you retain this notice you
can do whatever you want with this stuff. If we meet some day, and you think
this stuff is worth it, you can buy him a beer in return. - Muad'Dib
----------------------------------------------------------------------------
Overview:
Drop this PY in the plugins folder, and use whatever tools below you want.
Version:
2018.6.8
- Added Streamango and Streamcherry pairing sites
- Added <adult> tag to hide menu items unless an addon setting enabled it (see code for setting id to use
in your settings.xml)
2018.5.25
- Added <pairwith> tags
- Can use pairlist to show all sites, or specific entry from PAIR_LIST to load that site from menu
- Added <trailer> tag support to load your custom YT trailer (via plugin url) for non-imdb items
2018.5.1a
- Added <mode> and <modeurl> tags (used together in same item)
2018.5.1
- Initial Release
XML Explanations:
Tags:
<heading></heading> - Displays the entry as normal, but performs no action (not a directory or "item")
<mysettings>0/0</mysettings> - Opens settings dialog to the specified Tab and section (0 indexed)
<pairwith></pairwith> - Used for pairing with sites. See list below of supported sites with this plugin
<trailer>plugin://plugin.video.youtube/play/?video_id=ChA0qNHV1D4</trailer>
Usage Examples:
<item>
<title>[COLOR limegreen]Don't forget to folow me on twitter @tantrumdev ![/COLOR]</title>
<heading></heading>
</item>
<item>
<title>JEN: Customization</title>
<mysettings>0/0</mysettings>
<info>Open the Settings for the addon on the Customization tab</info>
</item>
<item>
<title>Pair With Sites</title>
<pairwith>pairlist</pairwith>
</item>
<item>
<title>Pair Openload</title>
<pairwith>openload</pairwith>
</item>
<item>
<title>Dune (1984)</title>
<trailer>plugin://plugin.video.youtube/play/?video_id=ChA0qNHV1D4</trailer>
<info>Provides the Trailer context link for this movie when Metadata is DISABLED in your addon.</info>
</item>
<item>
<title>JEN: General</title>
<mysettings>1/0</mysettings>
<info>Open the Settings for the addon on the General tab</info>
</item>
<item>
<title>Custom Mode</title>
<mode>Whatever</mode>
<modeurl>query=Iwant</modeurl>
<info>Sets a specific Mode for the menu item, to utilize Jen modes not normally accessible. Setting modeurl passes a custom built url= variable to go with it</info>
</item>
"""
import collections,requests,re,os,traceback,webbrowser
import koding
import __builtin__
import xbmc,xbmcaddon,xbmcgui
from koding import route
from resources.lib.plugin import Plugin
from resources.lib.util.context import get_context_items
from resources.lib.util.xml import JenItem, JenList, display_list
from unidecode import unidecode
addon_id = xbmcaddon.Addon().getAddonInfo('id')
this_addon = xbmcaddon.Addon(id=addon_id)
addon_fanart = xbmcaddon.Addon().getAddonInfo('fanart')
addon_icon = xbmcaddon.Addon().getAddonInfo('icon')
addon_path = xbmcaddon.Addon().getAddonInfo('path')
PAIR_LIST = [ ("openload", "https://olpair.com/pair"),
("streamango", "https://streamango.com/pair"),
("streamcherry", "https://streamcherry.com/pair"),
("the_video_me", "https://thevideo.us/pair"),
("vid_up_me", "https://vidup.me/pair"),
("vshare", "http://vshare.eu/pair"),
("flashx", "https://www.flashx.tv/?op=login&redirect=https://www.flashx.tv/pairing.php") ]
class JenTools(Plugin):
name = "jentools"
priority = 200
def process_item(self, item_xml):
result_item = None
if "<heading>" in item_xml:
item = JenItem(item_xml)
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "HEADING",
'url': item.get("heading", ""),
'folder': False,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
return result_item
elif "<mysettings>" in item_xml:
item = JenItem(item_xml)
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "MYSETTINGS",
'url': item.get("mysettings", ""),
'folder': False,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
return result_item
elif "<adult>" in item_xml:
item = JenItem(item_xml)
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "PASSREQ",
'url': item.get("adult", ""),
'folder': True,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
return result_item
elif "<mode>" in item_xml:
item = JenItem(item_xml)
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': item.get("mode", ""),
'url': item.get("modeurl", ""),
'folder': True,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
return result_item
elif "<pairwith>" in item_xml:
item = JenItem(item_xml)
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "PAIRWITH",
'url': item.get("pairwith", ""),
'folder': False,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
return result_item
elif "<trailer>" in item_xml:
item = JenItem(item_xml)
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "PAIRWITH",
'url': item.get("pairwith", ""),
'folder': False,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
result_item["info"]["trailer"] = item.get("trailer", None)
return result_item
@route(mode='HEADING')
def heading_handler():
try:
quit()
except:
pass
@route(mode="MYSETTINGS", args=["url"])
def mysettings_handler(query):
try:
xbmc.executebuiltin('Dialog.Close(busydialog)')
xbmc.executebuiltin('Addon.OpenSettings(%s)' % addon_id)
c, f = query.split('/')
xbmc.executebuiltin('SetFocus(%i)' % (int(c) + 100))
xbmc.executebuiltin('SetFocus(%i)' % (int(f) + 200))
except:
return
@route(mode="PASSREQ", args=["url"])
def password_handler(url):
adult_xml = ''
try:
the_setting = this_addon.getSetting('adult_stuff')
if the_setting == None or the_setting == '':
the_setting = 'false'
xbmcaddon.Addon().setSetting('adult_stuff', str(the_setting))
if the_setting == 'false':
adult_xml += "<item>"\
" <title>[COLOR yellow]This menu is not enabled[/COLOR]</title>"\
" <heading></heading>"\
" <thumbnail>https://nsx.np.dl.playstation.net/nsx/material/c/ce432e00ce97a461b9a8c01ce78538f4fa6610fe-1107562.png</thumbnail>"\
"</item>"
jenlist = JenList(adult_xml)
display_list(jenlist.get_list(), jenlist.get_content_type())
return
except:
return
sep_list = url.decode('base64').split('|')
dec_pass = sep_list[0]
xml_loc = sep_list[1]
input = ''
keyboard = xbmc.Keyboard(input, '[COLOR red]Are you worthy?[/COLOR]')
keyboard.doModal()
if keyboard.isConfirmed():
input = keyboard.getText()
if input == dec_pass:
if 'http' in xml_loc:
adult_xml = requests.get(xml_loc).content
else:
import xbmcvfs
xml_loc = xml_loc.replace('file://', '')
xml_file = xbmcvfs.File(os.path.join(addon_path, "xml", xml_loc))
adult_xml = xml_file.read()
xml_file.close()
else:
adult_xml += "<dir>"\
" <title>[COLOR yellow]Wrong Answer! You are not worthy[/COLOR]</title>"\
" <thumbnail>https://nsx.np.dl.playstation.net/nsx/material/c/ce432e00ce97a461b9a8c01ce78538f4fa6610fe-1107562.png</thumbnail>"\
"</dir>"
jenlist = JenList(adult_xml)
display_list(jenlist.get_list(), jenlist.get_content_type())
@route(mode="PAIRWITH", args=["url"])
def pairing_handler(url):
try:
site = ''
if 'pairlist' in url:
names = []
for item in PAIR_LIST:
the_title = 'PAIR %s' % (item[0].replace('_', ' ').capitalize())
names.append(the_title)
selected = xbmcgui.Dialog().select('Select Site',names)
if selected == -1:
return
# If you add [COLOR] etc to the title stuff in names loop above, this will strip all of that out and make it usable here
pair_item = re.sub('\[.*?]','',names[selected]).replace('Pair for ', '').replace(' ', '_').lower()
for item in PAIR_LIST:
if str(item[0]) == pair_item:
site = item[1]
break
else:
for item in PAIR_LIST:
if str(item[0]) == url:
site = item[1]
break
check_os = platform()
if check_os == 'android':
spam_time = xbmc.executebuiltin('StartAndroidActivity(,android.intent.action.VIEW,,%s)' % (site))
elif check_os == 'osx':
os.system("open -a /Applications/Safari.app %s") % (site)
else:
spam_time = webbrowser.open(site)
except:
failure = traceback.format_exc()
xbmcgui.Dialog().textviewer('Exception',str(failure))
pass
def platform():
if xbmc.getCondVisibility('system.platform.android'): return 'android'
elif xbmc.getCondVisibility('system.platform.linux'): return 'linux'
elif xbmc.getCondVisibility('system.platform.windows'): return 'windows'
elif xbmc.getCondVisibility('system.platform.osx'): return 'osx'
elif xbmc.getCondVisibility('system.platform.atv2'): return 'atv2'
elif xbmc.getCondVisibility('system.platform.ios'): return 'ios' | repotvsupertuga/tvsupertuga.repository | plugin.video.TVsupertuga/resources/lib/plugins/tools.py | Python | gpl-2.0 | 12,896 |
#!/usr/bin/python
__version__ = '0.0.1'
import pysimplesoap.client
import pysimplesoap.simplexml
from zimbrasoap.soap import soap,admin,mail
| Secretions/zmdomainexport | zimbrasoap/__init__.py | Python | gpl-2.0 | 144 |
"""
WSGI config for HW3 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "HW3.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
# dj-static configuration
# See: https://github.com/kennethreitz/dj-static
application = Cling(get_wsgi_application())
| dperlee/HW3 | HW3/wsgi.py | Python | gpl-2.0 | 492 |
# -*- coding: utf-8 -*-
"""Module handling report menus contents"""
from contextlib import contextmanager
import attr
from navmazing import NavigateToAttribute
from widgetastic.widget import Text
from widgetastic_patternfly import Button
from . import CloudIntelReportsView
from . import ReportsMultiBoxSelect
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.utils.appliance.implementations.ui import CFMENavigateStep
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.appliance.implementations.ui import navigator
from widgetastic_manageiq import FolderManager
from widgetastic_manageiq import ManageIQTree
class EditReportMenusView(CloudIntelReportsView):
title = Text("#explorer_title_text")
reports_tree = ManageIQTree("menu_roles_treebox")
# Buttons
save_button = Button("Save")
reset_button = Button("Reset")
default_button = Button("Default")
cancel_button = Button("Cancel")
commit_button = Button("Commit")
discard_button = Button("Discard")
manager = FolderManager(".//div[@id='folder_lists']/table")
report_select = ReportsMultiBoxSelect(
move_into="Move selected reports right",
move_from="Move selected reports left",
available_items="available_reports",
chosen_items="selected_reports"
)
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Editing EVM Group "{}"'.format(self.context["object"].group) and
self.edit_report_menus.is_opened and
self.edit_report_menus.tree.currently_selected == [
"All EVM Groups",
self.context["object"].group
]
)
@attr.s
class ReportMenu(BaseEntity):
"""
This is a fake class mainly needed for navmazing navigation.
"""
group = None
def go_to_group(self, group_name):
self.group = group_name
view = navigate_to(self, "EditReportMenus")
assert view.is_displayed
return view
def get_folders(self, group):
"""Returns list of folders for given user group.
Args:
group: User group to check.
"""
view = self.go_to_group(group)
view.reports_tree.click_path("Top Level")
fields = view.manager.fields
view.discard_button.click()
return fields
def get_subfolders(self, group, folder):
"""Returns list of sub-folders for given user group and folder.
Args:
group: User group to check.
folder: Folder to read.
"""
view = self.go_to_group(group)
view.reports_tree.click_path("Top Level", folder)
fields = view.manager.fields
view.discard_button.click()
return fields
def add_folder(self, group, folder):
"""Adds a folder under top-level.
Args:
group: User group.
folder: Name of the new folder.
"""
with self.manage_folder() as top_level:
top_level.add(folder)
def add_subfolder(self, group, folder, subfolder):
"""Adds a subfolder under specified folder.
Args:
group: User group.
folder: Name of the folder.
subfolder: Name of the new subdfolder.
"""
with self.manage_folder(folder) as fldr:
fldr.add(subfolder)
def reset_to_default(self, group):
"""Clicks the `Default` button.
Args:
group: Group to set to Default
"""
view = self.go_to_group(group)
view.default_button.click()
view.save_button.click()
@contextmanager
def manage_subfolder(self, group, folder, subfolder):
"""Context manager to use when modifying the subfolder contents.
You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the
changes done inside the with block.
Args:
group: User group.
folder: Parent folder name.
subfolder: Subfolder name to manage.
Returns: Context-managed :py:class: `widgetastic_manageiq.MultiBoxSelect` instance
"""
view = self.go_to_group(group)
view.reports_tree.click_path("Top Level", folder, subfolder)
try:
yield view.report_select
except FolderManager._BailOut:
view.discard_button.click()
except Exception:
# In case of any exception, nothing will be saved
view.discard_button.click()
raise # And reraise the exception
else:
# If no exception happens, save!
view.commit_button.click()
view.save_button.click()
@contextmanager
def manage_folder(self, group, folder=None):
"""Context manager to use when modifying the folder contents.
You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the
changes done inside the with block. This context manager does not give the manager as a
value to the with block so you have to import and use the :py:class:`FolderManager` class
manually.
Args:
group: User group.
folder: Which folder to manage. If None, top-level will be managed.
Returns: Context-managed :py:class:`widgetastic_manageiq.FolderManager` instance
"""
view = self.go_to_group(group)
if folder is None:
view.reports_tree.click_path("Top Level")
else:
view.reports_tree.click_path("Top Level", folder)
try:
yield view.manager
except FolderManager._BailOut:
view.manager.discard()
except Exception:
# In case of any exception, nothing will be saved
view.manager.discard()
raise # And reraise the exception
else:
# If no exception happens, save!
view.manager.commit()
view.save_button.click()
@attr.s
class ReportMenusCollection(BaseCollection):
"""Collection object for the :py:class:'cfme.intelligence.reports.ReportMenu'."""
ENTITY = ReportMenu
@navigator.register(ReportMenu)
class EditReportMenus(CFMENavigateStep):
VIEW = EditReportMenusView
prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports")
def step(self, *args, **kwargs):
self.view.edit_report_menus.tree.click_path(
"All EVM Groups",
self.obj.group
)
| RedHatQE/cfme_tests | cfme/intelligence/reports/menus.py | Python | gpl-2.0 | 6,600 |
#
# ADIABATIC_FLAME - A freely-propagating, premixed methane/air flat
# flame with multicomponent transport properties
#
from Cantera import *
from Cantera.OneD import *
from Cantera.OneD.FreeFlame import FreeFlame
################################################################
#
# parameter values
#
p = OneAtm # pressure
tin = 300.0 # unburned gas temperature
mdot = 0.04 # kg/m^2/s
comp = 'CH4:0.45, O2:1, N2:3.76' # premixed gas composition
initial_grid = [0.0, 0.001, 0.01, 0.02, 0.029, 0.03] # m
tol_ss = [1.0e-5, 1.0e-9] # [rtol atol] for steady-state
# problem
tol_ts = [1.0e-5, 1.0e-9] # [rtol atol] for time stepping
loglevel = 1 # amount of diagnostic output (0
# to 5)
refine_grid = 1 # 1 to enable refinement, 0 to
# disable
gas = GRI30('Mix')
gas.addTransportModel('Multi')
# set its state to that of the unburned gas
gas.setState_TPX(tin, p, comp)
f = FreeFlame(gas = gas, grid = initial_grid, tfix = 600.0)
# set the upstream properties
f.inlet.set(mole_fractions = comp, temperature = tin)
f.set(tol = tol_ss, tol_time = tol_ts)
f.showSolution()
f.set(energy = 'off')
f.setRefineCriteria(ratio = 10.0, slope = 1, curve = 1)
f.setMaxJacAge(50, 50)
f.setTimeStep(1.0e-5, [2, 5, 10, 20, 50])
f.solve(loglevel, refine_grid)
f.save('ch4_adiabatic.xml','no_energy',
'solution with the energy equation disabled')
f.set(energy = 'on')
f.setRefineCriteria(ratio = 3.0, slope = 0.1, curve = 0.2)
f.solve(loglevel, refine_grid)
f.save('ch4_adiabatic.xml','energy',
'solution with the energy equation enabled')
print 'mixture-averaged flamespeed = ',f.u()[0]
gas.switchTransportModel('Multi')
f.flame.setTransportModel(gas)
f.solve(loglevel, refine_grid)
f.save('ch4_adiabatic.xml','energy_multi',
'solution with the energy equation enabled and multicomponent transport')
# write the velocity, temperature, density, and mole fractions to a CSV file
z = f.flame.grid()
T = f.T()
u = f.u()
V = f.V()
fcsv = open('adiabatic_flame.csv','w')
writeCSV(fcsv, ['z (m)', 'u (m/s)', 'V (1/s)', 'T (K)', 'rho (kg/m3)']
+ list(gas.speciesNames()))
for n in range(f.flame.nPoints()):
f.setGasState(n)
writeCSV(fcsv, [z[n], u[n], V[n], T[n], gas.density()]
+list(gas.moleFractions()))
fcsv.close()
print 'solution saved to adiabatic_flame.csv'
print 'multicomponent flamespeed = ',u[0]
f.showStats()
| HyperloopTeam/FullOpenMDAO | cantera-2.0.2/samples/python/flames/adiabatic_flame/adiabatic_flame.py | Python | gpl-2.0 | 2,623 |
'''
Vidbull urlresolver plugin
Copyright (C) 2013 Vinnydude
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
import urllib2
import urllib
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
from urlresolver import common
USER_AGENT = 'Mozilla/5.0 (Linux; Android 4.4; Nexus 5 Build/BuildID) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36'
class VidbullResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "vidbull"
domains = [ "vidbull.com" ]
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
def get_media_url(self, host, media_id):
try:
headers = {
'User-Agent': USER_AGENT
}
web_url = self.get_url(host, media_id)
html = self.net.http_GET(web_url, headers=headers).content
match = re.search('<source\s+src="([^"]+)', html)
if match:
return match.group(1)
else:
raise Exception('File Link Not Found')
except urllib2.HTTPError as e:
common.addon.log_error(self.name + ': got http error %d fetching %s' % (e.code, web_url))
return self.unresolvable(code=3, msg=e)
except Exception as e:
common.addon.log('**** Vidbull Error occured: %s' % e)
return self.unresolvable(code=0, msg=e)
def get_url(self, host, media_id):
return 'http://www.vidbull.com/%s' % media_id
def get_host_and_id(self, url):
r = re.search('//(.+?)/(?:embed-)?([0-9a-zA-Z]+)',url)
if r:
return r.groups()
else:
return False
return('host', 'media_id')
def valid_url(self, url, host):
if self.get_setting('enabled') == 'false': return False
return (re.match('http://(www.)?vidbull.com/(?:embed-)?' +
'[0-9A-Za-z]+', url) or
'vidbull' in host)
| VioletRed/script.module.urlresolver | lib/urlresolver/plugins/vidbull.py | Python | gpl-2.0 | 2,788 |
# portage.py -- core Portage functionality
# Copyright 1998-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
VERSION="HEAD"
# ===========================================================================
# START OF IMPORTS -- START OF IMPORTS -- START OF IMPORTS -- START OF IMPORT
# ===========================================================================
try:
import sys
import errno
if not hasattr(errno, 'ESTALE'):
# ESTALE may not be defined on some systems, such as interix.
errno.ESTALE = -1
import re
import types
import platform
# Temporarily delete these imports, to ensure that only the
# wrapped versions are imported by portage internals.
import os
del os
import shutil
del shutil
except ImportError as e:
sys.stderr.write("\n\n")
sys.stderr.write("!!! Failed to complete python imports. These are internal modules for\n")
sys.stderr.write("!!! python and failure here indicates that you have a problem with python\n")
sys.stderr.write("!!! itself and thus portage is not able to continue processing.\n\n")
sys.stderr.write("!!! You might consider starting python with verbose flags to see what has\n")
sys.stderr.write("!!! gone wrong. Here is the information we got for this exception:\n")
sys.stderr.write(" "+str(e)+"\n\n");
raise
try:
import portage.proxy.lazyimport
import portage.proxy as proxy
proxy.lazyimport.lazyimport(globals(),
'portage.cache.cache_errors:CacheError',
'portage.checksum',
'portage.checksum:perform_checksum,perform_md5,prelink_capable',
'portage.cvstree',
'portage.data',
'portage.data:lchown,ostype,portage_gid,portage_uid,secpass,' + \
'uid,userland,userpriv_groups,wheelgid',
'portage.dbapi',
'portage.dbapi.bintree:bindbapi,binarytree',
'portage.dbapi.cpv_expand:cpv_expand',
'portage.dbapi.dep_expand:dep_expand',
'portage.dbapi.porttree:close_portdbapi_caches,FetchlistDict,' + \
'portagetree,portdbapi',
'portage.dbapi.vartree:dblink,merge,unmerge,vardbapi,vartree',
'portage.dbapi.virtual:fakedbapi',
'portage.dep',
'portage.dep:best_match_to_list,dep_getcpv,dep_getkey,' + \
'flatten,get_operator,isjustname,isspecific,isvalidatom,' + \
'match_from_list,match_to_list',
'portage.dep.dep_check:dep_check,dep_eval,dep_wordreduce,dep_zapdeps',
'portage.eclass_cache',
'portage.exception',
'portage.getbinpkg',
'portage.locks',
'portage.locks:lockdir,lockfile,unlockdir,unlockfile',
'portage.mail',
'portage.manifest:Manifest',
'portage.output',
'portage.output:bold,colorize',
'portage.package.ebuild.doebuild:doebuild,' + \
'doebuild_environment,spawn,spawnebuild',
'portage.package.ebuild.config:autouse,best_from_dict,' + \
'check_config_instance,config',
'portage.package.ebuild.deprecated_profile_check:' + \
'deprecated_profile_check',
'portage.package.ebuild.digestcheck:digestcheck',
'portage.package.ebuild.digestgen:digestgen',
'portage.package.ebuild.fetch:fetch',
'portage.package.ebuild.getmaskingreason:getmaskingreason',
'portage.package.ebuild.getmaskingstatus:getmaskingstatus',
'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
'portage.process',
'portage.process:atexit_register,run_exitfuncs',
'portage.update:dep_transform,fixdbentries,grab_updates,' + \
'parse_updates,update_config_files,update_dbentries,' + \
'update_dbentry',
'portage.util',
'portage.util:atomic_ofstream,apply_secpass_permissions,' + \
'apply_recursive_permissions,dump_traceback,getconfig,' + \
'grabdict,grabdict_package,grabfile,grabfile_package,' + \
'map_dictlist_vals,new_protect_filename,normalize_path,' + \
'pickle_read,pickle_write,stack_dictlist,stack_dicts,' + \
'stack_lists,unique_array,varexpand,writedict,writemsg,' + \
'writemsg_stdout,write_atomic',
'portage.util.digraph:digraph',
'portage.util.env_update:env_update',
'portage.util.ExtractKernelVersion:ExtractKernelVersion',
'portage.util.listdir:cacheddir,listdir',
'portage.util.movefile:movefile',
'portage.util.mtimedb:MtimeDB',
'portage.versions',
'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,' + \
'cpv_getkey@getCPFromCPV,endversion_keys,' + \
'suffix_value@endversion,pkgcmp,pkgsplit,vercmp,ververify',
'portage.xpak',
'subprocess',
'time',
)
try:
from collections import OrderedDict
except ImportError:
proxy.lazyimport.lazyimport(globals(),
'portage.cache.mappings:OrderedDict')
import portage.const
from portage.const import VDB_PATH, PRIVATE_PATH, CACHE_PATH, DEPCACHE_PATH, \
USER_CONFIG_PATH, MODULES_FILE_PATH, CUSTOM_PROFILE_PATH, PORTAGE_BASE_PATH, \
PORTAGE_BIN_PATH, PORTAGE_PYM_PATH, PROFILE_PATH, LOCALE_DATA_PATH, \
EBUILD_SH_BINARY, SANDBOX_BINARY, BASH_BINARY, \
MOVE_BINARY, PRELINK_BINARY, WORLD_FILE, MAKE_CONF_FILE, MAKE_DEFAULTS_FILE, \
DEPRECATED_PROFILE_FILE, USER_VIRTUALS_FILE, EBUILD_SH_ENV_FILE, \
INVALID_ENV_FILE, CUSTOM_MIRRORS_FILE, CONFIG_MEMORY_FILE,\
INCREMENTALS, EAPI, MISC_SH_BINARY, REPO_NAME_LOC, REPO_NAME_FILE
except ImportError as e:
sys.stderr.write("\n\n")
sys.stderr.write("!!! Failed to complete portage imports. There are internal modules for\n")
sys.stderr.write("!!! portage and failure here indicates that you have a problem with your\n")
sys.stderr.write("!!! installation of portage. Please try a rescue portage located in the\n")
sys.stderr.write("!!! portage tree under '/usr/portage/sys-apps/portage/files/' (default).\n")
sys.stderr.write("!!! There is a README.RESCUE file that details the steps required to perform\n")
sys.stderr.write("!!! a recovery of portage.\n")
sys.stderr.write(" "+str(e)+"\n\n")
raise
if sys.hexversion >= 0x3000000:
basestring = str
long = int
# We use utf_8 encoding everywhere. Previously, we used
# sys.getfilesystemencoding() for the 'merge' encoding, but that had
# various problems:
#
# 1) If the locale is ever changed then it can cause orphan files due
# to changed character set translation.
#
# 2) Ebuilds typically install files with utf_8 encoded file names,
# and then portage would be forced to rename those files to match
# sys.getfilesystemencoding(), possibly breaking things.
#
# 3) Automatic translation between encodings can lead to nonsensical
# file names when the source encoding is unknown by portage.
#
# 4) It's inconvenient for ebuilds to convert the encodings of file
# names to match the current locale, and upstreams typically encode
# file names with utf_8 encoding.
#
# So, instead of relying on sys.getfilesystemencoding(), we avoid the above
# problems by using a constant utf_8 'merge' encoding for all locales, as
# discussed in bug #382199 and bug #381509.
_encodings = {
'content' : 'utf_8',
'fs' : 'utf_8',
'merge' : 'utf_8',
'repo.content' : 'utf_8',
'stdio' : 'utf_8',
}
if sys.hexversion >= 0x3000000:
def _unicode_encode(s, encoding=_encodings['content'], errors='backslashreplace'):
if isinstance(s, str):
s = s.encode(encoding, errors)
return s
def _unicode_decode(s, encoding=_encodings['content'], errors='replace'):
if isinstance(s, bytes):
s = str(s, encoding=encoding, errors=errors)
return s
else:
def _unicode_encode(s, encoding=_encodings['content'], errors='backslashreplace'):
if isinstance(s, unicode):
s = s.encode(encoding, errors)
return s
def _unicode_decode(s, encoding=_encodings['content'], errors='replace'):
if isinstance(s, bytes):
s = unicode(s, encoding=encoding, errors=errors)
return s
class _unicode_func_wrapper(object):
"""
Wraps a function, converts arguments from unicode to bytes,
and return values to unicode from bytes. Function calls
will raise UnicodeEncodeError if an argument fails to be
encoded with the required encoding. Return values that
are single strings are decoded with errors='replace'. Return
values that are lists of strings are decoded with errors='strict'
and elements that fail to be decoded are omitted from the returned
list.
"""
__slots__ = ('_func', '_encoding')
def __init__(self, func, encoding=_encodings['fs']):
self._func = func
self._encoding = encoding
def __call__(self, *args, **kwargs):
encoding = self._encoding
wrapped_args = [_unicode_encode(x, encoding=encoding, errors='strict')
for x in args]
if kwargs:
wrapped_kwargs = dict(
(k, _unicode_encode(v, encoding=encoding, errors='strict'))
for k, v in kwargs.items())
else:
wrapped_kwargs = {}
rval = self._func(*wrapped_args, **wrapped_kwargs)
# Don't use isinstance() since we don't want to convert subclasses
# of tuple such as posix.stat_result in Python >=3.2.
if rval.__class__ in (list, tuple):
decoded_rval = []
for x in rval:
try:
x = _unicode_decode(x, encoding=encoding, errors='strict')
except UnicodeDecodeError:
pass
else:
decoded_rval.append(x)
if isinstance(rval, tuple):
rval = tuple(decoded_rval)
else:
rval = decoded_rval
else:
rval = _unicode_decode(rval, encoding=encoding, errors='replace')
return rval
class _unicode_module_wrapper(object):
"""
Wraps a module and wraps all functions with _unicode_func_wrapper.
"""
__slots__ = ('_mod', '_encoding', '_overrides', '_cache')
def __init__(self, mod, encoding=_encodings['fs'], overrides=None, cache=True):
object.__setattr__(self, '_mod', mod)
object.__setattr__(self, '_encoding', encoding)
object.__setattr__(self, '_overrides', overrides)
if cache:
cache = {}
else:
cache = None
object.__setattr__(self, '_cache', cache)
def __getattribute__(self, attr):
cache = object.__getattribute__(self, '_cache')
if cache is not None:
result = cache.get(attr)
if result is not None:
return result
result = getattr(object.__getattribute__(self, '_mod'), attr)
encoding = object.__getattribute__(self, '_encoding')
overrides = object.__getattribute__(self, '_overrides')
override = None
if overrides is not None:
override = overrides.get(id(result))
if override is not None:
result = override
elif isinstance(result, type):
pass
elif type(result) is types.ModuleType:
result = _unicode_module_wrapper(result,
encoding=encoding, overrides=overrides)
elif hasattr(result, '__call__'):
result = _unicode_func_wrapper(result, encoding=encoding)
if cache is not None:
cache[attr] = result
return result
import os as _os
_os_overrides = {
id(_os.fdopen) : _os.fdopen,
id(_os.mkfifo) : _os.mkfifo,
id(_os.popen) : _os.popen,
id(_os.read) : _os.read,
id(_os.system) : _os.system,
}
if hasattr(_os, 'statvfs'):
_os_overrides[id(_os.statvfs)] = _os.statvfs
os = _unicode_module_wrapper(_os, overrides=_os_overrides,
encoding=_encodings['fs'])
_os_merge = _unicode_module_wrapper(_os,
encoding=_encodings['merge'], overrides=_os_overrides)
import shutil as _shutil
shutil = _unicode_module_wrapper(_shutil, encoding=_encodings['fs'])
# Imports below this point rely on the above unicode wrapper definitions.
try:
__import__('selinux')
import portage._selinux
selinux = _unicode_module_wrapper(_selinux,
encoding=_encodings['fs'])
_selinux_merge = _unicode_module_wrapper(_selinux,
encoding=_encodings['merge'])
except (ImportError, OSError) as e:
if isinstance(e, OSError):
sys.stderr.write("!!! SELinux not loaded: %s\n" % str(e))
del e
_selinux = None
selinux = None
_selinux_merge = None
# ===========================================================================
# END OF IMPORTS -- END OF IMPORTS -- END OF IMPORTS -- END OF IMPORTS -- END
# ===========================================================================
_python_interpreter = os.path.realpath(sys.executable)
_bin_path = PORTAGE_BIN_PATH
_pym_path = PORTAGE_PYM_PATH
def _shell_quote(s):
"""
Quote a string in double-quotes and use backslashes to
escape any backslashes, double-quotes, dollar signs, or
backquotes in the string.
"""
for letter in "\\\"$`":
if letter in s:
s = s.replace(letter, "\\" + letter)
return "\"%s\"" % s
bsd_chflags = None
if platform.system() in ('FreeBSD',):
class bsd_chflags(object):
@classmethod
def chflags(cls, path, flags, opts=""):
cmd = ['chflags']
if opts:
cmd.append(opts)
cmd.append('%o' % (flags,))
cmd.append(path)
encoding = _encodings['fs']
if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
# Python 3.1 does not support bytes in Popen args.
cmd = [_unicode_encode(x, encoding=encoding, errors='strict')
for x in cmd]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = proc.communicate()[0]
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
return
# Try to generate an ENOENT error if appropriate.
if 'h' in opts:
_os_merge.lstat(path)
else:
_os_merge.stat(path)
# Make sure the binary exists.
if not portage.process.find_binary('chflags'):
raise portage.exception.CommandNotFound('chflags')
# Now we're not sure exactly why it failed or what
# the real errno was, so just report EPERM.
output = _unicode_decode(output, encoding=encoding)
e = OSError(errno.EPERM, output)
e.errno = errno.EPERM
e.filename = path
e.message = output
raise e
@classmethod
def lchflags(cls, path, flags):
return cls.chflags(path, flags, opts='-h')
def load_mod(name):
modname = ".".join(name.split(".")[:-1])
mod = __import__(modname)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def getcwd():
"this fixes situations where the current directory doesn't exist"
try:
return os.getcwd()
except OSError: #dir doesn't exist
os.chdir("/")
return "/"
getcwd()
def abssymlink(symlink, target=None):
"This reads symlinks, resolving the relative symlinks, and returning the absolute."
if target is not None:
mylink = target
else:
mylink = os.readlink(symlink)
if mylink[0] != '/':
mydir=os.path.dirname(symlink)
mylink=mydir+"/"+mylink
return os.path.normpath(mylink)
_doebuild_manifest_exempt_depend = 0
_testing_eapis = frozenset(["4-python", "4-slot-abi", "5-progress"])
_deprecated_eapis = frozenset(["4_pre1", "3_pre2", "3_pre1", "5_pre1", "5_pre2"])
def _eapi_is_deprecated(eapi):
return eapi in _deprecated_eapis
def eapi_is_supported(eapi):
if not isinstance(eapi, basestring):
# Only call str() when necessary since with python2 it
# can trigger UnicodeEncodeError if EAPI is corrupt.
eapi = str(eapi)
eapi = eapi.strip()
if _eapi_is_deprecated(eapi):
return True
if eapi in _testing_eapis:
return True
try:
eapi = int(eapi)
except ValueError:
eapi = -1
if eapi < 0:
return False
return eapi <= portage.const.EAPI
# This pattern is specified by PMS section 7.3.1.
_pms_eapi_re = re.compile(r"^[ \t]*EAPI=(['\"]?)([A-Za-z0-9+_.-]*)\1[ \t]*([ \t]#.*)?$")
_comment_or_blank_line = re.compile(r"^\s*(#.*)?$")
def _parse_eapi_ebuild_head(f):
eapi = None
eapi_lineno = None
lineno = 0
for line in f:
lineno += 1
m = _comment_or_blank_line.match(line)
if m is None:
eapi_lineno = lineno
m = _pms_eapi_re.match(line)
if m is not None:
eapi = m.group(2)
break
return (eapi, eapi_lineno)
def _movefile(src, dest, **kwargs):
"""Calls movefile and raises a PortageException if an error occurs."""
if movefile(src, dest, **kwargs) is None:
raise portage.exception.PortageException(
"mv '%s' '%s'" % (src, dest))
auxdbkeys = (
'DEPEND', 'RDEPEND', 'SLOT', 'SRC_URI',
'RESTRICT', 'HOMEPAGE', 'LICENSE', 'DESCRIPTION',
'KEYWORDS', 'INHERITED', 'IUSE', 'REQUIRED_USE',
'PDEPEND', 'PROVIDE', 'EAPI',
'PROPERTIES', 'DEFINED_PHASES', 'UNUSED_05', 'UNUSED_04',
'UNUSED_03', 'UNUSED_02', 'UNUSED_01',
)
auxdbkeylen=len(auxdbkeys)
def portageexit():
close_portdbapi_caches()
class _trees_dict(dict):
__slots__ = ('_running_eroot', '_target_eroot',)
def __init__(self, *pargs, **kargs):
dict.__init__(self, *pargs, **kargs)
self._running_eroot = None
self._target_eroot = None
def create_trees(config_root=None, target_root=None, trees=None, env=None,
eprefix=None):
if trees is not None:
# clean up any existing portdbapi instances
for myroot in trees:
portdb = trees[myroot]["porttree"].dbapi
portdb.close_caches()
portdbapi.portdbapi_instances.remove(portdb)
del trees[myroot]["porttree"], myroot, portdb
if trees is None:
trees = _trees_dict()
elif not isinstance(trees, _trees_dict):
# caller passed a normal dict or something,
# but we need a _trees_dict instance
trees = _trees_dict(trees)
if env is None:
env = os.environ
settings = config(config_root=config_root, target_root=target_root,
env=env, eprefix=eprefix)
settings.lock()
trees._target_eroot = settings['EROOT']
myroots = [(settings['EROOT'], settings)]
if settings["ROOT"] == "/":
trees._running_eroot = trees._target_eroot
else:
# When ROOT != "/" we only want overrides from the calling
# environment to apply to the config that's associated
# with ROOT != "/", so pass a nearly empty dict for the env parameter.
clean_env = {}
for k in ('PATH', 'PORTAGE_GRPNAME', 'PORTAGE_USERNAME',
'SSH_AGENT_PID', 'SSH_AUTH_SOCK', 'TERM',
'ftp_proxy', 'http_proxy', 'no_proxy',
'__PORTAGE_TEST_HARDLINK_LOCKS'):
v = settings.get(k)
if v is not None:
clean_env[k] = v
settings = config(config_root=None, target_root="/",
env=clean_env, eprefix=eprefix)
settings.lock()
trees._running_eroot = settings['EROOT']
myroots.append((settings['EROOT'], settings))
for myroot, mysettings in myroots:
trees[myroot] = portage.util.LazyItemsDict(trees.get(myroot, {}))
trees[myroot].addLazySingleton("virtuals", mysettings.getvirtuals)
trees[myroot].addLazySingleton(
"vartree", vartree, categories=mysettings.categories,
settings=mysettings)
trees[myroot].addLazySingleton("porttree",
portagetree, settings=mysettings)
trees[myroot].addLazySingleton("bintree",
binarytree, pkgdir=mysettings["PKGDIR"], settings=mysettings)
return trees
if VERSION == 'HEAD':
class _LazyVersion(proxy.objectproxy.ObjectProxy):
def _get_target(self):
global VERSION
if VERSION is not self:
return VERSION
if os.path.isdir(os.path.join(PORTAGE_BASE_PATH, '.git')):
encoding = _encodings['fs']
cmd = [BASH_BINARY, "-c", ("cd %s ; git describe --tags || exit $? ; " + \
"if [ -n \"`git diff-index --name-only --diff-filter=M HEAD`\" ] ; " + \
"then echo modified ; git rev-list --format=%%ct -n 1 HEAD ; fi ; " + \
"exit 0") % _shell_quote(PORTAGE_BASE_PATH)]
if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
# Python 3.1 does not support bytes in Popen args.
cmd = [_unicode_encode(x, encoding=encoding, errors='strict')
for x in cmd]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = _unicode_decode(proc.communicate()[0], encoding=encoding)
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
output_lines = output.splitlines()
if output_lines:
version_split = output_lines[0].split('-')
if version_split:
VERSION = version_split[0].lstrip('v')
patchlevel = False
if len(version_split) > 1:
patchlevel = True
VERSION = "%s_p%s" %(VERSION, version_split[1])
if len(output_lines) > 1 and output_lines[1] == 'modified':
head_timestamp = None
if len(output_lines) > 3:
try:
head_timestamp = long(output_lines[3])
except ValueError:
pass
timestamp = long(time.time())
if head_timestamp is not None and timestamp > head_timestamp:
timestamp = timestamp - head_timestamp
if not patchlevel:
VERSION = "%s_p0" % (VERSION,)
VERSION = "%s_p%d" % (VERSION, timestamp)
return VERSION
VERSION = 'HEAD'
return VERSION
VERSION = _LazyVersion()
if "_legacy_globals_constructed" in globals():
# The module has been reloaded, so perform any relevant cleanup
# and prevent memory leaks.
if "db" in _legacy_globals_constructed:
try:
db
except NameError:
pass
else:
if isinstance(db, dict) and db:
for _x in db.values():
try:
if "porttree" in _x.lazy_items:
continue
except (AttributeError, TypeError):
continue
try:
_x = _x["porttree"].dbapi
except (AttributeError, KeyError):
continue
if not isinstance(_x, portdbapi):
continue
_x.close_caches()
try:
portdbapi.portdbapi_instances.remove(_x)
except ValueError:
pass
del _x
class _LegacyGlobalProxy(proxy.objectproxy.ObjectProxy):
__slots__ = ('_name',)
def __init__(self, name):
proxy.objectproxy.ObjectProxy.__init__(self)
object.__setattr__(self, '_name', name)
def _get_target(self):
name = object.__getattribute__(self, '_name')
from portage._legacy_globals import _get_legacy_global
return _get_legacy_global(name)
_legacy_global_var_names = ("archlist", "db", "features",
"groups", "mtimedb", "mtimedbfile", "pkglines",
"portdb", "profiledir", "root", "selinux_enabled",
"settings", "thirdpartymirrors")
for k in _legacy_global_var_names:
globals()[k] = _LegacyGlobalProxy(k)
del k
_legacy_globals_constructed = set()
def _disable_legacy_globals():
"""
This deletes the ObjectProxy instances that are used
for lazy initialization of legacy global variables.
The purpose of deleting them is to prevent new code
from referencing these deprecated variables.
"""
global _legacy_global_var_names
for k in _legacy_global_var_names:
globals().pop(k, None)
| devurandom/portage | pym/portage/__init__.py | Python | gpl-2.0 | 21,844 |
"""
Test the CONMIN optimizer component
"""
import unittest
import numpy
# pylint: disable=F0401,E0611
from openmdao.main.api import Assembly, Component, VariableTree, set_as_top, Driver
from openmdao.main.datatypes.api import Float, Array, Str, VarTree
from openmdao.lib.casehandlers.api import ListCaseRecorder
from openmdao.main.interfaces import IHasParameters, implements
from openmdao.main.hasparameters import HasParameters
from openmdao.util.decorators import add_delegate
from openmdao.lib.drivers.conmindriver import CONMINdriver
from openmdao.util.testutil import assert_rel_error
class OptRosenSuzukiComponent(Component):
""" From the CONMIN User's Manual:
EXAMPLE 1 - CONSTRAINED ROSEN-SUZUKI FUNCTION. NO GRADIENT INFORMATION.
MINIMIZE OBJ = X(1)**2 - 5*X(1) + X(2)**2 - 5*X(2) +
2*X(3)**2 - 21*X(3) + X(4)**2 + 7*X(4) + 50
Subject to:
G(1) = X(1)**2 + X(1) + X(2)**2 - X(2) +
X(3)**2 + X(3) + X(4)**2 - X(4) - 8 .LE.0
G(2) = X(1)**2 - X(1) + 2*X(2)**2 + X(3)**2 +
2*X(4)**2 - X(4) - 10 .LE.0
G(3) = 2*X(1)**2 + 2*X(1) + X(2)**2 - X(2) +
X(3)**2 - X(4) - 5 .LE.0
This problem is solved beginning with an initial X-vector of
X = (1.0, 1.0, 1.0, 1.0)
The optimum design is known to be
OBJ = 6.000
and the corresponding X-vector is
X = (0.0, 1.0, 2.0, -1.0)
"""
x = Array(iotype='in', low=-10, high=99)
g = Array([1., 1., 1.], iotype='out')
result = Float(iotype='out')
obj_string = Str(iotype='out')
opt_objective = Float(iotype='out')
# pylint: disable=C0103
def __init__(self):
super(OptRosenSuzukiComponent, self).__init__()
self.x = numpy.array([1., 1., 1., 1.], dtype=float)
self.result = 0.
self.opt_objective = 6.*10.0
self.opt_design_vars = [0., 1., 2., -1.]
def execute(self):
"""calculate the new objective value"""
x = self.x
self.result = (x[0]**2 - 5.*x[0] + x[1]**2 - 5.*x[1] +
2.*x[2]**2 - 21.*x[2] + x[3]**2 + 7.*x[3] + 50)
self.obj_string = "Bad"
#print "rosen", self.x
self.g[0] = (x[0]**2 + x[0] + x[1]**2 - x[1] +
x[2]**2 + x[2] + x[3]**2 - x[3] - 8)
self.g[1] = (x[0]**2 - x[0] + 2*x[1]**2 + x[2]**2 +
2*x[3]**2 - x[3] - 10)
self.g[2] = (2*x[0]**2 + 2*x[0] + x[1]**2 - x[1] +
x[2]**2 - x[3] - 5)
#print self.x, self.g
class RosenSuzuki2D(Component):
""" RosenSuzuki with 2D input. """
x = Array(iotype='in', low=-10, high=99)
result = Float(iotype='out')
opt_objective = Float(iotype='out')
# pylint: disable=C0103
def __init__(self):
super(RosenSuzuki2D, self).__init__()
self.x = numpy.array([[1., 1.], [1., 1.]], dtype=float)
self.result = 0.
self.opt_objective = 6.*10.0
self.opt_design_vars = [0., 1., 2., -1.]
def execute(self):
"""calculate the new objective value"""
self.result = (self.x[0][0]**2 - 5.*self.x[0][0] +
self.x[0][1]**2 - 5.*self.x[0][1] +
2.*self.x[1][0]**2 - 21.*self.x[1][0] +
self.x[1][1]**2 + 7.*self.x[1][1] + 50)
class RosenSuzukiMixed(Component):
""" RosenSuzuki with mixed scalar and 1D inputs. """
x0 = Float(iotype='in', low=-10, high=99)
x12 = Array(iotype='in', low=-10, high=99)
x3 = Float(iotype='in', low=-10, high=99)
result = Float(iotype='out')
opt_objective = Float(iotype='out')
# pylint: disable=C0103
def __init__(self):
super(RosenSuzukiMixed, self).__init__()
self.x0 = 1.
self.x12 = numpy.array([1., 1.], dtype=float)
self.x3 = 1.
self.result = 0.
self.opt_objective = 6.*10.0
self.opt_design_vars = [0., 1., 2., -1.]
def execute(self):
"""calculate the new objective value"""
self.result = (self.x0**2 - 5.*self.x0 +
self.x12[0]**2 - 5.*self.x12[0] +
2.*self.x12[1]**2 - 21.*self.x12[1] +
self.x3**2 + 7.*self.x3 + 50)
class CONMINdriverTestCase(unittest.TestCase):
"""test CONMIN optimizer component"""
def setUp(self):
self.top = set_as_top(Assembly())
self.top.add('driver', CONMINdriver())
self.top.add('comp', OptRosenSuzukiComponent())
self.top.driver.workflow.add('comp')
self.top.driver.iprint = 0
self.top.driver.itmax = 30
def test_opt1(self):
# Run with scalar parameters, scalar constraints, and OpenMDAO gradient.
self.top.driver.add_objective('10*comp.result')
# pylint: disable=C0301
map(self.top.driver.add_parameter,
['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
'2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])
self.top.recorders = [ListCaseRecorder()]
self.top.driver.iprint = 0
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
cases = self.top.recorders[0].get_iterator()
end_case = cases[-1]
self.assertEqual(self.top.comp.x[1],
end_case.get_input('comp.x[1]'))
self.assertEqual(10*self.top.comp.result,
end_case.get_output('_pseudo_0.out0'))
def test_opt1_a(self):
# Run with scalar parameters, 1D constraint, and OpenMDAO gradient.
self.top.driver.add_objective('10*comp.result')
# pylint: disable=C0301
map(self.top.driver.add_parameter,
['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])
self.top.driver.add_constraint('comp.g <= 0')
self.top.driver.iprint = 0
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_opt1_with_CONMIN_gradient(self):
# Note: all other tests use OpenMDAO gradient
self.top.driver.add_objective('10*comp.result')
self.top.driver.add_parameter('comp.x[0]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[1]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[2]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[3]', fd_step=.00001)
# pylint: disable=C0301
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
'2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])
self.top.driver.conmin_diff = True
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_opt1_with_CONMIN_gradient_a(self):
# Scalar parameters, array constraint, CONMIN gradient.
# Note: all other tests use OpenMDAO gradient
self.top.driver.add_objective('10*comp.result')
self.top.driver.add_parameter('comp.x[0]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[1]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[2]', fd_step=.00001)
self.top.driver.add_parameter('comp.x[3]', fd_step=.00001)
self.top.driver.add_constraint('comp.g <= 0')
self.top.driver.conmin_diff = True
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_opt1_flippedconstraints(self):
self.top.driver.add_objective('10*comp.result')
map(self.top.driver.add_parameter,
['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])
# pylint: disable=C0301
map(self.top.driver.add_constraint, [
'8 > comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3]',
'10 > comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3]',
'5 > 2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3]'])
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_gradient_step_size_large(self):
# Test that a larger value of fd step-size is less acurate
self.top.driver.add_objective('10*comp.result')
map(self.top.driver.add_parameter, ['comp.x[0]', 'comp.x[1]',
'comp.x[2]', 'comp.x[3]'])
# pylint: disable=C0301
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8.',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10.',
'2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5.'])
self.top.driver.conmin_diff = True
self.top.driver.fdch = 1.0e-6
self.top.driver.fdchm = 1.0e-6
self.top.run()
baseerror = abs(self.top.comp.opt_objective - self.top.driver.eval_objective())
self.top.driver.fdch = .3
self.top.driver.fdchm = .3
self.top.comp.x = numpy.array([1., 1., 1., 1.], dtype=float)
self.top.run()
newerror = abs(self.top.comp.opt_objective - self.top.driver.eval_objective())
# pylint: disable=E1101
if baseerror > newerror:
self.fail("Coarsening CONMIN gradient step size did not make the objective worse.")
def test_linear_constraint_specification(self):
# Note, just testing problem specification and setup
self.top.driver.add_objective('comp.result')
map(self.top.driver.add_parameter,
['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])
self.top.driver.add_constraint('comp.x[1] + 3.0*comp.x[2] > 3.0', linear=True)
self.top.driver.add_constraint('comp.x[2] + comp.x[3] > 13.0', linear=True)
self.top.driver.add_constraint('comp.x[1] - 0.73*comp.x[3]*comp.x[2] > -12.0', linear=False)
self.top.driver.itmax = 1
self.top.run()
self.assertEqual(self.top.driver._cons_is_linear[0], 1, 1e-6)
self.assertEqual(self.top.driver._cons_is_linear[1], 1, 1e-6)
self.assertEqual(self.top.driver._cons_is_linear[2], 0, 1e-6)
lcons = self.top.driver.get_constraints(linear=True)
self.assertTrue(len(lcons) == 2)
self.assertTrue('comp.x[2]+comp.x[3]>13.0' in lcons)
self.assertTrue('comp.x[1]-0.73*comp.x[3]*comp.x[2]>-12.0' not in lcons)
lcons = self.top.driver.get_constraints(linear=False)
self.assertTrue(len(lcons) == 1)
self.assertTrue('comp.x[2]+comp.x[3]>13.0' not in lcons)
self.assertTrue('comp.x[1]-0.73*comp.x[3]*comp.x[2]>-12.0' in lcons)
def test_max_iteration(self):
self.top.driver.add_objective('comp.result')
map(self.top.driver.add_parameter, ['comp.x[0]', 'comp.x[1]',
'comp.x[2]', 'comp.x[3]'])
self.top.driver.nscal = -1
self.top.driver.itmax = 2
# pylint: disable=C0301
self.top.run()
# pylint: disable=E1101
self.assertEqual(self.top.driver.iter_count, 2)
def test_remove(self):
self.top.driver.add_objective('comp.result')
map(self.top.driver.add_parameter,
['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])
# pylint: disable=C0301
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
'2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])
self.top.remove('comp')
self.assertEqual(self.top.driver.list_param_targets(), [])
self.assertEqual(self.top.driver.list_constraints(), [])
self.assertEqual(self.top.driver.get_objectives(), {})
def test_initial_run(self):
# Test the fix that put run_iteration at the top
# of the start_iteration method
class MyComp(Component):
x = Float(0.0, iotype='in', low=-10, high=10)
xx = Float(0.0, iotype='in', low=-10, high=10)
f_x = Float(iotype='out')
y = Float(iotype='out')
def execute(self):
if self.xx != 1.0:
self.raise_exception("Lazy", RuntimeError)
self.f_x = 2.0*self.x
self.y = self.x
@add_delegate(HasParameters)
class SpecialDriver(Driver):
implements(IHasParameters)
def execute(self):
self.set_parameters([1.0])
top = set_as_top(Assembly())
top.add('comp', MyComp())
top.add('driver', CONMINdriver())
top.add('subdriver', SpecialDriver())
top.driver.workflow.add('subdriver')
top.subdriver.workflow.add('comp')
top.subdriver.add_parameter('comp.xx')
top.driver.add_parameter('comp.x')
top.driver.add_constraint('comp.y > 1.0')
top.driver.add_objective('comp.f_x')
top.run()
class TestContainer(VariableTree):
dummy1 = Float(desc='default value of 0.0') #this value is being grabbed by the optimizer
dummy2 = Float(11.0)
class TestComponent(Component):
dummy_data = VarTree(TestContainer(), iotype='in')
x = Float(iotype='out')
def execute(self):
self.x = (self.dummy_data.dummy1-3)**2 - self.dummy_data.dummy2
class TestAssembly(Assembly):
def configure(self):
self.add('dummy_top', TestContainer())
self.add('comp', TestComponent())
self.add('driver', CONMINdriver())
self.driver.workflow.add(['comp'])
#self.driver.iprint = 4 #debug verbosity
self.driver.add_objective('comp.x')
self.driver.add_parameter('comp.dummy_data.dummy1', low=-10.0, high=10.0)
class CONMINdriverTestCase2(unittest.TestCase):
def test_vartree_opt(self):
blah = set_as_top(TestAssembly())
blah.run()
self.assertAlmostEqual(blah.comp.dummy_data.dummy1, 3.0, 1) #3.0 should be minimum
class TestCase1D(unittest.TestCase):
"""Test using 1D array connections and 1D array constraint."""
def setUp(self):
self.top = set_as_top(Assembly())
self.top.add('comp', OptRosenSuzukiComponent())
driver = self.top.add('driver', CONMINdriver())
driver.workflow.add('comp')
driver.iprint = 0
driver.itmax = 30
driver.add_objective('10*comp.result')
driver.add_parameter('comp.x')
def test_conmin_gradient_a(self):
# Run with 1D parameter, 1D constraint, and CONMIN gradient.
self.top.driver.add_constraint('comp.g <= 0')
self.top.driver.conmin_diff = True
self.top.driver.fdch = .000001
self.top.driver.fdchm = .000001
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_conmin_gradient_s(self):
# Run with 1D parameter, scalar constraints, and CONMIN gradient.
# pylint: disable=C0301
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
'2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])
self.top.driver.conmin_diff = True
self.top.driver.fdch = .000001
self.top.driver.fdchm = .000001
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.driver.eval_objective(),
self.top.comp.opt_objective,
0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_openmdao_gradient_a(self):
# Run with 1D parameter, 1D constraint, and OpenMDAO gradient.
self.top.driver.add_constraint('comp.g <= 0')
self.top.driver.conmin_diff = False
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
def test_openmdao_gradient_s(self):
# Run with 1D parameter, scalar constraints, and OpenMDAO gradient.
# pylint: disable=C0301
map(self.top.driver.add_constraint, [
'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
'2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])
self.top.driver.conmin_diff = False
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[2], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[3], 0.05)
class TestCase2D(unittest.TestCase):
"""Test using 2D array connections."""
def setUp(self):
self.top = set_as_top(Assembly())
self.top.add('comp', RosenSuzuki2D())
driver = self.top.add('driver', CONMINdriver())
driver.workflow.add('comp')
driver.iprint = 0
driver.itmax = 30
driver.add_objective('10*comp.result')
driver.add_parameter('comp.x')
# pylint: disable=C0301
map(driver.add_constraint, [
'comp.x[0][0]**2+comp.x[0][0]+comp.x[0][1]**2-comp.x[0][1]+comp.x[1][0]**2+comp.x[1][0]+comp.x[1][1]**2-comp.x[1][1] < 8',
'comp.x[0][0]**2-comp.x[0][0]+2*comp.x[0][1]**2+comp.x[1][0]**2+2*comp.x[1][1]**2-comp.x[1][1] < 10',
'2*comp.x[0][0]**2+2*comp.x[0][0]+comp.x[0][1]**2-comp.x[0][1]+comp.x[1][0]**2-comp.x[1][1] < 5'])
def test_conmin_gradient(self):
# Run with 2D parameter and CONMIN gradient.
self.top.driver.conmin_diff = True
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0][0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[0][1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[1][0], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[1][1], 0.05)
def test_openmdao_gradient(self):
# Run with 2D parameter and OpenMDAO gradient.
self.top.driver.conmin_diff = False
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x[0][0], 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x[0][1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x[1][0], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x[1][1], 0.05)
class TestCaseMixed(unittest.TestCase):
"""Test using mixed scalar and 1D connections."""
def setUp(self):
self.top = set_as_top(Assembly())
self.top.add('comp', RosenSuzukiMixed())
driver = self.top.add('driver', CONMINdriver())
driver.workflow.add('comp')
driver.iprint = 0
driver.itmax = 30
driver.add_objective('10*comp.result')
map(driver.add_parameter, ['comp.x0', 'comp.x12', 'comp.x3'])
# pylint: disable=C0301
map(driver.add_constraint, [
'comp.x0**2+comp.x0+comp.x12[0]**2-comp.x12[0]+comp.x12[1]**2+comp.x12[1]+comp.x3**2-comp.x3 < 8',
'comp.x0**2-comp.x0+2*comp.x12[0]**2+comp.x12[1]**2+2*comp.x3**2-comp.x3 < 10',
'2*comp.x0**2+2*comp.x0+comp.x12[0]**2-comp.x12[0]+comp.x12[1]**2-comp.x3 < 5'])
def test_conmin_gradient(self):
# Run with mixed parameters and CONMIN gradient.
self.top.driver.conmin_diff = True
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x0, 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x12[0], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x12[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x3, 0.05)
def test_openmdao_gradient(self):
# Run with mixed parameters and OpenMDAO gradient.
self.top.driver.conmin_diff = False
self.top.run()
# pylint: disable=E1101
assert_rel_error(self, self.top.comp.opt_objective,
self.top.driver.eval_objective(), 0.01)
assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
1 + self.top.comp.x0, 0.05)
assert_rel_error(self, self.top.comp.opt_design_vars[1],
self.top.comp.x12[0], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[2],
self.top.comp.x12[1], 0.06)
assert_rel_error(self, self.top.comp.opt_design_vars[3],
self.top.comp.x3, 0.05)
if __name__ == "__main__":
unittest.main()
| HyperloopTeam/FullOpenMDAO | lib/python2.7/site-packages/openmdao.lib-0.13.0-py2.7.egg/openmdao/lib/drivers/test/test_opt_conmin.py | Python | gpl-2.0 | 27,188 |
import unittest
from pyramid import testing
class ViewTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_my_view(self):
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info['project'], 'zodiacbauth')
| kellazo/zodiacbauth | src/zodiacbauth/zodiacbauth/tests.py | Python | gpl-2.0 | 388 |
# -*- coding: utf-8 -*-
#############################################################
# This file was automatically generated on 2022-01-18. #
# #
# Python Bindings Version 2.1.29 #
# #
# If you have a bugfix for this file and want to commit it, #
# please fix the bug in the generator. You can find a link #
# to the generators git repository on tinkerforge.com #
#############################################################
from collections import namedtuple
try:
from .ip_connection import Device, IPConnection, Error, create_char, create_char_list, create_string, create_chunk_data
except (ValueError, ImportError):
from ip_connection import Device, IPConnection, Error, create_char, create_char_list, create_string, create_chunk_data
GetPositionCallbackConfiguration = namedtuple('PositionCallbackConfiguration', ['period', 'value_has_to_change', 'option', 'min', 'max'])
GetSPITFPErrorCount = namedtuple('SPITFPErrorCount', ['error_count_ack_checksum', 'error_count_message_checksum', 'error_count_frame', 'error_count_overflow'])
GetIdentity = namedtuple('Identity', ['uid', 'connected_uid', 'position', 'hardware_version', 'firmware_version', 'device_identifier'])
class BrickletLinearPotiV2(Device):
"""
59mm linear potentiometer
"""
DEVICE_IDENTIFIER = 2139
DEVICE_DISPLAY_NAME = 'Linear Poti Bricklet 2.0'
DEVICE_URL_PART = 'linear_poti_v2' # internal
CALLBACK_POSITION = 4
FUNCTION_GET_POSITION = 1
FUNCTION_SET_POSITION_CALLBACK_CONFIGURATION = 2
FUNCTION_GET_POSITION_CALLBACK_CONFIGURATION = 3
FUNCTION_GET_SPITFP_ERROR_COUNT = 234
FUNCTION_SET_BOOTLOADER_MODE = 235
FUNCTION_GET_BOOTLOADER_MODE = 236
FUNCTION_SET_WRITE_FIRMWARE_POINTER = 237
FUNCTION_WRITE_FIRMWARE = 238
FUNCTION_SET_STATUS_LED_CONFIG = 239
FUNCTION_GET_STATUS_LED_CONFIG = 240
FUNCTION_GET_CHIP_TEMPERATURE = 242
FUNCTION_RESET = 243
FUNCTION_WRITE_UID = 248
FUNCTION_READ_UID = 249
FUNCTION_GET_IDENTITY = 255
THRESHOLD_OPTION_OFF = 'x'
THRESHOLD_OPTION_OUTSIDE = 'o'
THRESHOLD_OPTION_INSIDE = 'i'
THRESHOLD_OPTION_SMALLER = '<'
THRESHOLD_OPTION_GREATER = '>'
BOOTLOADER_MODE_BOOTLOADER = 0
BOOTLOADER_MODE_FIRMWARE = 1
BOOTLOADER_MODE_BOOTLOADER_WAIT_FOR_REBOOT = 2
BOOTLOADER_MODE_FIRMWARE_WAIT_FOR_REBOOT = 3
BOOTLOADER_MODE_FIRMWARE_WAIT_FOR_ERASE_AND_REBOOT = 4
BOOTLOADER_STATUS_OK = 0
BOOTLOADER_STATUS_INVALID_MODE = 1
BOOTLOADER_STATUS_NO_CHANGE = 2
BOOTLOADER_STATUS_ENTRY_FUNCTION_NOT_PRESENT = 3
BOOTLOADER_STATUS_DEVICE_IDENTIFIER_INCORRECT = 4
BOOTLOADER_STATUS_CRC_MISMATCH = 5
STATUS_LED_CONFIG_OFF = 0
STATUS_LED_CONFIG_ON = 1
STATUS_LED_CONFIG_SHOW_HEARTBEAT = 2
STATUS_LED_CONFIG_SHOW_STATUS = 3
def __init__(self, uid, ipcon):
"""
Creates an object with the unique device ID *uid* and adds it to
the IP Connection *ipcon*.
"""
Device.__init__(self, uid, ipcon, BrickletLinearPotiV2.DEVICE_IDENTIFIER, BrickletLinearPotiV2.DEVICE_DISPLAY_NAME)
self.api_version = (2, 0, 0)
self.response_expected[BrickletLinearPotiV2.FUNCTION_GET_POSITION] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_SET_POSITION_CALLBACK_CONFIGURATION] = BrickletLinearPotiV2.RESPONSE_EXPECTED_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_GET_POSITION_CALLBACK_CONFIGURATION] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_GET_SPITFP_ERROR_COUNT] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_SET_BOOTLOADER_MODE] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_GET_BOOTLOADER_MODE] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_SET_WRITE_FIRMWARE_POINTER] = BrickletLinearPotiV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletLinearPotiV2.FUNCTION_WRITE_FIRMWARE] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_SET_STATUS_LED_CONFIG] = BrickletLinearPotiV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletLinearPotiV2.FUNCTION_GET_STATUS_LED_CONFIG] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_GET_CHIP_TEMPERATURE] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_RESET] = BrickletLinearPotiV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletLinearPotiV2.FUNCTION_WRITE_UID] = BrickletLinearPotiV2.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletLinearPotiV2.FUNCTION_READ_UID] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletLinearPotiV2.FUNCTION_GET_IDENTITY] = BrickletLinearPotiV2.RESPONSE_EXPECTED_ALWAYS_TRUE
self.callback_formats[BrickletLinearPotiV2.CALLBACK_POSITION] = (9, 'B')
ipcon.add_device(self)
def get_position(self):
"""
Returns the position of the linear potentiometer. The value is
between 0% (slider down) and 100% (slider up).
If you want to get the value periodically, it is recommended to use the
:cb:`Position` callback. You can set the callback configuration
with :func:`Set Position Callback Configuration`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_GET_POSITION, (), '', 9, 'B')
def set_position_callback_configuration(self, period, value_has_to_change, option, min, max):
"""
The period is the period with which the :cb:`Position` callback is triggered
periodically. A value of 0 turns the callback off.
If the `value has to change`-parameter is set to true, the callback is only
triggered after the value has changed. If the value didn't change
within the period, the callback is triggered immediately on change.
If it is set to false, the callback is continuously triggered with the period,
independent of the value.
It is furthermore possible to constrain the callback with thresholds.
The `option`-parameter together with min/max sets a threshold for the :cb:`Position` callback.
The following options are possible:
.. csv-table::
:header: "Option", "Description"
:widths: 10, 100
"'x'", "Threshold is turned off"
"'o'", "Threshold is triggered when the value is *outside* the min and max values"
"'i'", "Threshold is triggered when the value is *inside* or equal to the min and max values"
"'<'", "Threshold is triggered when the value is smaller than the min value (max is ignored)"
"'>'", "Threshold is triggered when the value is greater than the min value (max is ignored)"
If the option is set to 'x' (threshold turned off) the callback is triggered with the fixed period.
"""
self.check_validity()
period = int(period)
value_has_to_change = bool(value_has_to_change)
option = create_char(option)
min = int(min)
max = int(max)
self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_SET_POSITION_CALLBACK_CONFIGURATION, (period, value_has_to_change, option, min, max), 'I ! c B B', 0, '')
def get_position_callback_configuration(self):
"""
Returns the callback configuration as set by :func:`Set Position Callback Configuration`.
"""
self.check_validity()
return GetPositionCallbackConfiguration(*self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_GET_POSITION_CALLBACK_CONFIGURATION, (), '', 16, 'I ! c B B'))
def get_spitfp_error_count(self):
"""
Returns the error count for the communication between Brick and Bricklet.
The errors are divided into
* ACK checksum errors,
* message checksum errors,
* framing errors and
* overflow errors.
The errors counts are for errors that occur on the Bricklet side. All
Bricks have a similar function that returns the errors on the Brick side.
"""
self.check_validity()
return GetSPITFPErrorCount(*self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_GET_SPITFP_ERROR_COUNT, (), '', 24, 'I I I I'))
def set_bootloader_mode(self, mode):
"""
Sets the bootloader mode and returns the status after the requested
mode change was instigated.
You can change from bootloader mode to firmware mode and vice versa. A change
from bootloader mode to firmware mode will only take place if the entry function,
device identifier and CRC are present and correct.
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
mode = int(mode)
return self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_SET_BOOTLOADER_MODE, (mode,), 'B', 9, 'B')
def get_bootloader_mode(self):
"""
Returns the current bootloader mode, see :func:`Set Bootloader Mode`.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_GET_BOOTLOADER_MODE, (), '', 9, 'B')
def set_write_firmware_pointer(self, pointer):
"""
Sets the firmware pointer for :func:`Write Firmware`. The pointer has
to be increased by chunks of size 64. The data is written to flash
every 4 chunks (which equals to one page of size 256).
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
pointer = int(pointer)
self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_SET_WRITE_FIRMWARE_POINTER, (pointer,), 'I', 0, '')
def write_firmware(self, data):
"""
Writes 64 Bytes of firmware at the position as written by
:func:`Set Write Firmware Pointer` before. The firmware is written
to flash every 4 chunks.
You can only write firmware in bootloader mode.
This function is used by Brick Viewer during flashing. It should not be
necessary to call it in a normal user program.
"""
self.check_validity()
data = list(map(int, data))
return self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_WRITE_FIRMWARE, (data,), '64B', 9, 'B')
def set_status_led_config(self, config):
"""
Sets the status LED configuration. By default the LED shows
communication traffic between Brick and Bricklet, it flickers once
for every 10 received data packets.
You can also turn the LED permanently on/off or show a heartbeat.
If the Bricklet is in bootloader mode, the LED is will show heartbeat by default.
"""
self.check_validity()
config = int(config)
self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_SET_STATUS_LED_CONFIG, (config,), 'B', 0, '')
def get_status_led_config(self):
"""
Returns the configuration as set by :func:`Set Status LED Config`
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_GET_STATUS_LED_CONFIG, (), '', 9, 'B')
def get_chip_temperature(self):
"""
Returns the temperature as measured inside the microcontroller. The
value returned is not the ambient temperature!
The temperature is only proportional to the real temperature and it has bad
accuracy. Practically it is only useful as an indicator for
temperature changes.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_GET_CHIP_TEMPERATURE, (), '', 10, 'h')
def reset(self):
"""
Calling this function will reset the Bricklet. All configurations
will be lost.
After a reset you have to create new device objects,
calling functions on the existing ones will result in
undefined behavior!
"""
self.check_validity()
self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_RESET, (), '', 0, '')
def write_uid(self, uid):
"""
Writes a new UID into flash. If you want to set a new UID
you have to decode the Base58 encoded UID string into an
integer first.
We recommend that you use Brick Viewer to change the UID.
"""
self.check_validity()
uid = int(uid)
self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_WRITE_UID, (uid,), 'I', 0, '')
def read_uid(self):
"""
Returns the current UID as an integer. Encode as
Base58 to get the usual string version.
"""
self.check_validity()
return self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_READ_UID, (), '', 12, 'I')
def get_identity(self):
"""
Returns the UID, the UID where the Bricklet is connected to,
the position, the hardware and firmware version as well as the
device identifier.
The position can be 'a', 'b', 'c', 'd', 'e', 'f', 'g' or 'h' (Bricklet Port).
A Bricklet connected to an :ref:`Isolator Bricklet <isolator_bricklet>` is always at
position 'z'.
The device identifier numbers can be found :ref:`here <device_identifier>`.
|device_identifier_constant|
"""
return GetIdentity(*self.ipcon.send_request(self, BrickletLinearPotiV2.FUNCTION_GET_IDENTITY, (), '', 33, '8s 8s c 3B 3B H'))
def register_callback(self, callback_id, function):
"""
Registers the given *function* with the given *callback_id*.
"""
if function is None:
self.registered_callbacks.pop(callback_id, None)
else:
self.registered_callbacks[callback_id] = function
LinearPotiV2 = BrickletLinearPotiV2 # for backward compatibility
| Tinkerforge/brickv | src/brickv/bindings/bricklet_linear_poti_v2.py | Python | gpl-2.0 | 14,625 |
import unittest
import numpy
from deltaphi.category_info import RawCategoryInfo, CategoryGroup, CategoryInfoFactory
from deltaphi.fake_entities import FakeCategoryInfo
__author__ = 'Emanuele Tamponi'
class TestCategoryGroup(unittest.TestCase):
def setUp(self):
self.builder = CategoryInfoFactory({"a", "b", "c"})
def test_build_parent_pairwise(self):
ci1 = self.builder.build(RawCategoryInfo("C1", 100, {"a": 50, "c": 80}))
ci2 = self.builder.build(RawCategoryInfo("C2", 80, {"b": 40, "c": 20}))
merged = CategoryGroup([ci1, ci2]).build_parent_info()
numpy.testing.assert_array_equal([50, 40, 100], merged.frequencies)
self.assertEqual("(C1+C2)", merged.category)
self.assertEqual(180, merged.documents)
self.assertEqual(CategoryGroup([ci1, ci2]), merged.child_group)
def test_build_parent_multiple(self):
ci1 = self.builder.build(RawCategoryInfo("C1", 100, {"a": 50, "c": 80}))
ci2 = self.builder.build(RawCategoryInfo("C2", 80, {"b": 40, "c": 20}))
ci3 = self.builder.build(RawCategoryInfo("C3", 130, {"a": 20, "b": 20, "c": 30}))
merged = CategoryGroup([ci1, ci2, ci3]).build_parent_info()
numpy.testing.assert_array_equal([70, 60, 130], merged.frequencies)
self.assertEqual("(C1+C2+C3)", merged.category)
self.assertEqual(310, merged.documents)
self.assertEqual(CategoryGroup([ci1, ci2, ci3]), merged.child_group)
def test_hierarchical_build_node(self):
ci1 = self.builder.build(RawCategoryInfo("C1", 100, {"a": 50, "c": 80}))
ci2 = self.builder.build(RawCategoryInfo("C2", 80, {"b": 40, "c": 20}))
ci3 = self.builder.build(RawCategoryInfo("C3", 130, {"a": 20, "b": 20, "c": 30}))
middle = CategoryGroup([ci1, ci2]).build_parent_info()
merged = CategoryGroup([ci3, middle]).build_parent_info()
numpy.testing.assert_array_equal([70, 60, 130], merged.frequencies)
self.assertEqual("((C1+C2)+C3)", merged.category)
self.assertEqual(310, merged.documents)
self.assertEqual(CategoryGroup([ci3, middle]), merged.child_group)
def test_category_group_one_vs_siblings(self):
ci1 = FakeCategoryInfo("C1", 4)
ci2 = FakeCategoryInfo("C2", 4)
ci3 = FakeCategoryInfo("C3", 4)
expected_info_pair = [
(ci1, CategoryGroup([ci2, ci3]).build_parent_info()),
(ci2, CategoryGroup([ci1, ci3]).build_parent_info()),
(ci3, CategoryGroup([ci1, ci2]).build_parent_info())
]
group = CategoryGroup([ci1, ci2, ci3])
for expected_info_pair, actual_info_pair in zip(expected_info_pair, group.one_vs_siblings()):
self.assertEqual(expected_info_pair[0], actual_info_pair[0])
self.assertEqual(expected_info_pair[1], actual_info_pair[1])
def test_leafs(self):
ci1 = FakeCategoryInfo("C1", 4)
ci2 = FakeCategoryInfo("C2", 4)
ci3 = FakeCategoryInfo("C3", 4)
ci4 = FakeCategoryInfo("C4", 4)
ci12 = CategoryGroup([ci1, ci2]).build_parent_info()
ci34 = CategoryGroup([ci3, ci4]).build_parent_info()
g12 = CategoryGroup([ci12])
g34 = CategoryGroup([ci34])
expected_leafs = [ci1, ci2]
self.assertEqual(expected_leafs, g12.leafs())
expected_leafs = [ci3, ci4]
self.assertEqual(expected_leafs, g34.leafs())
g1234 = CategoryGroup([ci12, ci34])
expected_leafs = [ci1, ci2, ci3, ci4]
self.assertEqual(expected_leafs, g1234.leafs())
g1234 = CategoryGroup([g1234.build_parent_info()])
self.assertEqual(expected_leafs, g1234.leafs())
| etamponi/taxonomy-generator | deltaphi/test_category_group.py | Python | gpl-2.0 | 3,675 |
"""This module tests events that are invoked by Cloud/Infra VMs."""
import fauxfactory
import pytest
from cfme import test_requirements
from cfme.cloud.provider import CloudProvider
from cfme.cloud.provider.gce import GCEProvider
from cfme.control.explorer.policies import VMControlPolicy
from cfme.infrastructure.provider import InfraProvider
from cfme.infrastructure.provider.kubevirt import KubeVirtProvider
from cfme.markers.env_markers.provider import providers
from cfme.utils.providers import ProviderFilter
from cfme.utils.wait import wait_for
all_prov = ProviderFilter(classes=[InfraProvider, CloudProvider],
required_fields=['provisioning', 'events'])
excluded = ProviderFilter(classes=[KubeVirtProvider], inverted=True)
pytestmark = [
pytest.mark.usefixtures('uses_infra_providers', 'uses_cloud_providers'),
pytest.mark.tier(2),
pytest.mark.provider(gen_func=providers, filters=[all_prov, excluded],
scope='module'),
test_requirements.events,
]
@pytest.fixture(scope="function")
def vm_crud(provider, setup_provider_modscope, small_template_modscope):
template = small_template_modscope
base_name = 'test-events-' if provider.one_of(GCEProvider) else 'test_events_'
vm_name = fauxfactory.gen_alpha(20, start=base_name).lower()
collection = provider.appliance.provider_based_collection(provider)
vm = collection.instantiate(vm_name, provider, template_name=template.name)
yield vm
vm.cleanup_on_provider()
@pytest.mark.rhv2
def test_vm_create(request, appliance, vm_crud, provider, register_event):
""" Test whether vm_create_complete event is emitted.
Prerequisities:
* A provider that is set up and able to deploy VMs
Steps:
* Create a Control setup (action, policy, profile) that apply a tag on a VM when
``VM Create Complete`` event comes
* Deploy the VM outside of CFME (directly in the provider)
* Refresh provider relationships and wait for VM to appear
* Assert the tag appears.
Metadata:
test_flag: provision, events
Polarion:
assignee: jdupuy
casecomponent: Events
caseimportance: high
initialEstimate: 1/8h
"""
action = appliance.collections.actions.create(
fauxfactory.gen_alpha(),
"Tag",
dict(tag=("My Company Tags", "Environment", "Development")))
request.addfinalizer(action.delete)
policy = appliance.collections.policies.create(
VMControlPolicy,
fauxfactory.gen_alpha()
)
request.addfinalizer(policy.delete)
policy.assign_events("VM Create Complete")
@request.addfinalizer
def _cleanup():
policy.unassign_events("VM Create Complete")
policy.assign_actions_to_event("VM Create Complete", action)
profile = appliance.collections.policy_profiles.create(
fauxfactory.gen_alpha(), policies=[policy])
request.addfinalizer(profile.delete)
provider.assign_policy_profiles(profile.description)
request.addfinalizer(lambda: provider.unassign_policy_profiles(profile.description))
register_event(target_type='VmOrTemplate', target_name=vm_crud.name, event_type='vm_create')
vm_crud.create_on_provider(find_in_cfme=True)
def _check():
return any(tag.category.display_name == "Environment" and tag.display_name == "Development"
for tag in vm_crud.get_tags())
wait_for(_check, num_sec=300, delay=15, message="tags to appear")
| izapolsk/integration_tests | cfme/tests/cloud_infra_common/test_events.py | Python | gpl-2.0 | 3,523 |
# Copyright 2007-2010 by Peter Cock. All rights reserved.
# Revisions copyright 2010 by Uri Laserson. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
#
# This code is NOT intended for direct use. It provides a basic scanner
# (for use with a event consumer such as Bio.GenBank._FeatureConsumer)
# to parse a GenBank or EMBL file (with their shared INSDC feature table).
#
# It is used by Bio.GenBank to parse GenBank files
# It is also used by Bio.SeqIO to parse GenBank and EMBL files
#
# Feature Table Documentation:
# http://www.insdc.org/files/feature_table.html
# http://www.ncbi.nlm.nih.gov/projects/collab/FT/index.html
# ftp://ftp.ncbi.nih.gov/genbank/docs/
#
# 17-MAR-2009: added wgs, wgs_scafld for GenBank whole genome shotgun master records.
# These are GenBank files that summarize the content of a project, and provide lists of
# scaffold and contig files in the project. These will be in annotations['wgs'] and
# annotations['wgs_scafld']. These GenBank files do not have sequences. See
# http://groups.google.com/group/bionet.molbio.genbank/browse_thread/thread/51fb88bf39e7dc36
# http://is.gd/nNgk
# for more details of this format, and an example.
# Added by Ying Huang & Iddo Friedberg
import warnings
import os
import re
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import generic_alphabet, generic_protein
class InsdcScanner:
"""Basic functions for breaking up a GenBank/EMBL file into sub sections.
The International Nucleotide Sequence Database Collaboration (INSDC)
between the DDBJ, EMBL, and GenBank. These organisations all use the
same "Feature Table" layout in their plain text flat file formats.
However, the header and sequence sections of an EMBL file are very
different in layout to those produced by GenBank/DDBJ."""
#These constants get redefined with sensible values in the sub classes:
RECORD_START = "XXX" # "LOCUS " or "ID "
HEADER_WIDTH = 3 # 12 or 5
FEATURE_START_MARKERS = ["XXX***FEATURES***XXX"]
FEATURE_END_MARKERS = ["XXX***END FEATURES***XXX"]
FEATURE_QUALIFIER_INDENT = 0
FEATURE_QUALIFIER_SPACER = ""
SEQUENCE_HEADERS=["XXX"] #with right hand side spaces removed
def __init__(self, debug=0):
assert len(self.RECORD_START)==self.HEADER_WIDTH
for marker in self.SEQUENCE_HEADERS:
assert marker==marker.rstrip()
assert len(self.FEATURE_QUALIFIER_SPACER)==self.FEATURE_QUALIFIER_INDENT
self.debug = debug
self.line = None
def set_handle(self, handle):
self.handle = handle
self.line = ""
def find_start(self):
"""Read in lines until find the ID/LOCUS line, which is returned.
Any preamble (such as the header used by the NCBI on *.seq.gz archives)
will we ignored."""
while True:
if self.line:
line = self.line
self.line = ""
else:
line = self.handle.readline()
if not line:
if self.debug : print "End of file"
return None
if line[:self.HEADER_WIDTH]==self.RECORD_START:
if self.debug > 1: print "Found the start of a record:\n" + line
break
line = line.rstrip()
if line == "//":
if self.debug > 1: print "Skipping // marking end of last record"
elif line == "":
if self.debug > 1: print "Skipping blank line before record"
else:
#Ignore any header before the first ID/LOCUS line.
if self.debug > 1:
print "Skipping header line before record:\n" + line
self.line = line
return line
def parse_header(self):
"""Return list of strings making up the header
New line characters are removed.
Assumes you have just read in the ID/LOCUS line.
"""
assert self.line[:self.HEADER_WIDTH]==self.RECORD_START, \
"Not at start of record"
header_lines = []
while True:
line = self.handle.readline()
if not line:
raise ValueError("Premature end of line during sequence data")
line = line.rstrip()
if line in self.FEATURE_START_MARKERS:
if self.debug : print "Found header table"
break
#if line[:self.HEADER_WIDTH]==self.FEATURE_START_MARKER[:self.HEADER_WIDTH]:
# if self.debug : print "Found header table (?)"
# break
if line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS:
if self.debug : print "Found start of sequence"
break
if line == "//":
raise ValueError("Premature end of sequence data marker '//' found")
header_lines.append(line)
self.line = line
return header_lines
def parse_features(self, skip=False):
"""Return list of tuples for the features (if present)
Each feature is returned as a tuple (key, location, qualifiers)
where key and location are strings (e.g. "CDS" and
"complement(join(490883..490885,1..879))") while qualifiers
is a list of two string tuples (feature qualifier keys and values).
Assumes you have already read to the start of the features table.
"""
if self.line.rstrip() not in self.FEATURE_START_MARKERS:
if self.debug : print "Didn't find any feature table"
return []
while self.line.rstrip() in self.FEATURE_START_MARKERS:
self.line = self.handle.readline()
features = []
line = self.line
while True:
if not line:
raise ValueError("Premature end of line during features table")
if line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS:
if self.debug : print "Found start of sequence"
break
line = line.rstrip()
if line == "//":
raise ValueError("Premature end of features table, marker '//' found")
if line in self.FEATURE_END_MARKERS:
if self.debug : print "Found end of features"
line = self.handle.readline()
break
if line[2:self.FEATURE_QUALIFIER_INDENT].strip() == "":
#This is an empty feature line between qualifiers. Empty
#feature lines within qualifiers are handled below (ignored).
line = self.handle.readline()
continue
if skip:
line = self.handle.readline()
while line[:self.FEATURE_QUALIFIER_INDENT] == self.FEATURE_QUALIFIER_SPACER:
line = self.handle.readline()
else:
#Build up a list of the lines making up this feature:
if line[self.FEATURE_QUALIFIER_INDENT]!=" " \
and " " in line[self.FEATURE_QUALIFIER_INDENT:]:
#The feature table design enforces a length limit on the feature keys.
#Some third party files (e.g. IGMT's EMBL like files) solve this by
#over indenting the location and qualifiers.
feature_key, line = line[2:].strip().split(None,1)
feature_lines = [line]
warnings.warn("Overindented %s feature?" % feature_key)
else:
feature_key = line[2:self.FEATURE_QUALIFIER_INDENT].strip()
feature_lines = [line[self.FEATURE_QUALIFIER_INDENT:]]
line = self.handle.readline()
while line[:self.FEATURE_QUALIFIER_INDENT] == self.FEATURE_QUALIFIER_SPACER \
or line.rstrip() == "" : # cope with blank lines in the midst of a feature
#Use strip to remove any harmless trailing white space AND and leading
#white space (e.g. out of spec files with too much intentation)
feature_lines.append(line[self.FEATURE_QUALIFIER_INDENT:].strip())
line = self.handle.readline()
features.append(self.parse_feature(feature_key, feature_lines))
self.line = line
return features
def parse_feature(self, feature_key, lines):
"""Expects a feature as a list of strings, returns a tuple (key, location, qualifiers)
For example given this GenBank feature:
CDS complement(join(490883..490885,1..879))
/locus_tag="NEQ001"
/note="conserved hypothetical [Methanococcus jannaschii];
COG1583:Uncharacterized ACR; IPR001472:Bipartite nuclear
localization signal; IPR002743: Protein of unknown
function DUF57"
/codon_start=1
/transl_table=11
/product="hypothetical protein"
/protein_id="NP_963295.1"
/db_xref="GI:41614797"
/db_xref="GeneID:2732620"
/translation="MRLLLELKALNSIDKKQLSNYLIQGFIYNILKNTEYSWLHNWKK
EKYFNFTLIPKKDIIENKRYYLIISSPDKRFIEVLHNKIKDLDIITIGLAQFQLRKTK
KFDPKLRFPWVTITPIVLREGKIVILKGDKYYKVFVKRLEELKKYNLIKKKEPILEEP
IEISLNQIKDGWKIIDVKDRYYDFRNKSFSAFSNWLRDLKEQSLRKYNNFCGKNFYFE
EAIFEGFTFYKTVSIRIRINRGEAVYIGTLWKELNVYRKLDKEEREFYKFLYDCGLGS
LNSMGFGFVNTKKNSAR"
Then should give input key="CDS" and the rest of the data as a list of strings
lines=["complement(join(490883..490885,1..879))", ..., "LNSMGFGFVNTKKNSAR"]
where the leading spaces and trailing newlines have been removed.
Returns tuple containing: (key as string, location string, qualifiers as list)
as follows for this example:
key = "CDS", string
location = "complement(join(490883..490885,1..879))", string
qualifiers = list of string tuples:
[('locus_tag', '"NEQ001"'),
('note', '"conserved hypothetical [Methanococcus jannaschii];\nCOG1583:..."'),
('codon_start', '1'),
('transl_table', '11'),
('product', '"hypothetical protein"'),
('protein_id', '"NP_963295.1"'),
('db_xref', '"GI:41614797"'),
('db_xref', '"GeneID:2732620"'),
('translation', '"MRLLLELKALNSIDKKQLSNYLIQGFIYNILKNTEYSWLHNWKK\nEKYFNFT..."')]
In the above example, the "note" and "translation" were edited for compactness,
and they would contain multiple new line characters (displayed above as \n)
If a qualifier is quoted (in this case, everything except codon_start and
transl_table) then the quotes are NOT removed.
Note that no whitespace is removed.
"""
#Skip any blank lines
iterator = iter(filter(None, lines))
try:
line = iterator.next()
feature_location = line.strip()
while feature_location[-1:]==",":
#Multiline location, still more to come!
line = iterator.next()
feature_location += line.strip()
qualifiers=[]
for line in iterator:
if line[0]=="/":
#New qualifier
i = line.find("=")
key = line[1:i] #does not work if i==-1
value = line[i+1:] #we ignore 'value' if i==-1
if i==-1:
#Qualifier with no key, e.g. /pseudo
key = line[1:]
qualifiers.append((key,None))
elif value[0]=='"':
#Quoted...
if value[-1]!='"' or value!='"':
#No closing quote on the first line...
while value[-1] != '"':
value += "\n" + iterator.next()
else:
#One single line (quoted)
assert value == '"'
if self.debug : print "Quoted line %s:%s" % (key, value)
#DO NOT remove the quotes...
qualifiers.append((key,value))
else:
#Unquoted
#if debug : print "Unquoted line %s:%s" % (key,value)
qualifiers.append((key,value))
else:
#Unquoted continuation
assert len(qualifiers) > 0
assert key==qualifiers[-1][0]
#if debug : print "Unquoted Cont %s:%s" % (key, line)
qualifiers[-1] = (key, qualifiers[-1][1] + "\n" + line)
return (feature_key, feature_location, qualifiers)
except StopIteration:
#Bummer
raise ValueError("Problem with '%s' feature:\n%s" \
% (feature_key, "\n".join(lines)))
def parse_footer(self):
"""returns a tuple containing a list of any misc strings, and the sequence"""
#This is a basic bit of code to scan and discard the sequence,
#which was useful when developing the sub classes.
if self.line in self.FEATURE_END_MARKERS:
while self.line[:self.HEADER_WIDTH].rstrip() not in self.SEQUENCE_HEADERS:
self.line = self.handle.readline()
if not self.line:
raise ValueError("Premature end of file")
self.line = self.line.rstrip()
assert self.line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS, \
"Not at start of sequence"
while True:
line = self.handle.readline()
if not line : raise ValueError("Premature end of line during sequence data")
line = line.rstrip()
if line == "//" : break
self.line = line
return ([],"") #Dummy values!
def _feed_first_line(self, consumer, line):
"""Handle the LOCUS/ID line, passing data to the comsumer
This should be implemented by the EMBL / GenBank specific subclass
Used by the parse_records() and parse() methods.
"""
pass
def _feed_header_lines(self, consumer, lines):
"""Handle the header lines (list of strings), passing data to the comsumer
This should be implemented by the EMBL / GenBank specific subclass
Used by the parse_records() and parse() methods.
"""
pass
def _feed_feature_table(self, consumer, feature_tuples):
"""Handle the feature table (list of tuples), passing data to the comsumer
Used by the parse_records() and parse() methods.
"""
consumer.start_feature_table()
for feature_key, location_string, qualifiers in feature_tuples:
consumer.feature_key(feature_key)
consumer.location(location_string)
for q_key, q_value in qualifiers:
consumer.feature_qualifier_name([q_key])
if q_value is not None:
consumer.feature_qualifier_description(q_value.replace("\n"," "))
def _feed_misc_lines(self, consumer, lines):
"""Handle any lines between features and sequence (list of strings), passing data to the consumer
This should be implemented by the EMBL / GenBank specific subclass
Used by the parse_records() and parse() methods.
"""
pass
def feed(self, handle, consumer, do_features=True):
"""Feed a set of data into the consumer.
This method is intended for use with the "old" code in Bio.GenBank
Arguments:
handle - A handle with the information to parse.
consumer - The consumer that should be informed of events.
do_features - Boolean, should the features be parsed?
Skipping the features can be much faster.
Return values:
true - Passed a record
false - Did not find a record
"""
#Should work with both EMBL and GenBank files provided the
#equivalent Bio.GenBank._FeatureConsumer methods are called...
self.set_handle(handle)
if not self.find_start():
#Could not find (another) record
consumer.data=None
return False
#We use the above class methods to parse the file into a simplified format.
#The first line, header lines and any misc lines after the features will be
#dealt with by GenBank / EMBL specific derived classes.
#First line and header:
self._feed_first_line(consumer, self.line)
self._feed_header_lines(consumer, self.parse_header())
#Features (common to both EMBL and GenBank):
if do_features:
self._feed_feature_table(consumer, self.parse_features(skip=False))
else:
self.parse_features(skip=True) # ignore the data
#Footer and sequence
misc_lines, sequence_string = self.parse_footer()
self._feed_misc_lines(consumer, misc_lines)
consumer.sequence(sequence_string)
#Calls to consumer.base_number() do nothing anyway
consumer.record_end("//")
assert self.line == "//"
#And we are done
return True
def parse(self, handle, do_features=True):
"""Returns a SeqRecord (with SeqFeatures if do_features=True)
See also the method parse_records() for use on multi-record files.
"""
from Bio.GenBank import _FeatureConsumer
from Bio.GenBank.utils import FeatureValueCleaner
consumer = _FeatureConsumer(use_fuzziness = 1,
feature_cleaner = FeatureValueCleaner())
if self.feed(handle, consumer, do_features):
return consumer.data
else:
return None
def parse_records(self, handle, do_features=True):
"""Returns a SeqRecord object iterator
Each record (from the ID/LOCUS line to the // line) becomes a SeqRecord
The SeqRecord objects include SeqFeatures if do_features=True
This method is intended for use in Bio.SeqIO
"""
#This is a generator function
while True:
record = self.parse(handle, do_features)
if record is None : break
assert record.id is not None
assert record.name != "<unknown name>"
assert record.description != "<unknown description>"
yield record
def parse_cds_features(self, handle,
alphabet=generic_protein,
tags2id=('protein_id','locus_tag','product')):
"""Returns SeqRecord object iterator
Each CDS feature becomes a SeqRecord.
alphabet - Used for any sequence found in a translation field.
tags2id - Tupple of three strings, the feature keys to use
for the record id, name and description,
This method is intended for use in Bio.SeqIO
"""
self.set_handle(handle)
while self.find_start():
#Got an EMBL or GenBank record...
self.parse_header() # ignore header lines!
feature_tuples = self.parse_features()
#self.parse_footer() # ignore footer lines!
while True:
line = self.handle.readline()
if not line : break
if line[:2]=="//" : break
self.line = line.rstrip()
#Now go though those features...
for key, location_string, qualifiers in feature_tuples:
if key=="CDS":
#Create SeqRecord
#================
#SeqRecord objects cannot be created with annotations, they
#must be added afterwards. So create an empty record and
#then populate it:
record = SeqRecord(seq=None)
annotations = record.annotations
#Should we add a location object to the annotations?
#I *think* that only makes sense for SeqFeatures with their
#sub features...
annotations['raw_location'] = location_string.replace(' ','')
for (qualifier_name, qualifier_data) in qualifiers:
if qualifier_data is not None \
and qualifier_data[0]=='"' and qualifier_data[-1]=='"':
#Remove quotes
qualifier_data = qualifier_data[1:-1]
#Append the data to the annotation qualifier...
if qualifier_name == "translation":
assert record.seq is None, "Multiple translations!"
record.seq = Seq(qualifier_data.replace("\n",""), alphabet)
elif qualifier_name == "db_xref":
#its a list, possibly empty. Its safe to extend
record.dbxrefs.append(qualifier_data)
else:
if qualifier_data is not None:
qualifier_data = qualifier_data.replace("\n"," ").replace(" "," ")
try:
annotations[qualifier_name] += " " + qualifier_data
except KeyError:
#Not an addition to existing data, its the first bit
annotations[qualifier_name]= qualifier_data
#Fill in the ID, Name, Description
#=================================
try:
record.id = annotations[tags2id[0]]
except KeyError:
pass
try:
record.name = annotations[tags2id[1]]
except KeyError:
pass
try:
record.description = annotations[tags2id[2]]
except KeyError:
pass
yield record
class EmblScanner(InsdcScanner):
"""For extracting chunks of information in EMBL files"""
RECORD_START = "ID "
HEADER_WIDTH = 5
FEATURE_START_MARKERS = ["FH Key Location/Qualifiers","FH"]
FEATURE_END_MARKERS = ["XX"] #XX can also mark the end of many things!
FEATURE_QUALIFIER_INDENT = 21
FEATURE_QUALIFIER_SPACER = "FT" + " " * (FEATURE_QUALIFIER_INDENT-2)
SEQUENCE_HEADERS=["SQ", "CO"] #Remove trailing spaces
def parse_footer(self):
"""returns a tuple containing a list of any misc strings, and the sequence"""
assert self.line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS, \
"Eh? '%s'" % self.line
#Note that the SQ line can be split into several lines...
misc_lines = []
while self.line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS:
misc_lines.append(self.line)
self.line = self.handle.readline()
if not self.line:
raise ValueError("Premature end of file")
self.line = self.line.rstrip()
assert self.line[:self.HEADER_WIDTH] == " " * self.HEADER_WIDTH \
or self.line.strip() == '//', repr(self.line)
seq_lines = []
line = self.line
while True:
if not line:
raise ValueError("Premature end of file in sequence data")
line = line.strip()
if not line:
raise ValueError("Blank line in sequence data")
if line=='//':
break
assert self.line[:self.HEADER_WIDTH] == " " * self.HEADER_WIDTH, \
repr(self.line)
#Remove tailing number now, remove spaces later
seq_lines.append(line.rsplit(None,1)[0])
line = self.handle.readline()
self.line = line
return (misc_lines, "".join(seq_lines).replace(" ", ""))
def _feed_first_line(self, consumer, line):
assert line[:self.HEADER_WIDTH].rstrip() == "ID"
if line[self.HEADER_WIDTH:].count(";") == 6:
#Looks like the semi colon separated style introduced in 2006
self._feed_first_line_new(consumer, line)
elif line[self.HEADER_WIDTH:].count(";") == 3:
#Looks like the pre 2006 style
self._feed_first_line_old(consumer, line)
else:
raise ValueError('Did not recognise the ID line layout:\n' + line)
def _feed_first_line_old(self, consumer, line):
#Expects an ID line in the style before 2006, e.g.
#ID SC10H5 standard; DNA; PRO; 4870 BP.
#ID BSUB9999 standard; circular DNA; PRO; 4214630 BP.
assert line[:self.HEADER_WIDTH].rstrip() == "ID"
fields = [line[self.HEADER_WIDTH:].split(None,1)[0]]
fields.extend(line[self.HEADER_WIDTH:].split(None,1)[1].split(";"))
fields = [entry.strip() for entry in fields]
"""
The tokens represent:
0. Primary accession number
(space sep)
1. ??? (e.g. standard)
(semi-colon)
2. Topology and/or Molecule type (e.g. 'circular DNA' or 'DNA')
3. Taxonomic division (e.g. 'PRO')
4. Sequence length (e.g. '4639675 BP.')
"""
consumer.locus(fields[0]) #Should we also call the accession consumer?
consumer.residue_type(fields[2])
consumer.data_file_division(fields[3])
self._feed_seq_length(consumer, fields[4])
def _feed_first_line_new(self, consumer, line):
#Expects an ID line in the style introduced in 2006, e.g.
#ID X56734; SV 1; linear; mRNA; STD; PLN; 1859 BP.
#ID CD789012; SV 4; linear; genomic DNA; HTG; MAM; 500 BP.
assert line[:self.HEADER_WIDTH].rstrip() == "ID"
fields = [data.strip() for data in line[self.HEADER_WIDTH:].strip().split(";")]
assert len(fields) == 7
"""
The tokens represent:
0. Primary accession number
1. Sequence version number
2. Topology: 'circular' or 'linear'
3. Molecule type (e.g. 'genomic DNA')
4. Data class (e.g. 'STD')
5. Taxonomic division (e.g. 'PRO')
6. Sequence length (e.g. '4639675 BP.')
"""
consumer.locus(fields[0])
#Call the accession consumer now, to make sure we record
#something as the record.id, in case there is no AC line
consumer.accession(fields[0])
#TODO - How to deal with the version field? At the moment the consumer
#will try and use this for the ID which isn't ideal for EMBL files.
version_parts = fields[1].split()
if len(version_parts)==2 \
and version_parts[0]=="SV" \
and version_parts[1].isdigit():
consumer.version_suffix(version_parts[1])
#Based on how the old GenBank parser worked, merge these two:
consumer.residue_type(" ".join(fields[2:4])) #TODO - Store as two fields?
#consumer.xxx(fields[4]) #TODO - What should we do with the data class?
consumer.data_file_division(fields[5])
self._feed_seq_length(consumer, fields[6])
def _feed_seq_length(self, consumer, text):
length_parts = text.split()
assert len(length_parts) == 2
assert length_parts[1].upper() in ["BP", "BP.", "AA."]
consumer.size(length_parts[0])
def _feed_header_lines(self, consumer, lines):
EMBL_INDENT = self.HEADER_WIDTH
EMBL_SPACER = " " * EMBL_INDENT
consumer_dict = {
'AC' : 'accession',
'SV' : 'version', # SV line removed in June 2006, now part of ID line
'DE' : 'definition',
#'RN' : 'reference_num',
#'RC' : reference comment... TODO
#'RP' : 'reference_bases',
#'RX' : reference cross reference... DOI or Pubmed
'RG' : 'consrtm', #optional consortium
#'RA' : 'authors',
#'RT' : 'title',
'RL' : 'journal',
'OS' : 'organism',
'OC' : 'taxonomy',
#'DR' : data reference
'CC' : 'comment',
#'XX' : splitter
}
#We have to handle the following specially:
#RX (depending on reference type...)
for line in lines:
line_type = line[:EMBL_INDENT].strip()
data = line[EMBL_INDENT:].strip()
if line_type == 'XX':
pass
elif line_type == 'RN':
# Reformat reference numbers for the GenBank based consumer
# e.g. '[1]' becomes '1'
if data[0] == "[" and data[-1] == "]" : data = data[1:-1]
consumer.reference_num(data)
elif line_type == 'RP':
# Reformat reference numbers for the GenBank based consumer
# e.g. '1-4639675' becomes '(bases 1 to 4639675)'
# and '160-550, 904-1055' becomes '(bases 160 to 550; 904 to 1055)'
parts = [bases.replace("-"," to ").strip() for bases in data.split(",")]
consumer.reference_bases("(bases %s)" % "; ".join(parts))
elif line_type == 'RT':
#Remove the enclosing quotes and trailing semi colon.
#Note the title can be split over multiple lines.
if data.startswith('"'):
data = data[1:]
if data.endswith('";'):
data = data[:-2]
consumer.title(data)
elif line_type == 'RX':
# EMBL support three reference types at the moment:
# - PUBMED PUBMED bibliographic database (NLM)
# - DOI Digital Object Identifier (International DOI Foundation)
# - AGRICOLA US National Agriculture Library (NAL) of the US Department
# of Agriculture (USDA)
#
# Format:
# RX resource_identifier; identifier.
#
# e.g.
# RX DOI; 10.1016/0024-3205(83)90010-3.
# RX PUBMED; 264242.
#
# Currently our reference object only supports PUBMED and MEDLINE
# (as these were in GenBank files?).
key, value = data.split(";",1)
if value.endswith(".") : value = value[:-1]
value = value.strip()
if key == "PUBMED":
consumer.pubmed_id(value)
#TODO - Handle other reference types (here and in BioSQL bindings)
elif line_type == 'CC':
# Have to pass a list of strings for this one (not just a string)
consumer.comment([data])
elif line_type == 'DR':
# Database Cross-reference, format:
# DR database_identifier; primary_identifier; secondary_identifier.
#
# e.g.
# DR MGI; 98599; Tcrb-V4.
#
# TODO - How should we store any secondary identifier?
parts = data.rstrip(".").split(";")
#Turn it into "database_identifier:primary_identifier" to
#mimic the GenBank parser. e.g. "MGI:98599"
consumer.dblink("%s:%s" % (parts[0].strip(),
parts[1].strip()))
elif line_type == 'RA':
# Remove trailing ; at end of authors list
consumer.authors(data.rstrip(";"))
elif line_type == 'PR':
# Remove trailing ; at end of the project reference
# In GenBank files this corresponds to the old PROJECT
# line which is being replaced with the DBLINK line.
consumer.project(data.rstrip(";"))
elif line_type in consumer_dict:
#Its a semi-automatic entry!
getattr(consumer, consumer_dict[line_type])(data)
else:
if self.debug:
print "Ignoring EMBL header line:\n%s" % line
def _feed_misc_lines(self, consumer, lines):
#TODO - Should we do something with the information on the SQ line(s)?
lines.append("")
line_iter = iter(lines)
try:
for line in line_iter:
if line.startswith("CO "):
line = line[5:].strip()
contig_location = line
while True:
line = line_iter.next()
if not line:
break
elif line.startswith("CO "):
#Don't need to preseve the whitespace here.
contig_location += line[5:].strip()
else:
raise ValueError('Expected CO (contig) continuation line, got:\n' + line)
consumer.contig_location(contig_location)
return
except StopIteration:
raise ValueError("Problem in misc lines before sequence")
class _ImgtScanner(EmblScanner):
"""For extracting chunks of information in IMGT (EMBL like) files (PRIVATE).
IMGT files are like EMBL files but in order to allow longer feature types
the features should be indented by 25 characters not 21 characters. In
practice the IMGT flat files tend to use either 21 or 25 characters, so we
must cope with both.
This is private to encourage use of Bio.SeqIO rather than Bio.GenBank.
"""
FEATURE_START_MARKERS = ["FH Key Location/Qualifiers",
"FH Key Location/Qualifiers (from EMBL)",
"FH Key Location/Qualifiers",
"FH"]
def parse_features(self, skip=False):
"""Return list of tuples for the features (if present)
Each feature is returned as a tuple (key, location, qualifiers)
where key and location are strings (e.g. "CDS" and
"complement(join(490883..490885,1..879))") while qualifiers
is a list of two string tuples (feature qualifier keys and values).
Assumes you have already read to the start of the features table.
"""
if self.line.rstrip() not in self.FEATURE_START_MARKERS:
if self.debug : print "Didn't find any feature table"
return []
while self.line.rstrip() in self.FEATURE_START_MARKERS:
self.line = self.handle.readline()
bad_position_re = re.compile(r'([0-9]+)>{1}')
features = []
line = self.line
while True:
if not line:
raise ValueError("Premature end of line during features table")
if line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS:
if self.debug : print "Found start of sequence"
break
line = line.rstrip()
if line == "//":
raise ValueError("Premature end of features table, marker '//' found")
if line in self.FEATURE_END_MARKERS:
if self.debug : print "Found end of features"
line = self.handle.readline()
break
if line[2:self.FEATURE_QUALIFIER_INDENT].strip() == "":
#This is an empty feature line between qualifiers. Empty
#feature lines within qualifiers are handled below (ignored).
line = self.handle.readline()
continue
if skip:
line = self.handle.readline()
while line[:self.FEATURE_QUALIFIER_INDENT] == self.FEATURE_QUALIFIER_SPACER:
line = self.handle.readline()
else:
assert line[:2] == "FT"
try:
feature_key, location_start = line[2:].strip().split()
except ValueError:
#e.g. "FT TRANSMEMBRANE-REGION2163..2240\n"
#Assume indent of 25 as per IMGT spec, with the location
#start in column 26 (one-based).
feature_key = line[2:25].strip()
location_start = line[25:].strip()
feature_lines = [location_start]
line = self.handle.readline()
while line[:self.FEATURE_QUALIFIER_INDENT] == self.FEATURE_QUALIFIER_SPACER \
or line.rstrip() == "" : # cope with blank lines in the midst of a feature
#Use strip to remove any harmless trailing white space AND and leading
#white space (copes with 21 or 26 indents and orther variants)
assert line[:2] == "FT"
feature_lines.append(line[self.FEATURE_QUALIFIER_INDENT:].strip())
line = self.handle.readline()
feature_key, location, qualifiers = \
self.parse_feature(feature_key, feature_lines)
#Try to handle known problems with IMGT locations here:
if ">" in location:
#Nasty hack for common IMGT bug, should be >123 not 123>
#in a location string. At least here the meaning is clear,
#and since it is so common I don't want to issue a warning
#warnings.warn("Feature location %s is invalid, "
# "moving greater than sign before position"
# % location)
location = bad_position_re.sub(r'>\1',location)
features.append((feature_key, location, qualifiers))
self.line = line
return features
class GenBankScanner(InsdcScanner):
"""For extracting chunks of information in GenBank files"""
RECORD_START = "LOCUS "
HEADER_WIDTH = 12
FEATURE_START_MARKERS = ["FEATURES Location/Qualifiers","FEATURES"]
FEATURE_END_MARKERS = []
FEATURE_QUALIFIER_INDENT = 21
FEATURE_QUALIFIER_SPACER = " " * FEATURE_QUALIFIER_INDENT
SEQUENCE_HEADERS=["CONTIG", "ORIGIN", "BASE COUNT", "WGS"] # trailing spaces removed
def parse_footer(self):
"""returns a tuple containing a list of any misc strings, and the sequence"""
assert self.line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS, \
"Eh? '%s'" % self.line
misc_lines = []
while self.line[:self.HEADER_WIDTH].rstrip() in self.SEQUENCE_HEADERS \
or self.line[:self.HEADER_WIDTH] == " "*self.HEADER_WIDTH \
or "WGS" == self.line[:3]:
misc_lines.append(self.line.rstrip())
self.line = self.handle.readline()
if not self.line:
raise ValueError("Premature end of file")
self.line = self.line
assert self.line[:self.HEADER_WIDTH].rstrip() not in self.SEQUENCE_HEADERS, \
"Eh? '%s'" % self.line
#Now just consume the sequence lines until reach the // marker
#or a CONTIG line
seq_lines = []
line = self.line
while True:
if not line:
raise ValueError("Premature end of file in sequence data")
line = line.rstrip()
if not line:
import warnings
warnings.warn("Blank line in sequence data")
line = self.handle.readline()
continue
if line=='//':
break
if line.find('CONTIG')==0:
break
if len(line) > 9 and line[9:10]!=' ':
raise ValueError("Sequence line mal-formed, '%s'" % line)
seq_lines.append(line[10:]) #remove spaces later
line = self.handle.readline()
self.line = line
#Seq("".join(seq_lines), self.alphabet)
return (misc_lines,"".join(seq_lines).replace(" ",""))
def _feed_first_line(self, consumer, line):
#####################################
# LOCUS line #
#####################################
GENBANK_INDENT = self.HEADER_WIDTH
GENBANK_SPACER = " "*GENBANK_INDENT
assert line[0:GENBANK_INDENT] == 'LOCUS ', \
'LOCUS line does not start correctly:\n' + line
#Have to break up the locus line, and handle the different bits of it.
#There are at least two different versions of the locus line...
if line[29:33] in [' bp ', ' aa ',' rc ']:
#Old...
#
# Positions Contents
# --------- --------
# 00:06 LOCUS
# 06:12 spaces
# 12:?? Locus name
# ??:?? space
# ??:29 Length of sequence, right-justified
# 29:33 space, bp, space
# 33:41 strand type
# 41:42 space
# 42:51 Blank (implies linear), linear or circular
# 51:52 space
# 52:55 The division code (e.g. BCT, VRL, INV)
# 55:62 space
# 62:73 Date, in the form dd-MMM-yyyy (e.g., 15-MAR-1991)
#
assert line[29:33] in [' bp ', ' aa ',' rc '] , \
'LOCUS line does not contain size units at expected position:\n' + line
assert line[41:42] == ' ', \
'LOCUS line does not contain space at position 42:\n' + line
assert line[42:51].strip() in ['','linear','circular'], \
'LOCUS line does not contain valid entry (linear, circular, ...):\n' + line
assert line[51:52] == ' ', \
'LOCUS line does not contain space at position 52:\n' + line
assert line[55:62] == ' ', \
'LOCUS line does not contain spaces from position 56 to 62:\n' + line
if line[62:73].strip():
assert line[64:65] == '-', \
'LOCUS line does not contain - at position 65 in date:\n' + line
assert line[68:69] == '-', \
'LOCUS line does not contain - at position 69 in date:\n' + line
name_and_length_str = line[GENBANK_INDENT:29]
while name_and_length_str.find(' ')!=-1:
name_and_length_str = name_and_length_str.replace(' ',' ')
name_and_length = name_and_length_str.split(' ')
assert len(name_and_length)<=2, \
'Cannot parse the name and length in the LOCUS line:\n' + line
assert len(name_and_length)!=1, \
'Name and length collide in the LOCUS line:\n' + line
#Should be possible to split them based on position, if
#a clear definition of the standard exists THAT AGREES with
#existing files.
consumer.locus(name_and_length[0])
consumer.size(name_and_length[1])
#consumer.residue_type(line[33:41].strip())
if line[33:51].strip() == "" and line[29:33] == ' aa ':
#Amino acids -> protein (even if there is no residue type given)
#We want to use a protein alphabet in this case, rather than a
#generic one. Not sure if this is the best way to achieve this,
#but it works because the scanner checks for this:
consumer.residue_type("PROTEIN")
else:
consumer.residue_type(line[33:51].strip())
consumer.data_file_division(line[52:55])
if line[62:73].strip():
consumer.date(line[62:73])
elif line[40:44] in [' bp ', ' aa ',' rc ']:
#New...
#
# Positions Contents
# --------- --------
# 00:06 LOCUS
# 06:12 spaces
# 12:?? Locus name
# ??:?? space
# ??:40 Length of sequence, right-justified
# 40:44 space, bp, space
# 44:47 Blank, ss-, ds-, ms-
# 47:54 Blank, DNA, RNA, tRNA, mRNA, uRNA, snRNA, cDNA
# 54:55 space
# 55:63 Blank (implies linear), linear or circular
# 63:64 space
# 64:67 The division code (e.g. BCT, VRL, INV)
# 67:68 space
# 68:79 Date, in the form dd-MMM-yyyy (e.g., 15-MAR-1991)
#
assert line[40:44] in [' bp ', ' aa ',' rc '] , \
'LOCUS line does not contain size units at expected position:\n' + line
assert line[44:47] in [' ', 'ss-', 'ds-', 'ms-'], \
'LOCUS line does not have valid strand type (Single stranded, ...):\n' + line
assert line[47:54].strip() == "" \
or line[47:54].strip().find('DNA') != -1 \
or line[47:54].strip().find('RNA') != -1, \
'LOCUS line does not contain valid sequence type (DNA, RNA, ...):\n' + line
assert line[54:55] == ' ', \
'LOCUS line does not contain space at position 55:\n' + line
assert line[55:63].strip() in ['','linear','circular'], \
'LOCUS line does not contain valid entry (linear, circular, ...):\n' + line
assert line[63:64] == ' ', \
'LOCUS line does not contain space at position 64:\n' + line
assert line[67:68] == ' ', \
'LOCUS line does not contain space at position 68:\n' + line
if line[68:79].strip():
assert line[70:71] == '-', \
'LOCUS line does not contain - at position 71 in date:\n' + line
assert line[74:75] == '-', \
'LOCUS line does not contain - at position 75 in date:\n' + line
name_and_length_str = line[GENBANK_INDENT:40]
while name_and_length_str.find(' ')!=-1:
name_and_length_str = name_and_length_str.replace(' ',' ')
name_and_length = name_and_length_str.split(' ')
assert len(name_and_length)<=2, \
'Cannot parse the name and length in the LOCUS line:\n' + line
assert len(name_and_length)!=1, \
'Name and length collide in the LOCUS line:\n' + line
#Should be possible to split them based on position, if
#a clear definition of the stand exists THAT AGREES with
#existing files.
consumer.locus(name_and_length[0])
consumer.size(name_and_length[1])
if line[44:54].strip() == "" and line[40:44] == ' aa ':
#Amino acids -> protein (even if there is no residue type given)
#We want to use a protein alphabet in this case, rather than a
#generic one. Not sure if this is the best way to achieve this,
#but it works because the scanner checks for this:
consumer.residue_type(("PROTEIN " + line[54:63]).strip())
else:
consumer.residue_type(line[44:63].strip())
consumer.data_file_division(line[64:67])
if line[68:79].strip():
consumer.date(line[68:79])
elif line[GENBANK_INDENT:].strip().count(" ")==0 :
#Truncated LOCUS line, as produced by some EMBOSS tools - see bug 1762
#
#e.g.
#
# "LOCUS U00096"
#
#rather than:
#
# "LOCUS U00096 4639675 bp DNA circular BCT"
#
# Positions Contents
# --------- --------
# 00:06 LOCUS
# 06:12 spaces
# 12:?? Locus name
if line[GENBANK_INDENT:].strip() != "":
consumer.locus(line[GENBANK_INDENT:].strip())
else:
#Must just have just "LOCUS ", is this even legitimate?
#We should be able to continue parsing... we need real world testcases!
warnings.warn("Minimal LOCUS line found - is this correct?\n" + line)
elif len(line.split())>=4 and line.split()[3] in ["aa","bp"]:
#Cope with EMBOSS seqret output where it seems the locus id can cause
#the other fields to overflow. We just IGNORE the other fields!
consumer.locus(line.split()[1])
consumer.size(line.split()[2])
warnings.warn("Malformed LOCUS line found - is this correct?\n" + line)
else:
raise ValueError('Did not recognise the LOCUS line layout:\n' + line)
def _feed_header_lines(self, consumer, lines):
#Following dictionary maps GenBank lines to the associated
#consumer methods - the special cases like LOCUS where one
#genbank line triggers several consumer calls have to be
#handled individually.
GENBANK_INDENT = self.HEADER_WIDTH
GENBANK_SPACER = " "*GENBANK_INDENT
consumer_dict = {
'DEFINITION' : 'definition',
'ACCESSION' : 'accession',
'NID' : 'nid',
'PID' : 'pid',
'DBSOURCE' : 'db_source',
'KEYWORDS' : 'keywords',
'SEGMENT' : 'segment',
'SOURCE' : 'source',
'AUTHORS' : 'authors',
'CONSRTM' : 'consrtm',
'PROJECT' : 'project',
'DBLINK' : 'dblink',
'TITLE' : 'title',
'JOURNAL' : 'journal',
'MEDLINE' : 'medline_id',
'PUBMED' : 'pubmed_id',
'REMARK' : 'remark'}
#We have to handle the following specially:
#ORIGIN (locus, size, residue_type, data_file_division and date)
#COMMENT (comment)
#VERSION (version and gi)
#REFERENCE (eference_num and reference_bases)
#ORGANISM (organism and taxonomy)
lines = filter(None,lines)
lines.append("") #helps avoid getting StopIteration all the time
line_iter = iter(lines)
try:
line = line_iter.next()
while True:
if not line : break
line_type = line[:GENBANK_INDENT].strip()
data = line[GENBANK_INDENT:].strip()
if line_type == 'VERSION':
#Need to call consumer.version(), and maybe also consumer.gi() as well.
#e.g.
# VERSION AC007323.5 GI:6587720
while data.find(' ')!=-1:
data = data.replace(' ',' ')
if data.find(' GI:')==-1:
consumer.version(data)
else:
if self.debug : print "Version [" + data.split(' GI:')[0] + "], gi [" + data.split(' GI:')[1] + "]"
consumer.version(data.split(' GI:')[0])
consumer.gi(data.split(' GI:')[1])
#Read in the next line!
line = line_iter.next()
elif line_type == 'REFERENCE':
if self.debug >1 : print "Found reference [" + data + "]"
#Need to call consumer.reference_num() and consumer.reference_bases()
#e.g.
# REFERENCE 1 (bases 1 to 86436)
#
#Note that this can be multiline, see Bug 1968, e.g.
#
# REFERENCE 42 (bases 1517 to 1696; 3932 to 4112; 17880 to 17975; 21142 to
# 28259)
#
#For such cases we will call the consumer once only.
data = data.strip()
#Read in the next line, and see if its more of the reference:
while True:
line = line_iter.next()
if line[:GENBANK_INDENT] == GENBANK_SPACER:
#Add this continuation to the data string
data += " " + line[GENBANK_INDENT:]
if self.debug >1 : print "Extended reference text [" + data + "]"
else:
#End of the reference, leave this text in the variable "line"
break
#We now have all the reference line(s) stored in a string, data,
#which we pass to the consumer
while data.find(' ')!=-1:
data = data.replace(' ',' ')
if data.find(' ')==-1:
if self.debug >2 : print 'Reference number \"' + data + '\"'
consumer.reference_num(data)
else:
if self.debug >2 : print 'Reference number \"' + data[:data.find(' ')] + '\", \"' + data[data.find(' ')+1:] + '\"'
consumer.reference_num(data[:data.find(' ')])
consumer.reference_bases(data[data.find(' ')+1:])
elif line_type == 'ORGANISM':
#Typically the first line is the organism, and subsequent lines
#are the taxonomy lineage. However, given longer and longer
#species names (as more and more strains and sub strains get
#sequenced) the oragnism name can now get wrapped onto multiple
#lines. The NCBI say we have to recognise the lineage line by
#the presense of semi-colon delimited entries. In the long term,
#they are considering adding a new keyword (e.g. LINEAGE).
#See Bug 2591 for details.
organism_data = data
lineage_data = ""
while True:
line = line_iter.next()
if line[0:GENBANK_INDENT] == GENBANK_SPACER:
if lineage_data or ";" in line:
lineage_data += " " + line[GENBANK_INDENT:]
else:
organism_data += " " + line[GENBANK_INDENT:].strip()
else:
#End of organism and taxonomy
break
consumer.organism(organism_data)
if lineage_data.strip() == "" and self.debug > 1:
print "Taxonomy line(s) missing or blank"
consumer.taxonomy(lineage_data.strip())
del organism_data, lineage_data
elif line_type == 'COMMENT':
if self.debug > 1 : print "Found comment"
#This can be multiline, and should call consumer.comment() once
#with a list where each entry is a line.
comment_list=[]
comment_list.append(data)
while True:
line = line_iter.next()
if line[0:GENBANK_INDENT] == GENBANK_SPACER:
data = line[GENBANK_INDENT:]
comment_list.append(data)
if self.debug > 2 : print "Comment continuation [" + data + "]"
else:
#End of the comment
break
consumer.comment(comment_list)
del comment_list
elif line_type in consumer_dict:
#Its a semi-automatic entry!
#Now, this may be a multi line entry...
while True:
line = line_iter.next()
if line[0:GENBANK_INDENT] == GENBANK_SPACER:
data += ' ' + line[GENBANK_INDENT:]
else:
#We now have all the data for this entry:
getattr(consumer, consumer_dict[line_type])(data)
#End of continuation - return to top of loop!
break
else:
if self.debug:
print "Ignoring GenBank header line:\n" % line
#Read in next line
line = line_iter.next()
except StopIteration:
raise ValueError("Problem in header")
def _feed_misc_lines(self, consumer, lines):
#Deals with a few misc lines between the features and the sequence
GENBANK_INDENT = self.HEADER_WIDTH
GENBANK_SPACER = " "*GENBANK_INDENT
lines.append("")
line_iter = iter(lines)
try:
for line in line_iter:
if line.find('BASE COUNT')==0:
line = line[10:].strip()
if line:
if self.debug : print "base_count = " + line
consumer.base_count(line)
if line.find("ORIGIN")==0:
line = line[6:].strip()
if line:
if self.debug : print "origin_name = " + line
consumer.origin_name(line)
if line.find("WGS ")==0 :
line = line[3:].strip()
consumer.wgs(line)
if line.find("WGS_SCAFLD")==0 :
line = line[10:].strip()
consumer.add_wgs_scafld(line)
if line.find("CONTIG")==0:
line = line[6:].strip()
contig_location = line
while True:
line = line_iter.next()
if not line:
break
elif line[:GENBANK_INDENT]==GENBANK_SPACER:
#Don't need to preseve the whitespace here.
contig_location += line[GENBANK_INDENT:].rstrip()
else:
raise ValueError('Expected CONTIG continuation line, got:\n' + line)
consumer.contig_location(contig_location)
return
except StopIteration:
raise ValueError("Problem in misc lines before sequence")
if __name__ == "__main__":
from StringIO import StringIO
gbk_example = \
"""LOCUS SCU49845 5028 bp DNA PLN 21-JUN-1999
DEFINITION Saccharomyces cerevisiae TCP1-beta gene, partial cds, and Axl2p
(AXL2) and Rev7p (REV7) genes, complete cds.
ACCESSION U49845
VERSION U49845.1 GI:1293613
KEYWORDS .
SOURCE Saccharomyces cerevisiae (baker's yeast)
ORGANISM Saccharomyces cerevisiae
Eukaryota; Fungi; Ascomycota; Saccharomycotina; Saccharomycetes;
Saccharomycetales; Saccharomycetaceae; Saccharomyces.
REFERENCE 1 (bases 1 to 5028)
AUTHORS Torpey,L.E., Gibbs,P.E., Nelson,J. and Lawrence,C.W.
TITLE Cloning and sequence of REV7, a gene whose function is required for
DNA damage-induced mutagenesis in Saccharomyces cerevisiae
JOURNAL Yeast 10 (11), 1503-1509 (1994)
PUBMED 7871890
REFERENCE 2 (bases 1 to 5028)
AUTHORS Roemer,T., Madden,K., Chang,J. and Snyder,M.
TITLE Selection of axial growth sites in yeast requires Axl2p, a novel
plasma membrane glycoprotein
JOURNAL Genes Dev. 10 (7), 777-793 (1996)
PUBMED 8846915
REFERENCE 3 (bases 1 to 5028)
AUTHORS Roemer,T.
TITLE Direct Submission
JOURNAL Submitted (22-FEB-1996) Terry Roemer, Biology, Yale University, New
Haven, CT, USA
FEATURES Location/Qualifiers
source 1..5028
/organism="Saccharomyces cerevisiae"
/db_xref="taxon:4932"
/chromosome="IX"
/map="9"
CDS <1..206
/codon_start=3
/product="TCP1-beta"
/protein_id="AAA98665.1"
/db_xref="GI:1293614"
/translation="SSIYNGISTSGLDLNNGTIADMRQLGIVESYKLKRAVVSSASEA
AEVLLRVDNIIRARPRTANRQHM"
gene 687..3158
/gene="AXL2"
CDS 687..3158
/gene="AXL2"
/note="plasma membrane glycoprotein"
/codon_start=1
/function="required for axial budding pattern of S.
cerevisiae"
/product="Axl2p"
/protein_id="AAA98666.1"
/db_xref="GI:1293615"
/translation="MTQLQISLLLTATISLLHLVVATPYEAYPIGKQYPPVARVNESF
TFQISNDTYKSSVDKTAQITYNCFDLPSWLSFDSSSRTFSGEPSSDLLSDANTTLYFN
VILEGTDSADSTSLNNTYQFVVTNRPSISLSSDFNLLALLKNYGYTNGKNALKLDPNE
VFNVTFDRSMFTNEESIVSYYGRSQLYNAPLPNWLFFDSGELKFTGTAPVINSAIAPE
TSYSFVIIATDIEGFSAVEVEFELVIGAHQLTTSIQNSLIINVTDTGNVSYDLPLNYV
YLDDDPISSDKLGSINLLDAPDWVALDNATISGSVPDELLGKNSNPANFSVSIYDTYG
DVIYFNFEVVSTTDLFAISSLPNINATRGEWFSYYFLPSQFTDYVNTNVSLEFTNSSQ
DHDWVKFQSSNLTLAGEVPKNFDKLSLGLKANQGSQSQELYFNIIGMDSKITHSNHSA
NATSTRSSHHSTSTSSYTSSTYTAKISSTSAAATSSAPAALPAANKTSSHNKKAVAIA
CGVAIPLGVILVALICFLIFWRRRRENPDDENLPHAISGPDLNNPANKPNQENATPLN
NPFDDDASSYDDTSIARRLAALNTLKLDNHSATESDISSVDEKRDSLSGMNTYNDQFQ
SQSKEELLAKPPVQPPESPFFDPQNRSSSVYMDSEPAVNKSWRYTGNLSPVSDIVRDS
YGSQKTVDTEKLFDLEAPEKEKRTSRDVTMSSLDPWNSNISPSPVRKSVTPSPYNVTK
HRNRHLQNIQDSQSGKNGITPTTMSTSSSDDFVPVKDGENFCWVHSMEPDRRPSKKRL
VDFSNKSNVNVGQVKDIHGRIPEML"
gene complement(3300..4037)
/gene="REV7"
CDS complement(3300..4037)
/gene="REV7"
/codon_start=1
/product="Rev7p"
/protein_id="AAA98667.1"
/db_xref="GI:1293616"
/translation="MNRWVEKWLRVYLKCYINLILFYRNVYPPQSFDYTTYQSFNLPQ
FVPINRHPALIDYIEELILDVLSKLTHVYRFSICIINKKNDLCIEKYVLDFSELQHVD
KDDQIITETEVFDEFRSSLNSLIMHLEKLPKVNDDTITFEAVINAIELELGHKLDRNR
RVDSLEEKAEIERDSNWVKCQEDENLPDNNGFQPPKIKLTSLVGSDVGPLIIHQFSEK
LISGDDKILNGVYSQYEEGESIFGSLF"
ORIGIN
1 gatcctccat atacaacggt atctccacct caggtttaga tctcaacaac ggaaccattg
61 ccgacatgag acagttaggt atcgtcgaga gttacaagct aaaacgagca gtagtcagct
121 ctgcatctga agccgctgaa gttctactaa gggtggataa catcatccgt gcaagaccaa
181 gaaccgccaa tagacaacat atgtaacata tttaggatat acctcgaaaa taataaaccg
241 ccacactgtc attattataa ttagaaacag aacgcaaaaa ttatccacta tataattcaa
301 agacgcgaaa aaaaaagaac aacgcgtcat agaacttttg gcaattcgcg tcacaaataa
361 attttggcaa cttatgtttc ctcttcgagc agtactcgag ccctgtctca agaatgtaat
421 aatacccatc gtaggtatgg ttaaagatag catctccaca acctcaaagc tccttgccga
481 gagtcgccct cctttgtcga gtaattttca cttttcatat gagaacttat tttcttattc
541 tttactctca catcctgtag tgattgacac tgcaacagcc accatcacta gaagaacaga
601 acaattactt aatagaaaaa ttatatcttc ctcgaaacga tttcctgctt ccaacatcta
661 cgtatatcaa gaagcattca cttaccatga cacagcttca gatttcatta ttgctgacag
721 ctactatatc actactccat ctagtagtgg ccacgcccta tgaggcatat cctatcggaa
781 aacaataccc cccagtggca agagtcaatg aatcgtttac atttcaaatt tccaatgata
841 cctataaatc gtctgtagac aagacagctc aaataacata caattgcttc gacttaccga
901 gctggctttc gtttgactct agttctagaa cgttctcagg tgaaccttct tctgacttac
961 tatctgatgc gaacaccacg ttgtatttca atgtaatact cgagggtacg gactctgccg
1021 acagcacgtc tttgaacaat acataccaat ttgttgttac aaaccgtcca tccatctcgc
1081 tatcgtcaga tttcaatcta ttggcgttgt taaaaaacta tggttatact aacggcaaaa
1141 acgctctgaa actagatcct aatgaagtct tcaacgtgac ttttgaccgt tcaatgttca
1201 ctaacgaaga atccattgtg tcgtattacg gacgttctca gttgtataat gcgccgttac
1261 ccaattggct gttcttcgat tctggcgagt tgaagtttac tgggacggca ccggtgataa
1321 actcggcgat tgctccagaa acaagctaca gttttgtcat catcgctaca gacattgaag
1381 gattttctgc cgttgaggta gaattcgaat tagtcatcgg ggctcaccag ttaactacct
1441 ctattcaaaa tagtttgata atcaacgtta ctgacacagg taacgtttca tatgacttac
1501 ctctaaacta tgtttatctc gatgacgatc ctatttcttc tgataaattg ggttctataa
1561 acttattgga tgctccagac tgggtggcat tagataatgc taccatttcc gggtctgtcc
1621 cagatgaatt actcggtaag aactccaatc ctgccaattt ttctgtgtcc atttatgata
1681 cttatggtga tgtgatttat ttcaacttcg aagttgtctc cacaacggat ttgtttgcca
1741 ttagttctct tcccaatatt aacgctacaa ggggtgaatg gttctcctac tattttttgc
1801 cttctcagtt tacagactac gtgaatacaa acgtttcatt agagtttact aattcaagcc
1861 aagaccatga ctgggtgaaa ttccaatcat ctaatttaac attagctgga gaagtgccca
1921 agaatttcga caagctttca ttaggtttga aagcgaacca aggttcacaa tctcaagagc
1981 tatattttaa catcattggc atggattcaa agataactca ctcaaaccac agtgcgaatg
2041 caacgtccac aagaagttct caccactcca cctcaacaag ttcttacaca tcttctactt
2101 acactgcaaa aatttcttct acctccgctg ctgctacttc ttctgctcca gcagcgctgc
2161 cagcagccaa taaaacttca tctcacaata aaaaagcagt agcaattgcg tgcggtgttg
2221 ctatcccatt aggcgttatc ctagtagctc tcatttgctt cctaatattc tggagacgca
2281 gaagggaaaa tccagacgat gaaaacttac cgcatgctat tagtggacct gatttgaata
2341 atcctgcaaa taaaccaaat caagaaaacg ctacaccttt gaacaacccc tttgatgatg
2401 atgcttcctc gtacgatgat acttcaatag caagaagatt ggctgctttg aacactttga
2461 aattggataa ccactctgcc actgaatctg atatttccag cgtggatgaa aagagagatt
2521 ctctatcagg tatgaataca tacaatgatc agttccaatc ccaaagtaaa gaagaattat
2581 tagcaaaacc cccagtacag cctccagaga gcccgttctt tgacccacag aataggtctt
2641 cttctgtgta tatggatagt gaaccagcag taaataaatc ctggcgatat actggcaacc
2701 tgtcaccagt ctctgatatt gtcagagaca gttacggatc acaaaaaact gttgatacag
2761 aaaaactttt cgatttagaa gcaccagaga aggaaaaacg tacgtcaagg gatgtcacta
2821 tgtcttcact ggacccttgg aacagcaata ttagcccttc tcccgtaaga aaatcagtaa
2881 caccatcacc atataacgta acgaagcatc gtaaccgcca cttacaaaat attcaagact
2941 ctcaaagcgg taaaaacgga atcactccca caacaatgtc aacttcatct tctgacgatt
3001 ttgttccggt taaagatggt gaaaattttt gctgggtcca tagcatggaa ccagacagaa
3061 gaccaagtaa gaaaaggtta gtagattttt caaataagag taatgtcaat gttggtcaag
3121 ttaaggacat tcacggacgc atcccagaaa tgctgtgatt atacgcaacg atattttgct
3181 taattttatt ttcctgtttt attttttatt agtggtttac agatacccta tattttattt
3241 agtttttata cttagagaca tttaatttta attccattct tcaaatttca tttttgcact
3301 taaaacaaag atccaaaaat gctctcgccc tcttcatatt gagaatacac tccattcaaa
3361 attttgtcgt caccgctgat taatttttca ctaaactgat gaataatcaa aggccccacg
3421 tcagaaccga ctaaagaagt gagttttatt ttaggaggtt gaaaaccatt attgtctggt
3481 aaattttcat cttcttgaca tttaacccag tttgaatccc tttcaatttc tgctttttcc
3541 tccaaactat cgaccctcct gtttctgtcc aacttatgtc ctagttccaa ttcgatcgca
3601 ttaataactg cttcaaatgt tattgtgtca tcgttgactt taggtaattt ctccaaatgc
3661 ataatcaaac tatttaagga agatcggaat tcgtcgaaca cttcagtttc cgtaatgatc
3721 tgatcgtctt tatccacatg ttgtaattca ctaaaatcta aaacgtattt ttcaatgcat
3781 aaatcgttct ttttattaat aatgcagatg gaaaatctgt aaacgtgcgt taatttagaa
3841 agaacatcca gtataagttc ttctatatag tcaattaaag caggatgcct attaatggga
3901 acgaactgcg gcaagttgaa tgactggtaa gtagtgtagt cgaatgactg aggtgggtat
3961 acatttctat aaaataaaat caaattaatg tagcatttta agtataccct cagccacttc
4021 tctacccatc tattcataaa gctgacgcaa cgattactat tttttttttc ttcttggatc
4081 tcagtcgtcg caaaaacgta taccttcttt ttccgacctt ttttttagct ttctggaaaa
4141 gtttatatta gttaaacagg gtctagtctt agtgtgaaag ctagtggttt cgattgactg
4201 atattaagaa agtggaaatt aaattagtag tgtagacgta tatgcatatg tatttctcgc
4261 ctgtttatgt ttctacgtac ttttgattta tagcaagggg aaaagaaata catactattt
4321 tttggtaaag gtgaaagcat aatgtaaaag ctagaataaa atggacgaaa taaagagagg
4381 cttagttcat cttttttcca aaaagcaccc aatgataata actaaaatga aaaggatttg
4441 ccatctgtca gcaacatcag ttgtgtgagc aataataaaa tcatcacctc cgttgccttt
4501 agcgcgtttg tcgtttgtat cttccgtaat tttagtctta tcaatgggaa tcataaattt
4561 tccaatgaat tagcaatttc gtccaattct ttttgagctt cttcatattt gctttggaat
4621 tcttcgcact tcttttccca ttcatctctt tcttcttcca aagcaacgat ccttctaccc
4681 atttgctcag agttcaaatc ggcctctttc agtttatcca ttgcttcctt cagtttggct
4741 tcactgtctt ctagctgttg ttctagatcc tggtttttct tggtgtagtt ctcattatta
4801 gatctcaagt tattggagtc ttcagccaat tgctttgtat cagacaattg actctctaac
4861 ttctccactt cactgtcgag ttgctcgttt ttagcggaca aagatttaat ctcgttttct
4921 ttttcagtgt tagattgctc taattctttg agctgttctc tcagctcctc atatttttct
4981 tgccatgact cagattctaa ttttaagcta ttcaatttct ctttgatc
//"""
# GenBank format protein (aka GenPept) file from:
# http://www.molecularevolution.org/resources/fileformats/
gbk_example2 = \
"""LOCUS AAD51968 143 aa linear BCT 21-AUG-2001
DEFINITION transcriptional regulator RovA [Yersinia enterocolitica].
ACCESSION AAD51968
VERSION AAD51968.1 GI:5805369
DBSOURCE locus AF171097 accession AF171097.1
KEYWORDS .
SOURCE Yersinia enterocolitica
ORGANISM Yersinia enterocolitica
Bacteria; Proteobacteria; Gammaproteobacteria; Enterobacteriales;
Enterobacteriaceae; Yersinia.
REFERENCE 1 (residues 1 to 143)
AUTHORS Revell,P.A. and Miller,V.L.
TITLE A chromosomally encoded regulator is required for expression of the
Yersinia enterocolitica inv gene and for virulence
JOURNAL Mol. Microbiol. 35 (3), 677-685 (2000)
MEDLINE 20138369
PUBMED 10672189
REFERENCE 2 (residues 1 to 143)
AUTHORS Revell,P.A. and Miller,V.L.
TITLE Direct Submission
JOURNAL Submitted (22-JUL-1999) Molecular Microbiology, Washington
University School of Medicine, Campus Box 8230, 660 South Euclid,
St. Louis, MO 63110, USA
COMMENT Method: conceptual translation.
FEATURES Location/Qualifiers
source 1..143
/organism="Yersinia enterocolitica"
/mol_type="unassigned DNA"
/strain="JB580v"
/serotype="O:8"
/db_xref="taxon:630"
Protein 1..143
/product="transcriptional regulator RovA"
/name="regulates inv expression"
CDS 1..143
/gene="rovA"
/coded_by="AF171097.1:380..811"
/note="regulator of virulence"
/transl_table=11
ORIGIN
1 mestlgsdla rlvrvwrali dhrlkplelt qthwvtlhni nrlppeqsqi qlakaigieq
61 pslvrtldql eekglitrht candrrakri klteqsspii eqvdgvicst rkeilggisp
121 deiellsgli dklerniiql qsk
//
"""
embl_example="""ID X56734; SV 1; linear; mRNA; STD; PLN; 1859 BP.
XX
AC X56734; S46826;
XX
DT 12-SEP-1991 (Rel. 29, Created)
DT 25-NOV-2005 (Rel. 85, Last updated, Version 11)
XX
DE Trifolium repens mRNA for non-cyanogenic beta-glucosidase
XX
KW beta-glucosidase.
XX
OS Trifolium repens (white clover)
OC Eukaryota; Viridiplantae; Streptophyta; Embryophyta; Tracheophyta;
OC Spermatophyta; Magnoliophyta; eudicotyledons; core eudicotyledons; rosids;
OC eurosids I; Fabales; Fabaceae; Papilionoideae; Trifolieae; Trifolium.
XX
RN [5]
RP 1-1859
RX PUBMED; 1907511.
RA Oxtoby E., Dunn M.A., Pancoro A., Hughes M.A.;
RT "Nucleotide and derived amino acid sequence of the cyanogenic
RT beta-glucosidase (linamarase) from white clover (Trifolium repens L.)";
RL Plant Mol. Biol. 17(2):209-219(1991).
XX
RN [6]
RP 1-1859
RA Hughes M.A.;
RT ;
RL Submitted (19-NOV-1990) to the EMBL/GenBank/DDBJ databases.
RL Hughes M.A., University of Newcastle Upon Tyne, Medical School, Newcastle
RL Upon Tyne, NE2 4HH, UK
XX
FH Key Location/Qualifiers
FH
FT source 1..1859
FT /organism="Trifolium repens"
FT /mol_type="mRNA"
FT /clone_lib="lambda gt10"
FT /clone="TRE361"
FT /tissue_type="leaves"
FT /db_xref="taxon:3899"
FT CDS 14..1495
FT /product="beta-glucosidase"
FT /EC_number="3.2.1.21"
FT /note="non-cyanogenic"
FT /db_xref="GOA:P26204"
FT /db_xref="InterPro:IPR001360"
FT /db_xref="InterPro:IPR013781"
FT /db_xref="UniProtKB/Swiss-Prot:P26204"
FT /protein_id="CAA40058.1"
FT /translation="MDFIVAIFALFVISSFTITSTNAVEASTLLDIGNLSRSSFPRGFI
FT FGAGSSAYQFEGAVNEGGRGPSIWDTFTHKYPEKIRDGSNADITVDQYHRYKEDVGIMK
FT DQNMDSYRFSISWPRILPKGKLSGGINHEGIKYYNNLINELLANGIQPFVTLFHWDLPQ
FT VLEDEYGGFLNSGVINDFRDYTDLCFKEFGDRVRYWSTLNEPWVFSNSGYALGTNAPGR
FT CSASNVAKPGDSGTGPYIVTHNQILAHAEAVHVYKTKYQAYQKGKIGITLVSNWLMPLD
FT DNSIPDIKAAERSLDFQFGLFMEQLTTGDYSKSMRRIVKNRLPKFSKFESSLVNGSFDF
FT IGINYYSSSYISNAPSHGNAKPSYSTNPMTNISFEKHGIPLGPRAASIWIYVYPYMFIQ
FT EDFEIFCYILKINITILQFSITENGMNEFNDATLPVEEALLNTYRIDYYYRHLYYIRSA
FT IRAGSNVKGFYAWSFLDCNEWFAGFTVRFGLNFVD"
FT mRNA 1..1859
FT /experiment="experimental evidence, no additional details
FT recorded"
XX
SQ Sequence 1859 BP; 609 A; 314 C; 355 G; 581 T; 0 other;
aaacaaacca aatatggatt ttattgtagc catatttgct ctgtttgtta ttagctcatt 60
cacaattact tccacaaatg cagttgaagc ttctactctt cttgacatag gtaacctgag 120
tcggagcagt tttcctcgtg gcttcatctt tggtgctgga tcttcagcat accaatttga 180
aggtgcagta aacgaaggcg gtagaggacc aagtatttgg gataccttca cccataaata 240
tccagaaaaa ataagggatg gaagcaatgc agacatcacg gttgaccaat atcaccgcta 300
caaggaagat gttgggatta tgaaggatca aaatatggat tcgtatagat tctcaatctc 360
ttggccaaga atactcccaa agggaaagtt gagcggaggc ataaatcacg aaggaatcaa 420
atattacaac aaccttatca acgaactatt ggctaacggt atacaaccat ttgtaactct 480
ttttcattgg gatcttcccc aagtcttaga agatgagtat ggtggtttct taaactccgg 540
tgtaataaat gattttcgag actatacgga tctttgcttc aaggaatttg gagatagagt 600
gaggtattgg agtactctaa atgagccatg ggtgtttagc aattctggat atgcactagg 660
aacaaatgca ccaggtcgat gttcggcctc caacgtggcc aagcctggtg attctggaac 720
aggaccttat atagttacac acaatcaaat tcttgctcat gcagaagctg tacatgtgta 780
taagactaaa taccaggcat atcaaaaggg aaagataggc ataacgttgg tatctaactg 840
gttaatgcca cttgatgata atagcatacc agatataaag gctgccgaga gatcacttga 900
cttccaattt ggattgttta tggaacaatt aacaacagga gattattcta agagcatgcg 960
gcgtatagtt aaaaaccgat tacctaagtt ctcaaaattc gaatcaagcc tagtgaatgg 1020
ttcatttgat tttattggta taaactatta ctcttctagt tatattagca atgccccttc 1080
acatggcaat gccaaaccca gttactcaac aaatcctatg accaatattt catttgaaaa 1140
acatgggata cccttaggtc caagggctgc ttcaatttgg atatatgttt atccatatat 1200
gtttatccaa gaggacttcg agatcttttg ttacatatta aaaataaata taacaatcct 1260
gcaattttca atcactgaaa atggtatgaa tgaattcaac gatgcaacac ttccagtaga 1320
agaagctctt ttgaatactt acagaattga ttactattac cgtcacttat actacattcg 1380
ttctgcaatc agggctggct caaatgtgaa gggtttttac gcatggtcat ttttggactg 1440
taatgaatgg tttgcaggct ttactgttcg ttttggatta aactttgtag attagaaaga 1500
tggattaaaa aggtacccta agctttctgc ccaatggtac aagaactttc tcaaaagaaa 1560
ctagctagta ttattaaaag aactttgtag tagattacag tacatcgttt gaagttgagt 1620
tggtgcacct aattaaataa aagaggttac tcttaacata tttttaggcc attcgttgtg 1680
aagttgttag gctgttattt ctattatact atgttgtagt aataagtgca ttgttgtacc 1740
agaagctatg atcataacta taggttgatc cttcatgtat cagtttgatg ttgagaatac 1800
tttgaattaa aagtcttttt ttattttttt aaaaaaaaaa aaaaaaaaaa aaaaaaaaa 1859
//
"""
print "GenBank CDS Iteration"
print "====================="
g = GenBankScanner()
for record in g.parse_cds_features(StringIO(gbk_example)):
print record
g = GenBankScanner()
for record in g.parse_cds_features(StringIO(gbk_example2),
tags2id=('gene','locus_tag','product')):
print record
g = GenBankScanner()
for record in g.parse_cds_features(StringIO(gbk_example + "\n" + gbk_example2),
tags2id=('gene','locus_tag','product')):
print record
print
print "GenBank Iteration"
print "================="
g = GenBankScanner()
for record in g.parse_records(StringIO(gbk_example),do_features=False):
print record.id, record.name, record.description
print record.seq
g = GenBankScanner()
for record in g.parse_records(StringIO(gbk_example),do_features=True):
print record.id, record.name, record.description
print record.seq
g = GenBankScanner()
for record in g.parse_records(StringIO(gbk_example2),do_features=False):
print record.id, record.name, record.description
print record.seq
g = GenBankScanner()
for record in g.parse_records(StringIO(gbk_example2),do_features=True):
print record.id, record.name, record.description
print record.seq
print
print "EMBL CDS Iteration"
print "=================="
e = EmblScanner()
for record in e.parse_cds_features(StringIO(embl_example)):
print record
print
print "EMBL Iteration"
print "=============="
e = EmblScanner()
for record in e.parse_records(StringIO(embl_example),do_features=True):
print record.id, record.name, record.description
print record.seq
| BlogomaticProject/Blogomatic | opt/blog-o-matic/usr/lib/python/Bio/GenBank/Scanner.py | Python | gpl-2.0 | 79,599 |
from django.conf.urls import url
from . import views
urlpatterns = [
# General endpoints
url(r"^status/?$", views.get_status),
url(r"^version/?$", views.get_version),
# API authorization
url(r"^login/?$", views.login),
url(r"^logout/?$", views.logout),
url(r"^get-api-key/?$", views.get_api_key),
url(r"^devices/link/?$", views.link_device_new),
url(r"^devices/link/status/(?P<code>[^/]+)?$", views.link_device_status),
# Kegbot objects
url(r"^auth-tokens/(?P<auth_device>[\w\.]+)/(?P<token_value>\w+)/?$", views.get_auth_token),
url(
r"^auth-tokens/(?P<auth_device>[\w\.]+)/(?P<token_value>\w+)/assign/?$",
views.assign_auth_token,
),
url(r"^controllers/?$", views.all_controllers),
url(r"^controllers/(?P<controller_id>\d+)/?$", views.get_controller),
url(r"^drinks/?$", views.all_drinks),
url(r"^drinks/last/?$", views.last_drink),
url(r"^drinks/(?P<drink_id>\d+)/?$", views.get_drink),
url(r"^drinks/(?P<drink_id>\d+)/add-photo/?$", views.add_drink_photo),
url(r"^cancel-drink/?$", views.cancel_drink),
url(r"^events/?$", views.all_events),
url(r"^flow-meters/?$", views.all_flow_meters),
url(r"^flow-meters/(?P<flow_meter_id>\d+)/?$", views.get_flow_meter),
url(r"^flow-toggles/?$", views.all_flow_toggles),
url(r"^flow-toggles/(?P<flow_toggle_id>\d+)/?$", views.get_flow_toggle),
url(r"^kegs/?$", views.all_kegs),
url(r"^kegs/(?P<keg_id>\d+)/?$", views.get_keg),
url(r"^kegs/(?P<keg_id>\d+)/end/?$", views.end_keg),
url(r"^kegs/(?P<keg_id>\d+)/drinks/?$", views.get_keg_drinks),
url(r"^kegs/(?P<keg_id>\d+)/events/?$", views.get_keg_events),
url(r"^kegs/(?P<keg_id>\d+)/sessions/?$", views.get_keg_sessions),
url(r"^kegs/(?P<keg_id>\d+)/stats/?$", views.get_keg_stats),
url(r"^keg-sizes/?$", views.get_keg_sizes),
url(r"^pictures/?$", views.pictures),
url(r"^sessions/?$", views.all_sessions),
url(r"^sessions/current/?$", views.current_session),
url(r"^sessions/(?P<session_id>\d+)/?$", views.get_session),
url(r"^sessions/(?P<session_id>\d+)/stats/?$", views.get_session_stats),
url(r"^taps/?$", views.all_taps),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/activate/?$", views.tap_activate),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/calibrate/?$", views.tap_calibrate),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/spill/?$", views.tap_spill),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/connect-meter/?$", views.tap_connect_meter),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/disconnect-meter/?$", views.tap_disconnect_meter),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/connect-toggle/?$", views.tap_connect_toggle),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/disconnect-toggle/?$", views.tap_disconnect_toggle),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/?$", views.tap_detail),
url(r"^thermo-sensors/?$", views.all_thermo_sensors),
url(r"^thermo-sensors/(?P<sensor_name>[^/]+)/?$", views.get_thermo_sensor),
url(r"^thermo-sensors/(?P<sensor_name>[^/]+)/logs/?$", views.get_thermo_sensor_logs),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/connect-thermo/?$", views.tap_connect_thermo),
url(r"^taps/(?P<meter_name_or_id>[\w\.-]+)/disconnect-thermo/?$", views.tap_disconnect_thermo),
url(r"^users/?$", views.user_list),
url(r"^users/(?P<username>[\w@.+-_]+)/drinks/?$", views.get_user_drinks),
url(r"^users/(?P<username>[\w@.+-_]+)/events/?$", views.get_user_events),
url(r"^users/(?P<username>[\w@.+-_]+)/stats/?$", views.get_user_stats),
url(r"^users/(?P<username>[\w@.+-_]+)/photo/?$", views.user_photo),
url(r"^users/(?P<username>[\w@.+-_]+)/?$", views.get_user),
url(r"^new-user/?$", views.register),
url(r"^stats/?$", views.get_system_stats),
# Deprecated endpoints
url(r"^sound-events/?$", views.all_sound_events),
# Catch-all
url(r"", views.default_handler),
]
| Kegbot/kegbot-server | pykeg/web/api/urls.py | Python | gpl-2.0 | 3,947 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# mv - [insert a few words of module description on this line]
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
import cgi
import cgitb
cgitb.enable()
from shared.functionality.mv import main
from shared.cgiscriptstub import run_cgi_script
run_cgi_script(main)
| heromod/migrid | mig/cgi-bin/mv.py | Python | gpl-2.0 | 1,092 |
# -*- coding: utf-8 -*-
"""
The moo Editor
~~~~~~~~~~~~~~
:copyright: 2005-2008 by The PIDA Project
:license: GPL 2 or later (see README/COPYING/LICENSE)
"""
# Standard Libs
import os
import gtk
import gobject
import re
from gtk import gdk
# UGLY UGLY workarround as suggested by muntyan_
# this will be changed someday when therue will be a correct
# api for this.
from pida.core.environment import home, workspace_name
SYS_DATA = os.environ.get("XDG_DATA_DIRS",
"/usr/share:/usr/local/share")
MOO_DATA_DIRS=os.pathsep.join((
str(home()/'moo'),
os.path.join(os.path.dirname(__file__), "shared"),
os.pathsep.join([os.path.join(x, "moo")
for x in SYS_DATA.split(os.pathsep)]),
"/usr/share/moo",
"/usr/local/share/moo",
"/usr/share/pida",
"/usr/local/share/pida",
))
os.environ['MOO_DATA_DIRS'] = MOO_DATA_DIRS
def _load_pix(fn):
#XXX: not zip save
path = os.path.join(os.path.dirname(__file__), 'pixmaps', fn)
return gtk.gdk.pixbuf_new_from_file(path)
_PIXMAPS = {
'bookmark': _load_pix('bookmark.png'),
'debugger_breakpoint': _load_pix('breakpoint.png'),
'debugger_position': _load_pix('breakpoint.png'),
}
# Moo Imports
try:
import moo
except ImportError:
moo = None
# PIDA Imports
from pida.ui.views import PidaView
from pida.core.editors import EditorService, EditorActionsConfig
from pida.core.actions import TYPE_NORMAL, TYPE_TOGGLE
from pida.core.events import EventsConfig
from pida.core.document import DocumentException
from pida.core.options import OptionsConfig, choices
from pida.ui.completer import (PidaCompleter, PidaCompleterWindow,
SuggestionsList)
from pygtkhelpers.gthreads import GeneratorTask, gcall, AsyncTask
from pida.core.languages import Suggestion
from pida.ui.languages import PidaDocWindow
# locale
from pida.core.locale import Locale
locale = Locale('mooedit')
_ = locale.gettext
from .langs import build_mapping, MAPPINGS
class MooeditMain(PidaView):
"""Main Mooedit View.
This View contains a gtk.Notebook for displaying buffers.
"""
def create_ui(self):
self._embed = MooeditEmbed(self)
self.add_main_widget(self._embed)
#AA Needs implementing
#EA really? I didn't see it called anytime.
# Did it with relay to the service for now.
def grab_input_focus(self):
print "\n\ngrab_input_focus\n\n"
self.svc.grab_focus()
pass
class MooeditOptionsConfig(OptionsConfig):
def create_options(self):
self.create_option(
'display_type',
_('Display notebook title'),
choices({'filename':_('Filename'), 'fullpath':_('Full path'),
'project_or_filename':_('Project relative path or filename')}),
'project_or_filename',
_('Text to display in the Notebook'),
)
self.create_option(
'autocomplete',
_('Enable Autocompleter'),
bool,
True,
_('Shall the Autocompleter be active'),
)
self.create_option(
'auto_chars',
_('Autocompleter chars'),
int,
3,
_('Open Autocompleter after howmany characters'),
)
self.create_option(
'auto_attr',
_('On attribute'),
bool,
True,
_('Open Autocompleter after attribute accessor'),
)
class MooeditPreferences(PidaView):
"""Mooedit Preferences View.
Here the Mooedit editor preferences dialog is included
and provided with some buttons.
"""
label_text = _('Mooedit Preferences')
icon_name = 'package_utilities'
def create_ui(self):
prefs = self.svc._editor_instance.prefs_page()
prefs.emit('init')
prefs.show()
vbox = gtk.VBox()
vbox.pack_start(prefs)
self._prefs = prefs
bb = gtk.HButtonBox()
bb.set_spacing(6)
bb.set_layout(gtk.BUTTONBOX_END)
self._apply_but = gtk.Button(stock=gtk.STOCK_APPLY)
self._apply_but.connect('clicked', self.cb_apply_button_clicked)
self._ok_but = gtk.Button(stock=gtk.STOCK_OK)
self._ok_but.connect('clicked', self.cb_ok_button_clicked)
self._cancel_but = gtk.Button(stock=gtk.STOCK_CANCEL)
self._cancel_but.connect('clicked', self.cb_cancel_button_clicked)
bb.pack_start(self._apply_but)
bb.pack_start(self._cancel_but)
bb.pack_start(self._ok_but)
bb.show_all()
vbox.pack_start(bb)
vbox.show()
self.add_main_widget(vbox)
def cb_ok_button_clicked(self, button):
self._apply()
self.svc.show_preferences(self.svc.get_action('mooedit_preferences').set_active(False))
def cb_apply_button_clicked(self, button):
self._apply()
def cb_cancel_button_clicked(self, button):
self.svc.show_preferences(self.svc.get_action('mooedit_preferences').set_active(False))
def _apply(self):
self._prefs.emit('apply')
self.svc.save_moo_state()
try:
self.svc._editor_instance.apply_prefs()
except AttributeError:
pass
def can_be_closed(self):
self.svc.get_action('mooedit_preferences').set_active(False)
class MooeditEmbed(gtk.Notebook):
"""Mooedit Embed
This is the actual Notebook that holds our buffers.
"""
def __init__(self, mooedit):
gtk.Notebook.__init__(self)
self.set_scrollable(True)
self.popup_enable()
self._mooedit = mooedit
self.show_all()
def _create_tab(self, document):
editor = document.editor
hb = gtk.HBox(spacing=2)
editor._label = gtk.Label()
ns = self._mooedit.svc._get_document_title(document)
editor._label.set_markup(ns)
editor._label._markup = ns
b = gtk.Button()
b.set_border_width(0)
b.connect("clicked", self._close_cb, document)
b.set_relief(gtk.RELIEF_NONE)
b.set_size_request(20, 20)
img = gtk.Image()
img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_MENU)
b.add(img)
vb = gtk.VBox()
vb.pack_start(gtk.Alignment())
vb.pack_start(b, expand=False)
vb.pack_start(gtk.Alignment())
hb.pack_start(editor._label)
hb.pack_start(vb, expand=False)
hb.show_all()
return hb
def _close_cb(self, btn, document):
self._mooedit.svc.boss.get_service('buffer').cmd('close_file', document=document)
class MooeditView(gtk.ScrolledWindow):
"""Mooedit View
A gtk.ScrolledWindow containing the editor instance we get from mooedit.
"""
def __init__(self, document):
gtk.ScrolledWindow.__init__(self)
self.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.set_editor(document.editor)
self.document = document
self.line_markers = []
self.show_all()
def set_editor(self, editor):
self.editor = editor
self.editor.props.buffer.connect('changed', self.on_changed)
self.add(self.editor)
self.editor.show()
def on_changed(self, textbuffer):
#FIXME: this doesn't work, nor does connect_after work correctly.
# this is always one changed event to late. as the markers line position
# is updated after this event :(
self.editor.props.buffer.do_changed(textbuffer)
for lm in self.line_markers:
lm.update(lm._moo_marker.get_line()+1)
return True
def close(self):
buf = self.editor.get_buffer()
for lm in self.line_markers:
if hasattr(lm, '_moo_marker'):
lm._moo_marker.props.visible = False
buf.delete_line_mark(lm._moo_marker)
del lm._moo_marker
self.editor.inputter.disconnect()
def update_marker(self, marker):
if marker.line == -1:
# should be deleted
if marker in self.line_markers and hasattr(marker, '_moo_marker'):
marker._moo_marker.props.visible = False
self.editor.props.buffer.delete_line_mark(marker._moo_marker)
self.line_markers.remove(marker)
return
if not hasattr(marker, '_moo_marker'):
lm = moo.edit.LineMark()
lm.set_pixbuf(_PIXMAPS.get(marker.type, 'bookmark'))
#lm.set_markup('BOO')
lm.props.visible = True
marker._moo_marker = lm
buf = self.editor.props.buffer
if marker not in self.line_markers:
self.line_markers.append(marker)
buf.add_line_mark(marker._moo_marker,
min(max(0, int(marker.line)-1),buf.get_line_count()))
marker._moo_marker.props.visible = True
else:
self.editor.props.buffer.move_line_mark(marker._moo_marker,
min(max(0, int(marker.line)-1),buf.get_line_count()))
class MooeditActionsConfig(EditorActionsConfig):
"""Mooedit Actions Config
This defines some menu items for the edit menu.
"""
def create_actions(self):
EditorActionsConfig.create_actions(self)
self.create_action(
'mooedit_save_as',
TYPE_NORMAL,
_('Save _as'),
_('Save file as'),
gtk.STOCK_SAVE_AS,
self.on_save_as,
'<Shift><Control>S'
)
self.create_action(
'mooedit_reload',
TYPE_NORMAL,
_('Reload'),
_('Reload file content'),
gtk.STOCK_REFRESH,
self.on_reload,
''
)
self.create_action(
'mooedit_preferences',
TYPE_TOGGLE,
_('Edit Mooedit Preferences'),
_('Show the editors preferences dialog'),
gtk.STOCK_PREFERENCES,
self.on_project_preferences,
)
self.create_action(
'mooedit_find',
TYPE_NORMAL,
_('_Find in buffer'),
_('Find'),
gtk.STOCK_FIND,
self.on_find,
'<Control>F'
)
self.create_action(
'mooedit_find_next',
TYPE_NORMAL,
_('Find _next in buffer'),
'',
gtk.STOCK_GO_FORWARD,
self.on_find_next,
'F3',
)
self.create_action(
'mooedit_find_prev',
TYPE_NORMAL,
_('Find previous in buffer'),
'',
gtk.STOCK_GO_BACK,
self.on_find_prev,
'<Shift>F3',
)
self.create_action(
'mooedit_replace',
TYPE_NORMAL,
_('Find and _replace'),
_('Find & replace'),
gtk.STOCK_FIND_AND_REPLACE,
self.on_replace,
'<Control>R',
)
self.create_action(
'mooedit_find_word_next',
TYPE_NORMAL,
_('Find current word down'),
'',
gtk.STOCK_GO_BACK,
self.on_find_word_next,
'F4',
)
self.create_action(
'mooedit_find_word_prev',
TYPE_NORMAL,
_('Find _current word up'),
'',
gtk.STOCK_GO_FORWARD,
self.on_find_word_prev,
'<Shift>F4',
)
self.create_action(
'mooedit_goto',
TYPE_NORMAL,
_('_Goto line'),
_('Goto line'),
gtk.STOCK_GO_DOWN,
self.on_goto,
'<Control>G',
)
self.create_action(
'mooedit_last_edit',
TYPE_NORMAL,
_('Goto _last edit place'),
_('Goto last edit place'),
gtk.STOCK_JUMP_TO,
self.on_last_edit,
'<Control>q',
)
self.create_action(
'mooedit_comment',
TYPE_NORMAL,
_('Comment'),
_('Comment current selection'),
'',
self.on_comment,
'',
)
self.create_action(
'mooedit_uncomment',
TYPE_NORMAL,
_('Uncomment'),
_('Uncomment current selection'),
'',
self.on_uncomment,
'',
)
act = self.create_action(
'mooedit_completer_close',
TYPE_NORMAL,
_('Close completer'),
_('Close completer'),
'',
None,
'Escape',
)
# ne need to disconnect the accelerator as our text widget are
# handeling the shortcuts
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_complete_toggle',
TYPE_NORMAL,
_('Toggels the autocompleter'),
_('Toggels the autocompleter'),
'',
None,
'<Control>space',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_completer_next',
TYPE_NORMAL,
_('Next suggestion'),
_('Next suggestion'),
'',
None,
'Down',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_completer_prev',
TYPE_NORMAL,
_('Previous suggestion'),
_('Previous suggestion'),
'',
None,
'Up',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
act = self.create_action(
'mooedit_completer_accept',
TYPE_NORMAL,
_('Accept suggestion'),
_('Accept suggestion'),
'',
None,
'Tab',
)
act.disconnect_accelerator()
act.opt.add_notify(self.on_completer_change)
def on_completer_change(self, *args):
self.svc._update_keyvals()
return False
def on_project_preferences(self, action):
self.svc.show_preferences(action.get_active())
def on_save_as(self, action):
# open in current filebrowser path
moo.utils.prefs_new_key_string('Editor/last_dir')
moo.utils.prefs_set_string('Editor/last_dir',
self.svc.boss.cmd('filemanager', 'get_browsed_path'))
self.svc._current.editor.save_as()
def on_reload(self, action):
self.svc.reload_document(self.svc._current.document)
def on_find(self, action):
self.svc._current.editor.emit('find-interactive')
def on_find_next(self, action):
self.svc._current.editor.emit('find-next-interactive')
def on_find_prev(self, action):
self.svc._current.editor.emit('find-prev-interactive')
def on_replace(self, action):
self.svc._current.editor.emit('replace-interactive')
def on_find_word_next(self, action):
self.svc._current.editor.emit('find-word-at-cursor', True)
def on_find_word_prev(self, action):
self.svc._current.editor.emit('find-word-at-cursor', False)
def on_goto(self, action):
cl = self.svc.get_current_line()
self.svc._current.editor.emit('goto-line-interactive')
nl = self.svc.get_current_line()
if cl != nl:
self.svc.boss.get_service('buffer').emit('document-goto',
document=self.svc._current, line=nl)
def on_last_edit(self, action):
self.svc.boss.editor.goto_last_edit()
def on_comment(self, action):
self.svc._current.editor.emit('comment')
def on_uncomment(self, action):
self.svc._current.editor.emit('uncomment')
class PidaMooInput(object):
"""
Handles all customizations in the input event handling of the editor.
It handles autocompletion and snippets for example
"""
def __init__(self, svc, editor, document):
self.svc = svc
self.editor = editor
self.document = document
self.completer_window = PidaCompleterWindow(type_=gtk.WINDOW_POPUP,
show_input=False)
self.completer = self.completer_window.widget
self.completer.show_all()
self.completer.connect("user-accept", self.accept)
self.completer.connect("suggestion-selected", self.suggestion_selected)
self.editor.connect("cursor-moved", self.on_cursor_moved)
self.model = SuggestionsList()
self.completer.set_model(self.model)
#self.completer.hide()
#self.completer_visible = False
self.completer_added = False
self.completer_pos = 0
self.completer_pos_user = 0
# two markers are used to mark the text inserted by the completer
self.completer_start = None
self.completer_end = None
self.show_auto = False
self._task = None
# db stuff for the autocompleter
self.list_matcher = re.compile("""\w{3,100}""")
self.list_cache = {}
self.list_all = set()
editor.connect("key-press-event", self.on_keypress)
editor.connect("focus-out-event", self.on_do_hide)
editor.get_toplevel().connect("focus-out-event", self.on_do_hide)
#editor.connect_after("key-press-event", self.on_after_keypress)
def disconnect(self):
self.editor.disconnect_by_func(self.on_keypress)
self.editor.disconnect_by_func(self.on_do_hide)
#try:
# self.editor.get_toplevel().disconnect_by_func(self.on_do_hide)
#except ValueError: pass
self.completer.disconnect_by_func(self.accept)
self.completer.disconnect_by_func(self.suggestion_selected)
self.editor.disconnect_by_func(self.on_cursor_moved)
#def on_
def update_completer_and_add(self, cmpl, start, ignore=()):
"""
Returns items for completion widgets
"""
# we run the language completer first and the we add our own results
# to the completer list
if cmpl:
for i in cmpl.run(self.svc.get_current_word(),
unicode(self.editor.get_text()), start):
try:
if i not in ignore:
yield i
except Exception, e:
self.svc.log.exception(e)
#self.update_completer()
y = 0
clst = self.list_all.copy()
for x in clst:
if x not in ignore:
yield x
def get_completer_visible(self):
if self.completer_window and self.completer_window.window and \
self.completer_window.window.is_visible():
return True
return False
def set_completer_visible(self, value):
pass
completer_visible = property(get_completer_visible, set_completer_visible)
def on_do_hide(self, *args, **kwargs):
self.hide()
def toggle_popup(self):
if self.completer_visible:
self.hide()
else:
self.show()
def hide(self):
if not self.completer_visible:
return
self.completer_window.hide()
#self.completer.hide_all()
self.completer_visible = False
self.show_auto = False
# delete markers
buf = self.editor.get_buffer()
self._delete_suggested()
if self.completer_start:
buf.delete_mark(self.completer_start)
self.completer_start = None
if self.completer_end:
buf.delete_mark(self.completer_end)
self.completer_end = None
def _get_start(self, i):
info = self.svc.boss.get_service('language').get_info(self.document)
while i.get_char() in info.word:
if not i.backward_char():
break
else:
i.forward_char()
return i
def show(self, visible=True, show_auto=True):
#self.completer_pos = self.completer_pos_user = \
# self.editor.props.buffer.props.cursor_position
cmpl = self.svc.boss.get_service('language').get_completer(self.document)
info = self.svc.boss.get_service('language').get_info(self.document)
if info:
self.completer.ignore_case = not info.case_sensitive
else:
self.completer.ignore_case = False
buf = self.editor.get_buffer()
cpos = buf.props.cursor_position
# we may already in a word. so we have to find the start as base
i = buf.get_iter_at_offset(cpos)
i.backward_char()
self._get_start(i)
start = i.get_offset()
self.completer_pos = buf.create_mark('completer_pos',
buf.get_iter_at_offset(start),
left_gravity=True)
self.completer_start = buf.create_mark('completer_start',
buf.get_iter_at_offset(cpos),
left_gravity=True)
self.completer_end = buf.create_mark('completer_end',
buf.get_iter_at_offset(cpos))
rec = self.editor.get_iter_location(
self.editor.props.buffer.get_iter_at_offset(
buf.props.cursor_position))
pos = self.editor.buffer_to_window_coords(gtk.TEXT_WINDOW_WIDGET,
rec.x, rec.y + rec.height)
#tw = self.editor.window.get_toplevel()
#abspos = tw.get_position()
abspos = self.editor.window.get_origin()
rpos = (pos[0]+abspos[0], pos[1]+abspos[1])
#self.completer_window.show_all()
#self.completer_window.move(rpos[0],rpos[1])
self.completer.place(rpos[0],rpos[1] - rec.height, rec.height)
self.completer_window.set_transient_for(self.svc.boss.window)
#self.completer_window.window.set_accept_focus(False)
#self.completer_window.window.set_focus_on_map(False)
#self.completer_window.window.set_skip_taskbar_hint(True)
#self.completer_window.window.set_skip_pager_hint(True)
self.editor.grab_focus()
#if not self.completer_added:
#self.editor.add_child_in_window(self.completer,
# gtk.TEXT_WINDOW_TOP,
# pos[0],
# pos[1])
#
# self.completer_window.show_all()
# #self.completer_window.move(pos[0], pos[1])
# self.completer_added = True
#else:
# self.completer_window.show_all()
#self.completer_window.move(pos[0], pos[1])
#self.editor.move_child(self.completer, pos[0], pos[1])
#self.boss.get_service('language').
self.model.clear()
if start != pos:
self.completer.filter = buf.get_text(
buf.get_iter_at_offset(start),
buf.get_iter_at_offset(cpos))
else:
self.completer.filter = ""
self._task = GeneratorTask(self.update_completer_and_add,
self.add_str)
self._task.start(cmpl, start, ignore=(self.svc.get_current_word(),))
self.show_auto = show_auto
if visible:
self.completer_window.show()
self.completer.show_all()
#self.completer_visible = True
def accept(self, widget, suggestion):
self._delete_typed()
self._insert_typed(suggestion)
self.hide()
def _get_complete(self):
buf = self.editor.get_buffer()
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_end)
return buf.get_text(i1, i2)
def _get_typed(self):
buf = self.editor.get_buffer()
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_start)
return buf.get_text(i1, i2)
def _delete_typed(self):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_start)
buf.delete(i1, i2)
def _insert_typed(self, text):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_pos)
buf.insert(i1, text)
buf.move_mark(self.completer_start, i1)
i1.backward_chars(len(text))
buf.move_mark(self.completer_pos, i1)
def _append_typed(self, char):
if not char:
return
self._replace_typed(self._get_typed() + char)
def _replace_typed(self, text):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_pos)
i2 = buf.get_iter_at_mark(self.completer_start)
buf.delete(i1, i2)
buf.insert(i1, text)
#i1.backward_chars(len(text))
buf.move_mark(self.completer_start, i1)
def _get_suggested(self):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_start)
i2 = buf.get_iter_at_mark(self.completer_end)
return buf.get_text(i1, i2)
def _delete_suggested(self):
buf = self.editor.props.buffer
if not self.completer_start or not self.completer_end:
return
i1 = buf.get_iter_at_mark(self.completer_start)
i2 = buf.get_iter_at_mark(self.completer_end)
buf.delete(i1, i2)
def d(self):
buf = self.editor.props.buffer
if self.completer_start:
print "cur", buf.props.cursor_position
print "pos", buf.get_iter_at_mark(self.completer_pos).get_offset()
print "start", buf.get_iter_at_mark(self.completer_start).get_offset()
print "end", buf.get_iter_at_mark(self.completer_end).get_offset()
def _replace_suggested(self, text, mark=True):
buf = self.editor.props.buffer
i1 = buf.get_iter_at_mark(self.completer_start)
i2 = buf.get_iter_at_mark(self.completer_end)
buf.delete(i1, i2)
buf.insert(i1, text)
i2 = buf.get_iter_at_mark(self.completer_end)
if mark:
buf.select_range(
i2,
i1)
def _get_missing(self, word):
# returns the missing part a suggestion that was already typed
return word[len(self._get_typed()):]
#buf.place_cursor(i1)
#return i
def suggestion_selected(self, widget, suggestion):
pos = self.completer_pos_user #editor.props.buffer.props.cursor_position
#buf.
#intext = self._get_missing(suggestion)
typed = self._get_typed()
self._delete_typed()
self._replace_typed(suggestion[:len(typed)])
self._replace_suggested(suggestion[len(typed):])
#self.editor.get_buffer().insert_at_cursor(suggestion)
#self.completer_visible = False
def tokenize(self, text):
#tokenize the text into usable autocompleter chunks
return self.list_matcher.findall(text)
def update_completer(self, full=False):
#update the state of simple internal completer
self.list_all.clear()
buf = self.editor.get_buffer()
it = buf.get_iter_at_offset(buf.props.cursor_position)
if buf.get_line_count() != len(self.list_cache) or full:
# full update of cache
lines = range(0, buf.get_line_count())
else:
# incremental update. we update the current line + above and below
lines = range(max(it.get_line()-1, 0),
min(it.get_line()+1, buf.get_line_count()) + 1)
for line in lines:
its = buf.get_iter_at_line(line)
if its.ends_line():
self.list_cache[line] = []
continue
ite = its.copy()
ite.forward_to_line_end()
ite.forward_char()
self.list_cache[line] = self.tokenize(buf.get_text(its, ite))
for val in self.list_cache.itervalues():
self.list_all.update(val)
def add_str(self, line):
#print "add line", line
if len(self.completer) > 3000:
#emergency stop
self.svc.log.info(
_("Emergency stop of completer: Too many entries"))
self._task.stop()
return
if isinstance(line, Suggestion):
self.completer.add_str(line, type_=line.type_)
else:
self.completer.add_str(line)
# if we are in show_auto mode, the completion window
# is delayed until we have the first visible item.
if not self.completer_visible and self.show_auto and \
self.editor.get_toplevel().has_toplevel_focus() and \
self.editor.is_focus():
if len(self.completer.model):
self.completer_window.show()
def on_cursor_moved(self, widget, itr):
buf = self.editor.get_buffer()
pos = buf.props.cursor_position
if self.completer_visible and (
pos < buf.get_iter_at_mark(self.completer_pos).get_offset()
or pos > buf.get_iter_at_mark(self.completer_end).get_offset()
):
# buffer is visible, but the position of the cursor is not longer
# in the suggestion range.
self.hide()
def on_keypress(self, editor, event):
#print event
if event.type == gdk.KEY_PRESS and self.svc.opt('autocomplete'):
modifiers = event.get_state() & gtk.accelerator_get_default_mod_mask()
#print event.keyval, event.state, modifiers
#print event.keyval & modifiers
#print int(modifiers)
#print self.svc.key_toggle
#print self.svc.key_close
#print self.svc.key_next
#print self.svc.key_prev
#print self.svc.key_accept
def etest(pref):
return event.keyval == pref[0] and modifiers == pref[1]
#tab 65289
if etest(self.svc.key_toggle):
#self.completion.present()
self.toggle_popup()
return True
# enter tab
elif etest((gtk.keysyms.Return, 0)):
if self.completer_visible and \
len(self._get_suggested()):
self.accept(None, self._get_complete())
return True
elif etest(self.svc.key_accept):
if self.completer_visible:
self.accept(None, self._get_complete())
return True
# key up, key down, ?, pgup, pgdown
elif any((etest(self.svc.key_next), etest(self.svc.key_prev),
etest((gtk.keysyms.Page_Up,0)),
etest((gtk.keysyms.Page_Down,0)))):
#(65362, 65364, 65293, 65366, 65365):
if self.completer_visible:
self.completer.on_key_press_event(editor, event)
return True
elif etest(self.svc.key_close): # esc
self.hide()
#elif event.keyval == 65056:
# return True
#elif event.keyval == 65515:
# # show
# return True
# FIXME: this should usally be done via connect_after
# and the code later should be a extra function
# but doesn't work as this super function returns True
# and stops the processing of connect_after functions
modified = self.editor.do_key_press_event(editor, event)
#print modified, repr(event.string)
#self.d()
#if self.completer_start:
# buf = self.editor.get_buffer()
# buf.move_mark(self.completer_start,
# buf.get_iter_at_offset(buf.props.cursor_position))
#if modified:
# task = AsyncTask(work_callback=self.update_completer)
# task.start()
if self.completer_visible:
if event.keyval in (gtk.keysyms.BackSpace, gtk.keysyms.Delete): # delete
# once again the buffer problem
typed = self._get_typed()
if not len(typed):
self.hide()
else:
self.completer.filter = typed
elif len(event.string):
info = self.svc.boss.get_service('language').get_info(self.document)
if event.string not in info.word:
self.hide()
else:
#print "will delete", self._get_suggested(), self._get_typed()
if self.completer_start:
buf = self.editor.get_buffer()
buf.move_mark(self.completer_start,
buf.get_iter_at_offset(buf.props.cursor_position))
self.completer.filter = self._get_typed()
return True
# we have to retest as the completer could just have been closed by
# a non word character but an attrib char should open it again
if not self.completer_visible:
info = self.svc.boss.get_service('language').get_info(self.document)
buf = self.editor.get_buffer()
it = buf.get_iter_at_offset(buf.props.cursor_position)
if self.svc.opt('auto_attr'):
# we have to build a small buffer, because the character
# typed is not in the buffer yet
for x in info.completer_open:
end = it.copy()
end.backward_chars(len(x))
rv = it.backward_search(x, gtk.TEXT_SEARCH_TEXT_ONLY, end)
if rv and x[-1] == event.string:
gcall(self.show, visible=False, show_auto=True)
break
if self.show_auto:
# the completer should be shown, but the user typed a non word
# character so break up
if len(event.string) and event.string not in info.word:
self.show_auto = False
elif len(event.string):
#print "append typed", self._get_suggested(), self._get_typed()
self._delete_suggested()
self._append_typed(event.string)
self.completer.filter = self._get_typed()
#if self.svc.opt('auto_char'):
# info = self.svc.boss.get_service('language').get_info(self.document)
# buf = self.editor.get_buffer()
# it = buf.get_iter_at_offset(buf.props.cursor_position)
# # we have to build a small buffer, because the character
# # typed is not in the buffer yet
# it2 = buf.get_iter_at_offset(max(buf.props.cursor_position-self.svc.opt('auto_char'), 0))
# sbuf = buf.get_text(it, it2) + event.string
# print sbuf
# for x in info.attributerefs:
# if sbuf.rfind(x) == len(sbuf)-1 and \
# sbuf[-1] == event.string:
# gcall(self.show)
# return
#res = it.backward_search(x, gtk.TEXT_SEARCH_TEXT_ONLY)
#print res
#print res[0].get_offset(), res[1].get_offset(), it.get_offset(), buf.props.cursor_position
#if res and res[1].get_offset() == it.get_offset()+1:
# self.show()
# break
#self.completer.filter += event.string
#self.completer_pos_user += len(event.string)
if modified:
#prio of 50 is higher then
gobject.idle_add(self.update_completer,
gobject.PRIORITY_HIGH)
#self.update_completer()
# task = AsyncTask(work_callback=self.update_completer)
return True
class MooeditEventsConfig(EventsConfig):
def subscribe_all_foreign(self):
self.subscribe_foreign('editor', 'marker-changed',
self.marker_changed)
self.subscribe_foreign('buffer', 'document-typchanged',
self.doctype_changed)
def marker_changed(self, marker):
self.svc.on_marker_changed(marker)
def doctype_changed(self, document):
if document.doctype and getattr(document, 'editor', None):
document.editor.set_lang(MAPPINGS.get(document.doctype.internal,
None))
# Service class
class Mooedit(EditorService):
"""Moo Editor Interface for PIDA
Let's you enjoy all the GUI love from mooedit with all the superb IDE
features PIDA has to offer. Use with caution, may lead to addiction.
"""
options_config = MooeditOptionsConfig
actions_config = MooeditActionsConfig
events_config = MooeditEventsConfig
def pre_start(self):
# mooedit is able to open empty documents
self._last_modified = None
self._docwin = None
self.features.publish('new_file')
try:
self.script_path = os.path.join(pida_home, 'pida_mooedit.rc')
self._state_path = os.path.join(pida_home, 'pida_mooedit.state')
try:
moo.utils.prefs_load(sys_files=None, file_rc=self.script_path, file_state=self._state_path)
except gobject.GError:
pass
# if a workspace specific rc file exists, load it and make it the current one
if os.path.exists(os.path.join(pida_home, 'pida_mooedit.%s.rc' %workspace_name())):
self.script_path = os.path.join(pida_home, 'pida_mooedit.%s.rc' %workspace_name())
try:
moo.utils.prefs_load(sys_files=None, file_rc=self.script_path, file_state=None)
except gobject.GError:
pass
self._editor_instance = moo.edit.create_editor_instance()
moo.edit.plugin_read_dirs()
self._documents = {}
self._current = None
self._main = MooeditMain(self)
self._preferences = MooeditPreferences(self)
self._embed = self._main._embed
self._embed.connect("switch-page", self._changed_page)
self._embed.connect("drag_drop", self._drag_drop_cb)
self._embed.connect("drag_motion", self._drag_motion_cb)
self._embed.connect ("drag_data_received", self._drag_data_recv)
self._embed.connect('focus-out-event', self.do_doc_destroy)
self.boss.window.connect('focus-out-event', self.do_doc_destroy)
self._embed.drag_dest_set(0, [
("GTK_NOTEBOOK_TAB", gtk.TARGET_SAME_APP, 1),
("text/uri-list", 0, 2)],
gtk.gdk.ACTION_COPY | gtk.gdk.ACTION_MOVE)
self.boss.cmd('window', 'add_view', paned='Editor', view=self._main)
return True
except Exception, err:
import traceback
traceback.print_exc()
return False
def start(self):
# we only disable the buttons if no document is loaded
# session may already have loaded docs
if not len(self._documents):
self.update_actions(enabled=False)
self.get_action('mooedit_last_edit').set_sensitive(False)
self._update_keyvals()
self.boss.get_service('editor').emit('started')
# build a mapping table
build_mapping(moo.edit.lang_mgr_default(),
self.boss.get_service('language').doctypes)
return True
def on_marker_changed(self, marker):
# called when a marker changed. update the editor
for view in self._documents.itervalues():
# we iterate over all markers so they
if view.document.filename == marker.filename:
view.update_marker(marker)
def save_moo_state(self):
moo.utils.prefs_save(self.script_path, self._state_path)
def show_preferences(self, visible):
if visible:
self.boss.cmd('window', 'add_view', paned='Plugin',
view=self._preferences)
else:
self.boss.cmd('window', 'remove_view',
view=self._preferences)
def pre_stop(self):
views = [view for view in self._documents.values()]
rv = True
for view in views:
editor_close = view.editor.close()
if not editor_close:
rv = False
else:
self._embed.remove_page(self._embed.page_num(view))
return rv
def update_actions(self, enabled=True):
all = True
if not enabled:
all = False
self.get_action('save').set_sensitive(all)
self.get_action('mooedit_save_as').set_sensitive(all)
self.get_action('cut').set_sensitive(all)
self.get_action('copy').set_sensitive(all)
self.get_action('paste').set_sensitive(all)
if enabled and self._current and self._current.editor:
self.get_action('undo').set_sensitive(self._current.editor.can_undo())
self.get_action('redo').set_sensitive(self._current.editor.can_redo())
else:
self.get_action('undo').set_sensitive(all)
self.get_action('redo').set_sensitive(all)
self.get_action('focus_editor').set_sensitive(all)
self.get_action('mooedit_goto').set_sensitive(all)
self.get_action('mooedit_find').set_sensitive(all)
self.get_action('mooedit_find_next').set_sensitive(all)
self.get_action('mooedit_find_prev').set_sensitive(all)
self.get_action('mooedit_find_word_next').set_sensitive(all)
self.get_action('mooedit_find_word_prev').set_sensitive(all)
self.get_action('mooedit_replace').set_sensitive(all)
def _update_keyvals(self):
self.key_toggle = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_complete_toggle'].value)
self.key_close = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_close'].value)
self.key_next = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_next'].value)
self.key_prev = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_prev'].value)
self.key_accept = gtk.accelerator_parse(
self.get_keyboard_options()['mooedit_completer_accept'].value)
def open(self, document):
"""Open a document"""
if document.unique_id not in self._documents.keys():
if self._load_file(document):
self._embed.set_current_page(-1)
if self._embed.get_n_pages() > 0:
self.update_actions()
if document.is_new:
self.get_action('save').set_sensitive(True)
else:
self.get_action('save').set_sensitive(False)
else:
#EA: the file was already open. we switch to it.
self._embed.set_current_page(self._embed.page_num(self._documents[document.unique_id]))
self.update_actions()
def open_list(self, documents):
good = None
for doc in documents:
try:
good = self._load_file(doc)
except DocumentException, err:
#self.log.exception(err)
self.boss.get_service('editor').emit('document-exception', error=err)
# we open the last good document now normally again to
# make system consistent
if good:
self.open(doc)
def close(self, document):
"""Close a document"""
# remove the last modified reference as it is not available when closed
if not self._documents.has_key(document.unique_id):
return True
if self._last_modified and self._last_modified[0].document == document:
self._last_modified = None
self.get_action('mooedit_last_edit').set_sensitive(False)
closing = self._documents[document.unique_id].editor.close()
if closing:
self._documents[document.unique_id].close()
self._embed.remove_page(self._embed.page_num(self._documents[document.unique_id]))
del self._documents[document.unique_id]
if self._embed.get_n_pages() == 0:
self.update_actions(enabled=False)
return closing
def save(self):
"""Save the current document"""
# man, medit resets the language on save
olang = self._current.editor.props.buffer.get_lang()
self._current.editor.save()
self._current.editor.set_lang(olang)
gcall(self._current.editor.set_lang, olang)
self.boss.cmd('buffer', 'current_file_saved')
def save_as(self):
"""Save the current document"""
olang = self._current.editor.props.buffer.get_lang()
self._current.editor.save_as()
self._current.editor.set_lang(olang)
gcall(self._current.editor.set_lang, olang)
self.boss.cmd('buffer', 'current_file_saved')
def cut(self):
"""Cut to the clipboard"""
self._current.editor.emit('cut-clipboard')
def copy(self):
"""Copy to the clipboard"""
self._current.editor.emit('copy-clipboard')
def paste(self):
"""Paste from the clipboard"""
self._current.editor.emit('paste-clipboard')
def undo(self):
self._current.editor.undo()
self.get_action('redo').set_sensitive(True)
if not self._current.editor.can_undo():
self.get_action('undo').set_sensitive(False)
def redo(self):
self._current.editor.redo()
self.get_action('undo').set_sensitive(True)
if not self._current.editor.can_redo():
self.get_action('redo').set_sensitive(False)
def goto_line(self, line):
"""Goto a line"""
self._current.editor.move_cursor(line-1, 0, False, True)
self.boss.get_service('buffer').emit('document-goto',
document=self._current.document, line=line-1)
def goto_last_edit(self):
if self._last_modified:
view, count = self._last_modified
self.open(view.document)
itr = view.editor.get_buffer().get_iter_at_offset(count)
view.editor.get_buffer().place_cursor(itr)
view.editor.scroll_to_iter(itr, 0.05, use_align=True)
def set_path(self, path):
pass
def grab_focus(self):
if self._current is not None:
self._current.editor.grab_focus()
def _changed_page(self, notebook, page, page_num):
self._current = self._embed.get_nth_page(page_num)
self.boss.cmd('buffer', 'open_file', document=self._current.document)
def reload_document(self, document):
"""
Reloads a document from disc
"""
# TODO: moo does no export reload functionality, so this really sucks
view = self._documents[document.unique_id]
buf = document.editor.get_buffer()
last_line = buf.get_iter_at_offset(buf.props.cursor_position)\
.get_line()
document.editor.disconnect_by_func(self._buffer_status_changed)
document.editor.disconnect_by_func(self._buffer_renamed)
document.editor.get_buffer().disconnect_by_func(self._buffer_changed)
closing = document.editor.close()
if closing:
label = document.editor._label
view.remove(document.editor)
editor = self._editor_instance.create_doc(document.filename)
editor._label = label
editor.inputter = PidaMooInput(self, editor, document)
document.editor = editor
view.set_editor(editor)
gcall(editor.move_cursor, last_line, 0, False, True)
document.editor.connect("doc_status_changed", self._buffer_status_changed, view)
document.editor.connect("filename-changed", self._buffer_renamed, view)
document.editor.get_buffer().connect("changed", self._buffer_changed, view)
document.editor.emit("doc_status_changed")
def _load_file(self, document):
try:
if document is None:
editor = self._editor_instance.new_doc()
else:
editor = self._editor_instance.create_doc(document.filename)
document.editor = editor
editor.inputter = PidaMooInput(self, editor, document)
editor.props.show_line_marks = True
editor.props.enable_bookmarks = False
#FIXME: this should be implemented but needs some code and a pref
#editor.props.enable_folding = True
#ind = PidaMooIndenter(editor, document)
#print ind
#editor.set_indenter(ind)
view = MooeditView(document)
view._star = False
view._exclam = False
document.editor.connect("doc_status_changed", self._buffer_status_changed, view)
document.editor.connect("filename-changed", self._buffer_renamed, view)
document.editor.get_buffer().connect("changed", self._buffer_changed, view)
label = self._embed._create_tab(document)
self._documents[document.unique_id] = view
self._embed.append_page(view, label)
self._embed.set_tab_reorderable(view, True)
#self._embed.set_tab_detachable(view, True)
self._current = view
return True
except Exception, err:
#self.log.exception(err)
raise DocumentException(err.message, document=document, orig=err)
def _buffer_status_changed(self, buffer, view):
status = view.editor.get_status()
if moo.edit.EDIT_MODIFIED & status == moo.edit.EDIT_MODIFIED:
if not self._current.editor.can_redo():
self.get_action('redo').set_sensitive(False)
if not view._star:
s = view.editor._label._markup
if view._exclam:
view._exclam = False
ns = "*" + s
view.editor._label.set_markup(ns)
view._star = True
self.get_action('undo').set_sensitive(True)
self.get_action('save').set_sensitive(True)
if moo.edit.EDIT_CLEAN & status == moo.edit.EDIT_CLEAN:
status = 0
if moo.edit.EDIT_NEW & status == moo.edit.EDIT_NEW:
status = 0
if moo.edit.EDIT_CHANGED_ON_DISK & status == moo.edit.EDIT_CHANGED_ON_DISK:
if not view._exclam:
s = view.editor._label._markup
if view._star:
view._star = False
ns = "!" + s
view.editor._label.set_markup(ns)
view._exclam = True
self.get_action('save').set_sensitive(True)
if status == 0:
if view._star or view._exclam:
s = view.editor._label.get_text()
ns = view.editor._label._markup
view._exclam = False
view._star = False
view.editor._label.set_markup(ns)
self.get_action('save').set_sensitive(False)
def _buffer_changed(self, buffer, view):
self._last_modified = (view, buffer.props.cursor_position)
self.get_action('mooedit_last_edit').set_sensitive(True)
def _buffer_modified(self, buffer, view):
s = view.editor._label.get_text()
ns = "*" + s
view.editor._label.set_markup(ns)
view.editor._label._markup(ns)
def _buffer_renamed(self, buffer, new_name, view):
view.document.filename = new_name
ns = self._get_document_title(view.document)
view.editor._label.set_markup(ns)
view.editor._label._markup = ns
view._exclam = False
view._star = False
def _get_document_title(self, document):
dsp = self.opt('display_type')
if dsp == 'filename':
return document.get_markup(document.markup_string_if_project)
elif dsp == 'fullpath':
return document.get_markup(document.markup_string_fullpath)
return document.markup
def _drag_motion_cb (self, widget, context, x, y, time):
list = widget.drag_dest_get_target_list()
target = widget.drag_dest_find_target(context, list)
if target is None:
return False
else:
if target == "text/uri-list":
context.drag_status(gtk.gdk.ACTION_COPY, time)
else:
widget.drag_get_data(context, "GTK_NOTEBOOK_TAB", time)
return True
def _drag_drop_cb (self, widget, context, x, y, time):
list = widget.drag_dest_get_target_list()
target = widget.drag_dest_find_target (context, list);
if (target == "text/uri-list"):
widget.drag_get_data (context, "text/uri-list", time)
else:
context.finish (False, False, time)
return True
def _drag_data_recv(self, widget, context, x, y, selection, targetType, time):
if targetType == 2:
for filename in selection.get_uris():
widget._mooedit.svc.boss.cmd('buffer', 'open_file', file_name=filename[7:])
return True
else:
return False
def get_content(self, editor):
return editor.get_buffer().props.text
def set_content(self, editor, text):
return editor.get_buffer().set_text(text)
def _get_current_word_pos(self):
# returns the start, endposition of the current word and the text
buf = self._current.editor.get_buffer()
cursor = buf.props.cursor_position
try:
# moo stores the text always as utf-8 in the internal buffer
txt = buf.props.text.decode('utf-8')
except UnicodeDecodeError:
txt = buf.props.text
start = cursor-1
end = cursor
# FIXME: maybe this is faster with a regular expression
while end < len(txt):
if txt[end].isspace():
break
end += 1
# this isn't handled easy with a regular expression as its a
# forward lookup. maybe we could search for whitespace and guess
# as startstring max(0, cursor-10) and if it doesn't find anything
# we use the full buffer and use the last find...
while start >= 0:
if txt[start].isspace():
start += 1
break
start -= 1
start = max(start, 0)
return (start, end, txt)
def get_current_word(self):
"""
Returns the word the cursor is in or the selected text
"""
start, end, txt = self._get_current_word_pos()
return txt[start:end]
def call_with_current_word(self, callback):
start, end, txt = self._get_current_word_pos()
rv = txt[start:end]
if rv:
callback(rv)
def call_with_selection(self, callback):
if not self._current.editor.has_selection():
return
buf = self._current.editor.get_buffer()
tmb = buf.get_selection_bounds()
try:
rv = buf.props.text.decode('utf-8') \
[tmb[0].get_offset():tmb[1].get_offset()]
except UnicodeDecodeError:
# the buf.props.text is raw binary. so we have to convert it to
# unicode
return
callback(rv)
def call_with_selection_or_word(self, callback):
if self._current.editor.has_selection():
self.call_with_selection(callback)
else:
self.call_with_current_word(callback)
def insert_text(self, text):
self._current.editor.get_buffer().insert_at_cursor(text)
def delete_current_word(self):
start, end, txt = self._get_current_word_pos()
buf = self._current.editor.get_buffer()
buf.delete(buf.get_iter_at_offset(start),
buf.get_iter_at_offset(end))
def get_current_line(self):
if not self._current:
return None
buf = self._current.editor.get_buffer()
i = buf.get_iter_at_offset(buf.props.cursor_position)
return i.get_line()+1
def replace_line(self, editor, lineno, text):
"""
Replace a line in the editor. lineno is index 0 based.
"""
buf = editor.get_buffer()
it1 = buf.get_iter_at_line(lineno)
it2 = buf.get_iter_at_line(lineno)
it2.forward_to_line_end()
buf.delete(it1, it2)
buf.insert(it1, text)
def get_cursor_position(self):
buf = self._current.editor.get_buffer()
return buf.props.cursor_position
def set_cursor_position(self, position, scroll=True):
#FIXME: return current position
buf = self._current.editor.get_buffer()
itr = buf.get_iter_at_offset(position)
buf.place_cursor(itr)
if scroll:
itr = buf.get_iter_at_offset(position)
self._current.editor.scroll_to_iter(itr, 0.05, use_align=True)
def do_doc_destroy(self, *args):
if self._docwin:
self._docwin.destroy()
self._docwin = None
def on_doc_destroy(self, *args):
self._current.editor.props.buffer.disconnect(self._editor_mi)
def show_documentation(self):
buf = self._current.editor.props.buffer
rec = self._current.editor.get_iter_location(
buf.get_iter_at_offset(
buf.props.cursor_position))
pos = self._current.editor.buffer_to_window_coords(
gtk.TEXT_WINDOW_WIDGET,
rec.x, rec.y)
abspos = self._current.editor.window.get_origin()
rpos = (pos[0]+abspos[0], pos[1]+abspos[1])
dm = self.boss.get_service('language').get_documentator(
self._current.document)
if not dm:
return
docu = dm.get_documentation(buf.props.text,
buf.props.cursor_position)
#print docus
if self._docwin:
self._docwin.destroy()
if not docu:
self.boss.get_service('notify').notify(
data=_('No documentation found'), timeout=2000)
return
pd = PidaDocWindow(documentation=docu)
if not pd.valid:
self.notify_user(_("No documentation found"),
title=_("Show documentation"),
quick=True)
return
pd.connect("destroy-event", self.on_doc_destroy)
self._current.editor.props.buffer.connect(
'cursor-moved', self.do_doc_destroy)
pd.move(rpos[0], rpos[1] + rec.height)
self._docwin = pd
pd.present()
def define_sign_type(self, name, icon, linehl, text, texthl):
pass
def undefine_sign_type(self, name):
pass
def show_sign(self, type, filename, line):
pass
def hide_sign(self, type, filename, line):
pass
@staticmethod
def get_sanity_errors():
if moo is None:
return [
"medit python bindings are missing"
]
#XXX: version checks
# Required Service attribute for service loading
Service = Mooedit
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| fermat618/pida | pida/editors/mooedit/mooedit.py | Python | gpl-2.0 | 60,125 |
#!/usr/bin/python
__author__ = 'Markus Bajones'
__license__ = 'GPL'
__version__ = '1.0.0'
__email__ = 'markus.bajones@gmail.com'
"""
- download ROS key from keyserver and install it
- show settings (mirror/package selection/ROS distro)
- install selected packages
- rosdep update, and init
"""
import ImageTk
import lsb_release
import Tkinter as tk
import ttk as ttk
import subprocess
import os
class ROSInstallerGUI:
def __init__(self, master):
self.packages = [('Full desktop install (recommended)', 'desktop-full'),
('Desktop install', 'desktop'),
('Base install', 'ros-base')]
self.mirrors = [
('packages.ros.org', 'http://packages.ros.org'),
('mirrors.ustc.edu.cn', 'http://mirrors.ustc.edu.cn'),
('mirror.sysu.edu.cn', 'http://mirror.sysu.edu.cn'),
('ros.exbot.net', 'http://ros.exbot.net'),
('mirror-eu.packages.ros.org', 'http://mirror-eu.packages.ros.org'),
('mirror-ap.packages.ros.org', 'http://mirror-ap.packages.ros.org'),
('packages.ros.org.ros.informatik.uni-freiburg.de', 'http://packages.ros.org.ros.informatik.uni-freiburg.de'),
('mirror.umd.edu/packages.ros.org', 'http://mirror.umd.edu/packages.ros.org'),
('mobilerobots.case.edu/mirror/packages.ros.org', 'http://mobilerobots.case.edu/mirror/packages.ros.org'),
('ros.fei.edu.br/archive-ros/packages.ros.org', 'http://ros.fei.edu.br/archive-ros/packages.ros.org'),
('ftp.tudelft.nl', 'http://ftp.tudelft.nl')
]
self.ros_version = ['indigo', 'jade']
self.shell = ['bash', 'zsh']
self.select_pkg = tk.StringVar()
self.select_mirror = tk.StringVar()
self.select_ros_version = tk.StringVar()
self.select_shell = tk.StringVar()
self.select_catkin = tk.IntVar()
self.select_pkg.set('desktop-full')
self.select_mirror.set('http://packages.ros.org')
self.select_ros_version.set('indigo')
self.select_shell.set('bash')
self.select_catkin.set(1)
self.installed, self.rosdep_update, self.shell_written = False, False, False
self.master = master
master.title("ROS Installer GUI")
photoimage = ImageTk.PhotoImage(file="rosorg-logo1.png")
label = tk.Label(image=photoimage)
label.image = photoimage
label.pack(fill=tk.X)
self.message_settings = tk.StringVar()
self.message_settings.set('Customize settings')
self.settings_button = tk.Button(master, textvar=self.message_settings, command=self.show_settings)
self.settings_button.pack(fill=tk.X)
self.message = tk.StringVar()
self.message.set('Install ROS and initalize rosdep')
self.install_button = tk.Button(master, textvariable=self.message, command=self.install_ros)
self.install_button.pack(fill=tk.X)
self.shell_button = tk.Button(master, text='Setup shell configuration', command=self.write_shell_config)
self.shell_button.pack(fill=tk.X)
self.close_button = tk.Button(master, text="Close", command=self.explain_next_steps)
self.close_button.pack(fill=tk.X)
def show_settings(self):
newWindow = tk.Toplevel(self.master)
newWindow.title("Settings")
notebook = ttk.Notebook(newWindow)
tab1 = ttk.Frame(notebook)
tab2 = ttk.Frame(notebook)
tab3 = ttk.Frame(notebook)
tab4 = ttk.Frame(notebook)
notebook.add(tab1, text='ROS version and packages')
notebook.add(tab2, text='ROS version')
notebook.add(tab3, text='Shell')
notebook.add(tab4, text='Mirrors')
notebook.pack(fill=tk.X)
for text, pkg in self.packages:
b = tk.Radiobutton(tab1, text=text, variable=self.select_pkg, value=pkg)
b.pack(anchor=tk.W)
b = tk.Checkbutton(tab1, text="Include catkin tools?", variable=self.select_catkin)
b.pack(anchor=tk.W)
for text in self.ros_version:
b = tk.Radiobutton(tab2, text=text, variable=self.select_ros_version, value=text)
b.pack(anchor=tk.W)
for text in self.shell:
b = tk.Radiobutton(tab3, text=text, variable=self.select_shell, value=text)
b.pack(anchor=tk.W)
for text, mirror in self.mirrors:
b = tk.Radiobutton(tab4, text=text, variable=self.select_mirror, value=mirror)
b.pack(anchor=tk.W)
button = tk.Button(newWindow, text ="Done", command = newWindow.destroy)
button.pack()
self.settings_button.config(bg='green')
self.master.update()
def explain_next_steps(self):
newWindow = tk.Toplevel(self.master)
newWindow.title('Final steps')
message = tk.StringVar()
if self.installed and self.rosdep_update and self.shell_written:
message.set('ROS installed, rosdep updated and .{}rc written.\n'
'Next you need to create a catkin workspace'.format(self.select_shell.get()))
else:
message.set('Some step did not execute or had an error.\n\n'
'If you think this is fine you can proceed to create your catkin workspace.\n'
'Otherwise check the output on the terminal for more information.\n')
label = tk.Label(newWindow, textvariable=message, anchor=tk.W, justify=tk.LEFT)
label.pack()
button = tk.Button(newWindow, text ="Done", command = self.master.quit)
button.pack()
def check_ubuntu_ros(self):
data = {'indigo': ['13.10', '14.04'],
'jade': ['14.04', '14.10', '15.04']}
release = lsb_release.get_distro_information()['RELEASE']
if release in data[self.select_ros_version.get()]:
return True
return False
def install_ros(self):
if not self.check_ubuntu_ros():
self.settings_button.config(bg='red')
self.message_settings.set('Check ROS version.')
self.master.update()
return
self.message.set('Executing...')
self.install_button.config(bg='green')
self.master.update()
mirror, ros_pkgs, ros_version = self.select_mirror.get(), self.select_pkg.get(), self.select_ros_version.get()
ros_pkgs = '-'.join(['ros', ros_version, ros_pkgs])
catkin =''
if self.select_catkin.get():
catkin = 'python-catkin-tools'
print(mirror, ros_version, ros_pkgs, catkin)
try:
subprocess.call(['pkexec', os.getcwd()+'/root_tools.py', mirror, ros_version, ros_pkgs, catkin])
self.message.set('Done')
self.update_rosdep()
self.installed = True
except subprocess.CalledProcessError as e:
self.message.set('Something went wrong. Please check the terminal output')
self.install_button.config(bg='red')
self.installed = False
self.master.update()
def update_rosdep(self):
try:
subprocess.check_call(['/usr/bin/rosdep', 'update'])
self.rosdep_update = True
except subprocess.CalledProcessError as e:
print("rosdep executed with errors. [{err}]".format(err=str(e)))
self.rosdep_update = False
def write_shell_config(self):
shell = self.select_shell.get()
content = "".join(['source /opt/ros/', self.select_ros_version.get(), '/setup.', shell])
file = os.path.join(os.environ['HOME'], "".join(['.',shell,'rc']))
try:
if not os.path.exists(file) or not content in open(file).read():
with open(file, 'a+') as f:
f.write(content+'\n')
print("{file} written successfully.".format(file=file))
else:
print("'{}' already in {}".format(content, file))
self.shell_written = True
except IOError as e:
print('Could not read or write {file}. Error was {err}'.format(file=file, err=e))
self.shell_written = False
self.shell_button.config(bg='green')
self.master.update()
if __name__ == '__main__':
root = tk.Tk()
my_gui = ROSInstallerGUI(root)
root.mainloop()
| bajo/ros_installer_gui | tkmain.py | Python | gpl-2.0 | 8,340 |
#!/usr/bin/env python
#Copyright 2007,2008,2012 Sebastian Hagen
# This file is part of gonium.
#
# gonium is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# gonium is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from collections import deque
import logging
import struct
import socket
from io import BytesIO
import random
from .base import *
from .. import ip_address
from ..ip_address import ip_address_build
from ..fdm.packet import AsyncPacketSock
from ..fdm.stream import AsyncDataStream
# ----------------------------------------------------------------------------- question / RR sections
class ValueVerifier:
NAME_MIN = 0
NAME_MAX = 255
TYPE_MIN = 1
TYPE_MAX = 255
@classmethod
def name_validate(cls, name):
if not (cls.NAME_MIN <= len(name) <= cls.NAME_MAX):
raise ValueError('NAME {!a} is invalid'.format(name,))
@classmethod
def type_validate(cls, rtype):
if not (cls.TYPE_MIN <= int(rtype) <= cls.TYPE_MAX):
raise ValueError('TYPE {!a} is invalid'.format(rtype,))
class DNSQuestion(ValueVerifier, DNSReprBase):
fields = ('name', 'type', 'rclass')
def __init__(self, name, rtype, rclass=CLASS_IN):
self.name_validate(name)
self.type_validate(rtype)
self.name = name
self.type = rtype
self.rclass = rclass
@classmethod
def build_from_binstream(cls, binstream):
name = DomainName.build_from_binstream(binstream)
tc_str = binstream.read(4)
if (len(tc_str) < 4):
raise ValueError('Insufficient data in binstream')
(rtype, rclass) = struct.unpack(b'>HH', tc_str)
return cls(name, rtype, rclass)
def binary_repr(self):
"""Return binary representation of this question section"""
return (self.name.binary_repr() + struct.pack(b'>HH', self.type,
self.rclass))
def __eq__(self, other):
return (self.binary_repr() == other.binary_repr())
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(self.binary_repr())
class ResourceRecord(ValueVerifier, DNSReprBase):
fields = ('name', 'type', 'rclass', 'ttl', 'rdata')
def __init__(self, name, rtype, ttl, rdata, rclass=CLASS_IN):
self.name_validate(name)
self.type_validate(rtype)
self.name = name
self.type = rtype
self.rclass = rclass
self.ttl = ttl
self.rdata = rdata
@classmethod
def build_from_binstream(cls, binstream):
name = DomainName.build_from_binstream(binstream)
tctl_str = binstream.read(10)
if (len(tctl_str) < 10):
raise ValueError('Insufficient data in binstream')
(rtype, rclass, ttl, rdlength) = struct.unpack(b'>HHLH', tctl_str)
rdata = RDATA.class_get(rtype).build_from_binstream(binstream, rdlength)
return cls(name, rtype, ttl, rdata, rclass=rclass)
def binary_repr(self):
"""Return binary representation of this RR"""
rdata_str = self.rdata.binary_repr()
return (self.name.binary_repr() + struct.pack(b'>HHLH', self.type,
self.rclass, self.ttl, len(rdata_str)) + rdata_str)
# ----------------------------------------------------------------------------- DNS Frames and Headers
class DNSHeader(DNSReprBase):
ID_MIN = QDC_MIN = ANC_MIN = NSC_MIN = ARC_MIN = 0
ID_MAX = QDC_MAX = ANC_MAX = NSC_MAX = ARC_MAX = 65535
QR_MIN = AA_MIN = TC_MIN = RD_MIN = RA_MIN = False
QR_MAX = AA_MAX = TC_MAX = RD_MAX = RA_MAX = True
OPCODE_MIN = 0
OPCODE_MAX = 2
RCODE_MIN = 0
RCODE_MAX = 5
fields = ('id', 'response', 'opcode', 'authoritative_answer', 'truncation',
'recursion_desired', 'recursion_available', 'response_code', 'qdcount',
'ancount', 'nscount', 'arcount')
def __init__(self, id, response=False, opcode=0, authoritative_answer=False,
truncation=False, recursion_desired=True, recursion_available=False,
response_code=0, qdcount=0, ancount=0, nscount=0, arcount=0):
self.limit_verify(self.ID_MIN, self.ID_MAX, id)
self.limit_verify(self.QR_MIN, self.QR_MAX, response)
self.limit_verify(self.OPCODE_MIN, self.OPCODE_MAX, opcode)
self.limit_verify(self.AA_MIN, self.AA_MAX, authoritative_answer)
self.limit_verify(self.TC_MIN, self.TC_MAX, truncation)
self.limit_verify(self.RD_MIN, self.RD_MAX, recursion_desired)
self.limit_verify(self.RA_MIN, self.RA_MAX, recursion_available)
self.limit_verify(self.RCODE_MIN, self.RCODE_MAX, response_code)
self.limit_verify(self.QDC_MIN, self.QDC_MAX, qdcount)
self.limit_verify(self.ANC_MIN, self.ANC_MAX, ancount)
self.limit_verify(self.NSC_MIN, self.NSC_MAX, nscount)
self.limit_verify(self.ARC_MIN, self.ARC_MAX, arcount)
self.id = id
self.response = response
self.opcode = opcode
self.authoritative_answer = authoritative_answer
self.truncation = truncation
self.recursion_desired = recursion_desired
self.recursion_available = recursion_available
self.response_code = response_code
self.qdcount = qdcount
self.ancount = ancount
self.nscount = nscount
self.arcount = arcount
@staticmethod
def limit_verify(limit_min, limit_max, val):
if not (limit_min <= val <= limit_max):
raise ValueError('Expected value to lie between {} and {}; got {!a} instead.'.format(limit_min, limit_max, val))
@classmethod
def build_from_binstream(cls, binstream):
s = binstream.read(12)
if (len(s) < 12):
raise ValueError('Insufficient data in stream')
return cls.build_from_binstring(s)
@classmethod
def build_from_binstring(cls, binstring):
if (len(binstring) != 12):
raise ValueError('Binstring {!a} has invalid length'.format(binstring,))
(id, flags_1, flags_2, qdcount, ancount, nscount, arcount) = \
struct.unpack(b'>HBBHHHH', binstring)
qr = bool(flags_1 >> 7)
opcode = (flags_1 % 128) >> 3
aa = bool((flags_1 % 8) >> 2)
tc = bool((flags_1 % 4) >> 1)
rd = bool(flags_1 % 2)
ra = bool(flags_2 >> 7)
Z = (flags_2 % 128) >> 4
rcode = flags_2 % 16
if (Z != 0):
raise ValueError('Got non-zero value in Z header field')
return cls(id, qr, opcode, aa, tc, rd, ra, rcode, qdcount,
ancount, nscount, arcount)
def binary_repr(self):
"""Return binary representation of this DNS Header"""
flags_1 = (
(self.response << 7) +
(self.opcode << 3) +
(self.truncation << 2) +
(self.recursion_desired)
)
flags_2 = (self.recursion_available << 7) + self.response_code
return struct.pack(b'>HBBHHHH', self.id, flags_1, flags_2, self.qdcount,
self.ancount, self.nscount, self.arcount)
class DNSFrame(DNSReprBase):
fields = ('questions', 'answers', 'ns_records', 'ar', 'header')
def __init__(self, questions, answers=(), ns_records=(),
ar=(), header=None, *args, **kwargs):
self.questions = questions
self.answers = answers
self.ns_records = ns_records
self.ar = ar
if (header is None):
header = DNSHeader(qdcount=len(questions), ancount=len(answers),
nscount=len(ns_records), arcount=len(ar), *args, **kwargs)
self.header = header
@classmethod
def build_from_binstream(cls, binstream):
header = DNSHeader.build_from_binstream(binstream)
questions = tuple([DNSQuestion.build_from_binstream(binstream) for i in range(header.qdcount)])
answers = tuple([ResourceRecord.build_from_binstream(binstream) for i in range(header.ancount)])
ns_records = tuple([ResourceRecord.build_from_binstream(binstream) for i in range(header.nscount)])
ar = tuple([ResourceRecord.build_from_binstream(binstream) for i in range(header.arcount)])
return cls(header=header, questions=questions, answers=answers,
ns_records=ns_records, ar=ar)
def binary_repr(self):
"""Return binary representation of this DNS Header"""
return (self.header.binary_repr() +
b''.join([s.binary_repr() for s in (tuple(self.questions) +
tuple(self.answers) + tuple(self.ns_records) +
tuple(self.ar))]))
# ----------------------------------------------------------------------------- statekeeping
class DNSQuery:
"""Class representing outstanding local requests
Callback args: dns_request, response_frame
response_frame will be None iff the query timeouted."""
def __init__(self, lookup_manager, result_handler, id, question, timeout):
if (not hasattr(question.binary_repr,'__call__')):
raise ValueError('Value {!a} for argument question is invalid'.format(question,))
self.id = id
self.result_handler = result_handler
self.question = question
self.la = lookup_manager
self.la.query_add(self)
if not (timeout is None):
self.tt = self.la.event_dispatcher.set_timer(timeout, self.timeout_process, parent=self)
def __eq__(self, other):
return ((self.id == other.id) and (self.question == other.question))
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.id, self.question))
def timeout_process(self):
"""Process a timeout on this query"""
self.tt = None
self.la.query_forget(self)
try:
self.failure_report()
finally:
self.la = None
def failure_report(self):
"""Call callback handler with dummy results indicating lookup failure"""
self.result_handler(self, None)
def is_response(self, response):
"""Check a response for whether it answers our query. Do not process it further either way."""
return (tuple(response.questions) == (self.question,))
def potential_response_process(self, response):
"""Check a response for whether it answers our query, and if so process it.
Returns whether the response was accepted."""
if (not self.is_response(response)):
return False
self.tt.cancel()
self.tt = None
try:
self.result_handler(self, response)
finally:
self.la = None
return True
def clean_up(self):
"""Cancel request, if still pending"""
if not (self.tt is None):
self.tt.cancel()
self.tt = None
if not (self.la is None):
self.la.query_forget(self)
self.la = None
def get_dns_frame(self):
return DNSFrame(questions=(self.question,), id=self.id)
class ResolverConfig:
DEFAULT_FN = '/etc/resolv.conf'
PORT = 53
def __init__(self, nameservers):
self.ns = nameservers
@classmethod
def build_from_file(cls, fn=None):
if (fn is None):
fn = cls.DEFAULT_FN
nameservers = []
try:
f = open(fn, 'r')
except IOError:
pass
else:
for line in f:
words = line.split()
if (not words):
continue
if (words[0].startswith('#')):
continue
if (words[0] == 'nameserver'):
if (len(words) > 1):
try:
ip = ip_address_build(words[1])
except ValueError:
continue
nameservers.append(ip)
continue
f.close()
if (not nameservers):
nameservers = [ip_address_build(s) for s in ('127.0.0.1','::1')]
return cls(nameservers)
def get_addr(self):
return (str(self.ns[0]), self.PORT)
def build_lookup_manager(self, ed):
return DNSLookupManager(ed, ns_addr=self.get_addr())
class DNSTCPStream(AsyncDataStream):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.size = 2
def process_input(self, data):
bytes_used = 0
bytes_left = len(data)
msgs = []
while (bytes_left > 2):
(l,) = struct.unpack('>H', data[bytes_used:bytes_used+2])
wb = l + 2
if (wb > bytes_left):
self.size = wb
break
msgs.append(data[bytes_used+2:bytes_used+wb])
bytes_used += wb
bytes_left -= wb
else:
self.size = 2
self.discard_inbuf_data(bytes_used)
if (msgs):
self.process_msgs(msgs)
def send_query(self, query):
frame_data = query.get_dns_frame().binary_repr()
try:
header = struct.pack('>H', (len(frame_data)))
except struct.error as exc:
raise ValueError('Too much data.') from struct.error
self.send_bytes((header, frame_data))
class DNSLookupManager:
logger = logging.getLogger('gonium.dns_resolving.DNSLookupManager')
log = logger.log
def __init__(self, event_dispatcher, ns_addr, addr_family=None):
self.event_dispatcher = event_dispatcher
self.cleaning_up = False
if (not (len(ns_addr) == 2)):
raise ValueError('Argument ns_addr should have two elements; got {!a}'.format(ns_addr,))
ip_addr = ip_address.ip_address_build(ns_addr[0])
if (addr_family is None):
addr_family = ip_addr.AF
sock = socket.socket(addr_family, socket.SOCK_DGRAM)
self.sock_udp = AsyncPacketSock(self.event_dispatcher, sock)
self.sock_udp.process_input = self.data_process
self.sock_udp.process_close = self.close_process
self.sock_tcp = None
self.sock_tcp_connected = False
self._qq_tcp = deque()
# Normalize ns_addr argument
self.ns_addr = (str(ip_addr), int(ns_addr[1]))
self.queries = {}
def _have_tcp_connection(self):
s = self.sock_tcp
return (s and (s.state == s.CS_UP))
def _send_tcp(self, query):
if (self._have_tcp_connection()):
try:
self.sock_tcp.send_query(query)
except ValueError:
self.log(40, '{!a} unable to send query over TCP:'.format(self), exc_info=True)
self.event_dispatcher.set_timer(0, query.timeout_process, parent=self, interval_relative=False)
else:
if (self.sock_tcp is None):
self._make_tcp_sock()
self._qq_tcp.append(query)
return
def _make_tcp_sock(self):
self.sock_tcp = s = DNSTCPStream(run_start=False)
s.process_close = self._process_tcp_close
s.connect_async_sock(self.event_dispatcher, ip_address_build(self.ns_addr[0]), self.ns_addr[1], connect_callback=self._process_tcp_connect)
s.process_msgs = self._process_tcp_msgs
def _process_tcp_connect(self, conn):
for query in self._qq_tcp:
self.sock_tcp.send_query(query)
self._qq_tcp.clear()
def _process_tcp_close(self):
self.sock_tcp = None
def _process_tcp_msgs(self, msgs):
for msg in msgs:
self.data_process(msg, self.ns_addr, tcp=True)
def data_process(self, data, source, tcp=False):
try:
dns_frame = DNSFrame.build_from_binstream(BytesIO(data))
except ValueError:
self.log(30, '{!a} got frame {!a} not parsable as dns data from {!a}. Ignoring. Parsing error was:'.format(self, bytes(data), source), exc_info=True)
return
if (source != self.ns_addr):
self.log(30, '{!a} got spurious udp frame from {!a}; target NS is at {!a}. Ignoring.'.format(self, source, self.ns_addr))
return
if (not (dns_frame.header.id in self.queries)):
self.log(30, '{!a} got spurious (unexpected id) query dns response {!a} from {!a}. Ignoring.'.format(self, dns_frame, source))
return
def log_spurious():
self.log(30, '{!a} got spurious (unexpected question section) query dns response {!a} from {!a}. Ignoring.'.format(self, dns_frame, source))
if (dns_frame.header.truncation):
if (tcp):
self.log(30, '{!a} got truncated dns response {!a} over TCP from {!a}. Ignoring.'.format(self, dns_frame, source))
return
self.log(25, '{!a} got truncated dns response {!a} from {!a}. Retrying over TCP.'.format(self, dns_frame, source))
for query in self.queries[dns_frame.header.id]:
if (query.is_response(dns_frame)):
self._send_tcp(query)
break
else:
log_spurious()
return
for query in self.queries[dns_frame.header.id][:]:
if (query.potential_response_process(dns_frame)):
self.queries[dns_frame.header.id].remove(query)
break
else:
log_spurious()
def query_forget(self, query):
"""Forget outstanding dns query"""
self.queries[query.id].remove(query)
try:
self._qq_tcp.remove(query)
except ValueError:
pass
def id_suggestion_get(self):
"""Return suggestion for a frame id to use"""
while True:
rv = random.randint(0, 2**16-1)
if not (rv in self.queries):
return rv
def query_add(self, query):
"""Register new outstanding dns query and send query frame"""
dns_frame_str = query.get_dns_frame().binary_repr()
if (not (query.id in self.queries)):
self.queries[query.id] = []
query_list = self.queries[query.id]
if (query in query_list):
raise ValueError('query {!a} is already registered with {!a}.'.format(query, self))
query_list.append(query)
self.sock_udp.fl.sendto(dns_frame_str, self.ns_addr)
def close_process(self):
"""Process close of UDP socket"""
#if not (fd == self.sock_udp.fd):
# raise ValueError('{!a} is not responsible for fd {!a}'.fomat(self, fd))
if (not self.cleaning_up):
self.log(30, 'UDP socket of {!a} is unexpectedly being closed.'.format(self))
self.sock_udp = None
# Don't raise an exception here; this is most likely being called as a
# result of another exception, which we wouldn't want to mask.
def clean_up(self):
"""Shutdown instance, if still active"""
self.cleaning_up = True
if not (self.sock_udp is None):
self.sock_udp.clean_up()
self.sock_udp = None
for query_list in self.queries.values():
for query in query_list[:]:
query.failure_report()
query.clean_up()
self.queries.clear()
self._qq_tcp.clear()
self.cleaning_up = False
def build_simple_query(self, *args, **kwargs):
return SimpleDNSQuery(self, *args, **kwargs)
class DNSLookupResult:
def __init__(self, query_name, answers, additional_records):
self.query_name = query_name
self.answers = answers
self.additional_records = additional_records
def get_rr_bytypes(self, rtypes):
return tuple([a.data_get() for a in self.answers if (a.type in rtypes)])
def get_rr_A(self):
return self.get_rr_bytypes((RDATA_A.type,))
def get_rr_AAAA(self):
return self.get_rr_bytypes((RDATA_AAAA.type,))
def get_rr_ip_addresses(self):
return self.get_rr_bytypes((RDATA_A.type, RDATA_AAAA.type))
def get_rr_MX(self):
return self.get_rr_bytypes((RDATA_MX.type,))
def get_rr_TXT(self):
return self.get_rr_bytypes((RDATA_TXT.type,))
def __repr__(self):
return '%s%s' % (self.__class__.__name__, (self.query_name, self.answers,
self.additional_records))
def __nonzero__(self):
return True
class SimpleDNSQuery:
"""DNSQuery wrapper with more comfortable call syntax"""
logger = logging.getLogger('gonium.dns_resolving.SimpleDNSQuery')
log = logger.log
def __init__(self, lookup_manager, result_handler, query_name, qtypes, timeout):
if (isinstance(query_name, str)):
query_name = query_name.encode('ascii')
if (query_name.endswith(b'.') and not query_name.endswith(b'..')):
query_name = query_name[:-1]
query_name = DomainName(query_name)
self.lookup_manager = lookup_manager
self.result_handler = result_handler
self.query_name = query_name
self.qtypes = qtypes
self.results = []
self.queries = []
self.qtype_special = False
for qtype in self.qtypes:
question = DNSQuestion(query_name, qtype)
self.queries.append(DNSQuery(lookup_manager=lookup_manager,
result_handler=self.query_result_handler,
id=lookup_manager.id_suggestion_get(), question=question,
timeout=timeout))
if (qtype in QTYPES_SPECIAL):
self.qtype_special = True
def query_result_handler(self, query, result):
"""Process result for wrapped query"""
self.results.append(result)
if (len(self.results) >= len(self.qtypes)):
self.query_results_process()
def query_results_process(self):
"""Collocate and return query results"""
valid_results = []
valid_ars = []
names_valid = set((self.query_name,))
results = [x for x in self.results if (not (x is None))]
if (len(results) == 0):
self.result_handler(self, None)
return
for result in results:
if (result is None):
continue
for answer in result.answers:
if (not (answer.name in names_valid)):
self.log(30, "{!a} got bogus answer {!a}; didn't expect this name. Ignoring.".format(self, answer))
continue
if (answer.type == RDATA_CNAME.type):
names_valid.add(answer.rdata.domain_name)
elif (not ((answer.type in self.qtypes) or self.qtype_special)):
self.log(30, "{!a} got bogus answer {!a}; didn't expect this type. Ignoring.".format(self, answer))
continue
if not (answer.rdata in valid_results):
valid_results.append(answer.rdata)
for ar in result.ar:
if not (ar in valid_ars):
valid_ars.append(ar)
res = DNSLookupResult(self.query_name, valid_results, valid_ars)
try:
self.result_handler(self, res)
except BaseException as exc:
self.log(40, 'Error on DNS lookup result processing for {!a}:'.format(res), exc_info=True)
self.result_handler = None
self.lookup_manager = None
self.queries = ()
def clean_up(self):
"""Cancel request, if still pending"""
for query in self.queries:
query.clean_up
self.queries = ()
self.result_handler = None
self.lookup_manager = None
| sh01/gonium | src/dns_resolving/stub.py | Python | gpl-2.0 | 23,265 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Red Hat, Inc.
#
# Authors:
# Thomas Woerner <twoerner@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os.path
from firewall.core.prog import runProg
from firewall.core.logger import log
from firewall.functions import tempFile, readfile
from firewall.config import COMMANDS
IPSET_MAXNAMELEN = 32
IPSET_TYPES = [
# bitmap and set types are currently not supported
# "bitmap:ip",
# "bitmap:ip,mac",
# "bitmap:port",
# "list:set",
"hash:ip",
#"hash:ip,port",
#"hash:ip,port,ip",
#"hash:ip,port,net",
#"hash:ip,mark",
"hash:net",
#"hash:net,net",
#"hash:net,port",
#"hash:net,port,net",
#"hash:net,iface",
"hash:mac",
]
IPSET_CREATE_OPTIONS = {
"family": "inet|inet6",
"hashsize": "value",
"maxelem": "value",
"timeout": "value in secs",
# "counters": None,
# "comment": None,
}
class ipset:
def __init__(self):
self._command = COMMANDS["ipset"]
def __run(self, args):
# convert to string list
_args = ["%s" % item for item in args]
log.debug2("%s: %s %s", self.__class__, self._command, " ".join(_args))
(status, ret) = runProg(self._command, _args)
if status != 0:
raise ValueError("'%s %s' failed: %s" % (self._command,
" ".join(_args), ret))
return ret
def check_name(self, name):
if len(name) > IPSET_MAXNAMELEN:
raise FirewallError(INVALID_NAME,
"ipset name '%s' is not valid" % name)
def supported_types(self):
ret = { }
output = ""
try:
output = self.__run(["--help"])
except ValueError as e:
log.debug1("ipset error: %s" % e)
lines = output.splitlines()
in_types = False
for line in lines:
#print(line)
if in_types:
splits = line.strip().split(None, 2)
ret[splits[0]] = splits[2]
if line.startswith("Supported set types:"):
in_types = True
return ret
def check_type(self, type_name):
if len(type_name) > IPSET_MAXNAMELEN or type_name not in IPSET_TYPES:
raise FirewallError(INVALID_TYPE,
"ipset type name '%s' is not valid" % type_name)
def create(self, set_name, type_name, options=None):
self.check_name(set_name)
self.check_type(type_name)
args = [ "create", set_name, type_name ]
if options:
for k,v in options.items():
args.append(k)
if v != "":
args.append(v)
return self.__run(args)
def destroy(self, set_name):
self.check_name(set_name)
return self.__run([ "destroy", set_name ])
def add(self, set_name, entry, options=None):
args = [ "add", set_name, entry ]
if options:
args.append("%s" % " ".join(options))
return self.__run(args)
def delete(self, set_name, entry, options=None):
args = [ "del", set_name, entry ]
if options:
args.append("%s" % " ".join(options))
return self.__run(args)
def test(self, set_name, entry, options=None):
args = [ "test", set_name, entry ]
if options:
args.append("%s" % " ".join(options))
return self.__run(args)
def list(self, set_name=None):
args = [ "list" ]
if set_name:
args.append(set_name)
return self.__run(args).split()
def save(self, set_name=None):
args = [ "save" ]
if set_name:
args.append(set_name)
return self.__run(args)
def restore(self, set_name, type_name, entries,
create_options=None, entry_options=None):
self.check_name(set_name)
self.check_type(type_name)
temp_file = tempFile()
if ' ' in set_name:
set_name = "'%s'" % set_name
args = [ "create", set_name, type_name, "-exist" ]
if create_options:
for k,v in create_options.items():
args.append(k)
if v != "":
args.append(v)
temp_file.write("%s\n" % " ".join(args))
for entry in entries:
if ' ' in entry:
entry = "'%s'" % entry
if entry_options:
temp_file.write("add %s %s %s\n" % (set_name, entry,
" ".join(entry_options)))
else:
temp_file.write("add %s %s\n" % (set_name, entry))
temp_file.close()
stat = os.stat(temp_file.name)
log.debug2("%s: %s restore %s", self.__class__, self._command,
"%s: %d" % (temp_file.name, stat.st_size))
args = [ "restore" ]
(status, ret) = runProg(self._command, args,
stdin=temp_file.name)
if log.getDebugLogLevel() > 2:
try:
lines = readfile(temp_file.name)
except:
pass
else:
i = 1
for line in readfile(temp_file.name):
log.debug3("%8d: %s" % (i, line), nofmt=1, nl=0)
if not line.endswith("\n"):
log.debug3("", nofmt=1)
i += 1
os.unlink(temp_file.name)
if status != 0:
raise ValueError("'%s %s' failed: %s" % (self._command,
" ".join(args), ret))
return ret
def flush(self, set_name):
args = [ "flush" ]
if set_name:
args.append(set_name)
return self.__run(args)
def rename(self, old_set_name, new_set_name):
return self.__run([ "rename", old_set_name, new_set_name ])
def swap(self, set_name_1, set_name_2):
return self.__run([ "swap", set_name_1, set_name_2 ])
def version(self):
return self.__run([ "version" ])
def check_ipset_name(ipset):
if len(ipset) > IPSET_MAXNAMELEN:
return False
return True
| divereigh/firewalld | src/firewall/core/ipset.py | Python | gpl-2.0 | 6,843 |
from minieigen import *
from woo.dem import *
import woo.core, woo.models
from math import *
import numpy
class PourFeliciter(woo.core.Preprocessor,woo.pyderived.PyWooObject):
'''Showcase for custom packing predicates, and importing surfaces from STL.'''
_classTraits=None
_PAT=woo.pyderived.PyAttrTrait # less typing
_attrTraits=[
]
def __init__(self,**kw):
woo.core.Preprocessor.__init__(self)
self.wooPyInit(self.__class__,woo.core.Preprocessor,**kw)
def __call__(self):
# preprocessor builds the simulation when called
pass
class NewtonsCradle(woo.core.Preprocessor,woo.pyderived.PyWooObject):
'''Showcase for custom packing predicates, and importing surfaces from STL.'''
_classTraits=None
_PAT=woo.pyderived.PyAttrTrait # less typing
_attrTraits=[
_PAT(int,'nSpheres',5,'Total number of spheres'),
_PAT(int,'nFall',1,'The number of spheres which are out of the equilibrium position at the beginning.'),
_PAT(float,'fallAngle',pi/4.,unit='deg',doc='Initial angle of falling spheres.'),
_PAT(float,'rad',.005,unit='m',doc='Radius of spheres'),
_PAT(Vector2,'cabHtWd',(.1,.1),unit='m',doc='Height and width of the suspension'),
_PAT(float,'cabRad',.0005,unit='m',doc='Radius of the suspending cables'),
_PAT(woo.models.ContactModelSelector,'model',woo.models.ContactModelSelector(name='Hertz',restitution=.99,numMat=(1,2),matDesc=['spheres','cables'],mats=[FrictMat(density=3e3,young=2e8),FrictMat(density=.001,young=2e8)]),doc='Select contact model. The first material is for spheres; the second, optional, material, is for the suspension cables.'),
_PAT(Vector3,'gravity',(0,0,-9.81),'Gravity acceleration'),
_PAT(int,'plotEvery',10,'How often to collect plot data'),
_PAT(float,'dtSafety',.7,':obj:`woo.core.Scene.dtSafety`')
]
def __init__(self,**kw):
woo.core.Preprocessor.__init__(self)
self.wooPyInit(self.__class__,woo.core.Preprocessor,**kw)
def __call__(self):
pre=self
S=woo.core.Scene(fields=[DemField(gravity=pre.gravity)],dtSafety=self.dtSafety)
S.pre=pre.deepcopy()
# preprocessor builds the simulation when called
xx=numpy.linspace(0,(pre.nSpheres-1)*2*pre.rad,num=pre.nSpheres)
mat=pre.model.mats[0]
cabMat=(pre.model.mats[1] if len(pre.model.mats)>1 else mat)
ht=pre.cabHtWd[0]
for i,x in enumerate(xx):
color=min(.999,(x/xx[-1]))
s=Sphere.make((x,0,0) if i>=pre.nFall else (x-ht*sin(pre.fallAngle),0,ht-ht*cos(pre.fallAngle)),radius=pre.rad,mat=mat,color=color)
n=s.shape.nodes[0]
S.dem.par.add(s)
# sphere's node is integrated
S.dem.nodesAppend(n)
for p in [Vector3(x,-pre.cabHtWd[1]/2,pre.cabHtWd[0]),Vector3(x,pre.cabHtWd[1]/2,pre.cabHtWd[0])]:
t=Truss.make([n,p],radius=pre.cabRad,wire=False,color=color,mat=cabMat,fixed=None)
t.shape.nodes[1].blocked='xyzXYZ'
S.dem.par.add(t)
S.engines=DemField.minimalEngines(model=pre.model,dynDtPeriod=20)+[
IntraForce([In2_Truss_ElastMat()]),
woo.core.PyRunner(self.plotEvery,'S.plot.addData(i=S.step,t=S.time,total=S.energy.total(),relErr=(S.energy.relErr() if S.step>1000 else 0),**S.energy)'),
]
S.lab.dynDt.maxRelInc=1e-6
S.trackEnergy=True
S.plot.plots={'i':('total','**S.energy')}
return S
| sjl767/woo | py/pre/toys.py | Python | gpl-2.0 | 3,537 |
'''File Tree scanning related functions and classes'''
import os
class DiskObjInfo(object):
'''Hold inforation to identify an object on the disk'''
def __init__(self, rel_path, path_on_disk):
self.__name = os.path.basename(rel_path)
self.__rel_path = rel_path
self.__path_on_disk = path_on_disk
@property
def name(self):
'''Name of the disk object without path'''
return self.__name
@property
def rel_path(self):
'''Path to disk object relative to scan path'''
return self.__rel_path
@property
def path_on_disk(self):
'''Actual path to disk object'''
return self.__path_on_disk
def __eq__(self, obj):
try:
return obj.rel_path == self.__rel_path
except AttributeError:
return NotImplemented
def __str__(self):
return self.__rel_path
class FileInfo(DiskObjInfo):
'''Hold basic information about a file'''
TYPE_FLAG = 'F'
def __init__(self, rel_path, path_on_disk=None):
if rel_path[-1] == '/':
raise Exception("File path should not end in '/'")
# if path_on_disk is not None:
# if not os.path.isfile(path_on_disk):
# raise Exception("Not a file: " + path_on_disk)
super(FileInfo, self).__init__(rel_path, path_on_disk)
@property
def is_file(self):
return True
@property
def is_dir(self):
return False
@property
def fileobj_type(self):
return self.TYPE_FLAG
class DirInfo(DiskObjInfo):
'''Hold basic information about a directory'''
TYPE_FLAG = 'D'
def __init__(self, rel_path, path_on_disk=None):
if rel_path[-1] != '/':
raise Exception("Dir path should end in '/'")
# if path_on_disk is not None:
# if not os.path.isdir(path_on_disk):
# raise Exception("Not a dir: " + path_on_disk)
super(DirInfo, self).__init__(rel_path, path_on_disk)
@property
def is_file(self):
return False
@property
def is_dir(self):
return True
@property
def fileobj_type(self):
return self.TYPE_FLAG
def find_files_for_sync(root_path):
'''List all files under a given root path, returning relative paths
This function will recurse over a directory structure to find all files and
directories. It will function similar to os.walk, but is intended
specifically for finding which files to sync. It'll therefore be
influenced by command line options such as include and exclude options.
Given:
/home
/home/nate/
/home/nate/picture.png
/home/nate/secret/
/home/nate/secret/recipe.txt
And Argument: '/home/nate'
Will yeild:
picture.png - FileInfo()
secret/ - DirInfo()
secret/recipe.txt - FileInfo()
Relative paths will always use a / path separator for convenience
@param root_path: Path on disk to search under
@return: Generator of paths relative to root path
'''
# Sanity Check
# TODO: what if directory disappears during scan? Handle more cleanly?
if not os.path.exists(root_path):
raise Exception("Invalid path: " + root_path)
if os.path.isfile(root_path):
yield FileInfo(os.path.basename(root_path), root_path)
return
if not os.path.isdir(root_path):
raise Exception("Not a directory: " + root_path)
# Yield Files and directories for this directory
for obj_name, rel_path, disk_path, obj_type in _find_file_paths(root_path):
if obj_type == 'D':
yield DirInfo(rel_path, disk_path)
elif obj_type == 'F':
yield FileInfo(rel_path, disk_path)
def _find_file_paths(root_path, rel_path=None):
'''Helper for find_files_for_sync which doesn't worry about DiskObjInfo classes'''
for obj_name in os.listdir(root_path):
if obj_name not in ['.', '..']:
disk_path = os.path.join(root_path, obj_name)
obj_rel_path = None
if rel_path is None:
obj_rel_path = obj_name
else:
obj_rel_path = rel_path + '/' + obj_name
if os.path.isfile(disk_path):
yield obj_name, obj_rel_path, disk_path, 'F'
elif os.path.isdir(disk_path):
yield obj_name, obj_rel_path + '/', disk_path, 'D'
for sub_obj in _find_file_paths(disk_path, obj_rel_path):
yield sub_obj
| shearern/rsync-usb | src/rsync_usb/ftree.py | Python | gpl-2.0 | 4,574 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0028_auto_20151026_0926'),
]
operations = [
migrations.AlterField(
model_name='partner',
name='logo',
field=models.ImageField(default=None, upload_to='partners', verbose_name='Logo', help_text="Envoyez le logo du partenaire ici.<br />Il doit faire 150x150px. Si la largeur est différente de la hauteur, l'image apparaitra déformée."),
),
]
| agripo/website | core/migrations/0029_auto_20151028_1609.py | Python | gpl-2.0 | 595 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Message'
db.create_table(u'message_message', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'], null=True, blank=True)),
('object_id', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('group_id', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('type', self.gf('django.db.models.fields.IntegerField')(default=0)),
('subject', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('content', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('sender', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='sent_messages', null=True, to=orm['auth.User'])),
('recipient', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='received_messages', null=True, to=orm['auth.User'])),
('sent_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('read_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('sender_deleted_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('recipient_deleted_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('status', self.gf('django.db.models.fields.IntegerField')(default=0)),
('send_email', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'message', ['Message'])
def backwards(self, orm):
# Deleting model 'Message'
db.delete_table(u'message_message')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'message.message': {
'Meta': {'ordering': "['-sent_at', '-id']", 'object_name': 'Message'},
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'group_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'read_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'received_messages'", 'null': 'True', 'to': u"orm['auth.User']"}),
'recipient_deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sent_messages'", 'null': 'True', 'to': u"orm['auth.User']"}),
'sender_deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sent_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['message'] | ContributeToScience/participant-booking-app | booking/message/migrations/0001_initial.py | Python | gpl-2.0 | 7,560 |
#Boss Ogg - A Music Server
#(c)2003 by Ted Kulp (wishy@comcast.net)
#This project's homepage is: http://bossogg.wishy.org
#
#This program is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 2 of the License, or
#(at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from boss3.util import Logger
from boss3.util.Session import *
import boss3.xmlrpc.bossxmlrpclib as xmlrpclib
class Util:
"""
Util realm. This will contain basic functions that don't
really fit anywhere else. They can generally have a very low
security level.
util("version"): Returns version information about the running
server.
Parameters:
* None
Returns:
* Struct
* version - string
* name - string
"""
def handleRequest(self, cmd, argstuple):
session = Session()
args = []
for i in argstuple:
args.append(i)
if (session.hasKey('cmdint')):
cmdint = session['cmdint']
if cmd == "version":
return cmdint.util.version()
# vim:ts=8 sw=8 noet
| tedkulp/bossogg | boss3/xmlrpc/Util.py | Python | gpl-2.0 | 1,506 |
## begin license ##
#
# "Meresco Lucene" is a set of components and tools to integrate Lucene into Meresco
#
# Copyright (C) 2013-2016, 2019-2021 Seecr (Seek You Too B.V.) https://seecr.nl
# Copyright (C) 2013-2014 Stichting Bibliotheek.nl (BNL) http://www.bibliotheek.nl
# Copyright (C) 2015-2016, 2019 Koninklijke Bibliotheek (KB) http://www.kb.nl
# Copyright (C) 2016, 2020-2021 Stichting Kennisnet https://www.kennisnet.nl
# Copyright (C) 2021 Data Archiving and Network Services https://dans.knaw.nl
# Copyright (C) 2021 SURF https://www.surf.nl
# Copyright (C) 2021 The Netherlands Institute for Sound and Vision https://beeldengeluid.nl
#
# This file is part of "Meresco Lucene"
#
# "Meresco Lucene" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Lucene" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Lucene"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
from .utils import simplifiedDict
from meresco.components.json import JsonDict
from simplejson.decoder import JSONDecodeError
class ComposedQuery(object):
def __init__(self, resultsFromCore, query=None):
self.cores = set()
self._queries = {}
self._filterQueries = {}
self._excludeFilterQueries = {}
self._facets = {}
self._drilldownQueries = {}
self._otherCoreFacetFilters = {}
self._rankQueries = {}
self._matches = {}
self._unites = []
self._sortKeys = []
self.resultsFrom = resultsFromCore
if query:
self.setCoreQuery(resultsFromCore, query=query)
else:
self.cores.add(resultsFromCore)
def _makeProperty(name, defaultValue=None):
return property(
fget=lambda self: getattr(self, name, defaultValue),
fset=lambda self, value: setattr(self, name, value)
)
stop = _makeProperty('_stop')
start = _makeProperty('_start')
sortKeys = _makeProperty('_sortKeys')
suggestionRequest = _makeProperty('_suggestionRequest')
dedupField = _makeProperty('_dedupField')
dedupSortField = _makeProperty('_dedupSortField')
storedFields = _makeProperty('_storedFields')
clustering = _makeProperty('_clustering')
clusteringConfig = _makeProperty('_clusteringConfig')
unqualifiedTermFields = _makeProperty('_unqualifiedTermFields')
rankQueryScoreRatio = _makeProperty('_rankQueryScoreRatio')
relationalFilterJson = _makeProperty('_relationalFilterJson')
del _makeProperty
def setCoreQuery(self, core, query, filterQueries=None, facets=None):
self.cores.add(core)
self._queries[core] = query
if not filterQueries is None:
for filterQuery in filterQueries:
self.addFilterQuery(core, filterQuery)
if not facets is None:
for facet in facets:
self.addFacet(core, facet)
return self
def addFilterQuery(self, core, query):
self.cores.add(core)
self._filterQueries.setdefault(core, []).append(query)
return self
def addExcludeFilterQuery(self, core, query):
self.cores.add(core)
self._excludeFilterQueries.setdefault(core, []).append(query)
return self
def addFacet(self, core, facet):
self.cores.add(core)
self._facets.setdefault(core, []).append(facet)
return self
def addDrilldownQuery(self, core, drilldownQuery):
self.cores.add(core)
self._drilldownQueries.setdefault(core, []).append(drilldownQuery)
return self
def addOtherCoreFacetFilter(self, core, query):
self.cores.add(core)
self._otherCoreFacetFilters.setdefault(core, []).append(query)
return self
def setRankQuery(self, core, query):
self.cores.add(core)
self._rankQueries[core] = query
return self
def addMatch(self, matchCoreASpec, matchCoreBSpec):
self._matches[(matchCoreASpec['core'], matchCoreBSpec['core'])] = (matchCoreASpec, matchCoreBSpec)
resultsFromCoreSpecFound = False
for matchCoreSpec in [matchCoreASpec, matchCoreBSpec]:
coreName = matchCoreSpec['core']
if coreName == self.resultsFrom:
resultsFromCoreSpecFound = True
try:
matchCoreSpec['uniqueKey']
except KeyError:
raise ValueError("Match for result core '%s' must have a uniqueKey specification." % self.resultsFrom)
if not resultsFromCoreSpecFound:
raise ValueError("Match that does not include resultsFromCore ('%s') not yet supported" % self.resultsFrom)
return self
def addUnite(self, uniteCoreASpec, uniteCoreBSpec):
if len(self.unites) > 0:
raise ValueError("No more than 1 addUnite supported")
for uniteCoreSpec in (uniteCoreASpec, uniteCoreBSpec):
self.cores.add(uniteCoreSpec['core'])
self._unites.append(Unite(self, uniteCoreASpec, uniteCoreBSpec))
return self
def addSortKey(self, sortKey):
core = sortKey.get('core', self.resultsFrom)
self.cores.add(core)
self._sortKeys.append(sortKey)
def queryFor(self, core):
return self._queries.get(core)
def excludeFilterQueriesFor(self, core):
return self._excludeFilterQueries.get(core, [])
def filterQueriesFor(self, core):
return self._filterQueries.get(core, [])
def facetsFor(self, core):
return self._facets.get(core, [])
def drilldownQueriesFor(self, core):
return self._drilldownQueries.get(core, [])
def otherCoreFacetFiltersFor(self, core):
return self._otherCoreFacetFilters.get(core, [])
def rankQueryFor(self, core):
return self._rankQueries.get(core)
def keyName(self, core, otherCore):
if core == otherCore: #TODO: Needed for filters/rank's in same core as queried core
for matchCoreASpec, matchCoreBSpec in self._matches.values():
if matchCoreASpec['core'] == core:
coreSpec = matchCoreASpec
break
elif matchCoreBSpec['core'] == core:
coreSpec = matchCoreBSpec
break
else:
coreSpec, _ = self._matchCoreSpecs(core, otherCore)
return coreSpec.get('uniqueKey', coreSpec.get('key'))
def keyNames(self, core):
keyNames = set()
for coreName in self.cores:
if coreName != core:
keyNames.add(self.keyName(core, coreName))
return keyNames
def queriesFor(self, core):
return [q for q in [self.queryFor(core)] + self.filterQueriesFor(core) if q]
@property
def unites(self):
return self._unites[:]
@property
def filterQueries(self):
return self._filterQueries.items()
@property
def numberOfUsedCores(self):
return len(self.cores)
def isSingleCoreQuery(self):
return self.numberOfUsedCores == 1
def coresInMatches(self):
return set(c for matchKey in self._matches.keys() for c in matchKey)
def validate(self):
for core in self.cores:
if core == self.resultsFrom:
continue
try:
self._matchCoreSpecs(self.resultsFrom, core)
except KeyError:
raise ValueError("No match set for cores %s" % str((self.resultsFrom, core)))
if self.relationalFilterJson:
try:
JsonDict.loads(self.relationalFilterJson)
except JSONDecodeError:
raise ValueError("Value '%s' for 'relationalFilterJson' can not be parsed as JSON." % self.relationalFilterJson)
def convertWith(self, **converts):
def convertQuery(core, query):
if query is None:
return None
convertFunction = converts[core]
if core == self.resultsFrom:
kwargs = {'composedQuery': self}
if self.unqualifiedTermFields:
kwargs['unqualifiedTermFields'] = self.unqualifiedTermFields
return convertFunction(query, **kwargs)
return convertFunction(query)
self._queries = dict((core, convertQuery(core, v)) for core, v in self._queries.items())
self._filterQueries = dict((core, [convertQuery(core, v) for v in values]) for core, values in self._filterQueries.items())
self._excludeFilterQueries = dict((core, [convertQuery(core, v) for v in values]) for core, values in self._excludeFilterQueries.items())
self._rankQueries = dict((core, convertQuery(core, v)) for core, v in self._rankQueries.items())
for unite in self._unites:
unite.convertQuery(convertQuery)
self._otherCoreFacetFilters = dict((core, [convertQuery(core, v) for v in values]) for core, values in self._otherCoreFacetFilters.items())
def asDict(self):
result = dict(vars(self))
result['_matches'] = dict(('->'.join(key), value) for key, value in result['_matches'].items())
result['_unites'] = [unite.asDict() for unite in self._unites]
result['cores'] = list(sorted(self.cores))
return result
@classmethod
def fromDict(cls, dct):
cq = cls(dct['resultsFrom'])
matches = dct['_matches']
dct['_matches'] = dict((tuple(key.split('->')), value) for key, value in matches.items())
dct['_unites'] = [Unite.fromDict(cq, uniteDict) for uniteDict in dct['_unites']]
dct['cores'] = set(dct['cores'])
for attr, value in dct.items():
setattr(cq, attr, value)
return cq
def _matchCoreSpecs(self, *cores):
try:
coreASpec, coreBSpec = self._matches[cores]
except KeyError:
coreBSpec, coreASpec = self._matches[tuple(reversed(cores))]
return coreASpec, coreBSpec
def __repr__(self):
return "%s%s" % (self.__class__.__name__, self.asDict())
def infoDict(self):
return {
'type': self.__class__.__name__,
'query': simplifiedDict(dict((k.replace('_', ''), v) for k,v in self.asDict().items()))
}
class Unite(object):
def __init__(self, parent, coreASpec, coreBSpec):
self._parent = parent
self.coreASpec = coreASpec
self.coreBSpec = coreBSpec
def queries(self):
keyNameA = self._parent.keyName(self.coreASpec['core'], self.coreBSpec['core'])
keyNameB = self._parent.keyName(self.coreBSpec['core'], self.coreASpec['core'])
resultKeyName = keyNameA if self._parent.resultsFrom == self.coreASpec['core'] else keyNameB
yield dict(core=self.coreASpec['core'], query=self.coreASpec['query'], keyName=keyNameA), resultKeyName
yield dict(core=self.coreBSpec['core'], query=self.coreBSpec['query'], keyName=keyNameB), resultKeyName
def convertQuery(self, convertQueryFunction):
for spec in [self.coreASpec, self.coreBSpec]:
spec['query'] = convertQueryFunction(spec['core'], spec['query'])
def asDict(self):
return {'A': [self.coreASpec['core'], self.coreASpec['query']], 'B': [self.coreBSpec['core'], self.coreBSpec['query']]}
@classmethod
def fromDict(cls, parent, dct):
return cls(parent, dict(core=dct['A'][0], query=dct['A'][1]), dict(core=dct['B'][0], query=dct['B'][1]))
| seecr/meresco-lucene | meresco/lucene/composedquery.py | Python | gpl-2.0 | 11,941 |
# Python imports
# Lib imports
from PyInquirer import style_from_dict, Token
# Application imports
class StylesMixin:
"""
The StylesMixin has style methods that get called and
return their respective objects.
"""
def default(self):
return style_from_dict({
Token.Separator: '#6C6C6C',
Token.QuestionMark: '#FF9D00 bold',
# Token.Selected: '', # default
Token.Selected: '#5F819D',
Token.Pointer: '#FF9D00 bold',
Token.Instruction: '', # default
Token.Answer: '#5F819D bold',
Token.Question: '',
})
def orange(self):
return style_from_dict({
Token.Pointer: '#6C6C6C bold',
Token.QuestionMark: '#FF9D00 bold',
Token.Separator: '#FF9D00',
Token.Selected: '#FF9D00',
Token.Instruction: '', # default
Token.Answer: '#FF9D00 bold',
Token.Question: '', # default
})
def red(self):
return style_from_dict({
Token.Pointer: '#c70e0e bold',
Token.QuestionMark: '#c70e0e bold',
Token.Separator: '#c70e0e',
Token.Selected: '#c70e0e',
Token.Instruction: '', # default
Token.Answer: '#c70e0e bold',
Token.Question: '', # default
})
def purple(self):
return style_from_dict({
Token.Pointer: '#673ab7 bold',
Token.QuestionMark: '#673ab7 bold',
Token.Selected: '#673ab7',
Token.Separator: '#673ab7',
Token.Instruction: '', # default
Token.Answer: '#673ab7 bold',
Token.Question: '', # default
})
def green(self):
return style_from_dict({
Token.Pointer: '#ffde00 bold',
Token.QuestionMark: '#29a116 bold',
Token.Selected: '#29a116',
Token.Separator: '#29a116',
Token.Instruction: '', # default
Token.Answer: '#29a116 bold',
Token.Question: '', # default
})
| RaveYoda/Shellmen | src/core/mixins/StylesMixin.py | Python | gpl-2.0 | 2,111 |
# Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# highlight.py - highlight extension implementation file
#
# Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
#
# The original module was split in an interface and an implementation
# file to defer pygments loading and speedup extension setup.
from __future__ import absolute_import
from edenscm.mercurial import demandimport, encoding, util
demandimport.ignore.extend(["pkgutil", "pkg_resources", "__main__"])
with demandimport.deactivated():
import pygments
import pygments.formatters
import pygments.lexers
import pygments.util
highlight = pygments.highlight
ClassNotFound = pygments.util.ClassNotFound
guess_lexer = pygments.lexers.guess_lexer
guess_lexer_for_filename = pygments.lexers.guess_lexer_for_filename
TextLexer = pygments.lexers.TextLexer
HtmlFormatter = pygments.formatters.HtmlFormatter
SYNTAX_CSS = '\n<link rel="stylesheet" href="{url}highlightcss" ' 'type="text/css" />'
def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
# append a <link ...> to the syntax highlighting css
old_header = tmpl.load("header")
if SYNTAX_CSS not in old_header:
new_header = old_header + SYNTAX_CSS
tmpl.cache["header"] = new_header
text = fctx.data()
if util.binary(text):
return
# str.splitlines() != unicode.splitlines() because "reasons"
for c in "\x0c\x1c\x1d\x1e":
if c in text:
text = text.replace(c, "")
# Pygments is best used with Unicode strings:
# <http://pygments.org/docs/unicode/>
text = text.decode(encoding.encoding, "replace")
# To get multi-line strings right, we can't format line-by-line
try:
lexer = guess_lexer_for_filename(fctx.path(), text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# guess_lexer will return a lexer if *any* lexer matches. There is
# no way to specify a minimum match score. This can give a high rate of
# false positives on files with an unknown filename pattern.
if guessfilenameonly:
return
try:
lexer = guess_lexer(text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# Don't highlight unknown files
return
# Don't highlight text files
if isinstance(lexer, TextLexer):
return
formatter = HtmlFormatter(nowrap=True, style=style)
colorized = highlight(text, lexer, formatter)
coloriter = (s.encode(encoding.encoding, "replace") for s in colorized.splitlines())
tmpl.filters["colorize"] = lambda x: next(coloriter)
oldl = tmpl.cache[field]
newl = oldl.replace("line|escape", "line|colorize")
tmpl.cache[field] = newl
| facebookexperimental/eden | eden/hg-server/edenscm/hgext/highlight/highlight.py | Python | gpl-2.0 | 3,008 |
"""Code for constructing CTMCs and computing transition probabilities
in them."""
from numpy import zeros
from scipy import matrix
from scipy.linalg import expm
class CTMC(object):
"""Class representing the CTMC for the back-in-time coalescent."""
def __init__(self, state_space, rates_table):
"""Create the CTMC based on a state space and a mapping
from transition labels to rates.
:param state_space: The state space the CTMC is over.
:type state_space: IMCoalHMM.CoalSystem
:param rates_table: A table where transition rates can
be looked up.
:type rates_table: dict
"""
# Remember this, just to decouple state space from CTMC
# in other parts of the code...
self.state_space = state_space
# noinspection PyCallingNonCallable
self.rate_matrix = matrix(zeros((len(state_space.states),
len(state_space.states))))
for src, trans, dst in state_space.transitions:
self.rate_matrix[src, dst] = rates_table[trans]
for i in xrange(len(state_space.states)):
self.rate_matrix[i, i] = - self.rate_matrix[i, :].sum()
self.prob_matrix_cache = dict()
def probability_matrix(self, delta_t):
"""Computes the transition probability matrix for a
time period of delta_t.
:param delta_t: The time period the CTMC should run for.
:type delta_t: float
:returns: The probability transition matrix
:rtype: matrix
"""
if not delta_t in self.prob_matrix_cache:
self.prob_matrix_cache[delta_t] = expm(self.rate_matrix * delta_t)
return self.prob_matrix_cache[delta_t]
# We cache the CTMCs because in the optimisations, especially the models with a large number
# of parameters, we are creating the same CTMCs again and again and computing the probability
# transition matrices is where we spend most of the time.
from cache import Cache
CTMC_CACHE = Cache()
def make_ctmc(state_space, rates_table):
"""Create the CTMC based on a state space and a mapping
from transition labels to rates.
:param state_space: The state space the CTMC is over.
:type state_space: IMCoalHMM.CoalSystem
:param rates_table: A table where transition rates can be looked up.
:type rates_table: dict
"""
cache_key = (state_space, tuple(rates_table.items()))
if not cache_key in CTMC_CACHE:
CTMC_CACHE[cache_key] = CTMC(state_space, rates_table)
return CTMC_CACHE[cache_key]
| mailund/IMCoalHMM | src/IMCoalHMM/CTMC.py | Python | gpl-2.0 | 2,581 |
# import data_structures.binary_search_trees.rope as rope
import data_structures.binary_search_trees.set_range_sum as set_range_sum
import data_structures.binary_search_trees.tree_orders as tree_orders
import pytest
import os
import sys
import resource
CI = os.environ.get('CI') == 'true'
# Helpers
class BinarySearchTree:
def __init__(self):
self.root = None
self.size = 0
def length(self):
return self.size
def __len__(self):
return self.size
def __iter__(self):
return self.root.__iter__()
def get(self, key):
if self.root:
res = self.find(key, self.root)
if res and res.key == key:
return res.payload
else:
return None
else:
return None
def find(self, key, node):
if node.key == key:
return node
if key < node.key:
if not node.has_left_child():
return node
return self.find(key, node.left_child)
else:
if not node.has_right_child():
return node
return self.find(key, node.right_child)
def __getitem__(self, key):
return self.get(key)
def __contains__(self, key):
if self.get(key):
return True
else:
return False
def put(self, key, val):
if self.root:
print("put on empty")
self._put(key, val, self.root)
else:
print("put on non empty")
self.root = TreeNode(key, val)
self.size += 1
def _put(self, key, val, node):
_parent = self.find(key, node)
if _parent.key == key: # already exists, replace values
_parent.replace_node_data(key, val, _parent.left_child,
_parent.right_child)
return
# At this point is guaranteed that _parent has null child
if key < _parent.key:
assert not _parent.has_left_child()
_parent.left_child = TreeNode(key, val, parent=_parent)
else:
assert not _parent.has_right_child()
_parent.right_child = TreeNode(key, val, parent=_parent)
def __setitem__(self, k, v):
"""
Allows usage of [].
:param k:
:param v:
:return:
"""
self.put(k, v)
class TreeNode:
def __init__(self, key, val, left=None, right=None, parent=None):
self.key = key
self.payload = val
self.left_child = left
self.right_child = right
self.parent = parent
def has_left_child(self):
return self.left_child
def has_right_child(self):
return self.right_child
def is_left_child(self):
return self.parent and self.parent.leftChild == self
def is_right_child(self):
return self.parent and self.parent.rightChild == self
def is_root(self):
return not self.parent
def is_leaf(self):
return not (self.right_child or self.left_child)
def has_any_children(self):
return self.right_child or self.left_child
def has_both_children(self):
return self.right_child and self.left_child
def replace_node_data(self, key, value, lc, rc):
self.key = key
self.payload = value
self.left_child = lc
self.right_child = rc
if self.has_left_child():
self.left_child.parent = self
if self.has_right_child():
self.right_child.parent = self
@pytest.mark.timeout(6)
class TestTreeOrders:
@classmethod
def setup_class(cls):
""" setup any state specific to the execution of the given class (which
usually contains tests).
"""
sys.setrecursionlimit(10 ** 6) # max depth of recursion
resource.setrlimit(resource.RLIMIT_STACK, (2 ** 27, 2 ** 27))
@pytest.mark.parametrize("n,key,left,right,exp_inorder,exp_preorder,exp_postorder", [
(5,
[4, 2, 5, 1, 3],
[1, 3, -1, -1, -1],
[2, 4, -1, -1, -1],
[1, 2, 3, 4, 5], [4, 2, 1, 3, 5], [1, 3, 2, 5, 4]),
(10,
[0, 10, 20, 30, 40, 50, 60, 70, 80, 90],
[7, -1, -1, 8, 3, -1, 1, 5, -1, -1],
[2, -1, 6, 9, -1, -1, -1, 4, -1, -1],
[50, 70, 80, 30, 90, 40, 0, 20, 10, 60],
[0, 70, 50, 40, 30, 80, 90, 20, 60, 10],
[50, 80, 90, 30, 40, 70, 10, 60, 20, 0])
])
def test_samples(self, n,key,left,right,exp_inorder,exp_preorder,exp_postorder):
tree = tree_orders.TreeOrders(n, key, left, right)
assert exp_inorder == tree.order(tree.in_order)
assert exp_preorder == tree.order(tree.pre_order)
assert exp_postorder == tree.order(tree.post_order)
@pytest.mark.timeout(120)
class TestSetRangeSum:
@classmethod
def setup_class(cls):
""" setup any state specific to the execution of the given class (which
usually contains tests).
"""
del set_range_sum.root
set_range_sum.root = None
@pytest.mark.parametrize(
"test_input,expected", [(
(
"? 1",
"+ 1",
"? 1",
"+ 2",
"s 1 2",
"+ 1000000000",
"? 1000000000",
"- 1000000000",
"? 1000000000",
"s 999999999 1000000000",
"- 2",
"? 2",
"- 0",
"+ 9",
"s 0 9"
),
[
"Not found",
"Found",
"3",
"Found",
"Not found",
"1",
"Not found",
"10",
]), (
(
"? 0",
"+ 0",
"? 0",
"- 0",
"? 0",
),
[
"Not found",
"Found",
"Not found"
]), (
(
"+ 491572259",
"? 491572259",
"? 899375874",
"s 310971296 877523306",
"+ 352411209",
),
[
"Found",
"Not found",
"491572259"
]),
# (
# (
# "s 88127140 859949755",
# "s 407584225 906606553",
# "+ 885530090",
# "+ 234423189",
# "s 30746291 664192454",
# "+ 465752492",
# "s 848498590 481606032",
# "+ 844636782",
# "+ 251529178",
# "+ 182631153",
# ),
# [
# "0",
# "0",
# "234423189"
# ])
])
def test_samples(self, test_input, expected):
result = []
processor = set_range_sum.RangeSumProcessor()
for cmd in test_input:
res = processor.process(cmd)
if res:
result.append(res)
assert result == expected
# def test_input_files(self):
# result = []
# processor = set_range_sum.RangeSumProcessor()
# for cmd in test_input:
# res = processor.process(cmd)
# if res:
# result.append(res)
# assert result == expected
| chaicko/AlgorithmicToolbox | test/data_structures/test_binary_search_trees.py | Python | gpl-3.0 | 7,486 |
# ============================================================================
# FILE: size.py
# AUTHOR: Shougo Matsushita <Shougo.Matsu at gmail.com>
# License: MIT license
# ============================================================================
from defx.base.column import Base, Highlights
from defx.context import Context
from defx.util import Nvim, readable, Candidate
import typing
class Column(Base):
def __init__(self, vim: Nvim) -> None:
super().__init__(vim)
self.name = 'size'
self.has_get_with_highlights = True
self._length = 9
def get_with_highlights(
self, context: Context, candidate: Candidate
) -> typing.Tuple[str, Highlights]:
path = candidate['action__path']
if not readable(path) or path.is_dir():
return (' ' * self._length, [])
size = self._get_size(path.stat().st_size)
text = '{:>6s}{:>3s}'.format(size[0], size[1])
return (text, [(self.highlight_name, self.start, self._length)])
def _get_size(self, size: float) -> typing.Tuple[str, str]:
multiple = 1024
suffixes = ['KB', 'MB', 'GB', 'TB']
if size < multiple:
return (str(size), 'B')
for suffix in suffixes:
size /= multiple
if size < multiple:
return ('{:.1f}'.format(size), suffix)
return ('INF', '')
def length(self, context: Context) -> int:
return self._length
def highlight_commands(self) -> typing.List[str]:
commands: typing.List[str] = []
commands.append(
f'highlight default link {self.highlight_name} Constant')
return commands
| SpaceVim/SpaceVim | bundle/defx.nvim/rplugin/python3/defx/column/size.py | Python | gpl-3.0 | 1,686 |
from collections import OrderedDict
import pytest
from ucca import textutil
from ucca.constructions import CATEGORIES_NAME, DEFAULT, CONSTRUCTIONS, extract_candidates
from .conftest import PASSAGES, loaded, loaded_valid, multi_sent, crossing, discontiguous, l1_passage, empty
"""Tests the constructions module functions and classes."""
def assert_spacy_not_loaded(*args, **kwargs):
del args, kwargs
assert False, "Should not load spaCy when passage is pre-annotated"
def extract_and_check(p, constructions=None, expected=None):
d = OrderedDict((construction, [candidate.edge for candidate in candidates]) for construction, candidates in
extract_candidates(p, constructions=constructions).items() if candidates)
if expected is not None:
hist = {c.name: len(e) for c, e in d.items()}
assert hist == expected, " != ".join(",".join(sorted(h)) for h in (hist, expected))
@pytest.mark.parametrize("create, expected", (
(loaded, {'P': 1, 'remote': 1, 'E': 3, 'primary': 15, 'U': 2, 'F': 1, 'C': 3, 'A': 1, 'D': 1, 'L': 2, 'mwe': 2,
'H': 5, 'implicit': 1, 'main_rel': 1}),
(loaded_valid, {'P': 1, 'remote': 1, 'E': 3, 'primary': 15, 'U': 2, 'F': 1, 'C': 3, 'A': 1, 'D': 1, 'L': 2,
'mwe': 2, 'H': 5, 'implicit': 1, 'main_rel': 1}),
(multi_sent, {'U': 4, 'P': 3, 'mwe': 2, 'H': 3, 'primary': 6, 'main_rel': 2}),
(crossing, {'U': 3, 'P': 2, 'remote': 1, 'mwe': 1, 'H': 2, 'primary': 3, 'main_rel': 2}),
(discontiguous, {'G': 1, 'U': 2, 'E': 2, 'primary': 13, 'P': 3, 'F': 1, 'C': 1, 'A': 3, 'D': 2,
'mwe': 6, 'H': 3, 'implicit':3, 'main_rel': 2}),
(l1_passage, {'P': 2, 'mwe': 4, 'H': 3, 'primary': 11, 'U': 2, 'A': 5, 'D': 1, 'L': 2, 'remote': 2, 'S': 1,
'implicit':1, 'main_rel': 3}),
(empty, {}),
))
def test_extract_all(create, expected):
extract_and_check(create(), constructions=CONSTRUCTIONS, expected=expected)
@pytest.mark.parametrize("create", PASSAGES)
@pytest.mark.parametrize("constructions", (DEFAULT, [CATEGORIES_NAME]), ids=("default", CATEGORIES_NAME))
def test_extract(create, constructions, monkeypatch):
monkeypatch.setattr(textutil, "get_nlp", assert_spacy_not_loaded)
extract_and_check(create(), constructions=constructions)
| danielhers/ucca | ucca/tests/test_constructions.py | Python | gpl-3.0 | 2,357 |
#!/usr/bin/python
import feedparser
import wget
import sqlite3
import time
RssUrlList = ['http://postitforward.tumblr.com/rss','http://for-war3-blog-blog.tumblr.com/rss']
sleep=3600/len(RssUrlList)
def mkdir(path):
import os
path=path.strip()
path=path.rstrip("\\")
isExists=os.path.exists(path)
if not isExists:
os.makedirs(path)
conn = sqlite3.connect('tumblr.db')
def DownloadVideo(rss_url):
feeds = feedparser.parse(rss_url)
table=rss_url[7:-15].replace('-','')
try:
conn.execute('''CREATE TABLE %s(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE REAL)'''% table)
conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','new','0')" % (table,rss_url))
# conn.execute("SELECT * FROM TUMBLR WHERE BLOG == %s").next()
except:
pass
# conn.execute('''CREATE TABLE(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE TEXT);''')
# conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('rss_url','TEST','TEST')" % table)
mkdir(rss_url[7:-4])
for post in feeds.entries:
thisposttime=float(time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S")))
if conn.execute("SELECT MAX(DATE) FROM %s"%table).next()[0] == thisposttime:
break
if post.description.find("video_file") == -1:
continue
sourceadd= post.description.find("source src=")
tumblradd= post.description[sourceadd:].find("tumblr_")
typeadd = post.description[sourceadd:][tumblradd:].find("type=\"video")
video_id=post.description[sourceadd:][tumblradd:][:typeadd-2]
if video_id.find("/") !=-1:
video_id=video_id[:video_id.find("/")]
try:
list(conn.execute("SELECT * FROM %s WHERE ADDRESS == '%s'"%(table,video_id)).next())
except:
print(post.title + ": " + post.link + post.published+"\n")
wget.download("http://vt.tumblr.com/"+video_id+".mp4",rss_url[7:-4])
print("\n")
conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','%s','%f')" % (table,rss_url,video_id,time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S"))))
#wget.download(get_download_url("https://your.appspot.com/fetch.php?url="+post.link),rss_url[7:-4])
conn.commit()
while(1):
for rss_url in RssUrlList:
print("Downloading "+rss_url)
DownloadVideo(rss_url)
print("Sleep "+str(sleep)+" seconds")
time.sleep(sleep)
| yanyuechuixue/tumblr2onedriver | Main.py | Python | gpl-3.0 | 2,513 |
#!/usr/bin/env python
##############################################################################
#
# diffpy.pyfullprof by DANSE Diffraction group
# Simon J. L. Billinge
# (c) 2010 Trustees of the Columbia University
# in the City of New York. All rights reserved.
#
# File coded by: Wenduo Zhou
#
# See AUTHORS.txt for a list of people who contributed.
# See LICENSE.txt for license information.
#
##############################################################################
__id__ = "$Id: phase.py 6843 2013-01-09 22:14:20Z juhas $"
from diffpy.pyfullprof.rietveldclass import RietveldClass
from diffpy.pyfullprof.utilfunction import verifyType
from diffpy.pyfullprof.infoclass import BoolInfo
from diffpy.pyfullprof.infoclass import EnumInfo
from diffpy.pyfullprof.infoclass import FloatInfo
from diffpy.pyfullprof.infoclass import IntInfo
from diffpy.pyfullprof.infoclass import RefineInfo
from diffpy.pyfullprof.infoclass import StringInfo
from diffpy.pyfullprof.infoclass import ObjectInfo
class Phase(RietveldClass):
"""
Phase contains all the information only belongs to a single phase
attributes
_contributionlist -- list instance containing all the contribution related to this Phase
"""
ParamDict = {
"Name": StringInfo("Name", "Phase Name", ""),
"Jbt": EnumInfo("Jbt", "Structure factor model and refinement method", 0,
{0: "treated with Rietveld method, refining a given structural model",
1: "treated with Rietveld method, pure magnetic",
-1: "treated with Rietveld method, pure magentic with magnetic moments",
2: "profile matching method with constant scale factor",
-2: "profile matching method with constant scale factor and given CODFiln.hkl",
3: "profile matching method with constant relative intensities for the current phase",
-3: "profile matching method with given structure factor in CODFiln.hkl",
4: "intensities of nuclear reflection from Rigid body groups",
5: "intensities of magnetic reflection from conical magnetic structure in real space",
10: "nuclear and magnetic phase with Cartesian magnetic moment",
-10:"nuclear and magnetic phase with spherical magnetic moment",
15: "commensurate modulated crystal structure with Cartesian magnetic components",
-15:"commensurate modulated crystal structure with spherical magnetic components"},
[0,1,-1,2,-2,3,-3,4,5,10,-10,15,-15]),
"Comment": StringInfo("Comment", "Allowed Options", ""),
"a": RefineInfo("a", "a", 1.0),
"b": RefineInfo("b", "b", 1.0),
"c": RefineInfo("c", "c", 1.0),
"alpha": RefineInfo("alpha", "alpha", 90.0),
"beta": RefineInfo("beta", "beta", 90.0),
"gamma": RefineInfo("gamma", "gamma", 90.0),
"Spacegroup": StringInfo("Spacegroup", "Space group", ""),
"ATZ": FloatInfo("ATZ", "weight percent coefficient", 1.0, "", 0.0, None),
"Isy": EnumInfo("Isy", "Symmetry opertor reading control", 0,
{0: "symmetry operators generated automatically from the space group symbol",
1: "user-input symmetry operator",
-1: "user-input symmetry operator",
2: "basis function"},
[0, 1, -1, 2]),
"Str": EnumInfo("Str", "size strain reading control", 0,
{0: "using selected models",
1: "generalized formulation of strains parameters be used",
-1: "using selected models and generalized formulation of strains parameters be used",
2: "generalized formulation of size parameters be used",
3: "generalized formulation of strain and size parameters be used"},
[0, 1, -1, 2, 3]),
"Jvi": EnumInfo("Jvi", "Output options", 0,
{0: "no operation",
3: "unknow operation",
1: "",
2: "",
11: ""},
[0, 3, 1, 2, 11]),
"Jdi": EnumInfo("Jdi", "Crystallographic output options", 0,
{0: "",
1: "",
-1: "",
2: "",
3: "",
4: ""},
[0, 1, -1, 2, 3, 4]),
"Dis_max": FloatInfo("Dis_max", "maximum distance between atoms to output", 0.0),
"Ang_max": FloatInfo("Ang_max", "maximum angle between atoms to output", 0.0),
"BVS": StringInfo("BVS", "BVS calculation flag", ""),
"Tolerance": FloatInfo("Tolerance", "Tolerance for the ionic radius", 0.0, "%"),
"Hel": BoolInfo("Hel", "Control to constrain a magnetic structure to be helicoidal", False),
"Sol": BoolInfo("Sol", "Additional hkl-dependent shifts reading control", False),
"Nat": IntInfo("Nat", "Atom Number", 0, 0, None),
"Dis": IntInfo("Dis", "distance restraint number", 0, 0, None),
"MomMA": IntInfo("MomMA", "number of angle/magnetic restraint", 0, 0, None),
"MomMoment": IntInfo("MomMoment", "number of magnetic restraints", 0, 0, None),
"MomAngles": IntInfo("MomAngles", "number of angle restraints", 0, 0, None),
"Furth": IntInfo("Furth", "user defined parameter number", 0, 0, None),
"Nvk": IntInfo("Nvk", "number of propagation vector", 0),
"More": BoolInfo("More", "flag for using Jvi, Jdi, Hel, Sol, Mom and Ter", False),
"N_Domains":IntInfo("N_Domains", "Number of Domains/twins", 0, 0, None),
}
ObjectDict = {
"OperatorSet": ObjectInfo("OperatorSet", "OperatorSet"),
}
ObjectListDict = {
"TimeRev": ObjectInfo("TimeRev", "TimeRev", 0, 1),
"Atom" : ObjectInfo("SetAtom", "Atom", 0, None),
"PropagationVector": ObjectInfo("SetPropagationVector", "PropagationVector", 0, None),
"DistanceRestraint": ObjectInfo("SetDistanceRestraint", "DistanceRestraint", 0, None),
"AngleRestraint": ObjectInfo("SetAngleRestraint", "AngleRestraint", 0, None),
"MomentRestraint": ObjectInfo("SetMomentRestraint", "MomentRestraint", 0, None),
"TransformationMatrixSet": ObjectInfo("TransformationMatrixSet", "TransformationMatrixSet", 0, 1),
}
def __init__(self, parent):
"""
initialization:
"""
RietveldClass.__init__(self, parent)
# initialize subclass-object
operatorset = OperatorSet(self)
self.set("OperatorSet", operatorset)
# initialize attributes
self._contributionlist = []
return
def isCrystalPhase(self):
"""
tell the user whether this phase is a crystal phase or not
Return -- True/False
"""
jbt = self.get("Jbt")
if jbt == 0 or jbt == 2:
rvalue = True
else:
rvalue = False
return rvalue
def needAtoms(self):
jbt = self.get("Jbt")
if jbt==2:
return False
else:
return True
def addContribution(self, contribution):
"""
add a Contribution pertinent to this phase
return -- None
contribution -- Contribution instance
"""
from diffpy.pyfullprof.contribution import Contribution
verifyType(contribution, Contribution)
self._contributionlist.append(contribution)
return
def getContribution(self):
"""
get the list of Contribution of this phase
return -- list of Contribution
"""
return self._contributionlist
def delContribution(self, contribution):
"""
remove the Contribution instance from the Contribution-list
return -- None
contribution -- instance of Contribution
Exception
1. if contribution is not in self._contributionlist
"""
verifyType(contribution, Contribution)
self._contributionlist.remove(contribution)
return
def shiftOrigin(self, dx, dy, dz):
"""
for the Space group with multiple origin, the implementation in FullProf
is to shift the position of each atom by a specified amount
return -- None
dx -- float, -1 < dx < 1
dy -- float, -1 < dy < 1
dz -- float, -1 < dz < 1
"""
verifyType(dx, float)
verifyType(dy, float)
verifyType(dz, float)
# check range
if abs(dx)>1 or abs(dy)>1 or abs(dz)>1:
errmsg = "Phase.shiftOrigin(%-5s, %-5s, %-5s), Shift amount our of range"\
(str(dx), str(dy), str(dz))
raise RietError(errmsg)
# set shift
for atom in self.get("Atom"):
atom.shiftPosition(dx, dy, dz)
return
def set(self, param_name, value, index=None):
"""
Phase extending RietveldClass.set() method
Arguments:
param_name -- string, parameter name
value -- instance, value to set
"""
rvalue = RietveldClass.set(self, param_name, value, index=index)
if param_name == "Jvi":
self.set("More", True)
elif param_name == "Jdi":
self.set("More", True)
elif param_name == "Hel":
self.set("More", True)
elif param_name == "Sol":
self.set("More", True)
elif param_name == "Mom":
self.set("More", True)
elif param_name == "Ter":
self.set("More", True)
return rvalue
def validate(self):
"""
validate of class Phase
"""
rvalue = RietveldClass.validate(self)
errmsg = ""
# FullProf parameter synchronization
# atom
nat = len(self.get("Atom"))
self.set("Nat", nat)
# dis
dis = len(self.get("DistanceRestraint"))
self.set("Dis", dis)
# momma
ang = len(self.get("AngleRestraint"))
mom = len(self.get("MomentRestraint"))
if ang != 0 and mom != 0:
raise NotImplementedError, "Angular Restraint and Moment Restraint cannot be used simultaneously"
self.set("MomMA", ang+mom)
# nvk
nvk = len(self.get("PropagationVector"))
self.set("Nvk", nvk)
# Situation validation
# 1. Jbt and Atom Number
jbt = self.get("Jbt")
if abs(jbt) != 2 and nat == 0:
# not Le-Bail (profile match)
rvalue = False
errmsg += "No Atom is defined while Jbt = %-5s (Not Le-Bail)"% (jbt)
# error message output
if errmsg != "":
prtmsg = "Phase Invalid Setup: %-60s"% (errmsg)
print prtmsg
if rvalue is not True:
print "Invalidity Deteced In %-10s"% (self.__class__.__name__)
return rvalue
class TimeRev(RietveldClass):
"""
Time revolving of magnetic
attribute:
- NS: independent symmetry operator number
- TimeRev_1
- ...
- TimeRev_NS
"""
ParamDict = {
"NS": IntInfo("NS", "independent symmetry operator number", 6),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent, ns):
"""
initialization
ns = NS
"""
# PJ: data module does not exist dead code, commented out
'''
import data
RietveldClass.__init__(self, parent)
if ns > 0:
self.set("NS", ns)
else:
raise NotImplementedError, "NS = " + str(ns) + " cannot be 0"
for i in xrange(0, self.get("NS")+1):
param_name = "TimeRev"+str(i)
TimeRev.__dict__[param_name] = data.IntData(self.ParamDict[param_name].get("default"))
'''
return
# make doc string
MaxNS = 20
for i in xrange(0, MaxNS+1):
param_name = "TimeRev"+str(i)
TimeRev.ParamDict[param_name] = EnumInfo(param_name, "time reversal operator", -1,
{1: "1",
-1: "-1"},
[1, -1])
""" OperatorSet Suite """
class OperatorSet(RietveldClass):
"""
base class for operator set including symmetry operator set and basis function set
attribute
"""
ParamDict = {}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
class OperatorSetSymmetry(OperatorSet):
"""
the main container of a set of symmetry operators
"""
ParamDict = {
"Nsym": IntInfo("Nsym", "crystallographic symmetry operators number", 0, 0, None),
"Cen": EnumInfo("Cen", "centrosymmetry flag", 1,
{1: "1",
2: "2"},
[1, 2]),
"Laue": EnumInfo("Laue", "Laue class for magnetic symmetry", 1,
{1: "1",
2: "",
3: "",
4: "",
5: "",
6: "",
7: "",
8: "",
9: "",
10: "",
11: "",
12: "",
13: "",
14: ""},
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]),
"Nsym": IntInfo("Nsym", "symmetry operator number", 0),
"MagMat": IntInfo("MagMat", "magnetic matrix number", 0),
"DepMat": IntInfo("DepMat", "displacement matrix number", 0),
}
ObjectDict = {}
ObjectListDict = {
"OperatorCombo": ObjectInfo("SetOperatorCombo", "OperatorCombo", 0, None),
}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
class OperatorSetBasisFunction(OperatorSet):
ParamDict = {
"Ireps": IntInfo("Ireps", "number of irreducible representation", 0, 0, None),
"Complex": EnumInfo("Complex", "atomic basis funtion complex number or not", 0,
{0: "real",
1: "complex"},
[0, 1]),
}
ObjectDict = {}
ObjectListDict = {
"Icompl": ObjectInfo("SetIcompl", "Icompl", 0, 1),
}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
""" Operator Combo Suite """
class OperatorCombo(RietveldClass):
ParamDict = {}
ObjectDict = {
"SymmetryMatrix": ObjectInfo("SymmetryMatrix", "SymmetryMatrix"),
}
ObjectListDict = {}
def __init__(self, parent):
"""
initalization
"""
RietveldClass.__init__(self, parent)
return
class OperatorComboSymmetry(OperatorCombo):
ParamDict = {}
ObjectDict = {}
ObjectListDict = {
"MagneticMatrix": ObjectInfo("SetMagneticMatrix", "RotationalMatrix", 0, None),
"DisplaceMatrix": ObjectInfo("SetDisplaceMatrix", "RotationalMatrix", 0, None),
}
def __init__(self, parent):
"""
initialization: extending
"""
OperatorCombo.__init__(self, parent)
return
OperatorComboSymmetry.ParamDict.update(OperatorCombo.ParamDict)
OperatorComboSymmetry.ObjectDict.update(OperatorCombo.ObjectDict)
OperatorComboSymmetry.ObjectListDict.update(OperatorCombo.ObjectListDict)
class OperatorComboBasisFunction(OperatorCombo):
ParamDict = {}
ObjectDict = {}
ObjectListDict = {
"BasisFunction": ObjectInfo("SetBasisFunction", "BasisFunction", 0, None),
}
def __init__(self, parent):
"""
initialization: extending
"""
OperatorCombo.__init__(self, parent)
return
OperatorComboBasisFunction.ParamDict.update(OperatorCombo.ParamDict)
OperatorComboBasisFunction.ObjectDict.update(OperatorCombo.ObjectDict)
OperatorComboBasisFunction.ObjectListDict.update(OperatorCombo.ObjectListDict)
""" Icompl Suite """
class Icompl(RietveldClass):
"""
ICOMPL up to 9 integers
Isy = -2
"""
ParamDict = {
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent, nbas):
"""
initialization
"""
RietveldClass.__init__(self, parent)
for i in xrange(0, nbas):
param_name = "Ireps"+str(i)
self.ParamDict[param_name] = IntInfo(param_name, "real/pure imaginary BSF coefficient flags"+str(i), 0)
self.__dict__[param_name] = self.ParamDict[param_name].get("Default")
return
""" Basis Function Suite """
class BasisFunction(RietveldClass):
ParamDict = {
"R1": IntInfo("R1", "basis function real component 1", 0),
"R2": IntInfo("R2", "basis function real component 1", 0),
"R3": IntInfo("R3", "basis function real component 1", 0),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
class BasisFunctionComplex(BasisFunction):
ParamDict = {
"I1": IntInfo("I1", "basis function complex component 1", 0),
"I2": IntInfo("I2", "basis function complex component 1", 0),
"I3": IntInfo("I3", "basis function complex component 1", 0),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization: extending
"""
BasisFunction.__init__(self, parent)
return
BasisFunctionComplex.ParamDict.update(BasisFunction.ParamDict)
BasisFunctionComplex.ObjectDict.update(BasisFunction.ObjectDict)
BasisFunctionComplex.ObjectListDict.update(BasisFunction.ObjectListDict)
""" SymmetryMatrix Suite """
class SymmetryMatrix(RietveldClass):
ParamDict = {}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
class SymmetryMatrix33(SymmetryMatrix):
ParamDict = {
"S11": IntInfo("S11", "S[1, 1]", 0),
"S12": IntInfo("S12", "S[1, 2]", 0),
"S13": IntInfo("S13", "S[1, 3]", 0),
"S21": IntInfo("S21", "S[2, 1]", 0),
"S22": IntInfo("S22", "S[2, 2]", 0),
"S23": IntInfo("S23", "S[2, 3]", 0),
"S31": IntInfo("S31", "S[3, 1]", 0),
"S32": IntInfo("S32", "S[3, 2]", 0),
"S33": IntInfo("S33", "S[3, 3]", 0),
"T1": FloatInfo("T1", "T[1]", 0.0),
"T2": FloatInfo("T2", "T[2]", 0.0),
"T3": FloatInfo("T3", "T[3]", 0.0),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization: extending
"""
SymmetryMatrix.__init__(self, parent)
return
SymmetryMatrix33.ParamDict.update(SymmetryMatrix.ParamDict)
SymmetryMatrix33.ObjectDict.update(SymmetryMatrix.ObjectDict)
SymmetryMatrix33.ObjectListDict.update(SymmetryMatrix.ObjectListDict)
class SymmetryMatrixAlpha(SymmetryMatrix):
ParamDict = {
"X": StringInfo("X", "X-component", ""),
"Y": StringInfo("Y", "Y-component", ""),
"Z": StringInfo("Z", "Z-component", ""),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization: extending
"""
SymmetryMatrix.__init__(self, parent)
return
SymmetryMatrixAlpha.ParamDict.update(SymmetryMatrix.ParamDict)
SymmetryMatrixAlpha.ObjectDict.update(SymmetryMatrix.ObjectDict)
SymmetryMatrixAlpha.ObjectListDict.update(SymmetryMatrix.ObjectListDict)
""" RotationalMatrix Suite """
class RotationalMatrix(RietveldClass):
ParamDict = {}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
class RotationalMatrix33(RotationalMatrix):
ParamDict = {
"R11": IntInfo("R11", "R[1, 1]", 0),
"R12": IntInfo("R12", "R[1, 2]", 0),
"R13": IntInfo("R13", "R[1, 3]", 0),
"R21": IntInfo("R21", "R[2, 1]", 0),
"R22": IntInfo("R22", "R[2, 2]", 0),
"R23": IntInfo("R23", "R[2, 3]", 0),
"R31": IntInfo("R31", "R[3, 1]", 0),
"R32": IntInfo("R32", "R[3, 2]", 0),
"R33": IntInfo("R33", "R[3, 3]", 0),
"Phase": FloatInfo("Phase", "Phase", 0.0, "2PI"),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization: extending
"""
RotationalMatrix.__init__(self, parent)
return
RotationalMatrix.ParamDict.update(RotationalMatrix.ParamDict)
RotationalMatrix.ObjectDict.update(RotationalMatrix.ObjectDict)
RotationalMatrix.ObjectListDict.update(RotationalMatrix.ObjectListDict)
class RotationalMatrixAlpha(RotationalMatrix):
ParamDict = {
"X": StringInfo("X", "X-component", ""),
"Y": StringInfo("Y", "Y-component", ""),
"Z": StringInfo("Z", "Z-component", ""),
"Phase": FloatInfo("Phase", "Phase", 0.0, "2PI"),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization: extending
"""
RotationalMatrix.__init__(self, parent)
return
RotationalMatrixAlpha.ParamDict.update(RotationalMatrix.ParamDict)
RotationalMatrixAlpha.ObjectDict.update( RotationalMatrix.ObjectDict)
RotationalMatrixAlpha.ObjectListDict.update(RotationalMatrix.ObjectListDict)
""" Restraint Suite """
class DistanceRestraint(RietveldClass):
"""
soft distance contraints
attribute:
- CATOD1: StringInfo("CATOD1", "Atom 1", ""),
- CATOD2: StringInfo("CATOD2", "Atom 2", ""),
- ITnum: IntInfo("ITnum", "symmetry operator number", 0),
- T1: FloatInfo("T1", "translation part 1 of symmetry operator", 0.0),
- T2: FloatInfo("T2", "translation part 2 of symmetry operator", 0.0),
- T3: FloatInfo("T3", "translation part 3 of symmetry operator", 0.0),
- Dist: FloatInfo("Dist", "required distance", 1.0),
- Sigma: FloatInfo("Sigma", "required distance deviation", 1.0),
"""
ParamDict = {
"CATOD1": StringInfo("CATOD1", "Atom 1", ""),
"CATOD2": StringInfo("CATOD2", "Atom 2", ""),
"ITnum": IntInfo("ITnum", "symmetry operator number", 0),
"T1": FloatInfo("T1", "translation part 1 of symmetry operator", 0.0),
"T2": FloatInfo("T2", "translation part 2 of symmetry operator", 0.0),
"T3": FloatInfo("T3", "translation part 3 of symmetry operator", 0.0),
"Dist": FloatInfo("Dist", "required distance", 1.0),
"Sigma": FloatInfo("Sigma", "required distance deviation", 1.0),
}
def __init__(self, parent):
"""initialization"""
RietveldClass.__init__(self, parent)
return
class AngleRestraint(RietveldClass):
"""
soft distance contraints
attribute:
- CATOD1 = ""
- CATOD2 = ""
- Itnum1 = 0
- Itnum2 = 0
- T1 = 0.0
- T2 = 0.0
- T3 = 0.0
- t1 = 0.0
- t2 = 0.0
- t3 = 0.0
- Angl = 0.0
- Sigma = 0.0
"""
ParamDict = {
"CATOD1": StringInfo("CATOD1", "Atom 1", ""),
"CATOD2": StringInfo("CATOD2", "Atom 2", ""),
"CATOD3": StringInfo("CATOD3", "Atom 3", ""),
"ITnum1": IntInfo("ITnum1", "symmetry operator number 1", 0),
"ITnum2": IntInfo("ITnum2", "symmetry operator number 2", 0),
"T1": FloatInfo("T1", "translation part 1 of symmetry operator of ITnum1", 0.0),
"T2": FloatInfo("T2", "translation part 2 of symmetry operator of ITnum1", 0.0),
"T3": FloatInfo("T3", "translation part 3 of symmetry operator of ITnum1", 0.0),
"t1": FloatInfo("t1", "translation part 1 of symmetry operator of ITnum2", 0.0),
"t2": FloatInfo("t2", "translation part 2 of symmetry operator of ITnum2", 0.0),
"t3": FloatInfo("t3", "translation part 3 of symmetry operator of ITnum2", 0.0),
"Angle": FloatInfo("Angle", "required angle", 1.0),
"Sigma": FloatInfo("Sigma", "required angle deviation", 1.0),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self):
"""
initalization
"""
RietveldClass.__init__(self, parent)
return
class MomentRestrain:
"""
soft moment constraints
"""
def __init__(self):
self.CATOM = ""
self.Moment = 0.0
self.Sigma = 0.0
class PropagationVector(RietveldClass):
"""
a single propagation vector
attribute
- PVK_X
- PVK_Y
- PVK_Z
"""
ParamDict = {
"X": RefineInfo("X", "propagation vector - x", 0.0),
"Y": RefineInfo("Y", "propagation vector - y", 0.0),
"Z": RefineInfo("Z", "propagation vector - z", 0.0),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
class TransformationMatrixSet(RietveldClass):
"""
a class to hold a set including a transformation matrix and a origin shift vector
attribute
- T11... T33
- Or_sh1...Or_sh3
"""
ParamDict = {
"T11": FloatInfo("T11", "Tranformation Matrix Element 1 1", 0.0),
"T12": FloatInfo("T12", "Tranformation Matrix Element 1 2", 0.0),
"T13": FloatInfo("T13", "Tranformation Matrix Element 1 3", 0.0),
"T21": FloatInfo("T21", "Tranformation Matrix Element 2 1", 0.0),
"T22": FloatInfo("T22", "Tranformation Matrix Element 2 2", 0.0),
"T23": FloatInfo("T23", "Tranformation Matrix Element 2 3", 0.0),
"T31": FloatInfo("T31", "Tranformation Matrix Element 3 1", 0.0),
"T32": FloatInfo("T32", "Tranformation Matrix Element 3 2", 0.0),
"T33": FloatInfo("T33", "Tranformation Matrix Element 3 3", 0.0),
"Or_sh1": FloatInfo("Or_sh1", "Origin Shift Vector 1 ", 0.0),
"Or_sh2": FloatInfo("Or_sh2", "Origin Shift Vector 2", 0.0),
"Or_sh3": FloatInfo("Or_sh3", "Origin Shift Vector 3", 0.0),
}
ObjectDict = {}
ObjectListDict = {}
def __init__(self, parent):
"""
initialization
"""
RietveldClass.__init__(self, parent)
return
| xpclove/autofp | diffpy/pyfullprof/phase.py | Python | gpl-3.0 | 27,563 |
import logging
from sqlalchemy import engine_from_config
from courier.scripts import settings
from courier.models import DeclarativeBase, DBSession, db_views, populate_lookups
LOG = False
def main(DBSession, engine):
# set up logging
if LOG:
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
# build tables & views
db_views.drop_views(engine)
DeclarativeBase.metadata.bind = engine
DeclarativeBase.metadata.drop_all()
DeclarativeBase.metadata.create_all(engine)
db_views.build_views(engine)
# populate lookups
populate_lookups(DBSession)
if __name__ == '__main__':
# configure session
engine = engine_from_config(settings, prefix='sqlalchemy.')
DBSession.configure(bind=engine)
main(DBSession, engine)
| alienlike/courier | courier/scripts/create_tables.py | Python | gpl-3.0 | 816 |
#! /usr/bin/env python
import sys, cv, cv2, os
import numpy as np
import subprocess, signal
import math
import atexit
import cPickle as pickle
from sklearn.cluster import DBSCAN
from sklearn import metrics, preprocessing
import pymeanshift as pms
from optparse import OptionParser
import time
parser = OptionParser()
parser.add_option("-i", "--input", dest="input_dir", help="directory with frames")
parser.add_option("-s", "--start", dest="start_frame", default="0", help="frame to start on")
parser.add_option("-r", "--framerate", dest="framerate", default="30", help="playback rate")
parser.add_option("-c", "--crop-husky", dest="crop", action="store_true", default=False, help="crop out the image header from the Husky?")
parser.add_option("--save", dest="save", action="store", default=None, help="directory to save the rendered frames to")
(options, args) = parser.parse_args()
video = None
framerate = int(options.framerate)
frame = int(options.start_frame)
while True:
framefile = "%s%sframe%04d.jpg" % (options.input_dir, os.sep, frame)
print framefile
if not os.path.isfile(framefile):
print "done"
break
img = cv2.imread(framefile)
if options.crop:
img = img[20:, :]
if video == None:
vidfile = "%s%svideo.avi" % (options.save, os.sep)
height, width, layers = img.shape
video = cv2.VideoWriter(vidfile, cv2.cv.CV_FOURCC('M','J','P','G'), framerate, (width, height), True)
video.write(img)
frame += 1
cv2.destroyAllWindows()
| AutonomyLab/husky | opencv-utilities/video-creator.py | Python | gpl-3.0 | 1,535 |
import types
from flask import jsonify
from werkzeug.exceptions import default_exceptions
from werkzeug.exceptions import HTTPException
from twiml_server import app
def make_json_app():
def make_json_error(ex):
response = jsonify(message=str(ex))
response.status_code = (ex.code
if isinstance(ex, HTTPException)
else 500)
return response
for code in default_exceptions.iterkeys():
app.error_handler_spec[None][code] = make_json_error
| vfulco/twilio_server | twiml_server/common/util.py | Python | gpl-3.0 | 541 |