code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import cv2
__author__ = 'def'
class Environment:
def __init__(self, image, collision_checker):
self.image_to_load = image
# Load image as environment:
self.image = image
self.y_limit, self.x_limit = self.image.shape
# Get obstacles from image
self.obstacles = []
self.findObstacles()
# Set collision_checker
self.collision_checker = collision_checker
def findObstacles(self):
# Find contours
image2 = self.image.copy()
contours, dummy = cv2.findContours(image2, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# Simplify contours
# hull_contours = [ cv2.convexHull(contour) for contour in contours]
simple_contours = [ cv2.approxPolyDP(contour, 2, True) for contour in contours]
self.obstacles = simple_contours
def is_valid(self, point):
valid = True
for obstacle in self.obstacles:
if self.collision_checker.collides(obstacle, point):
valid = False
return valid
def is_line_valid(self, origin, end):
valid = True
for obstacle in self.obstacles:
if self.collision_checker.line_collides(obstacle, origin, end):
valid = False
return valid
|
JavierIH/platano
|
algorithm/Environment.py
|
Python
|
gpl-2.0
| 1,294
|
"""
Creer par Antoine Leonard
antoine@antbig.fr
Class qui contient une ligne de couleur
"""
class Line:
def __init__(self):
self.__colors = []
def addcolor(self,color):
self.__colors.append(color)
def getnbcolor(self):
return len(self.__colors)
def hascolor(self, color):
return color in self.__colors
def iscoloratposition(self, color, position):
if position >= len(self.__colors):
return False
return self.__colors[position] == color
def __str__(self):
stringvalue = ""
for color in self.__colors:
stringvalue += (color.getname()+" ")
return stringvalue
def getcolor(self, position):
return self.__colors[position]
def compareto(self, line):
return LineResult(self, line)
def getallcolors(self):
return self.__colors
def setColor(self, color, position):
if len(self.__colors) > position:
self.__colors.insert(position, color)
counttry = 0
class LineResult:
def __init__(self, line, compareto):
global counttry
counttry += 1
self.__line = line
self.__compareto = compareto
self.__result = []
self.__process()
def __process(self):
copyofcompareto = list(self.__compareto.getallcolors())
for position in range(0, self.__line.getnbcolor()):
if self.__line.getcolor(position) == self.__compareto.getcolor(position):
self.__result.append("OK")
copyofcompareto.remove(self.__line.getcolor(position))
else:
self.__result.append("NON")
for position in range(0, self.__line.getnbcolor()):
if self.__line.getcolor(position) in copyofcompareto and self.__result[position] != "OK":
copyofcompareto.remove(self.__line.getcolor(position))
self.__result.pop(position)
self.__result.insert(position, "COLOR")
def getresult(self):
return self.__result
def iscorrect(self):
return len(self.__result) == self.__result.count("OK")
def getCorrect(self):
return self.__result.count("OK")
def getRigthColor(self):
return self.__result.count("COLOR")
def getNON(self):
return self.__result.count("NON")
|
antbig/MasterMind
|
Line.py
|
Python
|
gpl-2.0
| 2,372
|
# -*- coding: utf-8 -*-
#-------------------------------------------------
#--
#-- edge to Draft Bspline
#--
#-- microelly 2016 v 0.1
#--
#-- GNU Lesser General Public License (LGPL)
#-------------------------------------------------
import Draft
import FreeCADGui,FreeCAD
def run():
e=FreeCADGui.Selection.getSelection()[0]
pts=e.Shape.Edge1.Curve.discretize(20)
Draft.makeBSpline(pts)
FreeCAD.ActiveDocument.ActiveObject.Label="BSpline for " + e.Label
e.ViewObject.hide()
|
microelly2/freeCAD_macro
|
misc/edge2bspline.py
|
Python
|
gpl-2.0
| 482
|
from setuptools import setup
setup(
name = "txt2tags",
version = '2.7',
url = 'http://txt2tags.org',
author = 'Aurelio Jargas',
author_email = 'verde@aurelio.net',
description = "Document generator. Reads a text file with minimal markup as **bold** and //italic// and converts it to various formats",
long_description=open('README').read(),
include_package_data = True,
scripts = ['txt2tags',],
)
|
austin1howard/txt2tags
|
setup.py
|
Python
|
gpl-2.0
| 434
|
# encoding: utf-8
# module PyQt4.QtCore
# from /usr/lib/python3/dist-packages/PyQt4/QtCore.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import sip as __sip
from .QObject import QObject
class QSharedMemory(QObject):
"""
QSharedMemory(QObject parent=None)
QSharedMemory(str, QObject parent=None)
"""
def attach(self, QSharedMemory_AccessMode_mode=None): # real signature unknown; restored from __doc__
""" QSharedMemory.attach(QSharedMemory.AccessMode mode=QSharedMemory.ReadWrite) -> bool """
return False
def constData(self): # real signature unknown; restored from __doc__
""" QSharedMemory.constData() -> sip.voidptr """
pass
def create(self, p_int, QSharedMemory_AccessMode_mode=None): # real signature unknown; restored from __doc__
""" QSharedMemory.create(int, QSharedMemory.AccessMode mode=QSharedMemory.ReadWrite) -> bool """
return False
def data(self): # real signature unknown; restored from __doc__
""" QSharedMemory.data() -> sip.voidptr """
pass
def detach(self): # real signature unknown; restored from __doc__
""" QSharedMemory.detach() -> bool """
return False
def error(self): # real signature unknown; restored from __doc__
""" QSharedMemory.error() -> QSharedMemory.SharedMemoryError """
pass
def errorString(self): # real signature unknown; restored from __doc__
""" QSharedMemory.errorString() -> str """
return ""
def isAttached(self): # real signature unknown; restored from __doc__
""" QSharedMemory.isAttached() -> bool """
return False
def key(self): # real signature unknown; restored from __doc__
""" QSharedMemory.key() -> str """
return ""
def lock(self): # real signature unknown; restored from __doc__
""" QSharedMemory.lock() -> bool """
return False
def nativeKey(self): # real signature unknown; restored from __doc__
""" QSharedMemory.nativeKey() -> str """
return ""
def setKey(self, p_str): # real signature unknown; restored from __doc__
""" QSharedMemory.setKey(str) """
pass
def setNativeKey(self, p_str): # real signature unknown; restored from __doc__
""" QSharedMemory.setNativeKey(str) """
pass
def size(self): # real signature unknown; restored from __doc__
""" QSharedMemory.size() -> int """
return 0
def unlock(self): # real signature unknown; restored from __doc__
""" QSharedMemory.unlock() -> bool """
return False
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
AccessMode = None # (!) real value is ''
AlreadyExists = 4
InvalidSize = 2
KeyError = 3
LockError = 6
NoError = 0
NotFound = 5
OutOfResources = 7
PermissionDenied = 1
ReadOnly = 0
ReadWrite = 1
SharedMemoryError = None # (!) real value is ''
UnknownError = 8
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyQt4/QtCore/QSharedMemory.py
|
Python
|
gpl-2.0
| 3,062
|
"""
This module is imported from the pandas package __init__.py file
in order to ensure that the core.config options registered here will
be available as soon as the user loads the package. if register_option
is invoked inside specific modules, they will not be registered until that
module is imported, which may or may not be a problem.
If you need to make sure options are available even before a certain
module is imported, register them here rather then in the module.
"""
import warnings
import pandas.core.config as cf
from pandas.core.config import (is_int, is_bool, is_text, is_instance_factory,
is_one_of_factory, get_default_val)
from pandas.core.format import detect_console_encoding
#
# options from the "display" namespace
pc_precision_doc = """
: int
Floating point output precision (number of significant digits). This is
only a suggestion
"""
pc_colspace_doc = """
: int
Default space for DataFrame columns.
"""
pc_max_rows_doc = """
: int
If max_rows is exceeded, switch to truncate view. Depending on
`large_repr`, objects are either centrally truncated or printed as
a summary view. 'None' value means unlimited.
In case python/IPython is running in a terminal and `large_repr`
equals 'truncate' this can be set to 0 and pandas will auto-detect
the height of the terminal and print a truncated object which fits
the screen height. The IPython notebook, IPython qtconsole, or
IDLE do not run in a terminal and hence it is not possible to do
correct auto-detection.
"""
pc_max_cols_doc = """
: int
If max_cols is exceeded, switch to truncate view. Depending on
`large_repr`, objects are either centrally truncated or printed as
a summary view. 'None' value means unlimited.
In case python/IPython is running in a terminal and `large_repr`
equals 'truncate' this can be set to 0 and pandas will auto-detect
the width of the terminal and print a truncated object which fits
the screen width. The IPython notebook, IPython qtconsole, or IDLE
do not run in a terminal and hence it is not possible to do
correct auto-detection.
"""
pc_max_categories_doc = """
: int
This sets the maximum number of categories pandas should output when
printing out a `Categorical` or a Series of dtype "category".
"""
pc_max_info_cols_doc = """
: int
max_info_columns is used in DataFrame.info method to decide if
per column information will be printed.
"""
pc_nb_repr_h_doc = """
: boolean
When True, IPython notebook will use html representation for
pandas objects (if it is available).
"""
pc_date_dayfirst_doc = """
: boolean
When True, prints and parses dates with the day first, eg 20/01/2005
"""
pc_date_yearfirst_doc = """
: boolean
When True, prints and parses dates with the year first, eg 2005/01/20
"""
pc_pprint_nest_depth = """
: int
Controls the number of nested levels to process when pretty-printing
"""
pc_multi_sparse_doc = """
: boolean
"sparsify" MultiIndex display (don't display repeated
elements in outer levels within groups)
"""
pc_encoding_doc = """
: str/unicode
Defaults to the detected encoding of the console.
Specifies the encoding to be used for strings returned by to_string,
these are generally strings meant to be displayed on the console.
"""
float_format_doc = """
: callable
The callable should accept a floating point number and return
a string with the desired format of the number. This is used
in some places like SeriesFormatter.
See core.format.EngFormatter for an example.
"""
max_colwidth_doc = """
: int
The maximum width in characters of a column in the repr of
a pandas data structure. When the column overflows, a "..."
placeholder is embedded in the output.
"""
colheader_justify_doc = """
: 'left'/'right'
Controls the justification of column headers. used by DataFrameFormatter.
"""
pc_expand_repr_doc = """
: boolean
Whether to print out the full DataFrame repr for wide DataFrames across
multiple lines, `max_columns` is still respected, but the output will
wrap-around across multiple "pages" if its width exceeds `display.width`.
"""
pc_show_dimensions_doc = """
: boolean or 'truncate'
Whether to print out dimensions at the end of DataFrame repr.
If 'truncate' is specified, only print out the dimensions if the
frame is truncated (e.g. not display all rows and/or columns)
"""
pc_line_width_doc = """
: int
Deprecated.
"""
pc_east_asian_width_doc = """
: boolean
Whether to use the Unicode East Asian Width to calculate the display text
width.
Enabling this may affect to the performance (default: False)
"""
pc_ambiguous_as_wide_doc = """
: boolean
Whether to handle Unicode characters belong to Ambiguous as Wide (width=2)
(default: False)
"""
pc_latex_repr_doc = """
: boolean
Whether to produce a latex DataFrame representation for jupyter
environments that support it.
(default: False)
"""
pc_line_width_deprecation_warning = """\
line_width has been deprecated, use display.width instead (currently both are
identical)
"""
pc_height_deprecation_warning = """\
height has been deprecated.
"""
pc_width_doc = """
: int
Width of the display in characters. In case python/IPython is running in
a terminal this can be set to None and pandas will correctly auto-detect
the width.
Note that the IPython notebook, IPython qtconsole, or IDLE do not run in a
terminal and hence it is not possible to correctly detect the width.
"""
pc_height_doc = """
: int
Deprecated.
"""
pc_chop_threshold_doc = """
: float or None
if set to a float value, all float values smaller then the given threshold
will be displayed as exactly 0 by repr and friends.
"""
pc_max_seq_items = """
: int or None
when pretty-printing a long sequence, no more then `max_seq_items`
will be printed. If items are omitted, they will be denoted by the
addition of "..." to the resulting string.
If set to None, the number of items to be printed is unlimited.
"""
pc_max_info_rows_doc = """
: int or None
df.info() will usually show null-counts for each column.
For large frames this can be quite slow. max_info_rows and max_info_cols
limit this null check only to frames with smaller dimensions than
specified.
"""
pc_large_repr_doc = """
: 'truncate'/'info'
For DataFrames exceeding max_rows/max_cols, the repr (and HTML repr) can
show a truncated table (the default from 0.13), or switch to the view from
df.info() (the behaviour in earlier versions of pandas).
"""
pc_mpl_style_doc = """
: bool
Setting this to 'default' will modify the rcParams used by matplotlib
to give plots a more pleasing visual style by default.
Setting this to None/False restores the values to their initial value.
"""
pc_mpl_style_deprecation_warning = """
mpl_style had been deprecated and will be removed in a future version.
Use `matplotlib.pyplot.style.use` instead.
"""
pc_memory_usage_doc = """
: bool, string or None
This specifies if the memory usage of a DataFrame should be displayed when
df.info() is called. Valid values True,False,'deep'
"""
pc_latex_escape = """
: bool
This specifies if the to_latex method of a Dataframe uses escapes special
characters.
method. Valid values: False,True
"""
pc_latex_longtable = """
:bool
This specifies if the to_latex method of a Dataframe uses the longtable
format.
method. Valid values: False,True
"""
style_backup = dict()
def mpl_style_cb(key):
warnings.warn(pc_mpl_style_deprecation_warning, FutureWarning,
stacklevel=5)
import sys
from pandas.tools.plotting import mpl_stylesheet
global style_backup
val = cf.get_option(key)
if 'matplotlib' not in sys.modules.keys():
if not val: # starting up, we get reset to None
return val
raise Exception("matplotlib has not been imported. aborting")
import matplotlib.pyplot as plt
if val == 'default':
style_backup = dict([(k, plt.rcParams[k]) for k in mpl_stylesheet])
plt.rcParams.update(mpl_stylesheet)
elif not val:
if style_backup:
plt.rcParams.update(style_backup)
return val
with cf.config_prefix('display'):
cf.register_option('precision', 6, pc_precision_doc, validator=is_int)
cf.register_option('float_format', None, float_format_doc)
cf.register_option('column_space', 12, validator=is_int)
cf.register_option('max_info_rows', 1690785, pc_max_info_rows_doc,
validator=is_instance_factory((int, type(None))))
cf.register_option('max_rows', 60, pc_max_rows_doc,
validator=is_instance_factory([type(None), int]))
cf.register_option('max_categories', 8, pc_max_categories_doc,
validator=is_int)
cf.register_option('max_colwidth', 50, max_colwidth_doc, validator=is_int)
cf.register_option('max_columns', 20, pc_max_cols_doc,
validator=is_instance_factory([type(None), int]))
cf.register_option('large_repr', 'truncate', pc_large_repr_doc,
validator=is_one_of_factory(['truncate', 'info']))
cf.register_option('max_info_columns', 100, pc_max_info_cols_doc,
validator=is_int)
cf.register_option('colheader_justify', 'right', colheader_justify_doc,
validator=is_text)
cf.register_option('notebook_repr_html', True, pc_nb_repr_h_doc,
validator=is_bool)
cf.register_option('date_dayfirst', False, pc_date_dayfirst_doc,
validator=is_bool)
cf.register_option('date_yearfirst', False, pc_date_yearfirst_doc,
validator=is_bool)
cf.register_option('pprint_nest_depth', 3, pc_pprint_nest_depth,
validator=is_int)
cf.register_option('multi_sparse', True, pc_multi_sparse_doc,
validator=is_bool)
cf.register_option('encoding', detect_console_encoding(), pc_encoding_doc,
validator=is_text)
cf.register_option('expand_frame_repr', True, pc_expand_repr_doc)
cf.register_option('show_dimensions', 'truncate', pc_show_dimensions_doc,
validator=is_one_of_factory([True, False, 'truncate']))
cf.register_option('chop_threshold', None, pc_chop_threshold_doc)
cf.register_option('max_seq_items', 100, pc_max_seq_items)
cf.register_option('mpl_style', None, pc_mpl_style_doc,
validator=is_one_of_factory([None, False, 'default']),
cb=mpl_style_cb)
cf.register_option('height', 60, pc_height_doc,
validator=is_instance_factory([type(None), int]))
cf.register_option('width', 80, pc_width_doc,
validator=is_instance_factory([type(None), int]))
# redirected to width, make defval identical
cf.register_option('line_width', get_default_val('display.width'),
pc_line_width_doc)
cf.register_option('memory_usage', True, pc_memory_usage_doc,
validator=is_one_of_factory([None, True,
False, 'deep']))
cf.register_option('unicode.east_asian_width', False,
pc_east_asian_width_doc, validator=is_bool)
cf.register_option('unicode.ambiguous_as_wide', False,
pc_east_asian_width_doc, validator=is_bool)
cf.register_option('latex.repr', False,
pc_latex_repr_doc, validator=is_bool)
cf.register_option('latex.escape', True, pc_latex_escape,
validator=is_bool)
cf.register_option('latex.longtable', False, pc_latex_longtable,
validator=is_bool)
cf.deprecate_option('display.line_width',
msg=pc_line_width_deprecation_warning,
rkey='display.width')
cf.deprecate_option('display.height', msg=pc_height_deprecation_warning,
rkey='display.max_rows')
tc_sim_interactive_doc = """
: boolean
Whether to simulate interactive mode for purposes of testing
"""
with cf.config_prefix('mode'):
cf.register_option('sim_interactive', False, tc_sim_interactive_doc)
use_inf_as_null_doc = """
: boolean
True means treat None, NaN, INF, -INF as null (old way),
False means None and NaN are null, but INF, -INF are not null
(new way).
"""
# We don't want to start importing everything at the global context level
# or we'll hit circular deps.
def use_inf_as_null_cb(key):
from pandas.core.common import _use_inf_as_null
_use_inf_as_null(key)
with cf.config_prefix('mode'):
cf.register_option('use_inf_as_null', False, use_inf_as_null_doc,
cb=use_inf_as_null_cb)
# user warnings
chained_assignment = """
: string
Raise an exception, warn, or no action if trying to use chained assignment,
The default is warn
"""
with cf.config_prefix('mode'):
cf.register_option('chained_assignment', 'warn', chained_assignment,
validator=is_one_of_factory([None, 'warn', 'raise']))
# Set up the io.excel specific configuration.
writer_engine_doc = """
: string
The default Excel writer engine for '{ext}' files. Available options:
'{default}' (the default){others}.
"""
with cf.config_prefix('io.excel'):
# going forward, will be additional writers
for ext, options in [('xls', ['xlwt']), ('xlsm', ['openpyxl'])]:
default = options.pop(0)
if options:
options = " " + ", ".join(options)
else:
options = ""
doc = writer_engine_doc.format(ext=ext, default=default,
others=options)
cf.register_option(ext + '.writer', default, doc, validator=str)
def _register_xlsx(engine, other):
cf.register_option('xlsx.writer', engine,
writer_engine_doc.format(ext='xlsx', default=engine,
others=", '%s'" % other),
validator=str)
try:
# better memory footprint
import xlsxwriter # noqa
_register_xlsx('xlsxwriter', 'openpyxl')
except ImportError:
# fallback
_register_xlsx('openpyxl', 'xlsxwriter')
|
pjryan126/solid-start-careers
|
store/api/zillow/venv/lib/python2.7/site-packages/pandas/core/config_init.py
|
Python
|
gpl-2.0
| 14,480
|
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
try:
from unittest import mock, TestCase
except ImportError:
import mock
import logging
from crmsh.crash_test import utils, main, config
class TestMyLoggingFormatter(TestCase):
@classmethod
def setUpClass(cls):
"""
Global setUp.
"""
def setUp(self):
"""
Test setUp.
"""
self.fence_info_inst = utils.FenceInfo()
def tearDown(self):
"""
Test tearDown.
"""
@classmethod
def tearDownClass(cls):
"""
Global tearDown.
"""
class TestFenceInfo(TestCase):
@classmethod
def setUpClass(cls):
"""
Global setUp.
"""
def setUp(self):
"""
Test setUp.
"""
self.fence_info_inst = utils.FenceInfo()
def tearDown(self):
"""
Test tearDown.
"""
@classmethod
def tearDownClass(cls):
"""
Global tearDown.
"""
@mock.patch('crmsh.crash_test.utils.get_property')
def test_fence_enabled_false(self, mock_get_property):
mock_get_property.return_value = None
res = self.fence_info_inst.fence_enabled
self.assertEqual(res, False)
mock_get_property.assert_called_once_with("stonith-enabled")
@mock.patch('crmsh.crash_test.utils.get_property')
def test_fence_enabled_true(self, mock_get_property):
mock_get_property.return_value = "True"
res = self.fence_info_inst.fence_enabled
self.assertEqual(res, True)
mock_get_property.assert_called_once_with("stonith-enabled")
@mock.patch('crmsh.crash_test.utils.msg_error')
@mock.patch('crmsh.crash_test.utils.get_property')
def test_fence_action_none(self, mock_get_property, mock_error):
mock_get_property.return_value = None
res = self.fence_info_inst.fence_action
self.assertEqual(res, None)
mock_get_property.assert_called_once_with("stonith-action")
mock_error.assert_called_once_with('Cluster property "stonith-action" should be reboot|off|poweroff')
@mock.patch('crmsh.crash_test.utils.get_property')
def test_fence_action(self, mock_get_property):
mock_get_property.return_value = "reboot"
res = self.fence_info_inst.fence_action
self.assertEqual(res, "reboot")
mock_get_property.assert_called_once_with("stonith-action")
@mock.patch('crmsh.crash_test.utils.get_property')
def test_fence_timeout(self, mock_get_property):
mock_get_property.return_value = "60s"
res = self.fence_info_inst.fence_timeout
self.assertEqual(res, "60")
mock_get_property.assert_called_once_with("stonith-timeout")
@mock.patch('crmsh.crash_test.utils.get_property')
def test_fence_timeout_default(self, mock_get_property):
mock_get_property.return_value = None
res = self.fence_info_inst.fence_timeout
self.assertEqual(res, config.FENCE_TIMEOUT)
mock_get_property.assert_called_once_with("stonith-timeout")
class TestUtils(TestCase):
'''
Unitary tests for crash_test/utils.py
'''
@classmethod
def setUpClass(cls):
"""
Global setUp.
"""
def setUp(self):
"""
Test setUp.
"""
def tearDown(self):
"""
Test tearDown.
"""
@classmethod
def tearDownClass(cls):
"""
Global tearDown.
"""
@mock.patch('crmsh.crash_test.utils.datetime')
def test_now(self, mock_datetime):
mock_now = mock.Mock()
mock_datetime.now.return_value = mock_now
mock_now.strftime.return_value = "2019/07/05 14:44:55"
result = utils.now()
self.assertEqual(result, "2019/07/05 14:44:55")
mock_datetime.now.assert_called_once_with()
mock_now.strftime.assert_called_once_with("%Y/%m/%d %H:%M:%S")
@mock.patch('crmsh.crash_test.utils.get_handler')
def test_manage_handler(self, mock_get_handler):
mock_get_handler.return_value = "handler"
utils.logger = mock.Mock()
utils.logger.removeHandler = mock.Mock()
utils.logger.addHandler = mock.Mock()
with utils.manage_handler("type1", keep=False):
pass
mock_get_handler.assert_called_once_with(utils.logger, "type1")
utils.logger.removeHandler.assert_called_once_with("handler")
utils.logger.addHandler.assert_called_once_with("handler")
@mock.patch('crmsh.crash_test.utils.manage_handler')
def test_msg_raw(self, mock_handler):
utils.logger = mock.Mock()
utils.logger.log = mock.Mock()
utils.msg_raw("level1", "msg1")
mock_handler.assert_called_once_with("stream", True)
utils.logger.log.assert_called_once_with("level1", "msg1")
@mock.patch('crmsh.crash_test.utils.msg_raw')
def test_msg_info(self, mock_raw):
utils.msg_info("msg1")
mock_raw.assert_called_once_with(logging.INFO, "msg1", True)
@mock.patch('crmsh.crash_test.utils.msg_raw')
def test_msg_warn(self, mock_raw):
utils.msg_warn("msg1")
mock_raw.assert_called_once_with(logging.WARNING, "msg1", True)
@mock.patch('crmsh.crash_test.utils.msg_raw')
def test_msg_error(self, mock_raw):
utils.msg_error("msg1")
mock_raw.assert_called_once_with(logging.ERROR, "msg1", True)
@mock.patch('os.fsync')
@mock.patch('json.dumps')
@mock.patch('builtins.open', create=True)
def test_json_dumps(self, mock_open_file, mock_dumps, mock_fsync):
main.ctx = mock.Mock(jsonfile="file1", task_list={"process_name": "xin", "age": 38})
mock_open_write = mock.mock_open()
file_handle = mock_open_write.return_value.__enter__.return_value
mock_open_file.return_value = mock_open_write.return_value
mock_dumps.return_value = "data"
utils.json_dumps()
mock_open_file.assert_called_once_with("file1", "w")
mock_dumps.assert_called_once_with(main.ctx.task_list, indent=2)
file_handle.write.assert_called_once_with("data")
file_handle.flush.assert_called_once_with()
mock_fsync.assert_called_once_with(file_handle)
@mock.patch('crmsh.crash_test.utils.crmshutils.this_node')
@mock.patch('crmsh.crash_test.utils.msg_error')
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_this_node_false(self, mock_run, mock_error, mock_this_node):
mock_run.return_value = (1, None, "error data")
mock_this_node.return_value = "node1"
res = utils.this_node()
self.assertEqual(res, "node1")
mock_run.assert_called_once_with("crm_node --name")
mock_error.assert_called_once_with("error data")
mock_this_node.assert_called_once_with()
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_this_node(self, mock_run):
mock_run.return_value = (0, "data", None)
res = utils.this_node()
self.assertEqual(res, "data")
mock_run.assert_called_once_with("crm_node --name")
@mock.patch('crmsh.crash_test.utils.datetime')
def test_str_to_datetime(self, mock_datetime):
utils.str_to_datetime("Mon Nov 2 15:37:11 2020", "%a %b %d %H:%M:%S %Y")
mock_datetime.strptime.assert_called_once_with("Mon Nov 2 15:37:11 2020", "%a %b %d %H:%M:%S %Y")
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_corosync_port_list(self, mock_run):
output = """
totem.interface.0.bindnetaddr (str) = 10.10.10.121
totem.interface.0.mcastaddr (str) = 239.101.40.63
totem.interface.0.mcastport (u16) = 5405
totem.interface.0.ttl (u8) = 1
totem.interface.1.bindnetaddr (str) = 20.20.20.121
totem.interface.1.mcastaddr (str) = 239.6.213.31
totem.interface.1.mcastport (u16) = 5407
totem.interface.1.ttl (u8) = 1
"""
mock_run.return_value = (0, output, None)
result = utils.corosync_port_list()
expected = ['5405', '5407']
self.assertListEqual(result, expected)
mock_run.assert_called_once_with("corosync-cmapctl totem.interface")
def test_get_handler(self):
mock_handler1 = mock.Mock(_name="test1_handler")
mock_handler2 = mock.Mock(_name="test2_handler")
mock_logger = mock.Mock(handlers=[mock_handler1, mock_handler2])
res = utils.get_handler(mock_logger, "test1_handler")
self.assertEqual(res, mock_handler1)
@mock.patch('os.getuid')
def test_is_root(self, mock_getuid):
mock_getuid.return_value = 0
self.assertEqual(utils.is_root(), True)
mock_getuid.assert_called_once_with()
@mock.patch('crmsh.crash_test.utils.crmshutils.to_ascii')
@mock.patch('os.path.basename')
@mock.patch('builtins.open')
@mock.patch('os.path.join')
@mock.patch('os.listdir')
def test_get_process_status_false(self, mock_listdir, mock_join, mock_open_file, mock_basename, mock_to_ascii):
mock_listdir.return_value = ['1', '2', 'none']
mock_join.side_effect = ['/proc/1/cmdline', '/proc/2/cmdline']
mock_open_read_1 = mock.mock_open(read_data=b'/usr/sbin/cmd1\x00--user\x00')
mock_open_read_2 = mock.mock_open(read_data=b'/usr/sbin/cmd2\x00')
mock_open_file.side_effect = [
mock_open_read_1.return_value,
mock_open_read_2.return_value
]
mock_to_ascii.side_effect = [
"/usr/sbin/cmd1\x00--user\x00",
"/usr/sbin/cmd2\x00"
]
mock_basename.side_effect = ["cmd1", "cmd2"]
rc, pid = utils.get_process_status("sbd")
self.assertEqual(rc, False)
self.assertEqual(pid, -1)
mock_listdir.assert_called_once_with('/proc')
mock_join.assert_has_calls([
mock.call('/proc', '1', 'cmdline'),
mock.call('/proc', '2', 'cmdline')
])
mock_open_file.assert_has_calls([
mock.call('/proc/1/cmdline', 'rb'),
mock.call('/proc/2/cmdline', 'rb')
])
mock_to_ascii.assert_has_calls([
mock.call(b'/usr/sbin/cmd1\x00--user\x00'),
mock.call(b'/usr/sbin/cmd2\x00')
])
@mock.patch('crmsh.crash_test.utils.crmshutils.to_ascii')
@mock.patch('os.path.basename')
@mock.patch('builtins.open')
@mock.patch('os.path.join')
@mock.patch('os.listdir')
def test_get_process_status(self, mock_listdir, mock_join, mock_open_file, mock_basename, mock_to_ascii):
mock_listdir.return_value = ['1', '2', 'none']
mock_join.side_effect = ['/proc/1/cmdline', '/proc/2/cmdline']
mock_open_read_1 = mock.mock_open(read_data=b'/usr/sbin/cmd1\x00--user\x00')
mock_open_read_2 = mock.mock_open(read_data=b'/usr/sbin/sbd\x00')
mock_open_file.side_effect = [
mock_open_read_1.return_value,
mock_open_read_2.return_value
]
mock_to_ascii.side_effect = [
"/usr/sbin/cmd1\x00--user\x00",
"/usr/sbin/sbd\x00"
]
mock_basename.side_effect = ["cmd1", "sbd"]
rc, pid = utils.get_process_status("sbd")
self.assertEqual(rc, True)
self.assertEqual(pid, 2)
mock_listdir.assert_called_once_with('/proc')
mock_join.assert_has_calls([
mock.call('/proc', '1', 'cmdline'),
mock.call('/proc', '2', 'cmdline')
])
mock_open_file.assert_has_calls([
mock.call('/proc/1/cmdline', 'rb'),
mock.call('/proc/2/cmdline', 'rb')
])
mock_to_ascii.assert_has_calls([
mock.call(b'/usr/sbin/cmd1\x00--user\x00'),
mock.call(b'/usr/sbin/sbd\x00')
])
@mock.patch('crmsh.crash_test.utils.msg_error')
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_check_node_status_error_cmd(self, mock_run, mock_error):
mock_run.return_value = (1, None, "error")
res = utils.check_node_status("node1", "member")
self.assertEqual(res, False)
mock_run.assert_called_once_with("crm_node -l")
mock_error.assert_called_once_with("error")
@mock.patch('crmsh.crash_test.utils.msg_error')
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_check_node_status(self, mock_run, mock_error):
output = """
1084783297 15sp2-1 member
1084783193 15sp2-2 lost
"""
mock_run.return_value = (0, output, None)
res = utils.check_node_status("15sp2-2", "member")
self.assertEqual(res, False)
res = utils.check_node_status("15sp2-1", "member")
self.assertEqual(res, True)
mock_run.assert_has_calls([
mock.call("crm_node -l"),
mock.call("crm_node -l")
])
mock_error.assert_not_called()
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_online_nodes_empty(self, mock_run):
mock_run.return_value = (0, "data", None)
res = utils.online_nodes()
self.assertEqual(res, [])
mock_run.assert_called_once_with("crm_mon -1")
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_online_nodes(self, mock_run):
output = """
Node List:
* Online: [ 15sp2-1 15sp2-2 ]
"""
mock_run.return_value = (0, output, None)
res = utils.online_nodes()
self.assertEqual(res, ["15sp2-1", "15sp2-2"])
mock_run.assert_called_once_with("crm_mon -1")
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_get_property_none(self, mock_run):
mock_run.return_value = (1, None, "error")
res = utils.get_property("test")
self.assertEqual(res, None)
mock_run.assert_called_once_with("crm configure get_property test")
@mock.patch('crmsh.crash_test.utils.crmshutils.get_stdout_stderr')
def test_get_property(self, mock_run):
mock_run.return_value = (0, "data", None)
res = utils.get_property("test")
self.assertEqual(res, "data")
mock_run.assert_called_once_with("crm configure get_property test")
@mock.patch('crmsh.crash_test.utils.online_nodes')
def test_peer_node_list_empty(self, mock_online):
mock_online.return_value = None
res = utils.peer_node_list()
self.assertEqual(res, [])
mock_online.assert_called_once_with()
@mock.patch('crmsh.crash_test.utils.this_node')
@mock.patch('crmsh.crash_test.utils.online_nodes')
def test_peer_node_list(self, mock_online, mock_this_node):
mock_online.return_value = ["node1", "node2"]
mock_this_node.return_value = "node1"
res = utils.peer_node_list()
self.assertEqual(res, ["node2"])
mock_online.assert_called_once_with()
# Test is_valid_sbd():
@classmethod
@mock.patch('os.path.exists')
def test_is_valid_sbd_not_exist(cls, mock_os_path_exists):
"""
Test device not exist
"""
dev = "/dev/disk/by-id/scsi-device1"
mock_os_path_exists.return_value = False
res = utils.is_valid_sbd(dev)
assert res is False
@classmethod
@mock.patch('crmsh.crash_test.utils.msg_error')
@mock.patch('crmsh.utils.get_stdout_stderr')
@mock.patch('os.path.exists')
def test_is_valid_sbd_cmd_error(cls, mock_os_path_exists,
mock_sbd_check_header, mock_msg_err):
"""
Test device is not valid sbd
"""
dev = "/dev/disk/by-id/scsi-device1"
mock_os_path_exists.return_value = True
mock_sbd_check_header.return_value = (-1, None, "Unknown error!")
mock_msg_err.return_value = ""
res = utils.is_valid_sbd(dev)
mock_msg_err.assert_called_once_with("Unknown error!")
assert res is False
@classmethod
@mock.patch('crmsh.crash_test.utils.msg_error')
@mock.patch('crmsh.utils.get_stdout_stderr')
@mock.patch('os.path.exists')
def test_is_valid_sbd_not_sbd(cls, mock_os_path_exists,
mock_sbd_check_header, mock_msg_err):
"""
Test device is not SBD device
"""
dev = "/dev/disk/by-id/scsi-device1"
err_output = """
==Dumping header on disk {}
==Header on disk {} NOT dumped
sbd failed; please check the logs.
""".format(dev, dev)
mock_os_path_exists.return_value = True
mock_sbd_check_header.return_value = (1, "==Dumping header on disk {}".format(dev),
err_output)
res = utils.is_valid_sbd(dev)
assert res is False
mock_msg_err.assert_called_once_with(err_output)
@classmethod
@mock.patch('crmsh.utils.get_stdout_stderr')
@mock.patch('os.path.exists')
def test_is_valid_sbd_is_sbd(cls, mock_os_path_exists,
mock_sbd_check_header):
"""
Test device is not SBD device
"""
dev = "/dev/disk/by-id/scsi-device1"
std_output = """
==Dumping header on disk {}
Header version : 2.1
UUID : f4c99362-6522-46fc-8ce4-7db60aff19bb
Number of slots : 255
Sector size : 512
Timeout (watchdog) : 5
Timeout (allocate) : 2
Timeout (loop) : 1
Timeout (msgwait) : 10
==Header on disk {} is dumped
""".format(dev, dev)
mock_os_path_exists.return_value = True
mock_sbd_check_header.return_value = (0, std_output, None)
res = utils.is_valid_sbd(dev)
assert res is True
# Test find_candidate_sbd() and _find_match_count()
@classmethod
@mock.patch('glob.glob')
@mock.patch('os.path.basename')
@mock.patch('os.path.dirname')
def test_find_candidate_no_dev(cls, mock_os_path_dname, mock_os_path_bname,
mock_glob):
"""
Test no suitable device
"""
mock_os_path_dname.return_value = "/dev/disk/by-id"
mock_os_path_bname.return_value = "scsi-label_CN_devA"
mock_glob.return_value = []
res = utils.find_candidate_sbd("/not-exist-folder/not-exist-dev")
assert res == ""
@classmethod
@mock.patch('crmsh.crash_test.utils.is_valid_sbd')
@mock.patch('glob.glob')
@mock.patch('os.path.basename')
@mock.patch('os.path.dirname')
def test_find_candidate_no_can(cls, mock_os_path_dname, mock_os_path_bname,
mock_glob, mock_is_valid_sbd):
"""
Test no valid candidate device
"""
mock_os_path_dname.return_value = "/dev/disk/by-id"
mock_os_path_bname.return_value = "scsi-label_CN_devA"
mock_glob.return_value = ["/dev/disk/by-id/scsi-label_DE_devA",
"/dev/disk/by-id/scsi-label_DE_devB",
"/dev/disk/by-id/scsi-label_DE_devC",
"/dev/disk/by-id/scsi-label_DE_devD"]
mock_is_valid_sbd.side_effect = [False, False, False, False]
res = utils.find_candidate_sbd("/dev/disk/by-id/scsi-label_CN_devA")
assert res == ""
@classmethod
@mock.patch('crmsh.crash_test.utils.is_valid_sbd')
@mock.patch('glob.glob')
@mock.patch('os.path.basename')
@mock.patch('os.path.dirname')
def test_find_candidate_has_multi(cls, mock_os_path_dname, mock_os_path_bname,
mock_glob, mock_is_valid_sbd):
"""
Test has multiple valid candidate devices
"""
mock_os_path_dname.return_value = "/dev/disk/by-id"
mock_os_path_bname.return_value = "scsi-label_CN_devA"
mock_glob.return_value = ["/dev/disk/by-id/scsi-label_DE_devA",
"/dev/disk/by-id/scsi-label_DE_devB",
"/dev/disk/by-id/scsi-label_CN_devC",
"/dev/disk/by-id/scsi-label_CN_devD",
"/dev/disk/by-id/scsi-mp_China_devE",
"/dev/disk/by-id/scsi-mp_China_devF"]
mock_is_valid_sbd.side_effect = [True, False, False, True, True, False]
res = utils.find_candidate_sbd("/dev/disk/by-id/scsi-label_CN_devA")
assert res == "/dev/disk/by-id/scsi-label_CN_devD"
|
dmuhamedagic/crmsh
|
test/unittests/test_crashtest_utils.py
|
Python
|
gpl-2.0
| 20,445
|
# vim:ts=4:sw=4:sts=4:et
# -*- coding: utf-8 -*-
"""Classes related to graph clustering."""
from copy import deepcopy
from math import pi
from io import StringIO
from igraph import community_to_membership
from igraph.configuration import Configuration
from igraph.datatypes import UniqueIdGenerator
from igraph.drawing.colors import ClusterColoringPalette
from igraph.statistics import Histogram
from igraph.summary import _get_wrapper_for_width
from igraph.utils import str_to_orientation
class Clustering:
"""Class representing a clustering of an arbitrary ordered set.
This is now used as a base for L{VertexClustering}, but it might be
useful for other purposes as well.
Members of an individual cluster can be accessed by the C{[]} operator:
>>> cl = Clustering([0,0,0,0,1,1,1,2,2,2,2])
>>> cl[0]
[0, 1, 2, 3]
The membership vector can be accessed by the C{membership} property:
>>> cl.membership
[0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2]
The number of clusters can be retrieved by the C{len} function:
>>> len(cl)
3
You can iterate over the clustering object as if it were a regular list
of clusters:
>>> for cluster in cl:
... print(" ".join(str(idx) for idx in cluster))
...
0 1 2 3
4 5 6
7 8 9 10
If you need all the clusters at once as lists, you can simply convert
the clustering object to a list:
>>> cluster_list = list(cl)
>>> print(cluster_list)
[[0, 1, 2, 3], [4, 5, 6], [7, 8, 9, 10]]
"""
def __init__(self, membership, params=None):
"""Constructor.
@param membership: the membership list -- that is, the cluster
index in which each element of the set belongs to.
@param params: additional parameters to be stored in this
object's dictionary."""
self._membership = list(membership)
if len(self._membership) > 0:
self._len = max(m for m in self._membership if m is not None) + 1
else:
self._len = 0
if params:
self.__dict__.update(params)
def __getitem__(self, idx):
"""Returns the members of the specified cluster.
@param idx: the index of the cluster
@return: the members of the specified cluster as a list
@raise IndexError: if the index is out of bounds"""
if idx < 0 or idx >= self._len:
raise IndexError("cluster index out of range")
return [i for i, e in enumerate(self._membership) if e == idx]
def __iter__(self):
"""Iterates over the clusters in this clustering.
This method will return a generator that generates the clusters
one by one."""
clusters = [[] for _ in range(self._len)]
for idx, cluster in enumerate(self._membership):
clusters[cluster].append(idx)
return iter(clusters)
def __len__(self):
"""Returns the number of clusters.
@return: the number of clusters
"""
return self._len
def __str__(self):
return self.summary(verbosity=1, width=78)
def as_cover(self):
"""Returns a L{Cover} that contains the same clusters as this clustering."""
return Cover(self._graph, self)
def compare_to(self, other, *args, **kwds):
"""Compares this clustering to another one using some similarity or
distance metric.
This is a convenience method that simply calls L{compare_communities}
with the two clusterings as arguments. Any extra positional or keyword
argument is also forwarded to L{compare_communities}."""
return compare_communities(self, other, *args, **kwds)
@property
def membership(self):
"""Returns the membership vector."""
return self._membership[:]
@property
def n(self):
"""Returns the number of elements covered by this clustering."""
return len(self._membership)
def size(self, idx):
"""Returns the size of a given cluster.
@param idx: the cluster in which we are interested.
"""
return len(self[idx])
def sizes(self, *args):
"""Returns the size of given clusters.
The indices are given as positional arguments. If there are no
positional arguments, the function will return the sizes of all clusters.
"""
counts = [0] * len(self)
for x in self._membership:
counts[x] += 1
if args:
return [counts[idx] for idx in args]
return counts
def size_histogram(self, bin_width=1):
"""Returns the histogram of cluster sizes.
@param bin_width: the bin width of the histogram
@return: a L{Histogram} object
"""
return Histogram(bin_width, self.sizes())
def summary(self, verbosity=0, width=None):
"""Returns the summary of the clustering.
The summary includes the number of items and clusters, and also the
list of members for each of the clusters if the verbosity is nonzero.
@param verbosity: determines whether the cluster members should be
printed. Zero verbosity prints the number of items and clusters only.
@return: the summary of the clustering as a string.
"""
out = StringIO()
print(
"Clustering with %d elements and %d clusters"
% (
len(self._membership),
len(self),
),
file=out,
)
if verbosity < 1:
return out.getvalue().strip()
ndigits = len(str(len(self)))
wrapper = _get_wrapper_for_width(width, subsequent_indent=" " * (ndigits + 3))
for idx, cluster in enumerate(self._formatted_cluster_iterator()):
wrapper.initial_indent = "[%*d] " % (ndigits, idx)
print("\n".join(wrapper.wrap(cluster)), file=out)
return out.getvalue().strip()
def _formatted_cluster_iterator(self):
"""Iterates over the clusters and formats them into a string to be
presented in the summary."""
for cluster in self:
yield ", ".join(str(member) for member in cluster)
class VertexClustering(Clustering):
"""The clustering of the vertex set of a graph.
This class extends L{Clustering} by linking it to a specific L{Graph} object
and by optionally storing the modularity score of the clustering.
It also provides some handy methods like getting the subgraph corresponding
to a cluster and such.
@note: since this class is linked to a L{Graph}, destroying the graph by the
C{del} operator does not free the memory occupied by the graph if there
exists a L{VertexClustering} that references the L{Graph}.
"""
# Allow None to be passed to __plot__ as the "palette" keyword argument
_default_palette = None
def __init__(
self,
graph,
membership=None,
modularity=None,
params=None,
modularity_params=None,
):
"""Creates a clustering object for a given graph.
@param graph: the graph that will be associated to the clustering
@param membership: the membership list. The length of the list must
be equal to the number of vertices in the graph. If C{None}, every
vertex is assumed to belong to the same cluster.
@param modularity: the modularity score of the clustering. If C{None},
it will be calculated when needed.
@param params: additional parameters to be stored in this object.
@param modularity_params: arguments that should be passed to
L{Graph.modularity} when the modularity is (re)calculated. If the
original graph was weighted, you should pass a dictionary
containing a C{weight} key with the appropriate value here.
"""
if membership is None:
Clustering.__init__(self, [0] * graph.vcount(), params)
else:
if len(membership) != graph.vcount():
raise ValueError("membership list has invalid length")
Clustering.__init__(self, membership, params)
self._graph = graph
self._modularity = modularity
self._modularity_dirty = modularity is None
if modularity_params is None:
self._modularity_params = {}
else:
self._modularity_params = dict(modularity_params)
@classmethod
def FromAttribute(cls, graph, attribute, intervals=None, params=None):
"""Creates a vertex clustering based on the value of a vertex attribute.
Vertices having the same attribute will correspond to the same cluster.
@param graph: the graph on which we are working
@param attribute: name of the attribute on which the clustering
is based.
@param intervals: for numeric attributes, you can either pass a single
number or a list of numbers here. A single number means that the
vertices will be put in bins of that width and vertices ending up
in the same bin will be in the same cluster. A list of numbers
specify the bin positions explicitly; e.g., C{[10, 20, 30]} means
that there will be four categories: vertices with the attribute
value less than 10, between 10 and 20, between 20 and 30 and over 30.
Intervals are closed from the left and open from the right.
@param params: additional parameters to be stored in this object.
@return: a new VertexClustering object
"""
from bisect import bisect
def safeintdiv(x, y):
"""Safe integer division that handles None gracefully"""
if x is None:
return None
return int(x / y)
def safebisect(intervals, x):
"""Safe list bisection that handles None gracefully"""
if x is None:
return None
return bisect(intervals, x)
try:
_ = iter(intervals)
iterable = True
except TypeError:
iterable = False
if intervals is None:
vec = graph.vs[attribute]
elif iterable:
intervals = list(intervals)
vec = [safebisect(intervals, x) for x in graph.vs[attribute]]
else:
intervals = float(intervals)
vec = [safeintdiv(x, intervals) for x in graph.vs[attribute]]
idgen = UniqueIdGenerator()
idgen[None] = None
vec = [idgen[i] for i in vec]
return cls(graph, vec, None, params)
def as_cover(self):
"""Returns a L{VertexCover} that contains the same clusters as this
clustering."""
return VertexCover(self._graph, self)
def cluster_graph(self, combine_vertices=None, combine_edges=None):
"""Returns a graph where each cluster is contracted into a single
vertex.
In the resulting graph, vertex M{i} represents cluster M{i} in this
clustering. Vertex M{i} and M{j} will be connected if there was
at least one connected vertex pair M{(a, b)} in the original graph such
that vertex M{a} was in cluster M{i} and vertex M{b} was in cluster
M{j}.
@param combine_vertices: specifies how to derive the attributes of
the vertices in the new graph from the attributes of the old ones.
See L{Graph.contract_vertices()<igraph._igraph.GraphBase.contract_vertices>}
for more details.
@param combine_edges: specifies how to derive the attributes of the
edges in the new graph from the attributes of the old ones. See
L{Graph.simplify()<igraph._igraph.GraphBase.simplify>} for more details.
If you specify C{False} here, edges will not be combined, and the
number of edges between the vertices representing the original
clusters will be equal to the number of edges between the members of
those clusters in the original graph.
@return: the new graph.
"""
result = self.graph.copy()
result.contract_vertices(self.membership, combine_vertices)
if combine_edges is not False:
result.simplify(combine_edges=combine_edges)
return result
def crossing(self):
"""Returns a boolean vector where element M{i} is C{True} iff edge
M{i} lies between clusters, C{False} otherwise."""
membership = self.membership
return [
membership[v1] != membership[v2] for v1, v2 in self.graph.get_edgelist()
]
@property
def modularity(self):
"""Returns the modularity score"""
if self._modularity_dirty:
return self._recalculate_modularity_safe()
return self._modularity
q = modularity
@property
def graph(self):
"""Returns the graph belonging to this object"""
return self._graph
def recalculate_modularity(self):
"""Recalculates the stored modularity value.
This method must be called before querying the modularity score of the
clustering through the class member C{modularity} or C{q} if the
graph has been modified (edges have been added or removed) since the
creation of the L{VertexClustering} object.
@return: the new modularity score
"""
self._modularity = self._graph.modularity(
self._membership, **self._modularity_params
)
self._modularity_dirty = False
return self._modularity
def _recalculate_modularity_safe(self):
"""Recalculates the stored modularity value and swallows all exceptions
raised by the modularity function (if any).
@return: the new modularity score or C{None} if the modularity function
could not be calculated.
"""
try:
return self.recalculate_modularity()
except Exception:
return None
finally:
self._modularity_dirty = False
def subgraph(self, idx):
"""Get the subgraph belonging to a given cluster.
Precondition: the vertex set of the graph hasn't been modified since the
moment the cover was constructed.
@param idx: the cluster index
@return: a copy of the subgraph
"""
return self._graph.subgraph(self[idx])
def subgraphs(self):
"""Gets all the subgraphs belonging to each of the clusters.
Precondition: the vertex set of the graph hasn't been modified since the
moment the cover was constructed.
@return: a list containing copies of the subgraphs
"""
return [self._graph.subgraph(cl) for cl in self]
def giant(self):
"""Returns the largest cluster of the clustered graph.
The largest cluster is a cluster for which no larger cluster exists in
the clustering. It may also be known as the I{giant community} if the
clustering represents the result of a community detection function.
Precondition: the vertex set of the graph hasn't been modified since the
moment the cover was constructed.
@note: there can be multiple largest clusters, this method will return
the copy of an arbitrary one if there are multiple largest clusters.
@return: a copy of the largest cluster.
"""
ss = self.sizes()
max_size = max(ss)
return self.subgraph(ss.index(max_size))
def __plot__(self, context, bbox, palette, *args, **kwds):
"""Plots the clustering to the given Cairo context in the given
bounding box.
This is done by calling L{Graph.__plot__()} with the same arguments, but
coloring the graph vertices according to the current clustering (unless
overridden by the C{vertex_color} argument explicitly).
This method understands all the positional and keyword arguments that
are understood by L{Graph.__plot__()}, only the differences will be
highlighted here:
- C{mark_groups}: whether to highlight some of the vertex groups by
colored polygons. Besides the values accepted by L{Graph.__plot__}
(i.e., a dict mapping colors to vertex indices, a list containing
lists of vertex indices, or C{False}), the following are also
accepted:
- C{True}: all the groups will be highlighted, the colors matching
the corresponding color indices from the current palette
(see the C{palette} keyword argument of L{Graph.__plot__}.
- A dict mapping cluster indices or tuples of vertex indices to
color names. The given clusters or vertex groups will be
highlighted by the given colors.
- A list of cluster indices. This is equivalent to passing a
dict mapping numeric color indices from the current palette
to cluster indices; therefore, the cluster referred to by element
I{i} of the list will be highlighted by color I{i} from the
palette.
The value of the C{plotting.mark_groups} configuration key is also
taken into account here; if that configuration key is C{True} and
C{mark_groups} is not given explicitly, it will automatically be set
to C{True}.
In place of lists of vertex indices, you may also use L{VertexSeq}
instances.
In place of color names, you may also use color indices into the
current palette. C{None} as a color name will mean that the
corresponding group is ignored.
- C{palette}: the palette used to resolve numeric color indices to RGBA
values. By default, this is an instance of L{ClusterColoringPalette}.
@see: L{Graph.__plot__()} for more supported keyword arguments.
"""
if "edge_color" not in kwds and "color" not in self.graph.edge_attributes():
# Set up a default edge coloring based on internal vs external edges
colors = ["grey20", "grey80"]
kwds["edge_color"] = [
colors[is_crossing] for is_crossing in self.crossing()
]
if palette is None:
palette = ClusterColoringPalette(len(self))
if "mark_groups" not in kwds:
if Configuration.instance()["plotting.mark_groups"]:
kwds["mark_groups"] = self
else:
kwds["mark_groups"] = _handle_mark_groups_arg_for_clustering(
kwds["mark_groups"], self
)
if "vertex_color" not in kwds:
kwds["vertex_color"] = self.membership
return self._graph.__plot__(context, bbox, palette, *args, **kwds)
def _formatted_cluster_iterator(self):
"""Iterates over the clusters and formats them into a string to be
presented in the summary."""
if self._graph.is_named():
names = self._graph.vs["name"]
for cluster in self:
yield ", ".join(str(names[member]) for member in cluster)
else:
for cluster in self:
yield ", ".join(str(member) for member in cluster)
###############################################################################
class Dendrogram:
"""The hierarchical clustering (dendrogram) of some dataset.
A hierarchical clustering means that we know not only the way the
elements are separated into groups, but also the exact history of
how individual elements were joined into larger subgroups.
This class internally represents the hierarchy by a matrix with n rows
and 2 columns -- or more precisely, a list of lists of size 2. This is
exactly the same as the original format used by C{igraph}'s C core.
The M{i}th row of the matrix contains the indices of the two clusters
being joined in time step M{i}. The joint group will be represented by
the ID M{n+i}, with M{i} starting from one. The ID of the joint group
will be referenced in the upcoming steps instead of any of its individual
members. So, IDs less than or equal to M{n} (where M{n} is the number of
rows in the matrix) mean the original members of the dataset (with ID
from 0 to M{n}), while IDs up from M{n+1} mean joint groups. As an
example, take a look at the dendrogram and the internal representation of
a given clustering of five nodes::
0 -+
|
1 -+-+
|
2 ---+-+ <====> [[0, 1], [3, 4], [2, 5], [6, 7]]
|
3 -+ |
| |
4 -+---+---
"""
def __init__(self, merges):
"""Creates a hierarchical clustering.
@param merges: the merge history either in matrix or tuple format"""
self._merges = [tuple(pair) for pair in merges]
self._nmerges = len(self._merges)
if self._nmerges:
self._nitems = max(self._merges[-1]) - self._nmerges + 2
else:
self._nitems = 0
self._names = None
@staticmethod
def _convert_matrix_to_tuple_repr(merges, n=None):
"""Converts the matrix representation of a clustering to a tuple
representation.
@param merges: the matrix representation of the clustering
@return: the tuple representation of the clustering
"""
if n is None:
n = len(merges) + 1
tuple_repr = range(n)
idxs = range(n)
for rowidx, row in enumerate(merges):
i, j = row
try:
idxi, idxj = idxs[i], idxs[j]
tuple_repr[idxi] = (tuple_repr[idxi], tuple_repr[idxj])
tuple_repr[idxj] = None
except IndexError:
raise ValueError(
"malformed matrix, subgroup referenced "
+ "before being created in step %d" % rowidx
)
idxs.append(j)
return [x for x in tuple_repr if x is not None]
def _traverse_inorder(self):
"""Conducts an inorder traversal of the merge tree.
The inorder traversal returns the nodes on the last level in the order
they should be drawn so that no edges cross each other.
@return: the result of the inorder traversal in a list."""
result = []
seen_nodes = set()
for node_index in reversed(range(self._nitems + self._nmerges)):
if node_index in seen_nodes:
continue
stack = [node_index]
while stack:
last = stack.pop()
seen_nodes.add(last)
if last < self._nitems:
# 'last' is a regular node so the traversal ends here, we
# can append it to the results
result.append(last)
else:
# 'last' is a merge node, so let us proceed with the entry
# where this merge node was created
stack.extend(self._merges[last - self._nitems])
return result
def __str__(self):
return self.summary(verbosity=1)
def format(self, format="newick"):
"""Formats the dendrogram in a foreign format.
Currently only the Newick format is supported.
Example:
>>> d = Dendrogram([(2, 3), (0, 1), (4, 5)])
>>> d.format()
'((2,3)4,(0,1)5)6;'
>>> d.names = list("ABCDEFG")
>>> d.format()
'((C,D)E,(A,B)F)G;'
"""
if format == "newick":
n = self._nitems + self._nmerges
if self._names is None:
nodes = list(range(n))
else:
nodes = list(self._names)
if len(nodes) < n:
nodes.extend("" for _ in range(n - len(nodes)))
for k, (i, j) in enumerate(self._merges, self._nitems):
nodes[k] = "(%s,%s)%s" % (nodes[i], nodes[j], nodes[k])
nodes[i] = nodes[j] = None
return nodes[-1] + ";"
raise ValueError("unsupported format: %r" % format)
def summary(self, verbosity=0, max_leaf_count=40):
"""Returns the summary of the dendrogram.
The summary includes the number of leafs and branches, and also an
ASCII art representation of the dendrogram unless it is too large.
@param verbosity: determines whether the ASCII representation of the
dendrogram should be printed. Zero verbosity prints only the number
of leafs and branches.
@param max_leaf_count: the maximal number of leafs to print in the
ASCII representation. If the dendrogram has more leafs than this
limit, the ASCII representation will not be printed even if the
verbosity is larger than or equal to 1.
@return: the summary of the dendrogram as a string.
"""
out = StringIO()
print(
"Dendrogram, %d elements, %d merges"
% (
self._nitems,
self._nmerges,
),
file=out,
)
if self._nitems == 0 or verbosity < 1 or self._nitems > max_leaf_count:
return out.getvalue().strip()
print("", file=out)
positions = [None] * self._nitems
inorder = self._traverse_inorder()
distance = 2
level_distance = 2
nextp = 0
for idx, element in enumerate(inorder):
positions[element] = nextp
inorder[idx] = str(element)
nextp += max(distance, len(inorder[idx]) + 1)
width = max(positions) + 1
# Print the nodes on the lowest level
print((" " * (distance - 1)).join(inorder), file=out)
midx = 0
max_community_idx = self._nitems
while midx < self._nmerges:
char_array = [" "] * width
for position in positions:
if position >= 0:
char_array[position] = "|"
char_str = "".join(char_array)
for _ in range(level_distance - 1):
print(char_str, file=out) # Print the lines
cidx_incr = 0
while midx < self._nmerges:
id1, id2 = self._merges[midx]
if id1 >= max_community_idx or id2 >= max_community_idx:
break
midx += 1
pos1, pos2 = positions[id1], positions[id2]
positions[id1], positions[id2] = -1, -1
if pos1 > pos2:
pos1, pos2 = pos2, pos1
positions.append((pos1 + pos2) // 2)
dashes = "-" * (pos2 - pos1 - 1)
char_array[pos1 : (pos2 + 1)] = "`%s'" % dashes
cidx_incr += 1
max_community_idx += cidx_incr
print("".join(char_array), file=out)
return out.getvalue().strip()
def _item_box_size(self, context, horiz, idx):
"""Calculates the amount of space needed for drawing an
individual vertex at the bottom of the dendrogram."""
if self._names is None or self._names[idx] is None:
x_bearing, _, _, height, x_advance, _ = context.text_extents("")
else:
x_bearing, _, _, height, x_advance, _ = context.text_extents(
str(self._names[idx])
)
if horiz:
return x_advance - x_bearing, height
return height, x_advance - x_bearing
def _plot_item(self, context, horiz, idx, x, y):
"""Plots a dendrogram item to the given Cairo context
@param context: the Cairo context we are plotting on
@param horiz: whether the dendrogram is horizontally oriented
@param idx: the index of the item
@param x: the X position of the item
@param y: the Y position of the item
"""
if self._names is None or self._names[idx] is None:
return
height = self._item_box_size(context, True, idx)[1]
if horiz:
context.move_to(x, y + height)
context.show_text(str(self._names[idx]))
else:
context.save()
context.translate(x, y)
context.rotate(-pi / 2.0)
context.move_to(0, height)
context.show_text(str(self._names[idx]))
context.restore()
def __plot__(self, context, bbox, palette, *args, **kwds):
"""Draws the dendrogram on the given Cairo context
Supported keyword arguments are:
- C{orientation}: the orientation of the dendrogram. Must be one of
the following values: C{left-right}, C{bottom-top}, C{right-left}
or C{top-bottom}. Individual elements are always placed at the
former edge and merges are performed towards the latter edge.
Possible aliases: C{horizontal} = C{left-right},
C{vertical} = C{bottom-top}, C{lr} = C{left-right},
C{rl} = C{right-left}, C{tb} = C{top-bottom}, C{bt} = C{bottom-top}.
The default is C{left-right}.
"""
from igraph.layout import Layout
if self._names is None:
self._names = [str(x) for x in range(self._nitems)]
orientation = str_to_orientation(
kwds.get("orientation", "lr"), reversed_vertical=True
)
horiz = orientation in ("lr", "rl")
# Get the font height
font_height = context.font_extents()[2]
# Calculate space needed for individual items at the
# bottom of the dendrogram
item_boxes = [
self._item_box_size(context, horiz, idx) for idx in range(self._nitems)
]
# Small correction for cases when the right edge of the labels is
# aligned with the tips of the dendrogram branches
ygap = 2 if orientation == "bt" else 0
xgap = 2 if orientation == "lr" else 0
item_boxes = [(x + xgap, y + ygap) for x, y in item_boxes]
# Calculate coordinates
layout = Layout([(0, 0)] * self._nitems, dim=2)
inorder = self._traverse_inorder()
if not horiz:
x, y = 0, 0
for idx, element in enumerate(inorder):
layout[element] = (x, 0)
x += max(font_height, item_boxes[element][0])
for id1, id2 in self._merges:
y += 1
layout.append(((layout[id1][0] + layout[id2][0]) / 2.0, y))
# Mirror or rotate the layout if necessary
if orientation == "bt":
layout.mirror(1)
else:
x, y = 0, 0
for idx, element in enumerate(inorder):
layout[element] = (0, y)
y += max(font_height, item_boxes[element][1])
for id1, id2 in self._merges:
x += 1
layout.append((x, (layout[id1][1] + layout[id2][1]) / 2.0))
# Mirror or rotate the layout if necessary
if orientation == "rl":
layout.mirror(0)
# Rescale layout to the bounding box
maxw = max(e[0] for e in item_boxes)
maxh = max(e[1] for e in item_boxes)
# w, h: width and height of the area containing the dendrogram
# tree without the items.
# delta_x, delta_y: displacement of the dendrogram tree
width, height = float(bbox.width), float(bbox.height)
delta_x, delta_y = 0, 0
if horiz:
width -= maxw
if orientation == "lr":
delta_x = maxw
else:
height -= maxh
if orientation == "tb":
delta_y = maxh
if horiz:
delta_y += font_height / 2.0
else:
delta_x += font_height / 2.0
layout.fit_into(
(delta_x, delta_y, width - delta_x, height - delta_y),
keep_aspect_ratio=False,
)
context.save()
context.translate(bbox.left, bbox.top)
context.set_source_rgb(0.0, 0.0, 0.0)
context.set_line_width(1)
# Draw items
if horiz:
sgn = 0 if orientation == "rl" else -1
for idx in range(self._nitems):
x = layout[idx][0] + sgn * item_boxes[idx][0]
y = layout[idx][1] - item_boxes[idx][1] / 2.0
self._plot_item(context, horiz, idx, x, y)
else:
sgn = 1 if orientation == "bt" else 0
for idx in range(self._nitems):
x = layout[idx][0] - item_boxes[idx][0] / 2.0
y = layout[idx][1] + sgn * item_boxes[idx][1]
self._plot_item(context, horiz, idx, x, y)
# Draw dendrogram lines
if not horiz:
for idx, (id1, id2) in enumerate(self._merges):
x0, y0 = layout[id1]
x1, y1 = layout[id2]
x2, y2 = layout[idx + self._nitems]
context.move_to(x0, y0)
context.line_to(x0, y2)
context.line_to(x1, y2)
context.line_to(x1, y1)
context.stroke()
else:
for idx, (id1, id2) in enumerate(self._merges):
x0, y0 = layout[id1]
x1, y1 = layout[id2]
x2, y2 = layout[idx + self._nitems]
context.move_to(x0, y0)
context.line_to(x2, y0)
context.line_to(x2, y1)
context.line_to(x1, y1)
context.stroke()
context.restore()
@property
def merges(self):
"""Returns the performed merges in matrix format"""
return deepcopy(self._merges)
@property
def names(self):
"""Returns the names of the nodes in the dendrogram"""
return self._names
@names.setter
def names(self, items):
"""Sets the names of the nodes in the dendrogram"""
if items is None:
self._names = None
return
items = list(items)
if len(items) < self._nitems:
raise ValueError("must specify at least %d names" % self._nitems)
n = self._nitems + self._nmerges
self._names = items[:n]
if len(self._names) < n:
self._names.extend("" for _ in range(n - len(self._names)))
class VertexDendrogram(Dendrogram):
"""The dendrogram resulting from the hierarchical clustering of the
vertex set of a graph."""
def __init__(
self, graph, merges, optimal_count=None, params=None, modularity_params=None
):
"""Creates a dendrogram object for a given graph.
@param graph: the graph that will be associated to the clustering
@param merges: the merges performed given in matrix form.
@param optimal_count: the optimal number of clusters where the
dendrogram should be cut. This is a hint usually provided by the
clustering algorithm that produces the dendrogram. C{None} means
that such a hint is not available; the optimal count will then be
selected based on the modularity in such a case.
@param params: additional parameters to be stored in this object.
@param modularity_params: arguments that should be passed to
L{Graph.modularity} when the modularity is (re)calculated. If the
original graph was weighted, you should pass a dictionary
containing a C{weight} key with the appropriate value here.
"""
Dendrogram.__init__(self, merges)
self._graph = graph
self._optimal_count = optimal_count
if modularity_params is None:
self._modularity_params = {}
else:
self._modularity_params = dict(modularity_params)
def as_clustering(self, n=None):
"""Cuts the dendrogram at the given level and returns a corresponding
L{VertexClustering} object.
@param n: the desired number of clusters. Merges are replayed from the
beginning until the membership vector has exactly M{n} distinct elements
or until there are no more recorded merges, whichever happens first.
If C{None}, the optimal count hint given by the clustering algorithm
will be used If the optimal count was not given either, it will be
calculated by selecting the level where the modularity is maximal.
@return: a new L{VertexClustering} object.
"""
if n is None:
n = self.optimal_count
num_elts = self._graph.vcount()
idgen = UniqueIdGenerator()
membership = community_to_membership(self._merges, num_elts, num_elts - n)
membership = [idgen[m] for m in membership]
return VertexClustering(
self._graph, membership, modularity_params=self._modularity_params
)
@property
def optimal_count(self):
"""Returns the optimal number of clusters for this dendrogram.
If an optimal count hint was given at construction time, this
property simply returns the hint. If such a count was not given,
this method calculates the optimal number of clusters by maximizing
the modularity along all the possible cuts in the dendrogram.
"""
if self._optimal_count is not None:
return self._optimal_count
n = self._graph.vcount()
max_q, optimal_count = 0, 1
for step in range(min(n - 1, len(self._merges))):
membs = community_to_membership(self._merges, n, step)
q = self._graph.modularity(membs, **self._modularity_params)
if q > max_q:
optimal_count = n - step
max_q = q
self._optimal_count = optimal_count
return optimal_count
@optimal_count.setter
def optimal_count(self, value):
self._optimal_count = max(int(value), 1)
def __plot__(self, context, bbox, palette, *args, **kwds):
"""Draws the vertex dendrogram on the given Cairo context
See L{Dendrogram.__plot__} for the list of supported keyword
arguments."""
from igraph.drawing.metamagic import AttributeCollectorBase
class VisualVertexBuilder(AttributeCollectorBase):
_kwds_prefix = "vertex_"
label = None
builder = VisualVertexBuilder(self._graph.vs, kwds)
self._names = [vertex.label for vertex in builder]
self._names = [
name if name is not None else str(idx)
for idx, name in enumerate(self._names)
]
result = Dendrogram.__plot__(self, context, bbox, palette, *args, **kwds)
del self._names
return result
###############################################################################
class Cover:
"""Class representing a cover of an arbitrary ordered set.
Covers are similar to clusterings, but each element of the set may
belong to more than one cluster in a cover, and elements not belonging
to any cluster are also allowed.
L{Cover} instances provide a similar API as L{Clustering} instances;
for instance, iterating over a L{Cover} will iterate over the clusters
just like with a regular L{Clustering} instance. However, they are not
derived from each other or from a common superclass, and there might
be functions that exist only in one of them or the other.
Clusters of an individual cover can be accessed by the C{[]} operator:
>>> cl = Cover([[0,1,2,3], [2,3,4], [0,1,6]])
>>> cl[0]
[0, 1, 2, 3]
The membership vector can be accessed by the C{membership} property.
Note that contrary to L{Clustering} instances, the membership vector
will contain lists that contain the cluster indices each item belongs
to:
>>> cl.membership
[[0, 2], [0, 2], [0, 1], [0, 1], [1], [], [2]]
The number of clusters can be retrieved by the C{len} function:
>>> len(cl)
3
You can iterate over the cover as if it were a regular list of
clusters:
>>> for cluster in cl:
... print(" ".join(str(idx) for idx in cluster))
...
0 1 2 3
2 3 4
0 1 6
If you need all the clusters at once as lists, you can simply convert
the cover to a list:
>>> cluster_list = list(cl)
>>> print(cluster_list)
[[0, 1, 2, 3], [2, 3, 4], [0, 1, 6]]
L{Clustering} objects can readily be converted to L{Cover} objects
using the constructor:
>>> clustering = Clustering([0, 0, 0, 0, 1, 1, 1, 2, 2, 2])
>>> cover = Cover(clustering)
>>> list(clustering) == list(cover)
True
"""
def __init__(self, clusters, n=0):
"""Constructs a cover with the given clusters.
@param clusters: the clusters in this cover, as a list or iterable.
Each cluster is specified by a list or tuple that contains the
IDs of the items in this cluster. IDs start from zero.
@param n: the total number of elements in the set that is covered
by this cover. If it is less than the number of unique elements
found in all the clusters, we will simply use the number of unique
elements, so it is safe to leave this at zero. You only have to
specify this parameter if there are some elements that are covered
by none of the clusters.
"""
self._clusters = [list(cluster) for cluster in clusters]
try:
self._n = max(max(cluster) + 1 for cluster in self._clusters if cluster)
except ValueError:
self._n = 0
self._n = max(n, self._n)
def __getitem__(self, index):
"""Returns the cluster with the given index."""
return self._clusters[index]
def __iter__(self):
"""Iterates over the clusters in this cover."""
return iter(self._clusters)
def __len__(self):
"""Returns the number of clusters in this cover."""
return len(self._clusters)
def __str__(self):
"""Returns a string representation of the cover."""
return self.summary(verbosity=1, width=78)
@property
def membership(self):
"""Returns the membership vector of this cover.
The membership vector of a cover covering I{n} elements is a list of
length I{n}, where element I{i} contains the cluster indices of the
I{i}th item.
"""
result = [[] for _ in range(self._n)]
for idx, cluster in enumerate(self):
for item in cluster:
result[item].append(idx)
return result
@property
def n(self):
"""Returns the number of elements in the set covered by this cover."""
return self._n
def size(self, idx):
"""Returns the size of a given cluster.
@param idx: the cluster in which we are interested.
"""
return len(self[idx])
def sizes(self, *args):
"""Returns the size of given clusters.
The indices are given as positional arguments. If there are no
positional arguments, the function will return the sizes of all clusters.
"""
if args:
return [len(self._clusters[idx]) for idx in args]
return [len(cluster) for cluster in self]
def size_histogram(self, bin_width=1):
"""Returns the histogram of cluster sizes.
@param bin_width: the bin width of the histogram
@return: a L{Histogram} object
"""
return Histogram(bin_width, self.sizes())
def summary(self, verbosity=0, width=None):
"""Returns the summary of the cover.
The summary includes the number of items and clusters, and also the
list of members for each of the clusters if the verbosity is nonzero.
@param verbosity: determines whether the cluster members should be
printed. Zero verbosity prints the number of items and clusters only.
@return: the summary of the cover as a string.
"""
out = StringIO()
print("Cover with %d clusters" % len(self), file=out)
if verbosity < 1:
return out.getvalue().strip()
ndigits = len(str(len(self)))
wrapper = _get_wrapper_for_width(width, subsequent_indent=" " * (ndigits + 3))
for idx, cluster in enumerate(self._formatted_cluster_iterator()):
wrapper.initial_indent = "[%*d] " % (ndigits, idx)
print("\n".join(wrapper.wrap(cluster)), file=out)
return out.getvalue().strip()
def _formatted_cluster_iterator(self):
"""Iterates over the clusters and formats them into a string to be
presented in the summary."""
for cluster in self:
yield ", ".join(str(member) for member in cluster)
class VertexCover(Cover):
"""The cover of the vertex set of a graph.
This class extends L{Cover} by linking it to a specific L{Graph} object.
It also provides some handy methods like getting the subgraph corresponding
to a cluster and such.
@note: since this class is linked to a L{Graph}, destroying the graph by the
C{del} operator does not free the memory occupied by the graph if there
exists a L{VertexCover} that references the L{Graph}.
"""
def __init__(self, graph, clusters=None):
"""Creates a cover object for a given graph.
@param graph: the graph that will be associated to the cover
@param clusters: the list of clusters. If C{None}, it is assumed
that there is only a single cluster that covers the whole graph.
"""
if clusters is None:
clusters = [range(graph.vcount())]
Cover.__init__(self, clusters, n=graph.vcount())
if self._n > graph.vcount():
raise ValueError(
"cluster list contains vertex ID larger than the "
"number of vertices in the graph"
)
self._graph = graph
def crossing(self):
"""Returns a boolean vector where element M{i} is C{True} iff edge
M{i} lies between clusters, C{False} otherwise."""
membership = [frozenset(cluster) for cluster in self.membership]
return [
membership[v1].isdisjoint(membership[v2])
for v1, v2 in self.graph.get_edgelist()
]
@property
def graph(self):
"""Returns the graph belonging to this object"""
return self._graph
def subgraph(self, idx):
"""Get the subgraph belonging to a given cluster.
Precondition: the vertex set of the graph hasn't been modified since the
moment the cover was constructed.
@param idx: the cluster index
@return: a copy of the subgraph
"""
return self._graph.subgraph(self[idx])
def subgraphs(self):
"""Gets all the subgraphs belonging to each of the clusters.
Precondition: the vertex set of the graph hasn't been modified since the
moment the cover was constructed.
@return: a list containing copies of the subgraphs
"""
return [self._graph.subgraph(cl) for cl in self]
def __plot__(self, context, bbox, palette, *args, **kwds):
"""Plots the cover to the given Cairo context in the given
bounding box.
This is done by calling L{Graph.__plot__()} with the same arguments, but
drawing nice colored blobs around the vertex groups.
This method understands all the positional and keyword arguments that
are understood by L{Graph.__plot__()}, only the differences will be
highlighted here:
- C{mark_groups}: whether to highlight the vertex clusters by
colored polygons. Besides the values accepted by L{Graph.__plot__}
(i.e., a dict mapping colors to vertex indices, a list containing
lists of vertex indices, or C{False}), the following are also
accepted:
- C{True}: all the clusters will be highlighted, the colors matching
the corresponding color indices from the current palette
(see the C{palette} keyword argument of L{Graph.__plot__}.
- A dict mapping cluster indices or tuples of vertex indices to
color names. The given clusters or vertex groups will be
highlighted by the given colors.
- A list of cluster indices. This is equivalent to passing a
dict mapping numeric color indices from the current palette
to cluster indices; therefore, the cluster referred to by element
I{i} of the list will be highlighted by color I{i} from the
palette.
The value of the C{plotting.mark_groups} configuration key is also
taken into account here; if that configuration key is C{True} and
C{mark_groups} is not given explicitly, it will automatically be set
to C{True}.
In place of lists of vertex indices, you may also use L{VertexSeq}
instances.
In place of color names, you may also use color indices into the
current palette. C{None} as a color name will mean that the
corresponding group is ignored.
- C{palette}: the palette used to resolve numeric color indices to RGBA
values. By default, this is an instance of L{ClusterColoringPalette}.
@see: L{Graph.__plot__()} for more supported keyword arguments.
"""
if "edge_color" not in kwds and "color" not in self.graph.edge_attributes():
# Set up a default edge coloring based on internal vs external edges
colors = ["grey20", "grey80"]
kwds["edge_color"] = [
colors[is_crossing] for is_crossing in self.crossing()
]
if "palette" in kwds:
palette = kwds["palette"]
else:
palette = ClusterColoringPalette(len(self))
if "mark_groups" not in kwds:
if Configuration.instance()["plotting.mark_groups"]:
kwds["mark_groups"] = self
else:
kwds["mark_groups"] = _handle_mark_groups_arg_for_clustering(
kwds["mark_groups"], self
)
return self._graph.__plot__(context, bbox, palette, *args, **kwds)
def _formatted_cluster_iterator(self):
"""Iterates over the clusters and formats them into a string to be
presented in the summary."""
if self._graph.is_named():
names = self._graph.vs["name"]
for cluster in self:
yield ", ".join(str(names[member]) for member in cluster)
else:
for cluster in self:
yield ", ".join(str(member) for member in cluster)
class CohesiveBlocks(VertexCover):
"""The cohesive block structure of a graph.
Instances of this type are created by L{Graph.cohesive_blocks()}. See
the documentation of L{Graph.cohesive_blocks()} for an explanation of
what cohesive blocks are.
This class provides a few more methods that make handling of cohesive
block structures easier.
"""
def __init__(self, graph, blocks=None, cohesion=None, parent=None):
"""Constructs a new cohesive block structure for the given graph.
If any of I{blocks}, I{cohesion} or I{parent} is C{None}, all the
arguments will be ignored and L{Graph.cohesive_blocks()} will be
called to calculate the cohesive blocks. Otherwise, these three
variables should describe the *result* of a cohesive block structure
calculation. Chances are that you never have to construct L{CohesiveBlocks}
instances directly, just use L{Graph.cohesive_blocks()}.
@param graph: the graph itself
@param blocks: a list containing the blocks; each block is described
as a list containing vertex IDs.
@param cohesion: the cohesion of each block. The length of this list
must be equal to the length of I{blocks}.
@param parent: the parent block of each block. Negative values or
C{None} mean that there is no parent block for that block. There
should be only one parent block, which covers the entire graph.
@see: Graph.cohesive_blocks()
"""
if blocks is None or cohesion is None or parent is None:
blocks, cohesion, parent = graph.cohesive_blocks()
VertexCover.__init__(self, graph, blocks)
self._cohesion = cohesion
self._parent = parent
for idx, p in enumerate(self._parent):
if p < 0:
self._parent[idx] = None
def cohesion(self, idx):
"""Returns the cohesion of the group with the given index."""
return self._cohesion[idx]
def cohesions(self):
"""Returns the list of cohesion values for each group."""
return self._cohesion[:]
def hierarchy(self):
"""Returns a new graph that describes the hierarchical relationships
between the groups.
The new graph will be a directed tree; an edge will point from
vertex M{i} to vertex M{j} if group M{i} is a superset of group M{j}.
In other words, the edges point downwards.
"""
from igraph import Graph
edges = [
pair for pair in zip(self._parent, range(len(self))) if pair[0] is not None
]
return Graph(edges, directed=True)
def max_cohesion(self, idx):
"""Finds the maximum cohesion score among all the groups that contain
the given vertex."""
result = 0
for cohesion, cluster in zip(self._cohesion, self._clusters):
if idx in cluster:
result = max(result, cohesion)
return result
def max_cohesions(self):
"""For each vertex in the graph, returns the maximum cohesion score
among all the groups that contain the vertex."""
result = [0] * self._graph.vcount()
for cohesion, cluster in zip(self._cohesion, self._clusters):
for idx in cluster:
result[idx] = max(result[idx], cohesion)
return result
def parent(self, idx):
"""Returns the parent group index of the group with the given index
or C{None} if the given group is the root."""
return self._parent[idx]
def parents(self):
"""Returns the list of parent group indices for each group or C{None}
if the given group is the root."""
return self._parent[:]
def __plot__(self, context, bbox, palette, *args, **kwds):
"""Plots the cohesive block structure to the given Cairo context in
the given bounding box.
Since a L{CohesiveBlocks} instance is also a L{VertexCover}, keyword
arguments accepted by L{VertexCover.__plot__()} are also accepted here.
The only difference is that the vertices are colored according to their
maximal cohesions by default, and groups are marked by colored blobs
except the last group which encapsulates the whole graph.
See the documentation of L{VertexCover.__plot__()} for more details.
"""
prepare_groups = False
if "mark_groups" not in kwds:
if Configuration.instance()["plotting.mark_groups"]:
prepare_groups = True
elif kwds["mark_groups"] is True:
prepare_groups = True
if prepare_groups:
colors = [pair for pair in enumerate(self.cohesions()) if pair[1] > 1]
kwds["mark_groups"] = colors
if "vertex_color" not in kwds:
kwds["vertex_color"] = self.max_cohesions()
return VertexCover.__plot__(self, context, bbox, palette, *args, **kwds)
def _handle_mark_groups_arg_for_clustering(mark_groups, clustering):
"""Handles the mark_groups=... keyword argument in plotting methods of
clusterings.
This is an internal method, you shouldn't need to mess around with it.
Its purpose is to handle the extended semantics of the mark_groups=...
keyword argument in the C{__plot__} method of L{VertexClustering} and
L{VertexCover} instances, namely the feature that numeric IDs are resolved
to clusters automatically.
"""
# Handle the case of mark_groups = True, mark_groups containing a list or
# tuple of cluster IDs, and and mark_groups yielding (cluster ID, color)
# pairs
if mark_groups is True:
group_iter = ((group, color) for color, group in enumerate(clustering))
elif isinstance(mark_groups, dict):
group_iter = mark_groups.iteritems()
elif hasattr(mark_groups, "__getitem__") and hasattr(mark_groups, "__len__"):
# Lists, tuples
try:
first = mark_groups[0]
except Exception:
# Hmm. Maybe not a list or tuple?
first = None
if first is not None:
# Okay. Is the first element of the list a single number?
if isinstance(first, int):
# Yes. Seems like we have a list of cluster indices.
# Assign color indices automatically.
group_iter = ((group, color) for color, group in enumerate(mark_groups))
else:
# No. Seems like we have good ol' group-color pairs.
group_iter = mark_groups
else:
group_iter = mark_groups
elif hasattr(mark_groups, "__iter__"):
# Iterators etc
group_iter = mark_groups
else:
group_iter = {}.iteritems()
def cluster_index_resolver():
for group, color in group_iter:
if isinstance(group, int):
group = clustering[group]
yield group, color
return cluster_index_resolver()
##############################################################
def _prepare_community_comparison(comm1, comm2, remove_none=False):
"""Auxiliary method that takes two community structures either as
membership lists or instances of L{Clustering}, and returns a
tuple whose two elements are membership lists.
This is used by L{compare_communities} and L{split_join_distance}.
@param comm1: the first community structure as a membership list or
as a L{Clustering} object.
@param comm2: the second community structure as a membership list or
as a L{Clustering} object.
@param remove_none: whether to remove C{None} entries from the membership
lists. If C{remove_none} is C{False}, a C{None} entry in either C{comm1}
or C{comm2} will result in an exception. If C{remove_none} is C{True},
C{None} values are filtered away and only the remaining lists are
compared.
"""
def _ensure_list(obj):
if isinstance(obj, Clustering):
return obj.membership
return list(obj)
vec1, vec2 = _ensure_list(comm1), _ensure_list(comm2)
if len(vec1) != len(vec2):
raise ValueError("the two membership vectors must be equal in length")
if remove_none and (None in vec1 or None in vec2):
idxs_to_remove = [
i for i in range(len(vec1)) if vec1[i] is None or vec2[i] is None
]
idxs_to_remove.reverse()
n = len(vec1)
for i in idxs_to_remove:
n -= 1
vec1[i], vec1[n] = vec1[n], vec1[i]
vec2[i], vec2[n] = vec2[n], vec2[i]
del vec1[n:]
del vec2[n:]
return vec1, vec2
def compare_communities(comm1, comm2, method="vi", remove_none=False):
"""Compares two community structures using various distance measures.
@param comm1: the first community structure as a membership list or
as a L{Clustering} object.
@param comm2: the second community structure as a membership list or
as a L{Clustering} object.
@param method: the measure to use. C{"vi"} or C{"meila"} means the
variation of information metric of Meila (2003), C{"nmi"} or C{"danon"}
means the normalized mutual information as defined by Danon et al (2005),
C{"split-join"} means the split-join distance of van Dongen (2000),
C{"rand"} means the Rand index of Rand (1971), C{"adjusted_rand"}
means the adjusted Rand index of Hubert and Arabie (1985).
@param remove_none: whether to remove C{None} entries from the membership
lists. This is handy if your L{Clustering} object was constructed using
L{VertexClustering.FromAttribute} using an attribute which was not defined
for all the vertices. If C{remove_none} is C{False}, a C{None} entry in
either C{comm1} or C{comm2} will result in an exception. If C{remove_none}
is C{True}, C{None} values are filtered away and only the remaining lists
are compared.
@return: the calculated measure.
@newfield ref: Reference
@ref: Meila M: Comparing clusterings by the variation of information.
In: Scholkopf B, Warmuth MK (eds). Learning Theory and Kernel
Machines: 16th Annual Conference on Computational Learning Theory
and 7th Kernel Workship, COLT/Kernel 2003, Washington, DC, USA.
Lecture Notes in Computer Science, vol. 2777, Springer, 2003.
ISBN: 978-3-540-40720-1.
@ref: Danon L, Diaz-Guilera A, Duch J, Arenas A: Comparing community
structure identification. J Stat Mech P09008, 2005.
@ref: van Dongen D: Performance criteria for graph clustering and Markov
cluster experiments. Technical Report INS-R0012, National Research
Institute for Mathematics and Computer Science in the Netherlands,
Amsterdam, May 2000.
@ref: Rand WM: Objective criteria for the evaluation of clustering
methods. J Am Stat Assoc 66(336):846-850, 1971.
@ref: Hubert L and Arabie P: Comparing partitions. Journal of
Classification 2:193-218, 1985.
"""
import igraph._igraph
vec1, vec2 = _prepare_community_comparison(comm1, comm2, remove_none)
return igraph._igraph._compare_communities(vec1, vec2, method)
def split_join_distance(comm1, comm2, remove_none=False):
"""Calculates the split-join distance between two community structures.
The split-join distance is a distance measure defined on the space of
partitions of a given set. It is the sum of the projection distance of
one partition from the other and vice versa, where the projection
number of A from B is if calculated as follows:
1. For each set in A, find the set in B with which it has the
maximal overlap, and take note of the size of the overlap.
2. Take the sum of the maximal overlap sizes for each set in A.
3. Subtract the sum from M{n}, the number of elements in the
partition.
Note that the projection distance is asymmetric, that's why it has to be
calculated in both directions and then added together. This function
returns the projection distance of C{comm1} from C{comm2} and the
projection distance of C{comm2} from C{comm1}, and returns them in a pair.
The actual split-join distance is the sum of the two distances. The reason
why it is presented this way is that one of the elements being zero then
implies that one of the partitions is a subpartition of the other (and if
it is close to zero, then one of the partitions is close to being a
subpartition of the other).
@param comm1: the first community structure as a membership list or
as a L{Clustering} object.
@param comm2: the second community structure as a membership list or
as a L{Clustering} object.
@param remove_none: whether to remove C{None} entries from the membership
lists. This is handy if your L{Clustering} object was constructed using
L{VertexClustering.FromAttribute} using an attribute which was not defined
for all the vertices. If C{remove_none} is C{False}, a C{None} entry in
either C{comm1} or C{comm2} will result in an exception. If C{remove_none}
is C{True}, C{None} values are filtered away and only the remaining lists
are compared.
@return: the projection distance of C{comm1} from C{comm2} and vice versa
in a tuple. The split-join distance is the sum of the two.
@newfield ref: Reference
@ref: van Dongen D: Performance criteria for graph clustering and Markov
cluster experiments. Technical Report INS-R0012, National Research
Institute for Mathematics and Computer Science in the Netherlands,
Amsterdam, May 2000.
@see: L{compare_communities()} with C{method = "split-join"} if you are
not interested in the individual projection distances but only the
sum of them.
"""
import igraph._igraph
vec1, vec2 = _prepare_community_comparison(comm1, comm2, remove_none)
return igraph._igraph._split_join_distance(vec1, vec2)
|
igraph/python-igraph
|
src/igraph/clustering.py
|
Python
|
gpl-2.0
| 65,054
|
#!/usr/bin/env python3
"""
Decode URL query strings.
"""
import argparse
import glob
import os
import signal
import sys
import urllib.parse
from typing import List
class Options:
"""
Options class
"""
def __init__(self) -> None:
self._args: argparse.Namespace = None
self.parse(sys.argv)
def get_urls(self) -> List[str]:
"""
Return list of URLs.
"""
return self._args.urls
def _parse_args(self, args: List[str]) -> None:
parser = argparse.ArgumentParser(
description="Decode URL query strings.",
)
parser.add_argument(
'urls',
nargs='+',
metavar='url',
help="URL query string.",
)
self._args = parser.parse_args(args)
def parse(self, args: List[str]) -> None:
"""
Parse arguments
"""
self._parse_args(args[1:])
class Main:
"""
Main class
"""
def __init__(self) -> None:
try:
self.config()
sys.exit(self.run())
except (EOFError, KeyboardInterrupt):
sys.exit(114)
except SystemExit as exception:
sys.exit(exception)
sys.exit(0)
@staticmethod
def config() -> None:
"""
Configure program
"""
if hasattr(signal, 'SIGPIPE'):
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
if os.name == 'nt':
argv = []
for arg in sys.argv:
files = glob.glob(arg) # Fixes Windows globbing bug
if files:
argv.extend(files)
else:
argv.append(arg)
sys.argv = argv
@staticmethod
def run() -> int:
"""
Start program
"""
options = Options()
for url in options.get_urls():
print(urllib.parse.unquote(url))
return 0
if __name__ == '__main__':
if '--pydoc' in sys.argv:
help(__name__)
else:
Main()
|
drtuxwang/system-config
|
bin/urldecode.py
|
Python
|
gpl-2.0
| 2,063
|
#! /usr/bin/env python
# -*- coding: latin-1 -*-
# Copyright (C) 2006 Universitat Pompeu Fabra
#
# Permission is hereby granted to distribute this software for
# non-commercial research purposes, provided that this copyright
# notice is included with any such distribution.
#
# THIS SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
# SOFTWARE IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU
# ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
#
# Elaborated by Hector Palacios, hlp@ldc.usb.ve, hectorpal@gmail.com
# Joint work with Hector Geffner.
import sys
import os
name = "ring"
init_certainty = False
deterministic = False
def usage():
print """
usage: %s <n> {det|nondet} {ui}
generates files <x>-%s-<n>/d.pddl and <x>-%s-<n>/p.pddl
(where is the 2nd parameter: det or nondet)
for the deterministic (d) or non-determistic (nd) ring
problem with <n> rooms
(as used by MBP and KACMBP conformant planner)
ui => total uncertainty at initial state
otherwise windows are open and locked
""" % (sys.argv[0], name, name)
sys.exit(1)
if len(sys.argv) < 3 or len(sys.argv) > 4:
usage()
if sys.argv[2] == 'det':
deterministic = True
name = "det-"+name
elif sys.argv[2] == 'nondet':
deterministic = False
name = "nondet-"+name
else:
print 'Error on 2nd parameter'
usage()
if len(sys.argv) == 4:
if sys.argv[3] == 'ui':
init_certainty = True
else:
print 'Error on last parameter'
usage()
n = int(sys.argv[1])
if init_certainty:
name_p="%s-ui-%d" % (name, n)
else:
name_p="%s-%d" % (name, n)
path=name_p
os.system("/bin/rm -rf ./"+path)
os.makedirs(path)
problem = file(path + "/p.pddl", "w")
domain = file(path + "/d.pddl", "w")
limit = n+1
def next(i):
if i < n:
return i+1
else:
return next(i-n)
def prev(i):
if i > 1:
return i-1
else:
return prev(i+n)
# Axiom: windows can change by themself
def window_can_change():
global deterministic
if deterministic:
return ""
r=""
for i in range(1,limit):
r += """
(when (and (not (locked w%s)) (not (pos w%s)))
(oneof (closed w%s) (not (closed w%s))))
""" % (i,i,i,i)
return r
print >> domain, """
(define (domain ring)
(:requirements :typing :equality)
(:types window)
(:constants """,
for i in range(1,limit):
print >> domain, ("w%s " % i),
print >> domain, """- window)
(:predicates
(pos ?w - window)
(closed ?w - window)
(locked ?w - window)
)
(:action fwd
:effect
(and""",
for i in range(1,limit):
print >> domain, """
(when (pos w%s) (and (pos w%s) (not (pos w%s)))) """ %(i,next(i),i),
print >> domain, window_can_change(),
print >> domain, """
)
)
(:action bwd
:effect
(and""",
for i in range(1,limit):
print >> domain, """
(when (pos w%s) (and (pos w%s) (not (pos w%s)))) """ %(i,prev(i),i),
print >> domain, window_can_change(),
print >> domain, """
)
)
(:action open
:effect
(and """,
for i in range(1,limit):
print >> domain, """
(when (and (pos w%s) (not (locked w%s))) (not (closed w%s))) """ %(i,i,i),
print >> domain, window_can_change(),
print >> domain, """
)
)
(:action close
:effect
(and """,
for i in range(1,limit):
print >> domain, """
(when (and (pos w%s) (not (locked w%s))) (closed w%s)) """ %(i,i,i),
print >> domain, window_can_change(),
print >> domain, """
)
)
(:action lock
:effect
(and""",
for i in range(1,limit):
print >> domain, """
(when (pos w%s) (locked w%s)) """ %(i,i),
print >> domain, window_can_change(),
print >> domain, """
)
)
(:action unlock
:effect
(and""",
for i in range(1,limit):
print >> domain, """
(when (pos w%s) (not (locked w%s))) """ %(i,i),
print >> domain, window_can_change(),
print >> domain, """
)
)
)
"""
print >> problem, """
(define (problem ring-p-%s)
(:domain ring-d-%s)
(:init
(and """ % (n,n),
if init_certainty:
for i in range(1,limit):
print >> problem, """
(or (not (locked w%s)) (locked w%s))
(or (not (closed w%s)) (closed w%s)) """ % (i,i,i,i),
else:
for i in range(1,limit):
print >> problem, """
(locked w%s)
;(not (closed w%s)) """ % (i,i),
print >> problem, """
(oneof""",
for i in range(1,limit):
print >> problem, " (pos w%s)" % i,
print >> problem, """)
)
)
""",
print >> problem, """
(:goal (and """,
for i in range(1,limit):
print >> problem, """
;(locked w%s)
(closed w%s)""" % (i,i),
print >> problem, """
)
)
)
"""
|
PlanTool/plantool
|
code/Uncertainty/T0/translator/generators/m-ring-mbp.py
|
Python
|
gpl-2.0
| 5,086
|
# -*- coding: utf-8 -*-
#Library from Google
from google.appengine.api import users
from google.appengine.api import urlfetch
from google.appengine.api import images
#Library System
import datetime
import cgi
import os
import sys
import logging
import urllib
import re
import sys
#Library
from manager.utils.utils import *
#Models for application
from manager.model.page import *
from manager.libs.sessions import *
def CategoryRootExist(category_short):
categories = PageCategory.query(PageCategory.category_parent == None, PageCategory.name_short == category_short).get()
if categories:
return True
else:
return False
def CategoryChildExist(category_parent,category_short):
catparentid = category_parent.key.id()
categories = PageCategory.query(PageCategory.name_short == category_short)
if categories:
for c in categories:
if not c.category_parent == None:
if catparentid == c.category_parent.id():
return True
else:
return False
else:
return False
class CategoryView(BaseHandler):
def get(self):
email = users.get_current_user().email()
userID = users.get_current_user().user_id()
categories_array = []
categories = PageCategory.query(ancestor=site_key()).filter(PageCategory.category_parent == None).order(PageCategory.name_short)
for c in categories:
if c.name_short != 'empty':
subcategories = PageCategory.query(PageCategory.category_parent == c.key)
categories_array.append({'category':c,'subcategories':subcategories})
template_values = {
'titlepopup': 'Categorias',
'categories': categories_array
}
template_name = '/manager/view/categorypopup.html'
self.renderTemplate(template_name,template_values)
class CategoryEdit(BaseHandler):
def get(self):
key = cgi.escape(self.request.get('key'))
if not key:
categories = model.page.PageCategory.all()
categories.filter('category_parent =', None)
categories.order('name_short')
totalcategories = categories.count()
categories = categories.fetch(totalcategories)
template_values = {
'titlepopup': 'Categorias',
'categories': categories
}
path = os.path.join(os.curdir, '..', '..','view','admin','categorypopup_view.html')
self.response.out.write(template.render(path, template_values))
return
category = model.page.PageCategory.get(key)
template_values = {
'titlepopup': 'Categorias',
'category': category
}
template_name = '/view/admin/categorypopup_edit.html'
self.renderTemplate(template_name,template_values)
def post(self):
name = cgi.escape(self.request.get('category'))
summary = cgi.escape(self.request.get('summary'))
key = cgi.escape(self.request.get('key'))
catexist = model.page.CategoryExist(name)
if catexist:
category = model.page.PageCategory.get(key)
category.summary = summary
category.put()
self.response.out.write(u'<p>Guardado el resumen <a href="/admin/category/edit">Regresar</a> </p>')
return
category = model.page.PageCategory.get(key)
category.name = validstringName(name)
category_valida = model.page.namecategory_short(name)
category.name_short = category_valida
category.summary = summary
category.put()
self.response.out.write(u'<p>Guardado <a href="/admin/category/edit">Regresar</a> </p>')
class CategoryPost(BaseHandler):
def post(self, action):
email = users.get_current_user().email()
userID = users.get_current_user().user_id()
if action == 'add':
category = cgi.escape(self.request.get('category'))
parent = cgi.escape(self.request.get('categories'))
parentbool = cgi.escape(self.request.get('parentbool'))
if category:
if not parentbool == 'parent':
category_valida = short_text(category)
boolCatExist = CategoryRootExist(category_valida)
if boolCatExist:
self.response.write('La categoría existe <a href="/admin/category/view">Volver</a>')
else:
idcategory = GenId()
category = PageCategory(
name = validstringName(category),
name_short = category_valida,
idcategory = idcategory,
date_publication = datetime.datetime.utcnow(),
userID = users.get_current_user().user_id(),
category_parent = None,
parent = site_key()
)
category.put()
self.redirect('/admin/category/view')
else:
if not parent:
self.response.write('No existen categorías padres creadas <a href="/admin/category/view">Volver</a>')
return
categoryparent = PageCategory.get_by_id(int(parent),parent=site_key())
category_valida = short_text(category)
boolCatExist = CategoryChildExist(categoryparent,category_valida)
if boolCatExist:
self.response.out.write('La categoría existe <a href="/admin/category/view">Volver</a>')
else:
idcategory = GenId()
category = PageCategory(
name = validstringName(category),
name_short = category_valida,
idcategory = idcategory,
date_publication = datetime.datetime.utcnow(),
userID = users.get_current_user().user_id(),
category_parent = categoryparent.key,
parent = site_key(),
)
category.put()
self.redirect('/admin/category/view')
else:
self.response.write('Error ingresando la categoría <a href="/admin/category/view">Volver</a>')
app = webapp2.WSGIApplication([
RedirectRoute('/admin/category/view',CategoryView,name="categoryviewadmin",strict_slash=True),
RedirectRoute('/admin/category/edit',CategoryEdit,name="categoryeditadmin",strict_slash=True),
RedirectRoute('/admin/category/<action>',CategoryPost,name="categorypostdmin",strict_slash=True)
],debug=debugconfig,config=confighandler)
|
Ivebo/Ivebo-Sites
|
manager/controller/category.py
|
Python
|
gpl-2.0
| 7,310
|
"""
Example using MySQL Connector/Python showing:
* sending multiple statements and iterating over the results
"""
import asyncio
import mysql_async.connector
@asyncio.coroutine
def main(config):
output = []
db = mysql_async.connector.Connect(**config)
yield from db.connect()
cursor = yield from db.cursor()
# Drop table if exists, and create it new
stmt_drop = "DROP TABLE IF EXISTS names"
yield from cursor.execute(stmt_drop)
stmt_create = (
"CREATE TABLE names ("
" id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, "
" name VARCHAR(30) DEFAULT '' NOT NULL, "
" info TEXT , "
" age TINYINT UNSIGNED DEFAULT '30', "
" PRIMARY KEY (id))"
)
yield from cursor.execute(stmt_create)
info = "abc" * 10000
stmts = [
"INSERT INTO names (name) VALUES ('Geert')",
"SELECT COUNT(*) AS cnt FROM names",
"INSERT INTO names (name) VALUES ('Jan'),('Michel')",
"SELECT name FROM names",
]
# Note 'multi=True' when calling cursor.execute()
# Use next_exec_result() to get each result.
result = yield from cursor.execute(' ; '.join(stmts), multi=True)
while True:
if result.with_rows:
if result.statement == stmts[3]:
output.append("Names in table: " +
' '.join([name[0] for name in (yield from result.fetchall())]))
else:
rd = yield from result.fetchone()
output.append(
"Number of rows: {0}".format(rd[0]))
else:
output.append("Inserted {0} row{1}".format(
result.rowcount, 's' if result.rowcount > 1 else ''))
if not (yield from result.next_exec_result()):
break
yield from cursor.execute(stmt_drop)
yield from cursor.close()
db.close()
print(output)
if __name__ == '__main__':
config = {
'unix_socket': '/var/run/mysqld/mysqld.sock',
'database': 'test',
'user': 'user1',
'password': 'user1',
'charset': 'utf8',
'use_unicode': True,
'get_warnings': True,
}
loop = asyncio.get_event_loop()
loop.run_until_complete(main(config))
|
netpie/mysql-connector-python-async
|
example/multi_resualtsets.py
|
Python
|
gpl-2.0
| 2,259
|
# Adding the modules from the main path
import os, sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
# Most important tools are assertion tests via `eq_`
from nose.tools import *
# Mock objects to allow for unit tests in complex environments
import mock
|
gsec/eZchat
|
tests/test_tools.py
|
Python
|
gpl-2.0
| 269
|
# -*- coding: utf-8 -*-
import os
import re
import traceback
import xml.etree.ElementTree as ET
import xbmc
import xbmcgui
import xbmcvfs
# Import the common settings
from settings import Settings
from settings import log
from settings import os_path_join
from settings import os_path_split
from settings import dir_exists
from VideoParser import VideoParser
########################################################
# Class to store all the details for a given extras file
########################################################
class BaseExtrasItem():
def __init__(self, directory, filename, isFileMatchExtra=False, defaultFanArt=""):
self.directory = directory
self.filename = filename
self.plot = None
# Setup the icon and thumbnail images
self.thumbnailImage = ""
self.iconImage = ""
# Set the default fanart - this will be over-written if there is a better one
self.fanart = defaultFanArt
self._loadImages(filename)
self.duration = None
# Record if the match was by filename rather than in Extras sub-directory
self.isFileMatchingExtra = isFileMatchExtra
# Check if there is an NFO file to process
if not self._loadNfoInfo(filename):
# Get the ordering and display details from the filename
(self.orderKey, self.displayName) = self._generateOrderAndDisplay(filename)
# eq and lt defined for sorting order only
def __eq__(self, other):
if other is None:
return False
# Check key, display then filename - all need to be the same for equals
return ((self.orderKey, self.displayName, self.directory, self.filename, self.isFileMatchingExtra) ==
(other.orderKey, other.displayName, other.directory, other.filename, other.isFileMatchingExtra))
def __lt__(self, other):
# Order in key, display then filename
return ((self.orderKey, self.displayName, self.directory, self.filename, self.isFileMatchingExtra) <
(other.orderKey, other.displayName, other.directory, other.filename, other.isFileMatchingExtra))
# Returns the name to display to the user for the file
def getDisplayName(self):
# Update the display name to allow for : in the name
return self.displayName.replace(".sample", "").replace(":", ":")
# Return the filename for the extra
def getFilename(self):
return self.filename
# Gets the file that needs to be passed to the player
def getMediaFilename(self):
# Check to see if the filename actually holds a directory
# If that is the case, we will only support it being a DVD Directory Image
# So check to see if the expected file is set
vobFile = self.getVOBFile()
if vobFile is not None:
return vobFile
return self.filename
# Gets the path to the VOB playable file, or None if not a VOB
def getVOBFile(self):
# Check to see if the filename actually holds a directory
# If that is the case, we will only support it being a DVD Directory Image
# So check to see if the expected file is set
videoTSDir = os_path_join(self.filename, 'VIDEO_TS')
if dir_exists(videoTSDir):
ifoFile = os_path_join(videoTSDir, 'VIDEO_TS.IFO')
if xbmcvfs.exists(ifoFile):
return ifoFile
# Also check for BluRay
videoBluRayDir = os_path_join(self.filename, 'BDMV')
if dir_exists(videoBluRayDir):
dbmvFile = os_path_join(videoBluRayDir, 'index.bdmv')
if xbmcvfs.exists(dbmvFile):
return dbmvFile
return None
# Compare if a filename matches the existing one
def isFilenameMatch(self, compareToFilename):
srcFilename = self.filename
tgtFilename = compareToFilename
try:
srcFilename = srcFilename.decode("utf-8")
except:
pass
try:
tgtFilename = tgtFilename.decode("utf-8")
except:
pass
if srcFilename == tgtFilename:
return True
return False
def getDirectory(self):
return self.directory
def isFileMatchExtra(self):
return self.isFileMatchingExtra
def getOrderKey(self):
return self.orderKey
def getPlot(self):
return self.plot
def getThumbnailImage(self):
return self.thumbnailImage
def getIconImage(self):
return self.iconImage
def getFanArt(self):
return self.fanart
# Returns the duration in seconds
def getDuration(self):
if self.duration is None:
try:
# Parse the video file for the duration
self.duration = VideoParser().getVideoLength(self.filename)
log("BaseExtrasItem: Duration retrieved is = %d" % self.duration)
except:
log("BaseExtrasItem: Failed to get duration from %s" % self.filename)
log("BaseExtrasItem: %s" % traceback.format_exc())
self.duration = 0
return self.duration
def getDisplayDuration(self, forcedDuration=0):
durationInt = forcedDuration
if forcedDuration < 1:
durationInt = self.getDuration()
displayDuration = ""
seconds = 0
minutes = 0
hours = 0
# Convert the duration into a viewable format
if durationInt > 0:
seconds = durationInt % 60
if durationInt > 60:
minutes = ((durationInt - seconds) % 3600) / 60
# Default the display to MM:SS
displayDuration = "%02d:%02d" % (minutes, seconds)
# Only add the hours is really needed
if durationInt > 3600:
hours = (durationInt - (minutes * 60) - seconds) / 3600
displayDuration = "%02d:%s" % (hours, displayDuration)
# Set the display duration to be the time in minutes
return displayDuration
# Load the Correct set of images for icons and thumbnails
# Image options are
# (NFO - Will overwrite these values)
# <filename>.tbn/jpg
# <filename>-poster.jpg (Auto picked up by player)
# <filename>-thumb.jpg
# poster.jpg
# folder.jpg
#
# See:
# http://wiki.xbmc.org/?title=Thumbnails
# http://wiki.xbmc.org/index.php?title=Frodo_FAQ#Local_images
def _loadImages(self, filename):
imageList = []
# Find out the name of the image files
fileNoExt = os.path.splitext(filename)[0]
# Start by searching for the filename match
fileNoExtImage = self._loadImageFile(fileNoExt)
if fileNoExtImage != "":
imageList.append(fileNoExtImage)
# Check for -poster added to the end
fileNoExtImage = self._loadImageFile(fileNoExt + "-poster")
if fileNoExtImage != "":
imageList.append(fileNoExtImage)
if len(imageList) < 2:
# Check for -thumb added to the end
fileNoExtImage = self._loadImageFile(fileNoExt + "-thumb")
if fileNoExtImage != "":
imageList.append(fileNoExtImage)
if len(imageList) < 2:
# Check for poster.jpg
fileDir = os_path_join(self.directory, "poster")
fileNoExtImage = self._loadImageFile(fileDir)
if fileNoExtImage != "":
imageList.append(fileNoExtImage)
if len(imageList) < 2:
# Check for folder.jpg
fileDir = os_path_join(self.directory, "folder")
fileNoExtImage = self._loadImageFile(fileDir)
if fileNoExtImage != "":
imageList.append(fileNoExtImage)
# Set the first one to the thumbnail, and the second the the icon
if len(imageList) > 0:
self.thumbnailImage = imageList[0]
if len(imageList) > 1:
self.iconImage = imageList[1]
# Now check for the fanart
# Check for -fanart added to the end
fileNoExtImage = self._loadImageFile(fileNoExt + "-fanart")
if fileNoExtImage != "":
self.fanart = fileNoExtImage
else:
# Check for fanart.jpg
fileDir = os_path_join(self.directory, "fanart")
fileNoExtImage = self._loadImageFile(fileDir)
if fileNoExtImage != "":
self.fanart = fileNoExtImage
# Searched for a given image name under different extensions
def _loadImageFile(self, fileNoExt):
if xbmcvfs.exists(fileNoExt + ".tbn"):
return fileNoExt + ".tbn"
if xbmcvfs.exists(fileNoExt + ".png"):
return fileNoExt + ".png"
if xbmcvfs.exists(fileNoExt + ".jpg"):
return fileNoExt + ".jpg"
return ""
# Parses the filename to work out the display name and order key
def _generateOrderAndDisplay(self, filename):
# First thing is to trim the display name from the filename
# Get just the filename, don't need the full path
displayName = os_path_split(filename)[1]
# Remove the file extension (e.g .avi)
displayName = os.path.splitext(displayName)[0]
# Remove anything before the -extras- tag (if it exists)
extrasTag = Settings.getExtrasFileTag()
if (extrasTag != "") and (extrasTag in displayName):
justDescription = displayName.split(extrasTag, 1)[1]
if len(justDescription) > 0:
displayName = justDescription
result = (displayName, displayName)
# Search for the order which will be written as [n]
# Followed by the display name
match = re.search("^\[(?P<order>.+)\](?P<Display>.*)", displayName)
if match:
orderKey = match.group('order')
if orderKey != "":
result = (orderKey, match.group('Display'))
return result
# Check for an NFO file for this video and reads details out of it
# if it exists
def _loadNfoInfo(self, filename):
# Find out the name of the NFO file
nfoFileName = os.path.splitext(filename)[0] + ".nfo"
log("BaseExtrasItem: Searching for NFO file: %s" % nfoFileName)
# Return False if file does not exist
if not xbmcvfs.exists(nfoFileName):
log("BaseExtrasItem: No NFO file found: %s" % nfoFileName)
return False
returnValue = False
try:
# Need to first load the contents of the NFO file into
# a string, this is because the XML File Parse option will
# not handle formats like smb://
nfoFile = xbmcvfs.File(nfoFileName, 'r')
nfoFileStr = nfoFile.read()
nfoFile.close()
# Create an XML parser
try:
nfoXml = ET.ElementTree(ET.fromstring(nfoFileStr))
except:
log("BaseExtrasItem: Trying encoding to UTF-8 with ignore")
nfoXml = ET.ElementTree(ET.fromstring(nfoFileStr.decode("UTF-8", 'ignore')))
rootElement = nfoXml.getroot()
log("BaseExtrasItem: Root element is = %s" % rootElement.tag)
# Check which format if being used
if rootElement.tag == "movie":
log("BaseExtrasItem: Movie format NFO detected")
# <movie>
# <title>Who knows</title>
# <sorttitle>Who knows 1</sorttitle>
# </movie>
# Get the title
self.displayName = nfoXml.findtext('title')
# Get the sort key
self.orderKey = nfoXml.findtext('sorttitle')
# Get the plot
self.plot = nfoXml.findtext('plot')
elif rootElement.tag == "tvshow":
log("BaseExtrasItem: TvShow format NFO detected")
# <tvshow>
# <title>Who knows</title>
# <sorttitle>Who knows 1</sorttitle>
# </tvshow>
# Get the title
self.displayName = nfoXml.findtext('title')
# Get the sort key
self.orderKey = nfoXml.findtext('sorttitle')
# Get the plot
self.plot = nfoXml.findtext('plot')
elif rootElement.tag == "episodedetails":
log("BaseExtrasItem: TvEpisode format NFO detected")
# <episodedetails>
# <title>Who knows</title>
# <season>2</season>
# <episode>1</episode>
# </episodedetails>
# Get the title
self.displayName = nfoXml.findtext('title')
# Get the plot
self.plot = nfoXml.findtext('plot')
# Get the sort key
season = nfoXml.findtext('season')
episode = nfoXml.findtext('episode')
# Need to use the season and episode to order the list
if (season is None) or (season == ""):
season = "0"
if (episode is None) or (episode == ""):
episode = "0"
self.orderKey = "%02d_%02d" % (int(season), int(episode))
else:
self.displayName = None
self.orderKey = None
log("BaseExtrasItem: Unknown NFO format")
# Now get the thumbnail - always called the same regardless of
# movie of TV
thumbnail = self._getNfoThumb(nfoXml)
if thumbnail is not None:
self.thumbnailImage = thumbnail
# Now get the fanart - always called the same regardless of
# movie of TV
fanart = self._getNfoFanart(nfoXml)
if (fanart is not None) and (fanart != ""):
self.fanart = fanart
del nfoXml
if (self.displayName is not None) and (self.displayName != ""):
returnValue = True
# If there is no order specified, use the display name
if (self.orderKey is None) or (self.orderKey == ""):
self.orderKey = self.displayName
log("BaseExtrasItem: Using sort key %s for %s" % (self.orderKey, self.displayName))
except:
log("BaseExtrasItem: Failed to process NFO: %s" % nfoFileName, xbmc.LOGERROR)
log("BaseExtrasItem: %s" % traceback.format_exc(), xbmc.LOGERROR)
returnValue = False
return returnValue
# Sets the title for a given extras file
def setTitle(self, newTitle, isTV=False):
log("BaseExtrasItem: Setting title to %s" % newTitle)
self.displayName = newTitle
# Find out the name of the NFO file
nfoFileName = os.path.splitext(self.filename)[0] + ".nfo"
log("BaseExtrasItem: Searching for NFO file: %s" % nfoFileName)
try:
nfoFileStr = None
newNfoRequired = False
if xbmcvfs.exists(nfoFileName):
# Need to first load the contents of the NFO file into
# a string, this is because the XML File Parse option will
# not handle formats like smb://
nfoFile = xbmcvfs.File(nfoFileName, 'r')
nfoFileStr = nfoFile.read()
nfoFile.close()
# Check to ensure we have some NFO data
if (nfoFileStr is None) or (nfoFileStr == ""):
# Create a default NFO File
# Need to create a new file if one does not exist
log("BaseExtrasItem: No NFO file found, creating new one: %s" % nfoFileName)
tagType = 'movie'
if isTV:
tagType = 'tvshow'
nfoFileStr = ("<%s>\n <title> </title>\n</%s>\n" % (tagType, tagType))
newNfoRequired = True
# Create an XML parser
try:
nfoXml = ET.ElementTree(ET.fromstring(nfoFileStr))
except:
log("BaseExtrasItem: Trying encoding to UTF-8 with ignore")
nfoXml = ET.ElementTree(ET.fromstring(nfoFileStr.decode("UTF-8", 'ignore')))
# Get the title element
titleElement = nfoXml.find('title')
# Make sure the title exists in the file
if titleElement is None:
log("BaseExtrasItem: title element not found")
return False
# Set the title to the new value
titleElement.text = newTitle
# Only set the sort title if already set
sorttitleElement = nfoXml.find('sorttitle')
if sorttitleElement is not None:
sorttitleElement.text = newTitle
# Save the file back to the filesystem
newNfoContent = ET.tostring(nfoXml.getroot(), encoding="UTF-8")
del nfoXml
nfoFile = xbmcvfs.File(nfoFileName, 'w')
try:
nfoFile.write(newNfoContent)
except:
log("BaseExtrasItem: Failed to write NFO: %s" % nfoFileName, xbmc.LOGERROR)
log("BaseExtrasItem: %s" % traceback.format_exc(), xbmc.LOGERROR)
# Make sure we close the file handle
nfoFile.close()
# If there was no file before, make sure we delete and partial file
if newNfoRequired:
xbmcvfs.delete(nfoFileName)
return False
nfoFile.close()
except:
log("BaseExtrasItem: Failed to write NFO: %s" % nfoFileName, xbmc.LOGERROR)
log("BaseExtrasItem: %s" % traceback.format_exc(), xbmc.LOGERROR)
return False
return True
# Sets the title for a given extras file
def setPlot(self, newPlot, isTV=False):
log("BaseExtrasItem: Setting plot to %s" % newPlot)
self.plot = newPlot
# Find out the name of the NFO file
nfoFileName = os.path.splitext(self.filename)[0] + ".nfo"
log("BaseExtrasItem: Searching for NFO file: %s" % nfoFileName)
try:
nfoFileStr = None
newNfoRequired = False
if xbmcvfs.exists(nfoFileName):
# Need to first load the contents of the NFO file into
# a string, this is because the XML File Parse option will
# not handle formats like smb://
nfoFile = xbmcvfs.File(nfoFileName, 'r')
nfoFileStr = nfoFile.read()
nfoFile.close()
# Check to ensure we have some NFO data
if (nfoFileStr is None) or (nfoFileStr == ""):
# Create a default NFO File
# Need to create a new file if one does not exist
log("BaseExtrasItem: No NFO file found, creating new one: %s" % nfoFileName)
tagType = 'movie'
if isTV:
tagType = 'tvshow'
nfoFileStr = ("<%s>\n <plot> </plot>\n</%s>\n" % (tagType, tagType))
newNfoRequired = True
# Create an XML parser
try:
nfoXml = ET.ElementTree(ET.fromstring(nfoFileStr))
except:
log("BaseExtrasItem: Trying encoding to UTF-8 with ignore")
nfoXml = ET.ElementTree(ET.fromstring(nfoFileStr.decode("UTF-8", 'ignore')))
# Get the plot element
plotElement = nfoXml.find('plot')
# Make sure the title exists in the file
if plotElement is None:
log("BaseExtrasItem: plot element not found")
return False
# Set the plot to the new value
plotElement.text = newPlot
# Save the file back to the file-system
newNfoContent = ET.tostring(nfoXml.getroot(), encoding="UTF-8")
del nfoXml
nfoFile = xbmcvfs.File(nfoFileName, 'w')
try:
nfoFile.write(newNfoContent)
except:
log("BaseExtrasItem: Failed to write NFO: %s" % nfoFileName, xbmc.LOGERROR)
log("BaseExtrasItem: %s" % traceback.format_exc(), xbmc.LOGERROR)
# Make sure we close the file handle
nfoFile.close()
# If there was no file before, make sure we delete and partial file
if newNfoRequired:
xbmcvfs.delete(nfoFileName)
return False
nfoFile.close()
except:
log("BaseExtrasItem: Failed to write NFO: %s" % nfoFileName, xbmc.LOGERROR)
log("BaseExtrasItem: %s" % traceback.format_exc(), xbmc.LOGERROR)
return False
return True
# Reads the thumbnail information from an NFO file
def _getNfoThumb(self, nfoXml):
# Get the thumbnail
thumbnail = nfoXml.findtext('thumb')
if (thumbnail is not None) and (thumbnail != ""):
# Found the thumb entry, check if this is a local path
# which just has a filename, this is the case if there are
# no forward slashes and no back slashes
if thumbnail.startswith('..') or (("/" not in thumbnail) and ("\\" not in thumbnail)):
thumbnail = os_path_join(self.directory, thumbnail)
else:
thumbnail = None
return thumbnail
# Reads the fanart information from an NFO file
def _getNfoFanart(self, nfoXml):
# Get the fanart
fanart = nfoXml.findtext('fanart')
if (fanart is not None) and (fanart != ""):
# Found the fanart entry, check if this is a local path
# which just has a filename, this is the case if there are
# no forward slashes and no back slashes
if fanart.startswith('..') or (("/" not in fanart) and ("\\" not in fanart)):
fanart = os_path_join(self.directory, fanart)
else:
fanart = None
return fanart
####################################################################
# Extras item that extends the base type to supply extra information
# that can be read or set via a database
####################################################################
class ExtrasItem(BaseExtrasItem):
def __init__(self, directory, filename, isFileMatchExtra=False, extrasDb=None, defaultFanArt=""):
self.extrasDb = extrasDb
self.watched = 0
self.totalDuration = -1
self.resumePoint = 0
BaseExtrasItem.__init__(self, directory, filename, isFileMatchExtra, defaultFanArt)
self._loadState()
# Note: An attempt was made to re-use the existing Kodi database to
# read the playcount to work out if a video file has been watched,
# however this did not seem to work, call was:
# json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "Files.GetFileDetails", "params": {"file": "%s", "media": "video", "properties": [ "playcount" ]},"id": 1 }' % filename)
# Even posted on the forum, but this hasn't resolved it:
# http://forum.xbmc.org/showthread.php?tid=177368
# UPDATE: Found out what the problem was, with window paths you need to additionally escape them!
# self.getFilename().replace("\\", "\\\\")
# However, as it turns out, we can't use the official database, as it only stores the "playcount"
# (The number of time the file has been played) and nothing about the resume point for partially
# played files
def getWatched(self):
return self.watched
# If the playing progress should be recorded for this file, things like
# ISO's and VOBs do not handle this well as the incorrect values are
# returned from the player
def shouldStoreProgress(self):
if self.getVOBFile() is not None:
return False
# Get the extension of the file
fileExt = os.path.splitext(self.getFilename())[1]
if (fileExt is None) or (fileExt == "") or (fileExt.lower() == '.iso'):
return False
# Default is true
return True
def setTotalDuration(self, totalDuration):
# Do not set the total duration on DVD Images as
# this will be incorrect
if not self.shouldStoreProgress():
return
self.totalDuration = totalDuration
def getTotalDuration(self):
if self.totalDuration < 0:
self.totalDuration = self.getDuration()
return self.totalDuration
def getDisplayDuration(self):
return BaseExtrasItem.getDisplayDuration(self, self.totalDuration)
def setResumePoint(self, currentPoint):
# Do not set the resume point on DVD Images as
# this will be incorrect
if not self.shouldStoreProgress():
return
# Now set the flag to show if it has been watched
# Consider watched if within 15 seconds of the end
if (currentPoint + 15 > self.totalDuration) and (self.totalDuration > 0):
self.watched = 1
self.resumePoint = 0
# Consider not watched if not watched at least 5 seconds
elif currentPoint < 6:
self.watched = 0
self.resumePoint = 0
# Otherwise save the resume point
else:
self.watched = 0
self.resumePoint = currentPoint
def getResumePoint(self):
return self.resumePoint
# Get the string display version of the Resume time
def getDisplayResumePoint(self):
# Split the time up ready for display
minutes, seconds = divmod(self.resumePoint, 60)
hoursString = ""
if minutes > 60:
# Need to collect hours if needed
hours, minutes = divmod(minutes, 60)
hoursString = "%02d:" % hours
newLabel = "%s%02d:%02d" % (hoursString, minutes, seconds)
return newLabel
def isResumable(self):
if self.watched == 1 or self.resumePoint < 1:
return False
return True
def saveState(self):
# Do not save the state on DVD Images as
# this will be incorrect
if not self.shouldStoreProgress():
return
if self.extrasDb is None:
log("ExtrasItem: Database not enabled")
return
log("ExtrasItem: Saving state for %s" % self.getFilename())
rowId = -1
# There are some cases where we want to remove the entries from the database
# This is the case where the resume point is 0, watched is 0
if (self.resumePoint == 0) and (self.watched == 0):
# There are some media files that we can only get the duration from if they have been played
# so just make sure we can get the duration again before we blat this entry
origDuration = self.duration
if (self.totalDuration > 0) and (self.getDuration() < 1):
self.duration = origDuration
# We currently have a duration and can't calculate it, so just do the update
rowId = self.extrasDb.insertOrUpdate(self.getFilename(), self.resumePoint, self.totalDuration, self.getWatched())
else:
self.extrasDb.delete(self.getFilename())
self.duration = origDuration
else:
rowId = self.extrasDb.insertOrUpdate(self.getFilename(), self.resumePoint, self.totalDuration, self.getWatched())
return rowId
def _loadState(self):
if self.extrasDb is None:
log("ExtrasItem: Database not enabled")
return
log("ExtrasItem: Loading state for %s" % self.getFilename())
returnData = self.extrasDb.select(self.getFilename())
if returnData is not None:
self.resumePoint = returnData['resumePoint']
self.totalDuration = returnData['totalDuration']
self.watched = returnData['watched']
def createListItem(self, path="", parentTitle="", tvShowTitle="", defaultIconImage=""):
# Label2 is used to store the duration in HH:MM:SS format
anItem = xbmcgui.ListItem(self.getDisplayName(), self.getDisplayDuration(), path=path)
anItem.setProperty("FileName", self.getFilename())
anItem.setInfo('video', {'PlayCount': self.getWatched()})
anItem.setInfo('video', {'Title': parentTitle})
# We store the duration here (it is only in minutes for Helix-v14 - and does not
# look very good if displayed), so we also set Label2 to a viewable value
# However for Isengard-v15 this was change to be seconds
intDuration = self.getDuration()
# Only add the duration if there is one
if intDuration > 0:
anItem.setInfo('video', {'Duration': intDuration})
if tvShowTitle != "":
anItem.setInfo('video', {'TvShowTitle': tvShowTitle})
# If the plot is supplied, then set it
plot = self.getPlot()
if (plot is not None) and (plot != ""):
anItem.setInfo('video', {'Plot': plot})
# If the order sort title is supplied, then set it
orderKey = self.getOrderKey()
if (orderKey is not None) and (orderKey != ""):
anItem.setInfo('video', {'sorttitle': orderKey})
# If both the Icon and Thumbnail is set, the list screen will choose to show
# the thumbnail
if self.getIconImage() != "":
anItem.setIconImage(self.getIconImage())
if self.getThumbnailImage() != "":
anItem.setThumbnailImage(self.getThumbnailImage())
# Set the default image if available
if defaultIconImage != "":
if (self.getIconImage() == "") and (self.getThumbnailImage() == ""):
anItem.setIconImage(defaultIconImage)
# The following two will give us the resume flag
anItem.setProperty("TotalTime", str(self.getTotalDuration()))
anItem.setProperty("ResumeTime", str(self.getResumePoint()))
# Set the background image
anItem.setProperty("Fanart_Image", self.getFanArt())
return anItem
|
robwebset/script.videoextras
|
resources/lib/ExtrasItem.py
|
Python
|
gpl-2.0
| 30,282
|
#!/usr/bin/env python
class ReplyMarkup(object):
def to_json(self):
raise NotImplementedError
def __str__(self):
return self.to_json()
|
pi0/python-telegram-bot
|
telegram/replymarkup.py
|
Python
|
gpl-2.0
| 162
|
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo Marcxml mapping test."""
from __future__ import absolute_import, print_function
from datetime import datetime
from invenio_pidstore.models import PersistentIdentifier
from invenio_records import Record
from zenodo.modules.records.serializers import marcxml_v1
def test_full_record(app, db, full_record):
"""Test MARC21 serialization of full record."""
record = Record.create(full_record)
record.model.updated = datetime.utcnow()
pid = PersistentIdentifier(pid_type='recid', pid_value='2')
assert record.validate() is None
expected = {
u'control_number': u'12345',
u'date_and_time_of_latest_transaction': (
record.model.updated.strftime("%Y%m%d%H%M%S.0")),
u'resource_type': {
u'subtype': u'book',
u'type': u'publication'
},
u'title_statement': {
u'title': u'Test title'
},
u'publication_distribution_imprint': [
{u'date_of_publication_distribution': u'2014-02-27'},
],
u'main_entry_personal_name': {
u'affiliation': u'CERN',
u'personal_name': u'Doe, John',
u'authority_record_control_number_or_standard_number': [
u'(gnd)170118215', u'(orcid)0000-0002-1694-233X'
]
},
u'added_entry_personal_name': [
{
u'affiliation': u'CERN',
u'personal_name': u'Doe, Jane',
u'authority_record_control_number_or_standard_number': [
u'(orcid)0000-0002-1825-0097'
]
},
{
u'affiliation': u'CERN',
u'personal_name': u'Smith, John',
},
{
u'affiliation': u'CERN',
u'personal_name': u'Nowak, Jack',
u'authority_record_control_number_or_standard_number': [
u'(gnd)170118215'
]
},
{
u'affiliation': u'CERN',
u'relator_code': [u'oth'],
u'personal_name': u'Smith, Other',
u'authority_record_control_number_or_standard_number': [
u'(orcid)0000-0002-1825-0097'
]
},
{
u'personal_name': u'Hansen, Viggo',
u'relator_code': [u'oth'],
},
{
u'affiliation': u'CERN',
u'relator_code': [u'dtm'],
u'personal_name': u'Kowalski, Manager'
},
{
u'relator_code': [u'ths'],
u'personal_name': u'Smith, Professor'
},
],
u'summary': {
u'summary': u'Test Description'
},
u'index_term_uncontrolled': [
{u'uncontrolled_term': u'kw1'},
{u'uncontrolled_term': u'kw2'},
{u'uncontrolled_term': u'kw3'},
],
u'subject_added_entry_topical_term': [
{
u'topical_term_or_geographic_name_entry_element': u'cc-by',
u'source_of_heading_or_term': u'opendefinition.org',
u'level_of_subject': u'Primary',
u'thesaurus': u'Source specified in subfield $2',
},
{
u'topical_term_or_geographic_name_entry_element': u'Astronomy',
u'authority_record_control_number_or_standard_number': (
u'(url)http://id.loc.gov/authorities/subjects/sh85009003'),
u'level_of_subject': u'Primary',
},
],
u'general_note': {
u'general_note': u'notes'
},
u'information_relating_to_copyright_status': {
u'copyright_status': u'open'
},
u'terms_governing_use_and_reproduction_note': {
u'uniform_resource_identifier': u'http://zenodo.org',
u'terms_governing_use_and_reproduction': u'Creative Commons'
},
u'communities': [
u'zenodo',
],
u'funding_information_note': [
{u'grant_number': u'1234', u'text_of_note': u'Grant Title'},
{u'grant_number': u'4321', u'text_of_note': u'Title Grant'}
],
u'host_item_entry': [
{
u'main_entry_heading': u'10.1234/foo.bar',
u'note': u'doi',
u'relationship_information': u'cites',
},
{
u'main_entry_heading': u'1234.4321',
u'note': u'arxiv',
u'relationship_information': u'cites',
},
{
u'main_entry_heading': u'Staszkowka',
u'edition': u'Jol',
u'title': u'Bum',
u'related_parts': u'1-2',
u'international_standard_book_number': u'978-0201633610',
},
],
u'other_standard_identifier': [
{
u'standard_number_or_code': u'10.1234/foo.bar',
u'source_of_number_or_code': u'doi',
},
{
u'standard_number_or_code': (
u'urn:lsid:ubio.org:namebank:11815'),
u'source_of_number_or_code': u'lsid',
u'qualifying_information': u'alternateidentifier',
},
{
u'standard_number_or_code': u'2011ApJS..192...18K',
u'source_of_number_or_code': u'issn',
u'qualifying_information': u'alternateidentifier',
},
{
u'standard_number_or_code': u'10.1234/alternate.doi',
u'source_of_number_or_code': u'doi',
u'qualifying_information': u'alternateidentifier',
}
],
u'references': [
{
u'raw_reference': u'Doe, John et al (2012). Some title. '
'Zenodo. 10.5281/zenodo.12'
}, {
u'raw_reference': u'Smith, Jane et al (2012). Some title. '
'Zenodo. 10.5281/zenodo.34'
}
],
u'added_entry_meeting_name': [{
u'date_of_meeting': u'23-25 June, 2014',
u'meeting_name_or_jurisdiction_name_as_entry_element':
u'The 13th Biennial HITRAN Conference',
u'number_of_part_section_meeting': u'VI',
u'miscellaneous_information': u'HITRAN13',
u'name_of_part_section_of_a_work': u'1',
u'location_of_meeting':
u'Harvard-Smithsonian Center for Astrophysics'
}],
u'conference_url': 'http://hitran.org/conferences/hitran-13-2014/',
u'dissertation_note': {
u'name_of_granting_institution': u'I guess important',
},
u'journal': {
'issue': '2',
'pages': '20',
'volume': '20',
'title': 'Bam',
'year': '2014',
},
# missing files
# missing language
u'embargo_date': '0900-12-31',
u'_oai': {
u'sets': [u'user-zenodo', u'user-ecfunded'],
u'id': u'oai:zenodo.org:1'
},
u'_files': [
{
'uri': 'https://zenodo.org/record/12345/files/test',
'checksum': 'md5:11111111111111111111111111111111',
'type': 'txt',
'size': 4,
},
],
'leader': {
'base_address_of_data': '00000',
'bibliographic_level': 'monograph_item',
'character_coding_scheme': 'marc-8',
'descriptive_cataloging_form': 'unknown',
'encoding_level': 'unknown',
'indicator_count': 2,
'length_of_the_implementation_defined_portion': 0,
'length_of_the_length_of_field_portion': 4,
'length_of_the_starting_character_position_portion': 5,
'multipart_resource_record_level':
'not_specified_or_not_applicable',
'record_length': '00000',
'record_status': 'new',
'subfield_code_count': 2,
'type_of_control': 'no_specified_type',
'type_of_record': 'language_material',
'undefined': 0,
},
}
# Add embargo date and OAI-PMH set information.
full_record['embargo_date'] = '0900-12-31'
full_record['_oai'] = {
"id": "oai:zenodo.org:1",
"sets": ["user-zenodo", "user-ecfunded"]
}
# Create record and PID.
record = Record.create(full_record)
pid = PersistentIdentifier(pid_type='recid', pid_value='2')
assert record.validate() is None
# Dump MARC21 JSON structure and compare against expected JSON.
preprocessed_record = marcxml_v1.preprocess_record(record=record, pid=pid)
assert_dict(
expected,
marcxml_v1.schema_class().dump(preprocessed_record).data
)
# Assert that we can output MARCXML.
marcxml_v1.serialize(record=record, pid=pid)
def test_minimal_record(app, db, minimal_record):
"""Test minimal record."""
# Create record and pid.
record = Record.create(minimal_record)
record.model.updated = datetime.utcnow()
pid = PersistentIdentifier(pid_type='recid', pid_value='2')
assert record.validate() is None
expected = {
u'date_and_time_of_latest_transaction': (
record.model.updated.strftime("%Y%m%d%H%M%S.0")),
u'publication_distribution_imprint': [{
'date_of_publication_distribution': record['publication_date']
}],
u'control_number': '123',
u'information_relating_to_copyright_status': {
'copyright_status': 'open'
},
u'summary': {
'summary': 'My description'
},
u'main_entry_personal_name': {
'personal_name': 'Test'
},
u'resource_type': {
'type': 'software'
},
u'title_statement': {
'title': 'Test'
},
u'leader': {
'base_address_of_data': '00000',
'bibliographic_level': 'monograph_item',
'character_coding_scheme': 'marc-8',
'descriptive_cataloging_form': 'unknown',
'encoding_level': 'unknown',
'indicator_count': 2,
'length_of_the_implementation_defined_portion': 0,
'length_of_the_length_of_field_portion': 4,
'length_of_the_starting_character_position_portion': 5,
'multipart_resource_record_level':
'not_specified_or_not_applicable',
'record_length': '00000',
'record_status': 'new',
'subfield_code_count': 2,
'type_of_control': 'no_specified_type',
'type_of_record': 'computer_file',
'undefined': 0,
},
}
data = marcxml_v1.schema_class().dump(marcxml_v1.preprocess_record(
pid=pid,
record=record)).data
assert_dict(expected, data)
marcxml_v1.serialize(pid=pid, record=record)
def assert_array(a1, a2):
"""Check array."""
for i in range(0, len(a1)):
if isinstance(a1[i], dict):
assert_dict(a1[i], a2[i])
elif isinstance(a1[i], list) or isinstance(a1[i], tuple):
assert_array(a1[i], a2[i])
else:
assert a1[i] in a2
assert len(a1) == len(a2)
def assert_dict(a1, a2):
"""Check dict."""
for (k, v) in a1.items():
assert k in a2
if isinstance(v, dict):
assert_dict(v, a2[k])
elif isinstance(v, list) or isinstance(v, tuple):
assert_array(v, a2[k])
else:
assert a2[k] == v
assert len(a2) == len(a1)
|
tiborsimko/zenodo
|
tests/unit/records/test_schemas_marcxml.py
|
Python
|
gpl-2.0
| 12,706
|
#!/usr/bin/env python2
# Rekall Memory Forensics
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Author: Michael Cohen scudette@google.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__author__ = "Michael Cohen <scudette@google.com>"
"""Location handlers for a stand alone HTTP server.
"""
import cStringIO
import hashlib
import logging
import time
from wsgiref import handlers
import requests
from requests import adapters
from rekall_lib.types import location
from rekall_lib import crypto
from rekall_lib import serializer
from rekall_lib import utils
from rekall import session
from rekall_agent import common
import sseclient
MAX_BUFF_SIZE = 10*1024*1024
class URLPolicy(serializer.SerializedObject):
"""Expresses the policy for managing URLs."""
schema = [
dict(name="path_prefix",
doc="The path prefix to enforce."),
dict(name="path_template", default="",
doc="The path template to expand."),
dict(name="expires", type="epoch",
doc="When does this policy expire"),
dict(name="access", type="choices", repeated=True, default=["READ"],
choices=["READ", "WRITE", "LIST"],
doc="The allowed access pattern for this operation."),
dict(name="public", type="bool",
doc="If set the uploaded object will be public."),
]
def _join_url(base, *components):
return base.rstrip("/") + "/" + utils.join_path(*components).lstrip("/")
class HTTPLocationImpl(common.AgentConfigMixin, location.HTTPLocation):
"""A stand along HTTP server location."""
def __init__(self, *args, **kwargs):
super(HTTPLocationImpl, self).__init__(*args, **kwargs)
if not isinstance(self._session, session.Session):
raise TypeError("%s must be instantiated with a Rekall Session" %
self.__class__)
def get_requests_session(self):
requests_session = self._session.GetParameter("requests_session")
if requests_session == None:
# To make sure we can use the requests session in the threadpool we
# need to make sure that the connection pool can block. Otherwise it
# will raise when it runs out of connections and the threads will be
# terminated.
requests_session = requests.Session()
requests_session.mount("https://", adapters.HTTPAdapter(
pool_connections=10, pool_maxsize=300, max_retries=10,
pool_block=True))
requests_session.mount("http://", adapters.HTTPAdapter(
pool_connections=10, pool_maxsize=300, max_retries=10,
pool_block=True))
self._session.SetCache("requests_session", requests_session)
return requests_session
def expand_path(self, **kwargs):
"""Expand the complete path using the client's config."""
return self.path_template.format(
**common.Interpolator(self._session, **kwargs))
def to_path(self, **kwargs):
return utils.join_path(self.path_prefix, self.expand_path(**kwargs))
def _get_parameters(self, if_modified_since=None, **kwargs):
if not self.path_prefix and not self.base:
raise IOError("No base URL specified.")
subpath = self.expand_path(**kwargs)
if subpath:
path = utils.join_path(self.path_prefix, subpath)
else:
path = self.path_prefix
if path:
base_url = _join_url(self.base, path)
else:
base_url = self.base
headers = {
"Cache-Control": "private",
}
if if_modified_since:
headers["If-Modified-Since"] = handlers.format_date_time(
if_modified_since)
return base_url, {}, headers, path
def read_file(self, **kw):
url_endpoint, _, headers, _ = self._get_parameters(**kw)
self.add_signature("", url_endpoint, headers)
resp = self.get_requests_session().get(
url_endpoint, headers=headers)
if resp.ok:
return resp.content
logging.warning("Error: %s", resp.content)
return ""
def add_signature(self, data, url, headers):
# Calculate the signature on the data.
private_key = self._config.client.writeback.private_key
assertion = crypto.HTTPAssertion.from_keywords(
timestamp=time.time(),
url=url).to_json()
signature = crypto.HTTPSignature.from_keywords(
assertion=assertion,
client_id=self._config.client.writeback.client_id,
public_key=private_key.public_key(),
signature=private_key.sign(assertion + data))
headers["x-rekall-signature"] = signature.to_json()
def write_file(self, data, **kwargs):
url_endpoint, params, headers, base_url = self._get_parameters(**kwargs)
self.add_signature(data, url_endpoint, headers)
resp = self.get_requests_session().post(
url_endpoint, data=data,
params=params, headers=headers)
self._session.logging.debug("Uploaded file: %s (%s bytes)",
base_url, len(data))
return self._report_error(None, resp)
def _report_error(self, completion_routine, response=None,
message=None):
if response:
# Only include the text in case of error.
if not response.ok:
status = location.Status(response.status_code, response.text)
else:
status = location.Status(response.status_code)
else:
status = location.Status(500, message)
if response is None or not response.ok:
if completion_routine:
return completion_routine(status)
raise IOError(response.text)
else:
if completion_routine:
completion_routine(status)
return location.Status(200, response.content)
class BlobUploaderImpl(HTTPLocationImpl, location.BlobUploader):
def write_file(self, data, **kwargs):
spec = location.BlobUploadSpecs.from_json(self.read_file(**kwargs))
# Upload the file to the blob endpoint.
resp = self.get_requests_session().post(
spec.url, files={spec.name: cStringIO.StringIO(data)})
self._session.logging.debug("Uploaded file: %s (%s bytes)",
spec.url, len(data))
return self._report_error(None, resp)
class Reader(object):
"""Wrap a file like object in a multi-form boundary."""
def __init__(self, fd):
self.fd = fd
self._boundary = "---------------------------735323031399963166993862150"
self._start = (
"--" + self._boundary + "\r\n" +
'Content-Disposition: form-data; name="file"; filename="a.bin"\r\n' +
'Content-Type: application/octet-stream\r\n\r\n')
self._start_stream = cStringIO.StringIO(self._start)
self._end = "\r\n--" + self._boundary + "--\r\n\r\n"
self._end_stream = cStringIO.StringIO(self._end)
self.len = len(self._start) + self.get_len(self.fd) + len(self._end)
def content_type(self):
return str(
'multipart/form-data;boundary="{0}"'.format(self._boundary)
)
def get_len(self, fd):
"""Figure out the total length of fd."""
fd.seek(0, 2)
res = fd.tell()
fd.seek(0)
return res
def read(self, length):
to_read = length
result = ""
for fd in (self._start_stream, self.fd, self._end_stream):
data = fd.read(to_read)
result += data
to_read -= len(data)
if to_read == 0:
return result
return result
class FileUploadLocationImpl(HTTPLocationImpl, location.FileUploadLocation):
def upload_file_object(self, fd, file_information=None, **kw):
"""Upload a local file.
Read data from fd. If file_information is provided, then we use this to
report about the file.
"""
reader = Reader(fd)
if file_information is None:
file_information = location.FileInformation.from_keywords(
filename=fd.name,
st_size=reader.len,
)
request = location.FileUploadRequest.from_keywords(
flow_id=self.flow_id,
file_information=file_information)
url_endpoint, _, headers, _ = self._get_parameters(**kw)
data = request.to_json()
self.add_signature(data, url_endpoint, headers)
resp = self.get_requests_session().post(
url_endpoint, data=data,
headers=headers)
if resp.ok:
response = location.FileUploadResponse.from_json(resp.content)
# Upload the file to the blob endpoint. This must be a multipart
# form with streamed file upload. TODO: Think about
# transfer-encoding gzip. It will be tricky because we need to know
# the length. It does not apprear that the AppEngine SDK supports
# chunked encoding, but the production server does support it.
# https://stackoverflow.com/questions/13127500/does-appengine-blobstore-support-chunked-transfer-encoding-for-uploads-status-4
resp = self.get_requests_session().post(
response.url, data=reader, headers={
"Content-Type": reader.content_type(),
"Content-Length": str(reader.len),
}
)
if resp.ok:
self._session.logging.debug(
"Uploaded file: %s (%s bytes)",
file_information.filename, fd.tell())
else:
self._session.logging.warn(
"Error uploading file: %s", resp.content)
class FirbaseNotifier(HTTPLocationImpl, location.NotificationLocation):
"""Read notifications from the server."""
def Start(self, callback):
client_id = self._config.client.writeback.client_id
client_id_hash = hashlib.sha1(client_id).hexdigest()
url_endpoint = "%s/%s.json" % (self.base, client_id_hash)
headers = {}
headers['Accept'] = 'text/event-stream'
while 1:
resp = self.get_requests_session().get(
url_endpoint, headers=headers, stream=True)
client = sseclient.SSEClient(resp)
for event in client.events():
# Ignore keep alive events.
if event.event in ['put', 'patch']:
self._session.logging.debug("FirebseNotifier woke up.")
callback(event.data)
self._session.logging.debug(
"Firebase connection reset, backing off.")
time.sleep(60)
|
dsweet04/rekall
|
rekall-agent/rekall_agent/locations/http.py
|
Python
|
gpl-2.0
| 11,603
|
"""w0401 dependency
"""
from __future__ import print_function
from . import func_w0401
if func_w0401:
print(input)
|
PyCQA/pylint
|
tests/input/w0401_cycle.py
|
Python
|
gpl-2.0
| 122
|
import pytest
import os
from pathlib import Path
from tyssue import Sheet, History, Epithelium, RNRGeometry
from tyssue.core.history import HistoryHdf5
from tyssue.generation import three_faces_sheet, extrude
def test_simple_history():
sheet = Sheet("3", *three_faces_sheet())
history = History(sheet)
assert "dx" in history.datasets["edge"].columns
for element in sheet.datasets:
assert sheet.datasets[element].shape[0] == history.datasets[element].shape[0]
history.record()
assert sheet.datasets["vert"].shape[0] * 2 == history.datasets["vert"].shape[0]
history.record()
assert sheet.datasets["vert"].shape[0] * 3 == history.datasets["vert"].shape[0]
assert sheet.datasets["face"].shape[0] * 3 == history.datasets["face"].shape[0]
mono = Epithelium("eptm", extrude(sheet.datasets))
histo2 = History(mono)
for element in mono.datasets:
assert mono.datasets[element].shape[0] == histo2.datasets[element].shape[0]
def test_warning():
sheet = Sheet("3", *three_faces_sheet())
with pytest.warns(UserWarning):
History(
sheet, extra_cols={"edge": ["dx"], "face": ["area"], "vert": ["segment"]}
)
def test_retrieve():
sheet = Sheet("3", *three_faces_sheet())
history = History(sheet)
sheet_ = history.retrieve(0)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
assert "area" in sheet_.datasets["face"].columns
with pytest.warns(UserWarning):
sheet_ = history.retrieve(1)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
sheet.vert_df.loc[0, "x"] = 100.0
sheet.face_df["area"] = 100.0
history.record()
sheet_ = history.retrieve(1)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
print(dset)
assert sheet_.datasets["vert"].loc[0, "x"] == 100.0
assert sheet_.datasets["face"].loc[0, "area"] == 100.0
history.record()
sheet_ = history.retrieve(2)
assert sheet_.datasets["face"].loc[0, "area"] == 100.0
sheet_ = history.retrieve(1)
assert sheet_.datasets["face"].loc[0, "area"] == 100.0
def test_overwrite_time():
sheet = Sheet("3", *three_faces_sheet())
history = History(sheet)
history.record(time_stamp=1)
history.record(time_stamp=1)
sheet_ = history.retrieve(1)
assert sheet_.Nv == sheet.Nv
def test_overwrite_tim_hdf5e():
sheet = Sheet("3", *three_faces_sheet())
history = HistoryHdf5(sheet, hf5file="out.hf5")
history.record(time_stamp=1)
history.record(time_stamp=1)
sheet_ = history.retrieve(1)
os.remove("out.hf5")
assert sheet_.Nv == sheet.Nv
def test_retrieve_bulk():
eptm = Epithelium("3", extrude(three_faces_sheet()[0]))
RNRGeometry.update_all(eptm)
history = History(eptm)
eptm_ = history.retrieve(0)
RNRGeometry.update_all(eptm_)
def test_historyHDF5_path_warning():
sheet = Sheet("3", *three_faces_sheet())
with pytest.warns(UserWarning):
history = HistoryHdf5(sheet)
history.record(time_stamp=0)
with pytest.warns(UserWarning):
history = HistoryHdf5(sheet, hf5file="out.hf5")
history.record(time_stamp=0)
for p in Path(".").glob("out*.hf5"):
p.unlink()
def test_historyHDF5_retrieve():
sheet = Sheet("3", *three_faces_sheet())
history = HistoryHdf5(sheet, hf5file="out.hf5")
for element in sheet.datasets:
assert sheet.datasets[element].shape[0] == history.datasets[element].shape[0]
history.record(time_stamp=0)
history.record(time_stamp=1)
sheet_ = history.retrieve(0)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
assert dset.time.unique()[0] == 0
sheet_ = history.retrieve(1)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
assert dset.time.unique()[0] == 1
for p in Path(".").glob("out*.hf5"):
p.unlink()
def test_historyHDF5_save_every():
sheet = Sheet("3", *three_faces_sheet())
history = HistoryHdf5(
sheet,
save_every=2,
dt=1,
hf5file="out.hf5",
)
for element in sheet.datasets:
assert sheet.datasets[element].shape[0] == history.datasets[element].shape[0]
for i in range(6):
history.record(time_stamp=i)
sheet_ = history.retrieve(0)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
assert dset.time.unique()[0] == 0
sheet_ = history.retrieve(1)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
assert dset.time.unique()[0] == 0
sheet_ = history.retrieve(2)
for elem, dset in sheet_.datasets.items():
assert dset.shape[0] == sheet.datasets[elem].shape[0]
assert dset.time.unique()[0] == 2
for p in Path(".").glob("out*.hf5"):
p.unlink()
def test_historyHDF5_itemsize():
sheet = Sheet("3", *three_faces_sheet())
sheet.vert_df["segment"] = "apical"
history = HistoryHdf5(
sheet,
hf5file="out.hf5",
)
for element in sheet.datasets:
assert sheet.datasets[element].shape[0] == history.datasets[element].shape[0]
sheet.vert_df.loc[0, "segment"] = ""
history.record(time_stamp=1)
sheet.vert_df.loc[0, "segment"] = "lateral"
history.record(time_stamp=2)
sheet.face_df.loc[0, "area"] = 12.0
history.record(time_stamp=3, sheet=sheet)
sheet1_ = history.retrieve(1)
assert sheet1_.vert_df.loc[0, "segment"] == ""
sheet2_ = history.retrieve(2)
assert sheet2_.vert_df.loc[0, "segment"] == "lateral"
sheet3_ = history.retrieve(3)
assert sheet3_.face_df.loc[0, "area"] == 12.0
for p in Path(".").glob("out*.hf5"):
p.unlink()
def test_historyHDF5_save_other_sheet():
sheet = Sheet("3", *three_faces_sheet())
history = HistoryHdf5(
sheet,
save_only={"edge": ["dx"], "face": ["area"], "vert": ["segment"]},
hf5file="out.hf5",
)
for element in sheet.datasets:
assert sheet.datasets[element].shape[0] == history.datasets[element].shape[0]
sheet.face_df.loc[0, "area"] = 1.0
history.record(time_stamp=1)
sheet.face_df.loc[0, "area"] = 12.0
history.record(time_stamp=2, sheet=sheet)
sheet1_ = history.retrieve(1)
assert sheet1_.face_df.loc[0, "area"] == 1.0
sheet2_ = history.retrieve(2)
assert sheet2_.face_df.loc[0, "area"] == 12.0
for p in Path(".").glob("out*.hf5"):
p.unlink()
def test_historyHDF5_from_archive():
sheet = Sheet("3", *three_faces_sheet())
history = HistoryHdf5(sheet, hf5file="test.hf5")
history.record()
history.record()
history.record()
retrieved = HistoryHdf5.from_archive("test.hf5")
try:
assert isinstance(retrieved.sheet, type(sheet))
finally:
os.remove("test.hf5")
def test_to_and_from_archive():
sheet = Sheet("3", *three_faces_sheet())
history = History(sheet)
history.record()
history.record()
history.record()
history.to_archive("test.hf5")
history_h = HistoryHdf5.from_archive("test.hf5")
sheet_ = history_h.retrieve(2)
try:
assert sheet_.Nv == sheet.Nv
finally:
os.remove("test.hf5")
def test_unsaved_col():
sheet = Sheet("3", *three_faces_sheet())
history = HistoryHdf5(
sheet,
hf5file="test.hf5",
)
history.record()
history.record()
sheet.face_df["new_col"] = 0
with pytest.warns(UserWarning):
history.record()
os.remove("test.hf5")
def test_change_col_types():
sheet = Sheet("3", *three_faces_sheet())
history = HistoryHdf5(
sheet,
hf5file="test.hf5",
)
history.record()
history.record()
sheet.face_df["z"] = "abc"
with pytest.raises(ValueError):
history.record()
os.remove("test.hf5")
|
CellModels/tyssue
|
tests/core/test_history.py
|
Python
|
gpl-2.0
| 8,115
|
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2012 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
# =================================================================== #
# _ ____ ___ ____ #
# / \ | _ \_ _| | _ \ ___ ___ _ _ #
# / _ \ | |_) | |_____| | | |/ _ \ / __| | | | #
# / ___ \| __/| |_____| |_| | (_) | (__| |_| | #
# /_/ \_\_| |___| |____/ \___/ \___|\__,_| #
# #
# =================================================================== #
#
# A sorter is used for allowing the user to sort the queried data
# according to a certain logic. All sorters declare in plugins/views/*.py
# are available for the user.
#
# Each sorter is a dictionary with the following keys:
#
# "title": Name of the sorter to be displayed in view editor
# "columns": Livestatus-columns needed be the sort algorithm
# "cmp": Comparison function
#
# The function cmp does the actual sorting. During sorting it
# will be called with two data rows as arguments and must
# return -1, 0 or 1:
#
# -1: The first row is smaller than the second (should be output first)
# 0: Both rows are equivalent
# 1: The first row is greater than the second.
#
# The rows are dictionaries from column names to values. Each row
# represents one item in the Livestatus table, for example one host,
# one service, etc.
# =================================================================== #
def cmp_state_equiv(r):
if r["service_has_been_checked"] == 0:
return -1
s = r["service_state"]
if s <= 1:
return s
else:
return 5 - s # swap crit and unknown
def cmp_host_state_equiv(r):
if r["host_has_been_checked"] == 0:
return -1
s = r["host_state"]
if s == 0:
return 0
else:
return 2 - s # swap down und unreachable
def cmp_svc_states(r1, r2):
return cmp(cmp_state_equiv(r1), cmp_state_equiv(r2))
def cmp_hst_states(r1, r2):
return cmp(cmp_host_state_equiv(r1), cmp_host_state_equiv(r2))
multisite_sorters["svcstate"] = {
"title" : _("Service state"),
"columns" : ["service_state", "service_has_been_checked"],
"cmp" : cmp_svc_states
}
multisite_sorters["hoststate"] = {
"title" : _("Host state"),
"columns" : ["host_state", "host_has_been_checked"],
"cmp" : cmp_hst_states
}
def cmp_site_host(r1, r2):
c = cmp(r1["site"], r2["site"])
if c != 0:
return c
else:
return cmp_simple_string("host_name", r1, r2)
multisite_sorters["site_host"] = {
"title" : _("Host"),
"columns" : ["site", "host_name" ],
"cmp" : cmp_site_host
}
def cmp_site_alias(r1, r2):
return cmp(config.site(r1["site"])["alias"], config.site(r2["site"])["alias"])
multisite_sorters["sitealias"] = {
"title" : _("Site Alias"),
"columns" : ["site" ],
"cmp" : cmp_site_alias
}
def cmp_host_tags(r1, r2):
return cmp(get_host_tags(r1), get_host_tags(r2))
multisite_sorters["host"] = {
"title" : _("Host Tags (Check_MK)"),
"columns" : [ "host_custom_variable_names", "host_custom_variable_values" ],
"cmp" : cmp_host_tags,
}
# name title column sortfunction
declare_simple_sorter("svcdescr", _("Service description"), "service_description", cmp_simple_string)
declare_simple_sorter("svcoutput", _("Service plugin output"), "service_plugin_output", cmp_simple_string)
declare_simple_sorter("svc_long_plugin_output", _("Long output of check plugin"), "service_long_plugin_output", cmp_simple_string)
declare_simple_sorter("site", _("Site"), "site", cmp_simple_string)
declare_simple_sorter("stateage", _("Service state age"), "service_last_state_change", cmp_simple_number)
declare_simple_sorter("servicegroup", _("Servicegroup"), "servicegroup_alias", cmp_simple_string)
declare_simple_sorter("hostgroup", _("Hostgroup"), "hostgroup_alias", cmp_simple_string)
# Service
declare_1to1_sorter("svc_check_command", cmp_simple_string)
declare_1to1_sorter("svc_contacts", cmp_string_list)
declare_1to1_sorter("svc_contact_groups", cmp_string_list)
declare_1to1_sorter("svc_check_age", cmp_simple_number, col_num = 1)
declare_1to1_sorter("svc_next_check", cmp_simple_number, reverse = True)
declare_1to1_sorter("svc_next_notification", cmp_simple_number, reverse = True)
declare_1to1_sorter("svc_last_notification", cmp_simple_number)
declare_1to1_sorter("svc_check_latency", cmp_simple_number)
declare_1to1_sorter("svc_check_duration", cmp_simple_number)
declare_1to1_sorter("svc_attempt", cmp_simple_number)
declare_1to1_sorter("svc_check_type", cmp_simple_number)
declare_1to1_sorter("svc_in_downtime", cmp_simple_number)
declare_1to1_sorter("svc_in_notifper", cmp_simple_number)
declare_1to1_sorter("svc_notifper", cmp_simple_string)
declare_1to1_sorter("svc_flapping", cmp_simple_number)
declare_1to1_sorter("svc_notifications_enabled", cmp_simple_number)
declare_1to1_sorter("svc_is_active", cmp_simple_number)
declare_1to1_sorter("svc_group_memberlist", cmp_string_list)
declare_1to1_sorter("svc_acknowledged", cmp_simple_number)
def cmp_perfdata_nth_value(r1, r2, n):
return cmp(savefloat(get_perfdata_nth_value(r1, n)), savefloat(get_perfdata_nth_value(r2, n)))
multisite_sorters['svc_perf_val01'] = {
"title" : _("Service performance data - value number %02d") % 1,
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 0),
}
multisite_sorters['svc_perf_val02'] = {
"title" : _("Service performance data - value number %02d"),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 1 % 2),
}
multisite_sorters['svc_perf_val03'] = {
"title" : _("Service performance data - value number %02d" % 3),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 2),
}
multisite_sorters['svc_perf_val04'] = {
"title" : _("Service performance data - value number %02d" % 4),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 3),
}
multisite_sorters['svc_perf_val05'] = {
"title" : _("Service performance data - value number %02d" % 5),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 4),
}
multisite_sorters['svc_perf_val06'] = {
"title" : _("Service performance data - value number %02d" % 6),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 5),
}
multisite_sorters['svc_perf_val07'] = {
"title" : _("Service performance data - value number %02d" % 7),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 6),
}
multisite_sorters['svc_perf_val08'] = {
"title" : _("Service performance data - value number %02d" % 8),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 7),
}
multisite_sorters['svc_perf_val09'] = {
"title" : _("Service performance data - value number %02d" % 9),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 8),
}
multisite_sorters['svc_perf_val10'] = {
"title" : _("Service performance data - value number %02d" % 10),
"columns" : [ 'service_perf_data' ],
"cmp" : lambda r1, r2: cmp_perfdata_nth_value(r1, r2, 9),
}
# Host
declare_1to1_sorter("alias", cmp_simple_string)
declare_1to1_sorter("host_address", cmp_simple_string)
declare_1to1_sorter("host_plugin_output", cmp_simple_string)
declare_1to1_sorter("host_perf_data", cmp_simple_string)
declare_1to1_sorter("host_check_command", cmp_simple_string)
declare_1to1_sorter("host_state_age", cmp_simple_number, col_num = 1)
declare_1to1_sorter("host_check_age", cmp_simple_number, col_num = 1)
declare_1to1_sorter("host_next_check", cmp_simple_number, reverse = True)
declare_1to1_sorter("host_next_notification", cmp_simple_number, reverse = True)
declare_1to1_sorter("host_last_notification", cmp_simple_number)
declare_1to1_sorter("host_check_latency", cmp_simple_number)
declare_1to1_sorter("host_check_duration", cmp_simple_number)
declare_1to1_sorter("host_attempt", cmp_simple_number)
declare_1to1_sorter("host_check_type", cmp_simple_number)
declare_1to1_sorter("host_in_notifper", cmp_simple_number)
declare_1to1_sorter("host_notifper", cmp_simple_string)
declare_1to1_sorter("host_flapping", cmp_simple_number)
declare_1to1_sorter("host_is_active", cmp_simple_number)
declare_1to1_sorter("host_in_downtime", cmp_simple_number)
declare_1to1_sorter("host_acknowledged", cmp_simple_number)
declare_1to1_sorter("num_services", cmp_simple_number)
declare_1to1_sorter("num_services_ok", cmp_simple_number)
declare_1to1_sorter("num_services_warn", cmp_simple_number)
declare_1to1_sorter("num_services_crit", cmp_simple_number)
declare_1to1_sorter("num_services_unknown", cmp_simple_number)
declare_1to1_sorter("num_services_pending", cmp_simple_number)
declare_1to1_sorter("host_parents", cmp_string_list)
declare_1to1_sorter("host_childs", cmp_string_list)
declare_1to1_sorter("host_group_memberlist", cmp_string_list)
declare_1to1_sorter("host_contacts", cmp_string_list)
declare_1to1_sorter("host_contact_groups", cmp_string_list)
def cmp_host_problems(r1, r2):
return cmp(r1["host_num_services"] - r1["host_num_services_ok"] - r1["host_num_services_pending"],
r2["host_num_services"] - r2["host_num_services_ok"] - r2["host_num_services_pending"])
multisite_sorters["num_problems"] = {
"title" : _("Number of problems"),
"columns" : [ "host_num_services", "host_num_services_ok", "host_num_services_pending" ],
"cmp" : cmp_host_problems,
}
# Hostgroup
declare_1to1_sorter("hg_num_services", cmp_simple_number)
declare_1to1_sorter("hg_num_services_ok", cmp_simple_number)
declare_1to1_sorter("hg_num_services_warn", cmp_simple_number)
declare_1to1_sorter("hg_num_services_crit", cmp_simple_number)
declare_1to1_sorter("hg_num_services_unknown", cmp_simple_number)
declare_1to1_sorter("hg_num_services_pending", cmp_simple_number)
declare_1to1_sorter("hg_num_hosts_up", cmp_simple_number)
declare_1to1_sorter("hg_num_hosts_down", cmp_simple_number)
declare_1to1_sorter("hg_num_hosts_unreach", cmp_simple_number)
declare_1to1_sorter("hg_num_hosts_pending", cmp_simple_number)
declare_1to1_sorter("hg_name", cmp_simple_string)
declare_1to1_sorter("hg_alias", cmp_simple_string)
# Servicegroup
declare_1to1_sorter("sg_num_services", cmp_simple_number)
declare_1to1_sorter("sg_num_services_ok", cmp_simple_number)
declare_1to1_sorter("sg_num_services_warn", cmp_simple_number)
declare_1to1_sorter("sg_num_services_crit", cmp_simple_number)
declare_1to1_sorter("sg_num_services_unknown", cmp_simple_number)
declare_1to1_sorter("sg_num_services_pending", cmp_simple_number)
declare_1to1_sorter("sg_name", cmp_simple_string)
declare_1to1_sorter("sg_alias", cmp_simple_string)
# Comments
declare_1to1_sorter("comment_id", cmp_simple_number)
declare_1to1_sorter("comment_author", cmp_simple_string)
declare_1to1_sorter("comment_comment", cmp_simple_string)
declare_1to1_sorter("comment_time", cmp_simple_number)
declare_1to1_sorter("comment_expires", cmp_simple_number, reverse = True)
declare_1to1_sorter("comment_what", cmp_simple_number)
declare_simple_sorter("comment_type", _("Comment type"), "comment_type", cmp_simple_number)
# Downtimes
declare_1to1_sorter("downtime_id", cmp_simple_number)
declare_1to1_sorter("downtime_author", cmp_simple_string)
declare_1to1_sorter("downtime_comment", cmp_simple_string)
declare_1to1_sorter("downtime_fixed", cmp_simple_number)
declare_1to1_sorter("downtime_type", cmp_simple_number)
declare_simple_sorter("downtime_what", _("Downtime type (host/service)"), "is_service", cmp_simple_number)
declare_simple_sorter("downtime_start_time", _("Downtime start"), "downtime_start_time", cmp_simple_number)
declare_simple_sorter("downtime_end_time", _("Downtime end"), "downtime_end_time", cmp_simple_number)
declare_simple_sorter("downtime_entry_time", _("Downtime entry time"), "downtime_entry_time", cmp_simple_number)
# Log
declare_1to1_sorter("log_plugin_output", cmp_simple_string)
declare_1to1_sorter("log_attempt", cmp_simple_string)
declare_1to1_sorter("log_state_type", cmp_simple_string)
declare_1to1_sorter("log_type", cmp_simple_string)
declare_1to1_sorter("log_contact_name", cmp_simple_string)
declare_1to1_sorter("log_time", cmp_simple_number)
declare_1to1_sorter("log_lineno", cmp_simple_number)
import time
def get_day_start_timestamp(t):
st = time.localtime(int(t))
start = int(time.mktime(time.struct_time((st[0], st[1], st[2], 0, 0, 0, st[6], st[7], st[8]))))
end = start + 86399
return start, end
def cmp_date(column, r1, r2):
# need to calculate with the timestamp of the day. Using 00:00:00 at the given day.
# simply calculating with 86400 does not work because of timezone problems
r1_date = get_day_start_timestamp(r1[column])
r2_date = get_day_start_timestamp(r2[column])
return cmp(r2_date, r1_date)
declare_1to1_sorter("log_date", cmp_date)
# Alert statistics
declare_simple_sorter("alerts_ok", _("Number of recoveries"), "alerts_ok", cmp_simple_number)
declare_simple_sorter("alerts_warn", _("Number of warnings"), "alerts_warn", cmp_simple_number)
declare_simple_sorter("alerts_crit", _("Number of critical alerts"), "alerts_crit", cmp_simple_number)
declare_simple_sorter("alerts_unknown", _("Number of unknown alerts"), "alerts_unknown", cmp_simple_number)
declare_simple_sorter("alerts_problem", _("Number of problem alerts"), "alerts_problem", cmp_simple_number)
# Aggregations
declare_simple_sorter("aggr_name", _("Aggregation name"), "aggr_name", cmp_simple_string)
declare_simple_sorter("aggr_group", _("Aggregation group"), "aggr_group", cmp_simple_string)
|
sileht/check_mk
|
web/plugins/views/sorters.py
|
Python
|
gpl-2.0
| 16,550
|
# encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
class KExtendableItemDelegate(__PyQt4_QtGui.QStyledItemDelegate):
# no doc
def contractAll(self, *args, **kwargs): # real signature unknown
pass
def contractItem(self, *args, **kwargs): # real signature unknown
pass
def contractPixmap(self, *args, **kwargs): # real signature unknown
pass
def extenderCreated(self, *args, **kwargs): # real signature unknown
pass
def extenderDestroyed(self, *args, **kwargs): # real signature unknown
pass
def extenderRect(self, *args, **kwargs): # real signature unknown
pass
def extendItem(self, *args, **kwargs): # real signature unknown
pass
def extendPixmap(self, *args, **kwargs): # real signature unknown
pass
def isExtended(self, *args, **kwargs): # real signature unknown
pass
def paint(self, *args, **kwargs): # real signature unknown
pass
def setContractPixmap(self, *args, **kwargs): # real signature unknown
pass
def setExtendPixmap(self, *args, **kwargs): # real signature unknown
pass
def sizeHint(self, *args, **kwargs): # real signature unknown
pass
def updateExtenderGeometry(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
auxDataRoles = None # (!) real value is ''
ShowExtensionIndicatorRole = 232
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdeui/KExtendableItemDelegate.py
|
Python
|
gpl-2.0
| 1,751
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
author by jacksyen[hyqiu.syen@gmail.com]
---------------------------------------
发送邮件帮助类
"""
import smtplib
from email.header import Header
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from log import logger
import helper.aop as aop
import webglobal.globals as gk7
class SendMail:
def __init__(self):
# 邮件服务
self.server = smtplib.SMTP(gk7.EMAIL.get('smtp'), gk7.EMAIL.get('port'), timeout=gk7.EMAIL.get('timeout'))
self.server.starttls()
self.server.login(gk7.EMAIL.get('user'), gk7.EMAIL.get('pwd'))
self.from_mail = gk7.EMAIL.get('user')
self.encode = gk7.EMAIL.get('encode')
@aop.exec_time
def close(self):
if not self.server:
self.server.quit()
self.server.close()
'''
发送邮件,成功返回True,失败返回False
filepath: 附件路径
tomail: 对方邮件
title: 标题
author: 作者
'''
@aop.exec_time
def send(self, file_path, tomail, title, author):
file_name = file_path.split('/')[-1]
# 构造附件
msg = MIMEMultipart()
msg['From'] = ("%s <" + self.from_mail +">") %(Header(author, self.encode),)
msg['To'] = tomail
msg['Subject'] = Header(title, self.encode)
#msg["Accept-Language"]="zh-CN"
#msg["Accept-Charset"]="ISO-8859-1,utf-8"
att = MIMEText(open(file_path, 'rb').read(), 'base64', self.encode)
att["Content-Type"] = 'application/octet-stream'
att.add_header("Content-Disposition", "attachment", filename = file_name.encode('utf-8'))
msg.attach(att)
# 发送邮件
try:
logger.info(u'开始发送邮件至%s...', tomail)
self.server.sendmail(msg['From'], tomail, msg.as_string())
logger.info(u'发送邮件至%s完成', tomail)
except Exception, err:
logger.error(u'发送邮件至%s失败,%s', tomail, err)
raise Exception, '发送邮件至%s失败,%s' %(tomail, err)
finally:
self.close()
|
liujianpc/gk7-douban
|
helper/mail.py
|
Python
|
gpl-2.0
| 2,160
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Ales Nosek <ales.nosek@gmail.com>
#
# This file is part of LinuxBand.
#
# LinuxBand is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file includes code found in MMA's 12.01 parse.py file,
written by Bob van der Poel <bob@mellowood.ca>
This module does all file parsing. Most commands
are passed to the track classes; however, things
like TIME, SEQRND, etc. which just set global flags
are completely handled here.
"""
from linuxband.glob import Glob
from linuxband.mma.bar_chords import BarChords
from linuxband.mma.bar_info import BarInfo
from linuxband.mma.song_data import SongData
########################################
# File processing. Mostly jumps to pats
########################################
def parse(inpath):
"""
Process a mma input file.
"""
song_bar_info = []
song_bar_chords = []
song_bar_count = 0
bar_number = 0
bar_info = BarInfo()
bar_chords = BarChords()
while True:
curline = inpath.readline()
# EOF
if not curline:
song_bar_info.append(bar_info) # song_bar_info has always one element more then song_bar_chords
song_bar_count = bar_number
return SongData(song_bar_info, song_bar_chords, song_bar_count)
""" convert 0xa0 (non-breakable space) to 0x20 (regular space).
"""
curline = curline.replace('\xa0', '\x20')
# empty line
if curline.rstrip('\n').strip() == '':
bar_info.add_line([Glob.A_UNKNOWN, curline])
continue
l = curline.split()
# line beginning with macro
if l[0][0] == '$':
wline = get_wrapped_line(inpath, curline)
wline.insert(0, Glob.A_UNKNOWN)
bar_info.add_line(wline)
continue
""" Handle BEGIN and END here. This is outside of the Repeat/End
and variable expand loops so SHOULD be pretty bullet proof.
Note that the beginData stuff is global to this module ... the
Include/Use directives check to make sure we're not doing that
inside a Begin/End.
beginData[] is a list which we append to as more Begins are
encountered.
The placement here is pretty deliberate. Variable expand comes
later so you can't macroize BEGIN ... I think this makes sense.
The tests for 'begin', 'end' and the appending of the current
begin[] stuff have to be here, in this order.
"""
action = l[0].upper() # 1st arg in line
# parse BEGIN and END block
if action == 'BEGIN':
block_action = l[1].upper()
begin_block = parse_begin_block(inpath, curline)
if block_action in supported_block_actions:
tokens = parse_supported_block_action(block_action, begin_block)
begin_block = tokens
begin_block.insert(0, Glob.A_BEGIN_BLOCK)
begin_block.insert(1, block_action)
bar_info.add_line(begin_block)
continue
# parse MSET block
if action == 'MSET':
mset_block = parse_mset_block(inpath, curline)
mset_block.insert(0, Glob.A_UNKNOWN)
bar_info.add_line(mset_block)
continue
# parse IF - ENDIF block
if action == 'IF':
if_block = parse_if_block(inpath, curline)
if_block.insert(0, Glob.A_UNKNOWN)
bar_info.add_line(if_block)
continue
# supported commands
if action in supported_actions:
wline = get_wrapped_line_join(inpath, curline)
tokens = parse_supported_action(action, wline)
tokens.insert(0, action)
bar_info.add_line(tokens)
continue
# if the command is in the simple function table
if action in simple_funcs:
wline = get_wrapped_line(inpath, curline)
wline.insert(0, Glob.A_UNKNOWN)
bar_info.add_line(wline)
continue
""" We have several possibilities ...
1. The command is a valid assigned track name,
2. The command is a valid track name, but needs to be
dynamically allocated,
3. It's really a chord action
"""
# track function BASS/DRUM/APEGGIO/CHORD ...
if '-' in action:
trk_class = action.split('-', 1)[0]
else:
trk_class = action
if trk_class in trk_classes:
# parsing track sequence ?
parse_seq = len(l) >= 1 and l[1].upper() == 'SEQUENCE'
wline = []
while True:
wline.extend(get_wrapped_line(inpath, curline))
if not parse_seq: break
""" Count the number of { and } and if they don't match read more lines and
append. If we get to the EOF then we're screwed and we error out. """
wline2 = ''.join(wline)
if wline2.count('{') == wline2.count('}'): break
curline = inpath.readline()
if not curline:
raise ValueError("Reached EOF, Sequence {}s do not match")
wline.insert(0, Glob.A_UNKNOWN)
bar_info.add_line(wline)
continue
# join the wrapped line into one line
wline = get_wrapped_line_join(inpath, curline)
if wline[0].replace('\\\n', '').strip() == '':
# line is a comment or empty wrapped line
act = Glob.A_REMARK if wline[1].strip() else Glob.A_UNKNOWN
bar_info.add_line([act, wline[0], wline[1]])
continue
l, eol = wline
### Gotta be a chord data line!
""" A data line can have an optional bar number at the start
of the line. Makes debugging input easier. The next
block strips leading integers off the line. Note that
a line number on a line by itself it okay.
"""
before_number = ''
if action.isdigit(): # isdigit() matches '1', '1234' but not '1a'!
l2 = l.lstrip()
before_number_len = len(l) - len(l2)
before_number = l[0:before_number_len]
l = l2
numstr = l.split()[0]
bar_chords.set_number(int(numstr))
l = l[len(numstr):] # remove number
if len(l.strip()) == 0: # ignore empty lines
bar_info.add_line([ Glob.A_UNKNOWN, wline[0] + wline[1] ])
continue
""" We now have a valid line. It'll look something like:
'Cm', '/', 'z', 'F#@4.5' { lyrics } [ solo ] * 2
Special processing in needed for 'z' options in chords. A 'z' can
be of the form 'CHORDzX', 'z!' or just 'z'.
"""
after_number = None
last_chord = []
ctable = []
i = 0
solo_count = 0
lyrics_count = 0
mismatched_solo = "Mismatched {}s for solo found in chord line"
mismatched_lyrics = "Mismatched []s for lyrics found in chord line"
while True:
chars = ''
while i < len(l):
ch = l[i]
if ch == '{':
""" Extract solo(s) from line ... this is anything in {}s.
The solo data is pushed into RIFFs and discarded from
the current line.
"""
solo_count += 1
elif ch == '[':
""" Set lyrics from [stuff] in the current line.
NOTE: lyric.extract() inserts previously created
data from LYRICS SET and inserts the chord names
if that flag is active.
"""
lyrics_count += 1
elif ch == '}':
solo_count -= 1
if solo_count < 0:
raise ValueError(mismatched_solo)
elif ch == ']':
lyrics_count -= 1
if lyrics_count < 0:
raise ValueError(mismatched_lyrics)
elif ch == '*':
""" A bar can have an optional repeat count. This must
be at the end of bar in the form '* xx'.
"""
pass
elif ch in '\t\n\\ 0123456789': # white spaces, \ and repeat count
pass
elif solo_count == 0 and lyrics_count == 0: # found beginning of the chord
break
chars += ch
i += 1
if i == len(l): # no more chord is coming
if solo_count != 0:
raise ValueError(mismatched_solo)
if lyrics_count != 0:
raise ValueError(mismatched_lyrics)
if after_number is None:
after_number = chars
else:
last_chord.append(chars)
ctable.append(last_chord)
break
else: # chord beginning
if after_number is None:
after_number = chars
else:
last_chord.append(chars)
ctable.append(last_chord)
chord_begin = i
# find the end of the chord
while i < len(l):
if l[i] in '{}[]*\t\n\\ ':
break
i += 1
# chord examples: '/', 'z', 'Am7@2', 'Am6zC@3'
c = l[chord_begin:i]
last_chord = [ c ]
# the trailing string of the last chord can possibly include '\n' after which
# it would be difficult to add further chords. Therefore move the trailing string
# of the last chord to eol
eol = last_chord[1] + eol
last_chord[1] = ''
bar_chords.set_before_number(before_number)
bar_chords.set_after_number(after_number)
bar_chords.set_eol(eol)
bar_chords.set_chords(ctable)
song_bar_info.append(bar_info)
song_bar_chords.append(bar_chords)
bar_number = bar_number + 1
bar_info = BarInfo()
bar_chords = BarChords()
def get_wrapped_line(inpath, curline):
"""
Reads the whole wrapped line ('\' at the end) and stores it in a list.
The lines in the list are not modified and are the same as in the file
"""
result = []
while True:
if not curline:
raise ValueError("Reached EOF, the last line is not complete")
result.append(curline)
curline = curline.strip()
if not curline or not(curline[-1] == '\\'):
break
curline = inpath.readline()
return result
def get_wrapped_line_join(inpath, curline):
"""
Reads the wrapped line and joins it into one.
Returns array of two strings:
1) the line content which will be further parsed
2) comment with '\n' at the end
If you join those strings you get exactly what was stored in the file
"""
wrapped = get_wrapped_line(inpath, curline)
line = ''
comment = ''
i = 0
while i < len(wrapped):
l = wrapped[i]
if comment:
comment = comment + l
else:
if '//' in l:
l, comm = l.split('//', 1)
comment = '//' + comm
line = line + l
else:
line = line + l
i = i + 1
return [line, comment]
def parse_begin_block(inpath, curline):
beginDepth = 1
result = [ curline ]
while True:
curline = inpath.readline()
if not curline:
raise ValueError("Reached EOF while looking for End")
l = curline.split()
action = None
if len(l) > 0:
action = l[0].upper()
if action == 'BEGIN':
beginDepth = beginDepth + 1
if action == 'END':
beginDepth = beginDepth - 1
result.append(curline)
if beginDepth == 0:
break
return result
def parse_mset_block(inpath, curline):
l = curline.split()
if len(l) < 2:
raise ValueError("Use: MSET VARIABLE_NAME <lines> MsetEnd")
result = [curline]
while True:
curline = inpath.readline()
if not curline:
raise ValueError("Reached EOF while looking for MSetEnd")
l = curline.split()
action = None
if len(l) > 0:
action = l[0].upper()
result.append(curline)
if action in ("MSETEND", 'ENDMSET'):
break
return result
def parse_if_block(inpath, curline):
ifDepth = 1
result = [curline]
while True:
curline = inpath.readline()
if not curline:
raise ValueError("Reached EOF while looking for EndIf")
l = curline.split()
action = None
if len(l) > 0:
action = l[0].upper()
if action == 'IF':
ifDepth = ifDepth + 1
if action in ('ENDIF', 'IFEND'):
ifDepth = ifDepth - 1
result.append(curline)
if ifDepth == 0:
break
return result
def parse_supported_action(action, wline):
line = []
if action == Glob.A_AUTHOR: # ['Author', ' Bob van der Poel\n']
line = tokenize_line(wline[0], 1)
elif action == Glob.A_DEF_GROOVE: # ['DefGroove', ' ', 'ModernJazz', ' ModernJazz with just a piano and guitar.\n']
line = tokenize_line(wline[0], 2)
elif action == Glob.A_GROOVE: # ['Groove', ' ', 'Tango', ' LightTango LightTangoSus LightTango\n']
line = tokenize_line(wline[0], 2)
elif action == Glob.A_REPEAT: # nothing to parse
line = [ wline[0] ]
elif action == Glob.A_REPEAT_END: # ['RepeatEnd', ' ', '2', '\n'] or ['RepeatEnd', '\n' ]
line = tokenize_line(wline[0], 2)
elif action == Glob.A_REPEAT_ENDING: #
line = tokenize_line(wline[0], 2)
elif action == Glob.A_TEMPO: # ['Tempo', ' ', '120', '\n']
line = tokenize_line(wline[0], 2)
elif action == Glob.A_TIME: # ['Time', ' ', '4'. '\n' ]
line = tokenize_line(wline[0], 2)
line.append(wline[1])
return line
def parse_supported_block_action(block_action, begin_block):
return [ begin_block[0], ''.join(begin_block[1:-1]), begin_block[-1] ]
def tokenize_line(line, limit):
"""
Split the line into tokens and characters in between.
Example:
['Time', ' ', '4', '\n']
['Timesig', ' ', '4', ' ', '4', '\n']
['DefGroove', ' ', 'ModernJazz', ' ModernJazz with just a piano and guitar.\n']
"""
chars_between = '\t\n\\ '
tokenized_line = []
count = 0
start = 0
end = 0
read_token = True
while start < len(line):
if read_token:
while end < len(line) and line[end] not in chars_between:
end += 1
tokenized_line.append(line[start:end])
count += 1
if count == limit:
tokenized_line.append(line[end:])
break
else:
while end < len(line) and line[end] in chars_between:
end += 1
tokenized_line.append(line[start:end])
read_token = not read_token
start = end
return tokenized_line
""" =================================================================
Command jump tables. These need to be at the end of this module
to avoid undefined name errors. The tables are only used in
the parse() function.
The first table is for the simple commands ... those which DO NOT
have a leading track name. The second table is for commands which
require a leading track name.
The alphabetic order is NOT needed, just convenient.
"""
simple_funcs = \
'ADJUSTVOLUME', \
'ALLGROOVES', \
'ALLTRACKS', \
'AUTHOR', \
'AUTOSOLOTRACKS', \
'BEATADJUST', \
'CHANNELPREF', \
'CHORDADJUST', \
'COMMENT', \
'CRESC', \
'CUT', \
'DEBUG', \
'DEC', \
'DECRESC', \
'DEFALIAS', \
'DEFCHORD', \
'DEFGROOVE', \
'DELETE', \
'DOC', \
'DOCVAR', \
'DRUMVOLTR', \
'ELSE', \
'ENDIF', \
'ENDMSET', \
'ENDREPEAT', \
'EOF', \
'FERMATA', \
'GOTO', \
'GROOVE', \
'GROOVECLEAR', \
'IF', \
'IFEND', \
'INC', \
'INCLUDE', \
'KEYSIG', \
'LABEL', \
'LYRIC', \
'MIDIDEF', \
'MIDI', \
'MIDICOPYRIGHT' \
'MIDICUE' \
'MIDIFILE', \
'MIDIINC', \
'MIDIMARK', \
'MIDISPLIT', \
'MIDITEXT' \
'MIDITNAME' \
'MMAEND', \
'MMASTART', \
'MSET', \
'MSETEND', \
'NEWSET', \
'PATCH', \
'PRINT', \
'PRINTACTIVE', \
'PRINTCHORD', \
'REPEAT', \
'REPEATEND', \
'REPEATENDING', \
'RESTART', \
'RNDSEED', \
'RNDSET', \
'SEQ', \
'SEQCLEAR', \
'SEQRND', \
'SEQRNDWEIGHT', \
'SEQSIZE', \
'SET', \
'SETAUTOLIBPATH', \
'SETINCPATH', \
'SETLIBPATH', \
'SETMIDIPLAYER', \
'SETOUTPATH', \
'SETSYNCTONE', \
'SHOWVARS', \
'STACKVALUE', \
'SWELL', \
'SWINGMODE', \
'SYNCHRONIZE', \
'TEMPO', \
'TIME', \
'TIMESIG', \
'TONETR', \
'TRUNCATE', \
'UNSET', \
'USE', \
'VARCLEAR', \
'VEXPAND', \
'VOICEVOLTR', \
'VOICETR', \
'VOLUME', \
'TRANSPOSE'
trackFuncs = \
'ACCENT', \
'ARPEGGIATE' \
'ARTICULATE' \
'CHANNEL', \
'DUPRIFF', \
'MIDIVOLUME', \
'MIDICRESC', \
'MIDIDECRESC', \
'CHSHARE', \
'COMPRESS', \
'COPY', \
'CRESC', \
'CUT', \
'DECRESC', \
'DELAY', \
'DIRECTION', \
'DRUMTYPE', \
'DUPROOT', \
'FORCEOUT', \
'GROOVE', \
'HARMONY', \
'HARMONYONLY', \
'HARMONYVOLUME', \
'INVERT', \
'LIMIT', \
'MALLET', \
'MIDICLEAR' \
'MIDICUE' \
'MIDIDEF', \
'MIDIGLIS', \
'MIDIPAN', \
'MIDISEQ', \
'MIDITEXT' \
'MIDITNAME', \
'MIDIVOICE', \
'OCTAVE', \
'OFF', \
'ON', \
'ORNAMENT' \
'TUNING' \
'CAPO' \
'RANGE', \
'RESTART', \
'RIFF', \
'RSKIP', \
'RTIME', \
'RVOLUME', \
'SCALETYPE', \
'SEQCLEAR', \
'SEQRND', \
'SEQUENCE', \
'SEQRNDWEIGHT', \
'SWELL', \
'MIDINOTE' \
'NOTESPAN', \
'STRUM', \
'TONE', \
'UNIFY', \
'VOICE', \
'VOICING', \
'VOLUME', \
'DEFINE'
trk_classes = \
'BASS', \
'CHORD', \
'ARPEGGIO', \
'SCALE', \
'DRUM', \
'WALK', \
'MELODY', \
'SOLO', \
'ARIA', \
'PLECTRUM'
supported_actions = \
Glob.A_AUTHOR, \
Glob.A_DEF_GROOVE, \
Glob.A_GROOVE, \
Glob.A_REPEAT, \
Glob.A_REPEAT_END, \
Glob.A_REPEAT_ENDING, \
Glob.A_TEMPO, \
Glob.A_TIME
supported_block_actions = \
Glob.A_DOC
|
noseka1/linuxband
|
src/main/python/linuxband/mma/parse.py
|
Python
|
gpl-2.0
| 19,612
|
# -*- coding: utf-8 -*-
import argparse
import importlib
def server():
parser = argparse.ArgumentParser()
parser.add_argument('--host', help="listen host")
parser.add_argument('--port', help="listen port")
args = parser.parse_args()
service_server = importlib.import_module("koenig.server")
service_server.server(args.host, args.port).serve()
def client():
parser = argparse.ArgumentParser()
parser.add_argument('--host', help="listen host")
parser.add_argument('--port', help="listen port")
args = parser.parse_args()
service_client = importlib.import_module("koenig")
with service_client.koenig_client(args.host, args.port) as c:
c.ping()
import IPython
IPython.embed()
|
streethacker/koenig
|
koenig/utils/console.py
|
Python
|
gpl-2.0
| 751
|
# -*- coding: utf-8 -*-
cont = 0
for i in range(5):
valor = int(raw_input())
if (valor % 2 == 0):
cont = cont + 1
print("%d valores pares" % cont)
|
bergolho1337/URI-Online-Judge
|
Basicos/Python/1065/main.py
|
Python
|
gpl-2.0
| 163
|
#!/usr/bin/python
import xml.etree.ElementTree as ET
import os, sys, magic
def getFileType(filename):
# cmd = shlex.split('file --mime-type {0}'.format(filename))
# mime_type = subprocess.check_output(cmd).split()[-1]
# return mime_type
return magic.from_file(filename)
def main(argv):
if len(argv) >= 1:
xmlFile = argv[0]
if getFileType(xmlFile).upper().find('XML') != -1:
xmlTree = ET.parse(xmlFile)
xml = xmlTree.getroot()
fecha = xml.get('fecha').replace("-","").replace("T","_").replace(":","")
noCertificado = xml.get('noCertificado')
subTotal = xml.get('subTotal')
total = xml.get('total')
emisorRFC = xml.find('{http://www.sat.gob.mx/cfd/3}Emisor').get('rfc')
emisorNombre = xml.find('{http://www.sat.gob.mx/cfd/3}Emisor').get('nombre')
receptorRFC = xml.find('{http://www.sat.gob.mx/cfd/3}Receptor').get('rfc')
receptorNombre = xml.find('{http://www.sat.gob.mx/cfd/3}Receptor').get('nombre')
impuestos = xml.find('{http://www.sat.gob.mx/cfd/3}Impuestos').get('totalImpuestosTrasladados')
newFileName = "%s_%s_%s_%s_%s_%s_%s" % (fecha, noCertificado, emisorRFC, receptorRFC, subTotal, impuestos, total)
print newFileName
else:
print "Debe especificarse un archivo en formato XML"
else:
print "Debe especificarse el nombre del archivo de entrada"
print "$ setGetCfdi.py archivo.xml"
if __name__ == "__main__":
main(sys.argv[1:])
|
hackob/read-cfdi-files
|
setGetCfdi.py
|
Python
|
gpl-2.0
| 1,624
|
# -*- coding: utf-8 -*-
# =============================================================================
# Federal University of Rio Grande do Sul (UFRGS)
# Connectionist Artificial Intelligence Laboratory (LIAC)
# Renato de Pontes Pereira - rppereira@inf.ufrgs.br
# =============================================================================
# Copyright (c) 2011 Renato de Pontes Pereira, renato.ppontes at gmail dot com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
'''
The liac-arff module implements functions to read and write ARFF files in
Python. It was created in the Connectionist Artificial Intelligence Laboratory
(LIAC), which takes place at the Federal University of Rio Grande do Sul
(UFRGS), in Brazil.
ARFF (Attribute-Relation File Format) is an file format specially created for
describe datasets which are commonly used for machine learning experiments and
softwares. This file format was created to be used in Weka, the best
representative software for machine learning automated experiments.
An ARFF file can be divided into two sections: header and data. The Header
describes the metadata of the dataset, including a general description of the
dataset, its name and its attributes. The source below is an example of a
header section in a XOR dataset::
%
% XOR Dataset
%
% Created by Renato Pereira
% rppereira@inf.ufrgs.br
% http://inf.ufrgs.br/~rppereira
%
%
@RELATION XOR
@ATTRIBUTE input1 REAL
@ATTRIBUTE input2 REAL
@ATTRIBUTE y REAL
The Data section of an ARFF file describes the observations of the dataset, in
the case of XOR dataset::
@DATA
0.0,0.0,0.0
0.0,1.0,1.0
1.0,0.0,1.0
1.0,1.0,0.0
%
%
%
Notice that several lines are starting with an ``%`` symbol, denoting a
comment, thus, lines with ``%`` at the beginning will be ignored, except by the
description part at the beginning of the file. The declarations ``@RELATION``,
``@ATTRIBUTE``, and ``@DATA`` are all case insensitive and obligatory.
For more information and details about the ARFF file description, consult
http://www.cs.waikato.ac.nz/~ml/weka/arff.html
ARFF Files in Python
~~~~~~~~~~~~~~~~~~~~
This module uses built-ins python objects to represent a deserialized ARFF
file. A dictionary is used as the container of the data and metadata of ARFF,
and have the following keys:
- **description**: (OPTIONAL) a string with the description of the dataset.
- **relation**: (OBLIGATORY) a string with the name of the dataset.
- **attributes**: (OBLIGATORY) a list of attributes with the following
template::
(attribute_name, attribute_type)
the attribute_name is a string, and attribute_type must be an string
or a list of strings.
- **data**: (OBLIGATORY) a list of data instances. Each data instance must be
a list with values, depending on the attributes.
The above keys must follow the case which were described, i.e., the keys are
case sensitive. The attribute type ``attribute_type`` must be one of these
strings (they are not case sensitive): ``NUMERIC``, ``INTEGER``, ``REAL`` or
``STRING``. For nominal attributes, the ``atribute_type`` must be a list of
strings.
In this format, the XOR dataset presented above can be represented as a python
object as::
xor_dataset = {
'description': 'XOR Dataset',
'relation': 'XOR',
'attributes': [
('input1', 'REAL'),
('input2', 'REAL'),
('y', 'REAL'),
],
'data': [
[0.0, 0.0, 0.0],
[0.0, 1.0, 1.0],
[1.0, 0.0, 1.0],
[1.0, 1.0, 0.0]
]
}
Features
~~~~~~~~
This module provides several features, including:
- Read and write ARFF files using python built-in structures, such dictionaries
and lists;
- Supports the following attribute types: NUMERIC, REAL, INTEGER, STRING, and
NOMINAL;
- Has an interface similar to other built-in modules such as ``json``, or
``zipfile``;
- Supports read and write the descriptions of files;
- Supports missing values and names with spaces;
- Supports unicode values and names;
- Fully compatible with Python 2.6+ and Python 3.4+;
- Under `MIT License <http://opensource.org/licenses/MIT>`_
'''
__author__ = 'ambrosys'
__author_email__ = 'renato.ppontes@gmail.com'
__version__ = '2.0.1'
import re
import csv
import sys
# CONSTANTS ===================================================================
_SIMPLE_TYPES = ['NUMERIC', 'REAL', 'INTEGER', 'STRING']
_TK_DESCRIPTION = '%'
_TK_COMMENT = '%'
_TK_RELATION = '@RELATION'
_TK_ATTRIBUTE = '@ATTRIBUTE'
_TK_DATA = '@DATA'
_TK_VALUE = ''
_RE_RELATION = re.compile(r'^(\".*\"|\'.*\'|\S*)$', re.UNICODE)
_RE_ATTRIBUTE = re.compile(r'^(\".*\"|\'.*\'|\S*)\s+(.+)$', re.UNICODE)
_RE_TYPE_NOMINAL = re.compile(r'^\{\s*((\".*\"|\'.*\'|\S*)\s*,\s*)*(\".*\"|\'.*\'|\S*)}$', re.UNICODE)
_RE_ESCAPE = re.compile(r'\\\'|\\\"|\\\%|[\\"\'%]')
_ESCAPE_DCT = {
' ': ' ',
"'": "\\'",
'"': '\\"',
'%': '\\%',
'\\': '\\',
'\\\'': '\\\'',
'\\"': '\\"',
'\\%': '\\%',
}
# =============================================================================
# COMPATIBILITY WITH PYTHON 3.3 ===============================================
if 'unicode' not in __builtins__:
unicode = str
if 'basestring' not in __builtins__:
basestring = str
if 'xrange' not in __builtins__:
xrange = range
# =============================================================================
# EXCEPTIONS ==================================================================
class ArffException(Exception):
message = None
def __init__(self):
self.line = -1
def __str__(self):
return self.message%self.line
class BadRelationFormat(ArffException):
'''Error raised when the relation declaration is in an invalid format.'''
message = 'Bad @RELATION format, at line %d.'
class BadAttributeFormat(ArffException):
'''Error raised when some attribute declaration is in an invalid format.'''
message = 'Bad @ATTIBUTE format, at line %d.'
class BadDataFormat(ArffException):
'''Error raised when some data instance is in an invalid format.'''
message = 'Bad @DATA instance format, at line %d.'
class BadAttributeType(ArffException):
'''Error raised when some invalid type is provided into the attribute
declaration.'''
message = 'Bad @ATTRIBUTE type, at line %d.'
class BadNominalValue(ArffException):
'''Error raised when a value in used in some data instance but is not
declared into it respective attribute declaration.'''
message = 'Data value not found in nominal declaration, at line %d.'
class BadNumericalValue(ArffException):
'''Error raised when and invalid numerical value is used in some data
instance.'''
message = 'Invalid numerical value, at line %d.'
class BadLayout(ArffException):
'''Error raised when the layout of the ARFF file has something wrong.'''
message = 'Invalid layout of the ARFF file, at line %d.'
class BadObject(ArffException):
'''Error raised when the object representing the ARFF file has something
wrong.'''
def __str__(self):
return 'Invalid object.'
class BadObject(ArffException):
'''Error raised when the object representing the ARFF file has something
wrong.'''
def __init__(self, msg=''):
self.msg = msg
def __str__(self):
return '%s'%self.msg
# =============================================================================
# INTERNAL ====================================================================
def encode_string(s):
def replace(match):
return _ESCAPE_DCT[match.group(0)]
return u"'" + _RE_ESCAPE.sub(replace, s) + u"'"
class Conversor(object):
'''Conversor is a helper used for converting ARFF types to Python types.'''
def __init__(self, type_, values=None):
'''Contructor.'''
self.values = values
if type_ == 'NUMERIC' or type_ == 'REAL':
self._conversor = self._float
elif type_ == 'STRING':
self._conversor = self._string
elif type_ == 'INTEGER':
self._conversor = self._integer
elif type_ == 'NOMINAL':
self._conversor = self._nominal
else:
raise BadAttributeType()
def _float(self, value):
'''Convert the value to float.'''
try:
return float(value)
except ValueError as e:
raise BadNumericalValue()
def _integer(self, value):
'''Convert the value to integer.'''
try:
return int(float(value))
except ValueError as e:
raise BadNumericalValue()
def _string(self, value):
'''Convert the value to string.'''
return unicode(value)
def _nominal(self, value):
'''Verify the value of nominal attribute and convert it to string.'''
if value not in self.values:
raise BadNominalValue()
return self._string(value)
def __call__(self, value):
'''Convert a ``value`` to a given type.
This function also verify if the value is an empty string or a missing
value, either cases, it returns None.
'''
value = value.strip(' ').strip('\"\'')
if value == u'?' or value == u'':
return None
return self._conversor(value)
# =============================================================================
# ADVANCED INTERFACE ==========================================================
class ArffDecoder(object):
'''An ARFF decoder.'''
def __init__(self):
'''Constructor.'''
self._conversors = []
self._current_line = 0
def _decode_comment(self, s):
'''(INTERNAL) Decodes a comment line.
Comments are single line strings starting, obligatorily, with the ``%``
character, and can have any symbol, including whitespaces or special
characters.
This method must receive a normalized string, i.e., a string without
padding, including the "\r\n" characters.
:param s: a normalized string.
:return: a string with the decoded comment.
'''
res = re.sub('^\%( )?', '', s)
return res
def _decode_relation(self, s):
'''(INTERNAL) Decodes a relation line.
The relation declaration is a line with the format ``@RELATION
<relation-name>``, where ``relation-name`` is a string. The string must
be quoted if the name includes spaces, otherwise this method will raise
a `BadRelationFormat` exception.
This method must receive a normalized string, i.e., a string without
padding, including the "\r\n" characters.
:param s: a normalized string.
:return: a string with the decoded relation name.
'''
_, v = s.split(' ', 1)
v = v.strip()
if not _RE_RELATION.match(v):
raise BadRelationFormat()
res = unicode(v.strip('"\''))
return res
def _decode_attribute(self, s):
'''(INTERNAL) Decodes an attribute line.
The attribute is the most complex declaration in an arff file. All
attributes must follow the template::
@attribute <attribute-name> <datatype>
where ``attribute-name`` is a string, quoted if the name contains any
whitespace, and ``datatype`` can be:
- Numerical attributes as ``NUMERIC``, ``INTEGER`` or ``REAL``.
- Strings as ``STRING``.
- Dates (NOT IMPLEMENTED).
- Nominal attributes with format:
{<nominal-name1>, <nominal-name2>, <nominal-name3>, ...}
The nominal names follow the rules for the attribute names, i.e., they
must be quoted if the name contains whitespaces.
This method must receive a normalized string, i.e., a string without
padding, including the "\r\n" characters.
:param s: a normalized string.
:return: a tuple (ATTRIBUTE_NAME, TYPE_OR_VALUES).
'''
_, v = s.split(' ', 1)
v = v.strip()
# Verify the general structure of declaration
m = _RE_ATTRIBUTE.match(v)
if not m:
raise BadAttributeFormat()
# Extracts the raw name and type
name, type_ = m.groups()
# Extracts the final name
name = unicode(name.strip('"\''))
# Extracts the final type
if _RE_TYPE_NOMINAL.match(type_):
# If follows the nominal structure, parse with csv reader.
values = next(csv.reader([type_.strip('{} ')]))
values = [unicode(v_.strip(' ').strip('"\'')) for v_ in values]
type_ = values
else:
# If not nominal, verify the type name
type_ = unicode(type_).upper()
if type_ not in ['NUMERIC', 'REAL', 'INTEGER', 'STRING']:
raise BadAttributeType()
return (name, type_)
def _decode_data(self, s):
'''(INTERNAL) Decodes a line of data.
Data instances follow the csv format, i.e, attribute values are
delimited by commas. After converted from csv, this method uses the
``_conversors`` list to convert each value. Obviously, the values must
follow the same order then their respective attributes.
This method must receive a normalized string, i.e., a string without
padding, including the "\r\n" characters.
:param s: a normalized string.
:return: a list with values.
'''
values = next(csv.reader([s.strip(' ')]))
if len(values) != len(self._conversors):
raise BadDataFormat()
values = [self._conversors[i](values[i]) for i in xrange(len(values))]
return values
def _decode(self, s):
'''Do the job the ``encode``.'''
# If string, convert to a list of lines
if isinstance(s, basestring):
s = s.strip('\r\n ').replace('\r\n', '\n').split('\n')
# Create the return object
obj = {
u'description': u'',
u'relation': u'',
u'attributes': [],
u'data': []
}
# Read all lines
STATE = _TK_DESCRIPTION
for row in s:
self._current_line += 1
# Ignore empty lines
row = row.strip(' \r\n')
if not row: continue
u_row = row.upper()
# DESCRIPTION -----------------------------------------------------
if u_row.startswith(_TK_DESCRIPTION) and STATE == _TK_DESCRIPTION:
obj['description'] += self._decode_comment(row) + '\n'
# -----------------------------------------------------------------
# RELATION --------------------------------------------------------
elif u_row.startswith(_TK_RELATION):
if STATE != _TK_DESCRIPTION:
raise BadLayout()
STATE = _TK_RELATION
obj['relation'] = self._decode_relation(row)
# -----------------------------------------------------------------
# ATTRIBUTE -------------------------------------------------------
elif u_row.startswith(_TK_ATTRIBUTE):
if STATE != _TK_RELATION and STATE != _TK_ATTRIBUTE:
raise BadLayout()
STATE = _TK_ATTRIBUTE
attr = self._decode_attribute(row)
obj['attributes'].append(attr)
if isinstance(attr[1], (list, tuple)):
conversor = Conversor('NOMINAL', attr[1])
else:
conversor = Conversor(attr[1])
self._conversors.append(conversor)
# -----------------------------------------------------------------
# DATA ------------------------------------------------------------
elif u_row.startswith(_TK_DATA):
if STATE != _TK_ATTRIBUTE:
raise BadLayout()
STATE = _TK_DATA
# -----------------------------------------------------------------
# COMMENT ---------------------------------------------------------
elif u_row.startswith(_TK_COMMENT):
pass
# -----------------------------------------------------------------
# DATA INSTANCES --------------------------------------------------
elif STATE == _TK_DATA:
obj['data'].append(self._decode_data(row))
# -----------------------------------------------------------------
# UNKNOWN INFORMATION ---------------------------------------------
else:
raise BadLayout()
# -----------------------------------------------------------------
if obj['description'].endswith('\n'):
obj['description'] = obj['description'][:-1]
return obj
def decode(self, s):
'''Returns the Python representation of a given ARFF file.
When a file object is passed as an argument, this method read lines
iteratively, avoiding to load unnecessary information to the memory.
:param s: a string or file object with the ARFF file.
'''
try:
return self._decode(s)
except ArffException as e:
# print e
e.line = self._current_line
raise e
class ArffEncoder(object):
'''An ARFF encoder.'''
def _encode_comment(self, s=''):
'''(INTERNAL) Encodes a comment line.
Comments are single line strings starting, obligatorily, with the ``%``
character, and can have any symbol, including whitespaces or special
characters.
If ``s`` is None, this method will simply return an empty comment.
:param s: (OPTIONAL) string.
:return: a string with the encoded comment line.
'''
return u'%s %s'%(_TK_COMMENT, s)
def _encode_relation(self, name):
'''(INTERNAL) Decodes a relation line.
The relation declaration is a line with the format ``@RELATION
<relation-name>``, where ``relation-name`` is a string.
:param name: a string.
:return: a string with the encoded relation declaration.
'''
if ' ' in name:
name = '"%s"'%name
return u'%s %s'%(_TK_RELATION, name)
def _encode_attribute(self, name, type_):
'''(INTERNAL) Encodes an attribute line.
The attribute follow the template::
@attribute <attribute-name> <datatype>
where ``attribute-name`` is a string, and ``datatype`` can be:
- Numerical attributes as ``NUMERIC``, ``INTEGER`` or ``REAL``.
- Strings as ``STRING``.
- Dates (NOT IMPLEMENTED).
- Nominal attributes with format:
{<nominal-name1>, <nominal-name2>, <nominal-name3>, ...}
This method must receive a the name of the attribute and its type, if
the attribute type is nominal, ``type`` must be a list of values.
:param name: a string.
:param type_: a string or a list of string.
:return: a string with the encoded attribute declaration.
'''
if ' ' in name:
name = '"%s"'%name
if isinstance(type_, (tuple, list)):
type_ = [u'"%s"'%t if ' ' in t else u'%s'%t for t in type_]
type_ = u'{%s}'%(u', '.join(type_))
return u'%s %s %s'%(_TK_ATTRIBUTE, name, type_)
def _encode_data(self, data):
'''(INTERNAL) Encodes a line of data.
Data instances follow the csv format, i.e, attribute values are
delimited by commas. After converted from csv.
:param data: a list of values.
:return: a string with the encoded data line.
'''
new_data = []
for v in data:
s = unicode(v)
for escape_char in _ESCAPE_DCT:
if escape_char in s:
s = encode_string(s)
break
new_data.append(s)
return u','.join(new_data)
def encode(self, obj):
'''Encodes a given object to an ARFF file.
:param obj: the object containing the ARFF information.
:return: the ARFF file as an unicode string.
'''
data = [row for row in self.iter_encode(obj)]
return u'\n'.join(data)
def iter_encode(self, obj):
'''The iterative version of `arff.ArffEncoder.encode`.
This encodes iteratively a given object and return, one-by-one, the
lines of the ARFF file.
:param obj: the object containing the ARFF information.
:return: (yields) the ARFF file as unicode strings.
'''
# DESCRIPTION
if obj.get('description', None):
for row in obj['description'].split('\n'):
yield self._encode_comment(row)
# RELATION
if not obj.get('relation'):
raise BadObject('Relation name not found or with invalid value.')
yield self._encode_relation(obj['relation'])
yield u''
# ATTRIBUTES
if not obj.get('attributes'):
raise BadObject('Attributes not found.')
for attr in obj['attributes']:
# Verify for bad object format
if not isinstance(attr, (tuple, list)) or \
len(attr) != 2 or \
not isinstance(attr[0], basestring):
raise BadObject('Invalid attribute declaration "%s"'%str(attr))
if isinstance(attr[1], basestring):
# Verify for invalid types
if attr[1] not in _SIMPLE_TYPES:
raise BadObject('Invalid attribute type "%s"'%str(attr))
# Verify for bad object format
elif not isinstance(attr[1], (tuple, list)):
raise BadObject('Invalid attribute type "%s"'%str(attr))
yield self._encode_attribute(attr[0], attr[1])
yield u''
# DATA
yield _TK_DATA
if not obj.get('data'):
raise BadObject('Data declaration not found.')
for inst in obj['data']:
yield self._encode_data(inst)
# FILLER
yield self._encode_comment()
yield self._encode_comment()
yield self._encode_comment()
# =============================================================================
# BASIC INTERFACE =============================================================
def load(fp):
'''Load a file-like object containing the ARFF document and convert it into
a Python object.
:param fp: a file-like object.
:return: a dictionary.
'''
decoder = ArffDecoder()
return decoder.decode(fp)
def loads(s):
'''Convert a string instance containing the ARFF document into a Python
object.
:param s: a string object.
:return: a dictionary.
'''
decoder = ArffDecoder()
return decoder.decode(s)
def dump(obj, fp):
'''Serialize an object representing the ARFF document to a given file-like
object.
:param obj: a dictionary.
:param fp: a file-like object.
'''
encoder = ArffEncoder()
generator = encoder.iter_encode(obj)
last_row = generator.next()
for row in generator:
fp.write(last_row + u'\n')
last_row = row
fp.write(last_row)
return fp
def dumps(obj):
'''Serialize an object representing the ARFF document, returning a string.
:param obj: a dictionary.
:return: a string with the ARFF document.
'''
encoder = ArffEncoder()
return encoder.encode(obj)
# =============================================================================
|
Ambrosys/climatelearn
|
climatelearn/learning/arf.py
|
Python
|
gpl-2.0
| 24,951
|
"""Library to handle database interactions.
The master list of variants are stored in M tables named master_chrom_1, master_chrom_2, ... master_chrom_M
Columns::
rowid - corresponds to index
pos -
stop -
ref -
alt -
p - probability of variant (to make the sfs)
Sample data is stored in M tables named sample_chrom_1, ....
Columns::
rowid - sample id
gen - generation
serial - serial within the generation
data - binary blob
Chromosome data is stored as a sequence of 4N bytes
30 bits represent the index and 2 bits represent the genotype
This is stored as a blob in the vlist field
"""
import sqlite3 as sq
import struct
import numpy
import mitty.lib.variants as vr
def sample_table_name(sample_name, chrom_name):
return 'sample_{:s}_chrom_{:s}'.format(sample_name, chrom_name)
def connect(db_name='population.sqlite3'):
"""Connect to the database
:param db_name: The database name
:returns conn: connection object"""
conn = sq.connect(db_name)
conn.text_factory = str # Otherwise our ref and alts will be unicode, bleh!
return conn
def save_master_list(conn, chrom, ml):
"""
:param conn: connection object
:param chrom: chromosome number
:param ml: master list of variants
:return: nothing
THIS WIPES ANY PREVIOUS DATA AND WRITES THE LIST AFRESH
"""
assert type(chrom) == int, 'Chromosome must be a number'
assert chrom > 0, 'Chromosome numbering starts at 1'
assert ml.sorted, 'Master list has not been sorted. Please check your program'
assert len(ml) <= 1073741823, 'Master list has more than 2^30-1 variants.' # I want whoever gets here to mail me: kaushik.ghose@sbgenomics.com
conn.execute("DROP TABLE IF EXISTS master_chrom_{:d}".format(chrom))
conn.execute("CREATE TABLE master_chrom_{:d} (pos INTEGER, stop INTEGER, ref TEXT, alt TEXT, p FLOAT)".format(chrom))
insert_clause = "INSERT INTO master_chrom_{:d} (pos, stop, ref, alt, p) VALUES (?, ?, ?, ?, ?)".format(chrom)
conn.executemany(insert_clause, ml.variants.tolist())
conn.commit()
def load_master_list(conn, chrom):
"""
:param conn: connection object
:param chrom: chromosome number
:returns ml: master list of variants
"""
assert type(chrom) == int, 'Chromosome must be a number'
assert chrom > 0, 'Chromosome numbering starts at 1'
# # Surely numpy has a better way of doing this, but fromiter does not work
# c = []
# for col in ['pos', 'stop', 'ref', 'alt', 'p']:
# c += [[r for r in conn.execute("SELECT {:s} FROM master_chrom_{:d}".format(col, chrom))]]
#ml = vr.VariantList(*c)
ml = vr.VariantList()
try:
rows = [c for c in conn.execute("SELECT * FROM master_chrom_{:d}".format(chrom))]
dtype = [('pos', 'i4'), ('stop', 'i4'), ('ref', 'object'), ('alt', 'object'), ('p', 'f2')]
ml.variants = numpy.array(rows, dtype=dtype)
except sq.OperationalError:
pass # TODO: log some kind of warning? Or assume we just don' have any variants for that chromosome?
ml.sorted = True # We assume that this had been sorted etc. before saving
return ml
def save_sample(conn, gen, serial, chrom, variants):
"""Save the sample chromosomes into the database
:param conn: connection object
:param gen: generation
:param serial: serial number within the generation
:param chrom: chromosome number
:param variants: list of tuples as returned by generate_chromosome
:return: rowid corresponding to this sample
Chromosome data is stored as a sequence of 4N bytes
30 bits represent the index and 2 bits represent the genotype
This is stored as a blob in the vlist field
"""
c = conn.cursor()
c.execute("CREATE TABLE IF NOT EXISTS sample_chrom_{:d} (gen INTEGER, serial INTEGER, vlist BLOB)".format(chrom))
c.execute("INSERT INTO sample_chrom_{:d} (gen, serial, vlist) VALUES (?, ?, ?)".format(chrom), (gen, serial,
sq.Binary(struct.pack('{:d}I'.format(len(variants)), *[v[0] << 2 | v[1] for v in variants]))))
conn.commit()
return c.lastrowid
def load_sample(conn, gen, serial, chrom):
"""Load the sample in the database
:param conn: connection object
:param gen: generation
:param serial: serial number within the generation
:param chrom: chromosome number
:return: variants: list of tuples same as returned by generate_chromosome
"""
c = conn.cursor()
try:
c.execute("SELECT vlist FROM sample_chrom_{:d} WHERE gen==? AND serial==?".format(chrom), (gen, serial))
row = next(c, None)
except sq.OperationalError:
row = None # We assume that no such table means, simply, no variants in that chromosome
return [(b >> 2, b & 0x3) for b in struct.unpack('{:d}I'.format(len(row[0]) / 4), row[0])] if row is not None else []
def save_chromosome_metadata(conn, chrom, seq_id, seq_len, seq_md5):
"""Save chromosome sequence metadata in db
:param conn: connection object
:param chrom: chromosome number
:param seq_id: sequence id as found in fasta
:param seq_len: sequence length
:param seq_md5:md5 hash of sequence string
"""
c = conn.cursor()
c.execute("CREATE TABLE IF NOT EXISTS chrom_metadata (chrom INTEGER, seq_id TEXT, seq_len INTEGER, seq_md5 TEXT)")
c.execute("INSERT INTO chrom_metadata (chrom, seq_id, seq_len, seq_md5) VALUES (?, ?, ?, ?)", (chrom, seq_id, seq_len, seq_md5))
conn.commit()
def load_chromosome_metadata(conn, chrom):
"""Load chromosome metadata
:param conn: connection object
:param chrom: chromosome number
:returns chrom, seq_id, seq_len, seq_md5
"""
c = conn.cursor()
c.execute("SELECT chrom, seq_id, seq_len, seq_md5 FROM chrom_metadata WHERE chrom=?", (chrom,))
row = next(c, None)
return row
def chromosomes_in_db(conn):
"""Total number of chromosomes - same as in original genome"""
c = conn.execute("SELECT chrom, seq_id, seq_len, seq_md5 FROM chrom_metadata ORDER BY rowid ASC")
return [row for row in c]
def populated_chromosomes_in_db(conn):
"""Chromosomes with at least one variant in the master list."""
c = conn.execute("SELECT chrom, seq_id, seq_len, seq_md5 FROM chrom_metadata ORDER BY rowid ASC")
return [row for row in c if variants_in_master_list(conn, row[0])]
def variants_in_master_list(conn, chrom):
count = 0
c = conn.execute("SELECT name FROM sqlite_master WHERE TYPE='table' AND name='master_chrom_{:d}'".format(chrom))
if len(c.fetchall()) > 0:
c = conn.execute("SELECT COUNT(rowid) FROM master_chrom_{:d}".format(chrom))
count = c.next()[0]
return count
def samples_in_db(conn):
c = conn.execute("SELECT name FROM sqlite_master WHERE TYPE='table' AND name LIKE 'sample_chrom_%'")
table = c.next()[0]
c = conn.execute("SELECT COUNT(rowid) FROM {:s}".format(table))
return int(c.next()[0])
|
latticelabs/Mitty
|
mitty/lib/db.py
|
Python
|
gpl-2.0
| 6,769
|
#!/usr/bin/env python3
# coding: utf-8
from __future__ import unicode_literals, print_function
import re
import zlib
import hashlib
from molbiox.algor.traverse import Traverser
class Hasher(object):
"""
Common interface for hashlib & zlib hash functions
"""
__hfuncs__ = {
'adler32': zlib.adler32,
'crc32': zlib.crc32,
'md5': hashlib.md5,
'sha1': hashlib.sha1,
'sha224': hashlib.sha224,
'sha256': hashlib.sha256,
'sha384': hashlib.sha384,
'sha512': hashlib.sha512,
}
__hlens__ = {
32: 'md5',
40: 'sha1',
56: 'sha224',
64: 'sha256',
96: 'sha384',
128: 'sha512',
}
def __init__(self, name):
if name not in self.__hfuncs__:
raise ValueError("'{}' is not a supported hash function".format(name))
if name in {'adl32', 'crc32'}:
self.hash = getattr(zlib, name) # a checksum function
self.emulate = True
self.im = self.hash(b'') # intermediate result
else:
self.hash = getattr(hashlib, name)() # a hash object
self.emulate = False
self.im = 0 # not used for hashlib functions
def update(self, data):
# hashlib.md* / hashlib.sha*
if not self.emulate:
self.hash.update(data)
return
# zlib.adler32 / zlib.crc32
if self.im is None:
self.im = self.hash(data)
else:
self.im = self.hash(data, self.im)
def hexdigest(self):
# hashlib.md* / hashlib.sha*
if not self.emulate:
return self.hash.hexdigest()
# zlib.adler32 / zlib.crc32
return hex(self.im & 0xffffffff).replace('0x', '')
@classmethod
def from_signature(cls, signature):
name = signature.split(':')[0].lower()
if name not in cls.__hfuncs__:
name = cls.__hlens__.get(len(signature), 'md5')
return cls(name)
class SigDictionary(dict):
"""
A dict with signature (main class in this module)
"""
def __init__(self, *args, **kwargs):
super(SigDictionary, self).__init__(*args, **kwargs)
self._signature = ''
def sign(self, signature):
self._signature = signature.lower()
def inspect(self):
"""
Inspect integrity of a dict object
:raise ValueError: if fail
:return: self
"""
if not self._signature:
return
hasher = Hasher.from_signature(self._signature)
hvalue = re.sub(r'^[a-z0-9]+:', '', self._signature)
if hvalue != self.calc(hasher):
raise ValueError('object is corrupted')
def calc(self, hasher):
"""
:param hasher: a Hasher object
:return: a string (signature)
"""
travs = Traverser(self, lambda x: hasher.update(x.encode('ascii')))
travs.traverse()
return hasher.hexdigest()
def build(self):
hasher = Hasher('md5')
hvalue = self.calc(hasher)
signature = 'md5:{}'.format(hvalue)
self.sign(signature)
return signature
def format(self):
pairs = ((repr(k), repr(v)) for k, v in self.items())
body = ''.join('\t{}:\t{},\n'.format(*p) for p in pairs)
body = '{\n' + body + '}'
if self._signature:
self.inspect() # always make sure of the integrity
return body + " + Sig('{}')".format(self._signature)
else:
return body
def print_(self, *args, **kwargs):
print(self.format(), *args, **kwargs)
class Sig(object):
def __init__(self, signature, cheat=False):
self.signature = signature
self.cheat = cheat
def __add__(self, other):
if not isinstance(other, SigDictionary):
other = SigDictionary(other)
if self.cheat:
return other
other.sign(self.signature)
return other
def __radd__(self, other):
return self + other
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, self.signature)
def __str__(self):
return self.signature
|
frozflame/molbiox
|
molbiox/frame/signature.py
|
Python
|
gpl-2.0
| 4,227
|
#!/usr/bin/env python
#
# sally-annoyifier.py - randomise a time reporting string, for Sally
#
# Copyright (C) 2018 Michael Davies <michael@the-davies.net>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# 02111-1307, USA.
#
import os
import random
import sys
def _get_project_names():
home = os.environ.get('HOME')
if not home:
print("%s: ${HOME} not defined" % os.path.basename(sys.argv[0]))
print("Exiting...")
sys.exit(1)
filename = home + "/.project-names"
try:
fd = open(filename, 'r')
return [line.rstrip() for line in fd]
except (OSError, IOError) as e:
print("%s: %s" % (os.path.basename(sys.argv[0]), e))
print("Please create this file, with one project name per line")
print("Exiting...")
sys.exit(2)
if __name__ == '__main__':
pn = _get_project_names()
random.shuffle(pn)
print("COP-29: %s standup" % ','.join(pn))
|
mrda/junkcode
|
sally-annoyifier.py
|
Python
|
gpl-2.0
| 1,572
|
from datetime import datetime
from django.contrib.auth.models import User
from django.test import TestCase
from envirocon.envirocon_main.models import GroundWorkClass
class TestGroundWorkClassModels(TestCase):
def setUp(self):
self.gwc = GroundWorkClass()
def test_gwc_init(self):
self.assertIsNotNone(self.gwc)
self.assertEquals(unicode(self.gwc.faculty_group.name),
'Another Test Class Faculty')
self.assertEquals(unicode(self.gwc.course_group.name),
'Another Test Class Students')
self.assertIsNotNone(self.gwc.course)
self.assertIsNotNone(self.gwc.game)
self.assertIsNotNone(self.gwc.faculty_team)
def test_gwc_init_creator(self):
cr = User.objects.create()
ngwc = GroundWorkClass(creator=cr, title='New Name')
self.assertIsNotNone(ngwc.creator)
def test_next_month(self):
nm = self.gwc.next_month()
d = datetime.today()
nxtmn = datetime(d.year + (d.month + 1) / 12,
((d.month + 1) % 12) + 1, d.day)
self.assertEquals(nm, nxtmn)
def test_copy_survey_srvy_none(self):
pass
|
ccnmtl/envirocon
|
envirocon/envirocon_main/tests/test_models.py
|
Python
|
gpl-2.0
| 1,200
|
######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
"""create and manipulate C data types in Python"""
import os as _os, sys as _sys
__version__ = "1.1.0"
import _ffi
from _ctypes import Union, Structure, Array
from _ctypes import _Pointer
from _ctypes import CFuncPtr as _CFuncPtr
from _ctypes import __version__ as _ctypes_version
from _ctypes import RTLD_LOCAL, RTLD_GLOBAL
from _ctypes import ArgumentError
from struct import calcsize as _calcsize
if __version__ != _ctypes_version:
raise Exception("Version number mismatch", __version__, _ctypes_version)
if _os.name in ("nt", "ce"):
from _ctypes import FormatError
DEFAULT_MODE = RTLD_LOCAL
if _os.name == "posix" and _sys.platform == "darwin":
# On OS X 10.3, we use RTLD_GLOBAL as default mode
# because RTLD_LOCAL does not work at least on some
# libraries. OS X 10.3 is Darwin 7, so we check for
# that.
if int(_os.uname()[2].split('.')[0]) < 8:
DEFAULT_MODE = RTLD_GLOBAL
from _ctypes import FUNCFLAG_CDECL as _FUNCFLAG_CDECL, \
FUNCFLAG_PYTHONAPI as _FUNCFLAG_PYTHONAPI, \
FUNCFLAG_USE_ERRNO as _FUNCFLAG_USE_ERRNO, \
FUNCFLAG_USE_LASTERROR as _FUNCFLAG_USE_LASTERROR
"""
WINOLEAPI -> HRESULT
WINOLEAPI_(type)
STDMETHODCALLTYPE
STDMETHOD(name)
STDMETHOD_(type, name)
STDAPICALLTYPE
"""
def create_string_buffer(init, size=None):
"""create_string_buffer(aString) -> character array
create_string_buffer(anInteger) -> character array
create_string_buffer(aString, anInteger) -> character array
"""
if isinstance(init, (str, unicode)):
if size is None:
size = len(init)+1
buftype = c_char * size
buf = buftype()
buf.value = init
return buf
elif isinstance(init, (int, long)):
buftype = c_char * init
buf = buftype()
return buf
raise TypeError(init)
def c_buffer(init, size=None):
## "deprecated, use create_string_buffer instead"
## import warnings
## warnings.warn("c_buffer is deprecated, use create_string_buffer instead",
## DeprecationWarning, stacklevel=2)
return create_string_buffer(init, size)
_c_functype_cache = {}
def CFUNCTYPE(restype, *argtypes, **kw):
"""CFUNCTYPE(restype, *argtypes,
use_errno=False, use_last_error=False) -> function prototype.
restype: the result type
argtypes: a sequence specifying the argument types
The function prototype can be called in different ways to create a
callable object:
prototype(integer address) -> foreign function
prototype(callable) -> create and return a C callable function from callable
prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method
prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal
prototype((function name, dll object)[, paramflags]) -> foreign function exported by name
"""
flags = _FUNCFLAG_CDECL
if kw.pop("use_errno", False):
flags |= _FUNCFLAG_USE_ERRNO
if kw.pop("use_last_error", False):
flags |= _FUNCFLAG_USE_LASTERROR
if kw:
raise ValueError("unexpected keyword argument(s) %s" % kw.keys())
try:
return _c_functype_cache[(restype, argtypes, flags)]
except KeyError:
class CFunctionType(_CFuncPtr):
_argtypes_ = argtypes
_restype_ = restype
_flags_ = flags
_c_functype_cache[(restype, argtypes, flags)] = CFunctionType
return CFunctionType
if _os.name in ("nt", "ce"):
from _ctypes import LoadLibrary as _dlopen
from _ctypes import FUNCFLAG_STDCALL as _FUNCFLAG_STDCALL
if _os.name == "ce":
# 'ce' doesn't have the stdcall calling convention
_FUNCFLAG_STDCALL = _FUNCFLAG_CDECL
_win_functype_cache = {}
def WINFUNCTYPE(restype, *argtypes, **kw):
# docstring set later (very similar to CFUNCTYPE.__doc__)
flags = _FUNCFLAG_STDCALL
if kw.pop("use_errno", False):
flags |= _FUNCFLAG_USE_ERRNO
if kw.pop("use_last_error", False):
flags |= _FUNCFLAG_USE_LASTERROR
if kw:
raise ValueError("unexpected keyword argument(s) %s" % kw.keys())
try:
return _win_functype_cache[(restype, argtypes, flags)]
except KeyError:
class WinFunctionType(_CFuncPtr):
_argtypes_ = argtypes
_restype_ = restype
_flags_ = flags
_win_functype_cache[(restype, argtypes, flags)] = WinFunctionType
return WinFunctionType
if WINFUNCTYPE.__doc__:
WINFUNCTYPE.__doc__ = CFUNCTYPE.__doc__.replace("CFUNCTYPE", "WINFUNCTYPE")
elif _os.name == "posix":
from _ctypes import dlopen as _dlopen
from _ctypes import sizeof, byref, addressof, alignment, resize
from _ctypes import get_errno, set_errno
from _ctypes import _SimpleCData
def _check_size(typ, typecode=None):
# Check if sizeof(ctypes_type) against struct.calcsize. This
# should protect somewhat against a misconfigured libffi.
from struct import calcsize
if typecode is None:
# Most _type_ codes are the same as used in struct
typecode = typ._type_
actual, required = sizeof(typ), calcsize(typecode)
if actual != required:
raise SystemError("sizeof(%s) wrong: %d instead of %d" % \
(typ, actual, required))
class py_object(_SimpleCData):
_type_ = "O"
def __repr__(self):
try:
return super(py_object, self).__repr__()
except ValueError:
return "%s(<NULL>)" % type(self).__name__
_check_size(py_object, "P")
class c_short(_SimpleCData):
_type_ = "h"
_check_size(c_short)
class c_ushort(_SimpleCData):
_type_ = "H"
_check_size(c_ushort)
class c_long(_SimpleCData):
_type_ = "l"
_check_size(c_long)
class c_ulong(_SimpleCData):
_type_ = "L"
_check_size(c_ulong)
if _calcsize("i") == _calcsize("l"):
# if int and long have the same size, make c_int an alias for c_long
c_int = c_long
c_uint = c_ulong
else:
class c_int(_SimpleCData):
_type_ = "i"
_check_size(c_int)
class c_uint(_SimpleCData):
_type_ = "I"
_check_size(c_uint)
class c_float(_SimpleCData):
_type_ = "f"
_check_size(c_float)
class c_double(_SimpleCData):
_type_ = "d"
_check_size(c_double)
class c_longdouble(_SimpleCData):
_type_ = "g"
if sizeof(c_longdouble) == sizeof(c_double):
c_longdouble = c_double
if _calcsize("l") == _calcsize("q"):
# if long and long long have the same size, make c_longlong an alias for c_long
c_longlong = c_long
c_ulonglong = c_ulong
else:
class c_longlong(_SimpleCData):
_type_ = "q"
_check_size(c_longlong)
class c_ulonglong(_SimpleCData):
_type_ = "Q"
## def from_param(cls, val):
## return ('d', float(val), val)
## from_param = classmethod(from_param)
_check_size(c_ulonglong)
class c_ubyte(_SimpleCData):
_type_ = "B"
c_ubyte.__ctype_le__ = c_ubyte.__ctype_be__ = c_ubyte
# backward compatibility:
##c_uchar = c_ubyte
_check_size(c_ubyte)
class c_byte(_SimpleCData):
_type_ = "b"
c_byte.__ctype_le__ = c_byte.__ctype_be__ = c_byte
_check_size(c_byte)
class c_char(_SimpleCData):
_type_ = "c"
c_char.__ctype_le__ = c_char.__ctype_be__ = c_char
_check_size(c_char)
class c_char_p(_SimpleCData):
_type_ = "z"
if _os.name == "nt":
def __repr__(self):
if not windll.kernel32.IsBadStringPtrA(self, -1):
return "%s(%r)" % (self.__class__.__name__, self.value)
return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value)
else:
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value)
_check_size(c_char_p, "P")
class c_void_p(_SimpleCData):
_type_ = "P"
c_voidp = c_void_p # backwards compatibility (to a bug)
_check_size(c_void_p)
class c_bool(_SimpleCData):
_type_ = "?"
from _ctypes import POINTER, pointer, _pointer_type_cache
def _reset_cache():
_pointer_type_cache.clear()
_c_functype_cache.clear()
if _os.name in ("nt", "ce"):
_win_functype_cache.clear()
# _SimpleCData.c_wchar_p_from_param
POINTER(c_wchar).from_param = c_wchar_p.from_param
# _SimpleCData.c_char_p_from_param
POINTER(c_char).from_param = c_char_p.from_param
_pointer_type_cache[None] = c_void_p
# XXX for whatever reasons, creating the first instance of a callback
# function is needed for the unittests on Win64 to succeed. This MAY
# be a compiler bug, since the problem occurs only when _ctypes is
# compiled with the MS SDK compiler. Or an uninitialized variable?
CFUNCTYPE(c_int)(lambda: None)
try:
from _ctypes import set_conversion_mode
except ImportError:
pass
else:
if _os.name in ("nt", "ce"):
set_conversion_mode("mbcs", "ignore")
else:
set_conversion_mode("ascii", "strict")
class c_wchar_p(_SimpleCData):
_type_ = "Z"
class c_wchar(_SimpleCData):
_type_ = "u"
def create_unicode_buffer(init, size=None):
"""create_unicode_buffer(aString) -> character array
create_unicode_buffer(anInteger) -> character array
create_unicode_buffer(aString, anInteger) -> character array
"""
if isinstance(init, (str, unicode)):
if size is None:
size = len(init)+1
buftype = c_wchar * size
buf = buftype()
buf.value = init
return buf
elif isinstance(init, (int, long)):
buftype = c_wchar * init
buf = buftype()
return buf
raise TypeError(init)
# XXX Deprecated
def SetPointerType(pointer, cls):
if _pointer_type_cache.get(cls, None) is not None:
raise RuntimeError("This type already exists in the cache")
if id(pointer) not in _pointer_type_cache:
raise RuntimeError("What's this???")
pointer.set_type(cls)
_pointer_type_cache[cls] = pointer
del _pointer_type_cache[id(pointer)]
# XXX Deprecated
def ARRAY(typ, len):
return typ * len
################################################################
class CDLL(object):
"""An instance of this class represents a loaded dll/shared
library, exporting functions using the standard C calling
convention (named 'cdecl' on Windows).
The exported functions can be accessed as attributes, or by
indexing with the function name. Examples:
<obj>.qsort -> callable object
<obj>['qsort'] -> callable object
Calling the functions releases the Python GIL during the call and
reacquires it afterwards.
"""
_func_flags_ = _FUNCFLAG_CDECL
_func_restype_ = c_int
def __init__(self, name, mode=DEFAULT_MODE, handle=None,
use_errno=False,
use_last_error=False):
self._name = name
flags = self._func_flags_
if use_errno:
flags |= _FUNCFLAG_USE_ERRNO
if use_last_error:
flags |= _FUNCFLAG_USE_LASTERROR
class _FuncPtr(_CFuncPtr):
_flags_ = flags
_restype_ = self._func_restype_
self._FuncPtr = _FuncPtr
if handle is None:
if flags & _FUNCFLAG_CDECL:
self._handle = _ffi.CDLL(name, mode)
else:
self._handle = _ffi.WinDLL(name, mode)
else:
self._handle = handle
def __repr__(self):
return "<%s '%s', handle %r at 0x%x>" % (
self.__class__.__name__, self._name, self._handle,
id(self) & (_sys.maxint * 2 + 1))
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
raise AttributeError(name)
func = self.__getitem__(name)
setattr(self, name, func)
return func
def __getitem__(self, name_or_ordinal):
func = self._FuncPtr((name_or_ordinal, self))
if not isinstance(name_or_ordinal, (int, long)):
func.__name__ = name_or_ordinal
return func
# Not in PyPy
#class PyDLL(CDLL):
# """This class represents the Python library itself. It allows to
# access Python API functions. The GIL is not released, and
# Python exceptions are handled correctly.
# """
# _func_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
if _os.name in ("nt", "ce"):
class WinDLL(CDLL):
"""This class represents a dll exporting functions using the
Windows stdcall calling convention.
"""
_func_flags_ = _FUNCFLAG_STDCALL
# XXX Hm, what about HRESULT as normal parameter?
# Mustn't it derive from c_long then?
from _ctypes import _check_HRESULT, _SimpleCData
class HRESULT(_SimpleCData):
_type_ = "l"
# _check_retval_ is called with the function's result when it
# is used as restype. It checks for the FAILED bit, and
# raises a WindowsError if it is set.
#
# The _check_retval_ method is implemented in C, so that the
# method definition itself is not included in the traceback
# when it raises an error - that is what we want (and Python
# doesn't have a way to raise an exception in the caller's
# frame).
_check_retval_ = _check_HRESULT
class OleDLL(CDLL):
"""This class represents a dll exporting functions using the
Windows stdcall calling convention, and returning HRESULT.
HRESULT error values are automatically raised as WindowsError
exceptions.
"""
_func_flags_ = _FUNCFLAG_STDCALL
_func_restype_ = HRESULT
class LibraryLoader(object):
def __init__(self, dlltype):
self._dlltype = dlltype
def __getattr__(self, name):
if name[0] == '_':
raise AttributeError(name)
dll = self._dlltype(name)
setattr(self, name, dll)
return dll
def __getitem__(self, name):
return getattr(self, name)
def LoadLibrary(self, name):
return self._dlltype(name)
cdll = LibraryLoader(CDLL)
# not on PyPy
#pydll = LibraryLoader(PyDLL)
if _os.name in ("nt", "ce"):
windll = LibraryLoader(WinDLL)
oledll = LibraryLoader(OleDLL)
if _os.name == "nt":
GetLastError = windll.kernel32.GetLastError
else:
GetLastError = windll.coredll.GetLastError
from _ctypes import get_last_error, set_last_error
def WinError(code=None, descr=None):
if code is None:
code = GetLastError()
if descr is None:
descr = FormatError(code).strip()
return WindowsError(code, descr)
if sizeof(c_uint) == sizeof(c_void_p):
c_size_t = c_uint
c_ssize_t = c_int
elif sizeof(c_ulong) == sizeof(c_void_p):
c_size_t = c_ulong
c_ssize_t = c_long
elif sizeof(c_ulonglong) == sizeof(c_void_p):
c_size_t = c_ulonglong
c_ssize_t = c_longlong
# functions
from _ctypes import _memmove_addr, _memset_addr, _string_at_addr, _cast_addr
## void *memmove(void *, const void *, size_t);
memmove = CFUNCTYPE(c_void_p, c_void_p, c_void_p, c_size_t)(_memmove_addr)
## void *memset(void *, int, size_t)
memset = CFUNCTYPE(c_void_p, c_void_p, c_int, c_size_t)(_memset_addr)
def PYFUNCTYPE(restype, *argtypes):
class CFunctionType(_CFuncPtr):
_argtypes_ = argtypes
_restype_ = restype
_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
return CFunctionType
def cast(obj, typ):
try:
c_void_p.from_param(obj)
except TypeError, e:
raise ArgumentError(str(e))
return _cast_addr(obj, obj, typ)
_string_at = PYFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr)
def string_at(ptr, size=-1):
"""string_at(addr[, size]) -> string
Return the string at addr."""
return _string_at(ptr, size)
try:
from _ctypes import _wstring_at_addr
except ImportError:
pass
else:
_wstring_at = PYFUNCTYPE(py_object, c_void_p, c_int)(_wstring_at_addr)
def wstring_at(ptr, size=-1):
"""wstring_at(addr[, size]) -> string
Return the string at addr."""
return _wstring_at(ptr, size)
if _os.name in ("nt", "ce"): # COM stuff
def DllGetClassObject(rclsid, riid, ppv):
try:
ccom = __import__("comtypes.server.inprocserver", globals(), locals(), ['*'])
except ImportError:
return -2147221231 # CLASS_E_CLASSNOTAVAILABLE
else:
return ccom.DllGetClassObject(rclsid, riid, ppv)
def DllCanUnloadNow():
try:
ccom = __import__("comtypes.server.inprocserver", globals(), locals(), ['*'])
except ImportError:
return 0 # S_OK
return ccom.DllCanUnloadNow()
from ctypes._endian import BigEndianStructure, LittleEndianStructure
# Fill in specifically-sized types
c_int8 = c_byte
c_uint8 = c_ubyte
for kind in [c_short, c_int, c_long, c_longlong]:
if sizeof(kind) == 2: c_int16 = kind
elif sizeof(kind) == 4: c_int32 = kind
elif sizeof(kind) == 8: c_int64 = kind
for kind in [c_ushort, c_uint, c_ulong, c_ulonglong]:
if sizeof(kind) == 2: c_uint16 = kind
elif sizeof(kind) == 4: c_uint32 = kind
elif sizeof(kind) == 8: c_uint64 = kind
del(kind)
_reset_cache()
|
BartoszCichecki/onlinepython
|
onlinepython/pypy-2.4.0-win32/lib-python/2.7/ctypes/__init__.py
|
Python
|
gpl-2.0
| 17,513
|
# Morphux-IRC-BOT
# Quote save system
# By Noich
import time
import json
import sys
# sys.setdefaultencoding() does not exist, here!
class Quote:
def command(self):
self.config = {
"command": {
"quote": {
"function": self.fetchQuote,
"usage": "quote [number]",
"help": "fetches a quote from previously saved quotes"
},
"savequote": {
"function": self.saveQuote,
"usage": "savequote #author [quote]",
"help": "saves a quote with its author"
},
"qsearch": {
"function": self.searchQuote,
"usage": "qsearch [args]",
"help": "searches a quote based on given args"
}
}
}
with open ("/morphux/Morphux-IRC-Bot/modules/quote/quote.json", "r") as myfile:
data=myfile.read().replace('\n', '')
data = data.replace("\\\"", "'")
string = data#.decode('unicode_escape')
self.quoteList = json.loads(string)
self.quote = [None, None, None]
return self.config
def fetchQuote(self, Morphux, infos):
if (len(infos['args']) == 0):
Morphux.sendMessage("I have " + str(len(self.quoteList)) +" quotes available. Choose one", infos['nick']);
elif (infos['args'][0].isdigit() == False):
Morphux.sendMessage("No such quote - Nice Try!", infos['nick']);
elif (int(infos['args'][0]) < len(self.quoteList)):
Morphux.sendMessage("Quote #" + infos['args'][0] + ", By: " + str(self.quoteList[int(infos['args'][0])][1]) + ", " + str(self.quoteList[int(infos['args'][0])][0]), infos['nick']);
self.quoteList[int(infos['args'][0])][2] = self.quoteList[int(infos['args'][0])][2]
Morphux.sendMessage(self.quoteList[int(infos['args'][0])][2]);
else:
Morphux.sendMessage("No such quote - Nice Try!", infos['nick']);
def saveQuote(self, Morphux, infos):
if (len(infos['args']) == 0):
Morphux.sendMessage("I dont have anything to save, cmon!", infos['nick']);
else:
if (infos['args'][0] == ""):
Morphux.sendMessage("OMG BUG");
elif (infos['args'][0][0] != '#'):
Morphux.sendMessage("Your first argument is supposed to be #author. God you're stupid.", infos['nick']);
else:
self.quote = [None, None, None];
i = 0
self.quote[0] = time.strftime("%d/%m/%Y")
self.quote[1] = str(infos['args'][0][1:])
for arg in infos['args']:
if (i > 0):
if self.quote[2] is None:
self.quote[2] = str(arg);
else:
self.quote[2] += str(' ')
self.quote[2] += str(arg);
else:
i = 1;
self.quoteList[len(self.quoteList):] = [self.quote]
with open('modules/quote/quote.json', 'w') as f:
json.dump(self.quoteList, f);
f.close()
Morphux.sendMessage("Quote saved as #" + str(len(self.quoteList) - 1), infos['nick'])
def searchQuote(self, Morphux, infos):
if (len(infos['args']) == 0):
Morphux.sendMessage("Need a keyword", infos['nick']);
else:
i = 0
result = ""
while i < len(self.quoteList):
if (infos['args'][0] in self.quoteList[i][2]):
result = result + str(i) + ","
i = i + 1
if (result == ""):
Morphux.sendMessage("No quote found.", infos['nick']);
else:
Morphux.sendMessage("Quote(s) found: " + result[:-1], infos['nick']);
|
Morphux/IRC-Bot
|
modules/quote/quote.py
|
Python
|
gpl-2.0
| 4,157
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bonjour_py.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
jameskane05/bonjour_py
|
manage.py
|
Python
|
gpl-2.0
| 253
|
##
# Copyright 2012-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Utility module for working with github
:author: Jens Timmerman (Ghent University)
:author: Kenneth Hoste (Ghent University)
:author: Toon Willems (Ghent University)
"""
import base64
import copy
import getpass
import glob
import os
import random
import re
import socket
import sys
import tempfile
import time
from datetime import datetime, timedelta
from distutils.version import LooseVersion
from easybuild.base import fancylogger
from easybuild.framework.easyconfig.easyconfig import EASYCONFIGS_ARCHIVE_DIR
from easybuild.framework.easyconfig.easyconfig import copy_easyconfigs, copy_patch_files, det_file_info
from easybuild.framework.easyconfig.easyconfig import process_easyconfig
from easybuild.framework.easyconfig.parser import EasyConfigParser
from easybuild.tools.build_log import EasyBuildError, print_msg, print_warning
from easybuild.tools.config import build_option
from easybuild.tools.filetools import apply_patch, change_dir, copy_dir, copy_easyblocks, copy_framework_files
from easybuild.tools.filetools import det_patched_files, download_file, extract_file
from easybuild.tools.filetools import get_easyblock_class_name, mkdir, read_file, symlink, which, write_file
from easybuild.tools.py2vs3 import HTTPError, URLError, ascii_letters, urlopen
from easybuild.tools.systemtools import UNKNOWN, get_tool_version
from easybuild.tools.utilities import nub, only_if_module_is_available
_log = fancylogger.getLogger('github', fname=False)
try:
import keyring
HAVE_KEYRING = True
except ImportError as err:
_log.warning("Failed to import 'keyring' Python module: %s" % err)
HAVE_KEYRING = False
try:
from easybuild.base.rest import RestClient
HAVE_GITHUB_API = True
except ImportError as err:
_log.warning("Failed to import from 'easybuild.base.rest' Python module: %s" % err)
HAVE_GITHUB_API = False
try:
import git
from git import GitCommandError
except ImportError as err:
_log.warning("Failed to import 'git' Python module: %s", err)
GITHUB_URL = 'https://github.com'
GITHUB_API_URL = 'https://api.github.com'
GITHUB_DIR_TYPE = u'dir'
GITHUB_EB_MAIN = 'easybuilders'
GITHUB_EASYBLOCKS_REPO = 'easybuild-easyblocks'
GITHUB_EASYCONFIGS_REPO = 'easybuild-easyconfigs'
GITHUB_FRAMEWORK_REPO = 'easybuild-framework'
GITHUB_DEVELOP_BRANCH = 'develop'
GITHUB_FILE_TYPE = u'file'
GITHUB_PR_STATE_OPEN = 'open'
GITHUB_PR_STATES = [GITHUB_PR_STATE_OPEN, 'closed', 'all']
GITHUB_PR_ORDER_CREATED = 'created'
GITHUB_PR_ORDERS = [GITHUB_PR_ORDER_CREATED, 'updated', 'popularity', 'long-running']
GITHUB_PR_DIRECTION_DESC = 'desc'
GITHUB_PR_DIRECTIONS = ['asc', GITHUB_PR_DIRECTION_DESC]
GITHUB_MAX_PER_PAGE = 100
GITHUB_MERGEABLE_STATE_CLEAN = 'clean'
GITHUB_PR = 'pull'
GITHUB_RAW = 'https://raw.githubusercontent.com'
GITHUB_STATE_CLOSED = 'closed'
HTTP_STATUS_OK = 200
HTTP_STATUS_CREATED = 201
HTTP_STATUS_NO_CONTENT = 204
KEYRING_GITHUB_TOKEN = 'github_token'
URL_SEPARATOR = '/'
VALID_CLOSE_PR_REASONS = {
'archived': 'uses an archived toolchain',
'inactive': 'no activity for > 6 months',
'obsolete': 'obsoleted by more recent PRs',
'retest': 'closing and reopening to trigger tests',
}
class Githubfs(object):
"""This class implements some higher level functionality on top of the Github api"""
def __init__(self, githubuser, reponame, branchname="master", username=None, password=None, token=None):
"""Construct a new githubfs object
:param githubuser: the github user's repo we want to use.
:param reponame: The name of the repository we want to use.
:param branchname: Then name of the branch to use (defaults to master)
:param username: (optional) your github username.
:param password: (optional) your github password.
:param token: (optional) a github api token.
"""
if token is None:
token = fetch_github_token(username)
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
self.gh = RestClient(GITHUB_API_URL, username=username, password=password, token=token)
self.githubuser = githubuser
self.reponame = reponame
self.branchname = branchname
@staticmethod
def join(*args):
"""This method joins 'paths' inside a github repository"""
args = [x for x in args if x]
return URL_SEPARATOR.join(args)
def get_repo(self):
"""Returns the repo as a Github object (from agithub)"""
return self.gh.repos[self.githubuser][self.reponame]
def get_path(self, path):
"""returns the path as a Github object (from agithub)"""
endpoint = self.get_repo()['contents']
if path:
for subpath in path.split(URL_SEPARATOR):
endpoint = endpoint[subpath]
return endpoint
@staticmethod
def isdir(githubobj):
"""Check if this path points to a directory"""
if isinstance(githubobj, (list, tuple)):
return True
else:
try:
return githubobj['type'] == GITHUB_DIR_TYPE
except Exception:
return False
@staticmethod
def isfile(githubobj):
"""Check if this path points to a file"""
try:
return githubobj['type'] == GITHUB_FILE_TYPE
except Exception:
return False
def listdir(self, path):
"""List the contents of a directory"""
path = self.get_path(path)
listing = path.get(ref=self.branchname)
self.log.debug("listdir response: %s" % str(listing))
if listing[0] == 200:
return listing[1]
else:
self.log.warning("error: %s" % str(listing))
raise EasyBuildError("Invalid response from github (I/O error)")
def walk(self, top=None, topdown=True):
"""
Walk the github repo in an os.walk like fashion.
"""
isdir, listdir = self.isdir, self.listdir
# If this fails we blow up, since permissions on a github repo are recursive anyway.j
githubobjs = listdir(top)
# listdir works with None, but we want to show a decent 'root dir' name
dirs, nondirs = [], []
for githubobj in githubobjs:
if isdir(githubobj):
dirs.append(str(githubobj['name']))
else:
nondirs.append(str(githubobj['name']))
if topdown:
yield top, dirs, nondirs
for name in dirs:
new_path = self.join(top, name)
for x in self.walk(new_path, topdown):
yield x
if not topdown:
yield top, dirs, nondirs
def read(self, path, api=True):
"""Read the contents of a file and return it
Or, if api=False it will download the file and return the location of the downloaded file"""
# we don't need use the api for this, but can also use raw.github.com
# https://raw.github.com/easybuilders/easybuild/master/README.rst
if not api:
outfile = tempfile.mkstemp()[1]
url = '/'.join([GITHUB_RAW, self.githubuser, self.reponame, self.branchname, path])
download_file(os.path.basename(path), url, outfile)
return outfile
else:
obj = self.get_path(path).get(ref=self.branchname)[1]
if not self.isfile(obj):
raise GithubError("Error: not a valid file: %s" % str(obj))
return base64.b64decode(obj['content'])
class GithubError(Exception):
"""Error raised by the Githubfs"""
pass
def github_api_get_request(request_f, github_user=None, token=None, **kwargs):
"""
Helper method, for performing get requests to GitHub API.
:param request_f: function that should be called to compose request, providing a RestClient instance
:param github_user: GitHub user name (to try and obtain matching GitHub token if none is provided)
:param token: GitHub token to use
:return: tuple with return status and data
"""
if github_user is None:
github_user = build_option('github_user')
if token is None:
token = fetch_github_token(github_user)
url = request_f(RestClient(GITHUB_API_URL, username=github_user, token=token))
try:
status, data = url.get(**kwargs)
except socket.gaierror as err:
_log.warning("Error occurred while performing get request: %s", err)
status, data = 0, None
_log.debug("get request result for %s: status: %d, data: %s", url.url, status, data)
return (status, data)
def github_api_put_request(request_f, github_user=None, token=None, **kwargs):
"""
Helper method, for performing put requests to GitHub API.
:param request_f: function that should be called to compose request, providing a RestClient instance
:param github_user: GitHub user name (to try and obtain matching GitHub token if none is provided)
:param token: GitHub token to use
:return: tuple with return status and data
"""
if github_user is None:
github_user = build_option('github_user')
if token is None:
token = fetch_github_token(github_user)
url = request_f(RestClient(GITHUB_API_URL, username=github_user, token=token))
try:
status, data = url.put(**kwargs)
except socket.gaierror as err:
_log.warning("Error occurred while performing put request: %s", err)
status, data = 0, {'message': err}
if status == 200:
_log.info("Put request successful: %s", data['message'])
elif status in [405, 409]:
raise EasyBuildError("FAILED: %s", data['message'])
else:
raise EasyBuildError("FAILED: %s", data.get('message', "(unknown reason)"))
_log.debug("get request result for %s: status: %d, data: %s", url.url, status, data)
return (status, data)
def fetch_latest_commit_sha(repo, account, branch='master', github_user=None, token=None):
"""
Fetch latest SHA1 for a specified repository and branch.
:param repo: GitHub repository
:param account: GitHub account
:param branch: branch to fetch latest SHA1 for
:param github_user: name of GitHub user to use
:param token: GitHub token to use
:return: latest SHA1
"""
status, data = github_api_get_request(lambda x: x.repos[account][repo].branches,
github_user=github_user, token=token, per_page=GITHUB_MAX_PER_PAGE)
if status != HTTP_STATUS_OK:
raise EasyBuildError("Failed to get latest commit sha for branch %s from %s/%s (status: %d %s)",
branch, account, repo, status, data)
res = None
for entry in data:
if entry[u'name'] == branch:
res = entry['commit']['sha']
break
if res is None:
error_msg = "No branch with name %s found in repo %s/%s" % (branch, account, repo)
if len(data) >= GITHUB_MAX_PER_PAGE:
error_msg += "; only %d branches were checked (too many branches in %s/%s?)" % (len(data), account, repo)
raise EasyBuildError(error_msg + ': ' + ', '.join([x[u'name'] for x in data]))
return res
def download_repo(repo=GITHUB_EASYCONFIGS_REPO, branch='master', account=GITHUB_EB_MAIN, path=None, github_user=None):
"""
Download entire GitHub repo as a tar.gz archive, and extract it into specified path.
:param repo: repo to download
:param branch: branch to download
:param account: GitHub account to download repo from
:param path: path to extract to
:param github_user: name of GitHub user to use
"""
# make sure path exists, create it if necessary
if path is None:
path = tempfile.mkdtemp()
# add account subdir
path = os.path.join(path, account)
mkdir(path, parents=True)
extracted_dir_name = '%s-%s' % (repo, branch)
base_name = '%s.tar.gz' % branch
latest_commit_sha = fetch_latest_commit_sha(repo, account, branch, github_user=github_user)
expected_path = os.path.join(path, extracted_dir_name)
latest_sha_path = os.path.join(expected_path, 'latest-sha')
# check if directory already exists, don't download if 'latest-sha' file indicates that it's up to date
if os.path.exists(latest_sha_path):
sha = read_file(latest_sha_path).split('\n')[0].rstrip()
if latest_commit_sha == sha:
_log.debug("Not redownloading %s/%s as it already exists: %s" % (account, repo, expected_path))
return expected_path
url = URL_SEPARATOR.join([GITHUB_URL, account, repo, 'archive', base_name])
target_path = os.path.join(path, base_name)
_log.debug("downloading repo %s/%s as archive from %s to %s" % (account, repo, url, target_path))
download_file(base_name, url, target_path, forced=True)
_log.debug("%s downloaded to %s, extracting now" % (base_name, path))
base_dir = extract_file(target_path, path, forced=True, change_into_dir=False)
change_dir(base_dir)
extracted_path = os.path.join(base_dir, extracted_dir_name)
# check if extracted_path exists
if not os.path.isdir(extracted_path):
raise EasyBuildError("%s should exist and contain the repo %s at branch %s", extracted_path, repo, branch)
write_file(latest_sha_path, latest_commit_sha, forced=True)
_log.debug("Repo %s at branch %s extracted into %s" % (repo, branch, extracted_path))
return extracted_path
def fetch_easyblocks_from_pr(pr, path=None, github_user=None):
"""Fetch patched easyconfig files for a particular PR."""
return fetch_files_from_pr(pr, path, github_user, github_repo=GITHUB_EASYBLOCKS_REPO)
def fetch_easyconfigs_from_pr(pr, path=None, github_user=None):
"""Fetch patched easyconfig files for a particular PR."""
return fetch_files_from_pr(pr, path, github_user, github_repo=GITHUB_EASYCONFIGS_REPO)
def fetch_files_from_pr(pr, path=None, github_user=None, github_repo=None):
"""Fetch patched files for a particular PR."""
if github_user is None:
github_user = build_option('github_user')
if github_repo is None:
github_repo = GITHUB_EASYCONFIGS_REPO
if path is None:
if github_repo == GITHUB_EASYCONFIGS_REPO:
path = build_option('pr_path')
elif github_repo == GITHUB_EASYBLOCKS_REPO:
path = os.path.join(tempfile.gettempdir(), 'ebs_pr%s' % pr)
else:
raise EasyBuildError("Unknown repo: %s" % github_repo)
if path is None:
path = tempfile.mkdtemp()
else:
# make sure path exists, create it if necessary
mkdir(path, parents=True)
github_account = build_option('pr_target_account')
if github_repo == GITHUB_EASYCONFIGS_REPO:
easyfiles = 'easyconfigs'
elif github_repo == GITHUB_EASYBLOCKS_REPO:
easyfiles = 'easyblocks'
else:
raise EasyBuildError("Don't know how to fetch files from repo %s", github_repo)
subdir = os.path.join('easybuild', easyfiles)
_log.debug("Fetching %s from %s/%s PR #%s into %s", easyfiles, github_account, github_repo, pr, path)
pr_data, _ = fetch_pr_data(pr, github_account, github_repo, github_user)
pr_merged = pr_data['merged']
pr_closed = pr_data['state'] == GITHUB_STATE_CLOSED and not pr_merged
pr_target_branch = pr_data['base']['ref']
_log.info("Target branch for PR #%s: %s", pr, pr_target_branch)
# download target branch of PR so we can try and apply the PR patch on top of it
repo_target_branch = download_repo(repo=github_repo, account=github_account, branch=pr_target_branch,
github_user=github_user)
# determine list of changed files via diff
diff_fn = os.path.basename(pr_data['diff_url'])
diff_filepath = os.path.join(path, diff_fn)
download_file(diff_fn, pr_data['diff_url'], diff_filepath, forced=True)
diff_txt = read_file(diff_filepath)
_log.debug("Diff for PR #%s:\n%s", pr, diff_txt)
patched_files = det_patched_files(txt=diff_txt, omit_ab_prefix=True, github=True, filter_deleted=True)
_log.debug("List of patched files for PR #%s: %s", pr, patched_files)
final_path = None
# try to apply PR patch on top of target branch, unless the PR is closed or already merged
if pr_merged:
_log.info("PR is already merged, so using current version of PR target branch")
final_path = repo_target_branch
elif not pr_closed:
try:
_log.debug("Trying to apply PR patch %s to %s...", diff_filepath, repo_target_branch)
apply_patch(diff_filepath, repo_target_branch, use_git_am=True)
_log.info("Using %s which included PR patch to test PR #%s", repo_target_branch, pr)
final_path = repo_target_branch
except EasyBuildError as err:
_log.warning("Ignoring problem that occured when applying PR patch: %s", err)
if final_path is None:
if pr_closed:
print_warning("Using %s from closed PR #%s" % (easyfiles, pr))
# obtain most recent version of patched files
for patched_file in [f for f in patched_files if subdir in f]:
# path to patch file, incl. subdir it is in
fn = patched_file.split(subdir)[1].strip(os.path.sep)
sha = pr_data['head']['sha']
full_url = URL_SEPARATOR.join([GITHUB_RAW, github_account, github_repo, sha, patched_file])
_log.info("Downloading %s from %s", fn, full_url)
download_file(fn, full_url, path=os.path.join(path, fn), forced=True)
final_path = path
# symlink directories into expected place if they're not there yet
if final_path != path:
dirpath = os.path.join(final_path, subdir)
for eb_dir in os.listdir(dirpath):
symlink(os.path.join(dirpath, eb_dir), os.path.join(path, os.path.basename(eb_dir)))
# sanity check: make sure all patched files are downloaded
files = []
for patched_file in [f for f in patched_files if subdir in f]:
fn = patched_file.split(easyfiles)[1].strip(os.path.sep)
full_path = os.path.join(path, fn)
if os.path.exists(full_path):
files.append(full_path)
else:
raise EasyBuildError("Couldn't find path to patched file %s", full_path)
return files
def create_gist(txt, fn, descr=None, github_user=None, github_token=None):
"""Create a gist with the provided text."""
dry_run = build_option('dry_run') or build_option('extended_dry_run')
if descr is None:
descr = "(none)"
if github_token is None:
github_token = fetch_github_token(github_user)
body = {
"description": descr,
"public": True,
"files": {
fn: {
"content": txt,
}
}
}
if dry_run:
status, data = HTTP_STATUS_CREATED, {'html_url': 'https://gist.github.com/DRY_RUN'}
else:
g = RestClient(GITHUB_API_URL, username=github_user, token=github_token)
status, data = g.gists.post(body=body)
if status != HTTP_STATUS_CREATED:
raise EasyBuildError("Failed to create gist; status %s, data: %s", status, data)
return data['html_url']
def delete_gist(gist_id, github_user=None, github_token=None):
"""Delete gist with specified ID."""
if github_token is None:
github_token = fetch_github_token(github_user)
gh = RestClient(GITHUB_API_URL, username=github_user, token=github_token)
status, data = gh.gists[gist_id].delete()
if status != HTTP_STATUS_NO_CONTENT:
raise EasyBuildError("Failed to delete gist with ID %s: status %s, data: %s", status, data)
def post_comment_in_issue(issue, txt, account=GITHUB_EB_MAIN, repo=GITHUB_EASYCONFIGS_REPO, github_user=None):
"""Post a comment in the specified PR."""
if not isinstance(issue, int):
try:
issue = int(issue)
except ValueError as err:
raise EasyBuildError("Failed to parse specified pull request number '%s' as an int: %s; ", issue, err)
dry_run = build_option('dry_run') or build_option('extended_dry_run')
msg = "Adding comment to %s issue #%s: '%s'" % (repo, issue, txt)
if dry_run:
msg = "[DRY RUN] " + msg
print_msg(msg, log=_log, prefix=False)
if not dry_run:
github_token = fetch_github_token(github_user)
g = RestClient(GITHUB_API_URL, username=github_user, token=github_token)
pr_url = g.repos[account][repo].issues[issue]
status, data = pr_url.comments.post(body={'body': txt})
if not status == HTTP_STATUS_CREATED:
raise EasyBuildError("Failed to create comment in PR %s#%d; status %s, data: %s", repo, issue, status, data)
def init_repo(path, repo_name, silent=False):
"""
Initialize a new Git repository at the specified location.
:param path: location where Git repository should be initialized
:param repo_name: name of Git repository
:param silent: keep quiet (don't print any messages)
"""
repo_path = os.path.join(path, repo_name)
if not os.path.exists(repo_path):
mkdir(repo_path, parents=True)
# clone repo in git_working_dirs_path to repo_path
git_working_dirs_path = build_option('git_working_dirs_path')
if git_working_dirs_path:
workdir = os.path.join(git_working_dirs_path, repo_name)
if os.path.exists(workdir):
print_msg("cloning git repo from %s..." % workdir, silent=silent)
try:
workrepo = git.Repo(workdir)
workrepo.clone(repo_path)
except GitCommandError as err:
raise EasyBuildError("Failed to clone git repo at %s: %s", workdir, err)
# initalize repo in repo_path
try:
repo = git.Repo.init(repo_path)
except GitCommandError as err:
raise EasyBuildError("Failed to init git repo at %s: %s", repo_path, err)
_log.debug("temporary git working directory ready at %s", repo_path)
return repo
def setup_repo_from(git_repo, github_url, target_account, branch_name, silent=False):
"""
Set up repository by checking out specified branch from repository at specified URL.
:param git_repo: git.Repo instance
:param github_url: URL to GitHub repository
:param target_account: name of GitHub account that owns GitHub repository at specified URL
:param branch_name: name of branch to check out
:param silent: keep quiet (don't print any messages)
"""
_log.debug("Cloning from %s", github_url)
# salt to use for names of remotes/branches that are created
salt = ''.join(random.choice(ascii_letters) for _ in range(5))
remote_name = 'pr_target_account_%s_%s' % (target_account, salt)
origin = git_repo.create_remote(remote_name, github_url)
if not origin.exists():
raise EasyBuildError("%s does not exist?", github_url)
# git fetch
# can't use --depth to only fetch a shallow copy, since pushing to another repo from a shallow copy doesn't work
print_msg("fetching branch '%s' from %s..." % (branch_name, github_url), silent=silent)
try:
res = origin.fetch()
except GitCommandError as err:
raise EasyBuildError("Failed to fetch branch '%s' from %s: %s", branch_name, github_url, err)
if res:
if res[0].flags & res[0].ERROR:
raise EasyBuildError("Fetching branch '%s' from remote %s failed: %s", branch_name, origin, res[0].note)
else:
_log.debug("Fetched branch '%s' from remote %s (note: %s)", branch_name, origin, res[0].note)
else:
raise EasyBuildError("Fetching branch '%s' from remote %s failed: empty result", branch_name, origin)
# git checkout -b <branch>; git pull
if hasattr(origin.refs, branch_name):
origin_branch = getattr(origin.refs, branch_name)
else:
raise EasyBuildError("Branch '%s' not found at %s", branch_name, github_url)
_log.debug("Checking out branch '%s' from remote %s", branch_name, github_url)
try:
origin_branch.checkout(b=branch_name)
except GitCommandError as err:
alt_branch = '%s_%s' % (branch_name, salt)
_log.debug("Trying to work around checkout error ('%s') by using different branch name '%s'", err, alt_branch)
try:
origin_branch.checkout(b=alt_branch, force=True)
except GitCommandError as err:
raise EasyBuildError("Failed to check out branch '%s' from repo at %s: %s", alt_branch, github_url, err)
return remote_name
def setup_repo(git_repo, target_account, target_repo, branch_name, silent=False, git_only=False):
"""
Set up repository by checking out specified branch for specfied GitHub account/repository.
:param git_repo: git.Repo instance
:param target_account: name of GitHub account that owns GitHub repository
:param target_repo: name of GitHib repository
:param branch_name: name of branch to check out
:param silent: keep quiet (don't print any messages)
:param git_only: only use git@github.com repo URL, skip trying https://github.com first
"""
tmpl_github_urls = [
'git@github.com:%s/%s.git',
]
if not git_only:
tmpl_github_urls.insert(0, 'https://github.com/%s/%s.git')
res = None
errors = []
for tmpl_github_url in tmpl_github_urls:
github_url = tmpl_github_url % (target_account, target_repo)
try:
res = setup_repo_from(git_repo, github_url, target_account, branch_name, silent=silent)
break
except EasyBuildError as err:
errors.append("Checking out branch '%s' from %s failed: %s" % (branch_name, github_url, err))
if res:
return res
else:
raise EasyBuildError('\n'.join(errors))
@only_if_module_is_available('git', pkgname='GitPython')
def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_account=None, commit_msg=None):
"""
Common code for new_pr and update_pr functions:
* check whether all supplied paths point to existing files
* create temporary clone of target git repository
* fetch/checkout specified starting branch
* copy files to right location
* stage/commit all files in PR branch
* push PR branch to GitHub (to account specified by --github-user)
:param paths: paths to categorized lists of files (easyconfigs, files to delete, patches)
:param ecs: list of parsed easyconfigs, incl. for dependencies (if robot is enabled)
:param start_branch: name of branch to use as base for PR
:param pr_branch: name of branch to push to GitHub
:param start_account: name of GitHub account to use as base for PR
:param commit_msg: commit message to use
"""
# we need files to create the PR with
non_existing_paths = []
ec_paths = []
if paths['easyconfigs'] or paths['py_files']:
for path in paths['easyconfigs'] + paths['py_files']:
if not os.path.exists(path):
non_existing_paths.append(path)
else:
ec_paths.append(path)
if non_existing_paths:
raise EasyBuildError("One or more non-existing paths specified: %s", ', '.join(non_existing_paths))
if not any(paths.values()):
raise EasyBuildError("No paths specified")
pr_target_repo = det_pr_target_repo(paths)
if pr_target_repo is None:
raise EasyBuildError("Failed to determine target repository, please specify it via --pr-target-repo!")
# initialize repository
git_working_dir = tempfile.mkdtemp(prefix='git-working-dir')
git_repo = init_repo(git_working_dir, pr_target_repo)
repo_path = os.path.join(git_working_dir, pr_target_repo)
if pr_target_repo not in [GITHUB_EASYCONFIGS_REPO, GITHUB_EASYBLOCKS_REPO, GITHUB_FRAMEWORK_REPO]:
raise EasyBuildError("Don't know how to create/update a pull request to the %s repository", pr_target_repo)
if start_account is None:
start_account = build_option('pr_target_account')
if start_branch is None:
# if start branch is not specified, we're opening a new PR
# account to use is determined by active EasyBuild configuration (--github-org or --github-user)
target_account = build_option('github_org') or build_option('github_user')
# if branch to start from is specified, we're updating an existing PR
start_branch = build_option('pr_target_branch')
else:
# account to target is the one that owns the branch used to open PR
# (which may be different from account used to push update!)
target_account = start_account
# set up repository
setup_repo(git_repo, start_account, pr_target_repo, start_branch)
_log.debug("git status: %s", git_repo.git.status())
# copy easyconfig files to right place
target_dir = os.path.join(git_working_dir, pr_target_repo)
print_msg("copying files to %s..." % target_dir)
file_info = COPY_FUNCTIONS[pr_target_repo](ec_paths, os.path.join(git_working_dir, pr_target_repo))
# figure out commit message to use
if commit_msg:
cnt = len(file_info['paths_in_repo'])
_log.debug("Using specified commit message for all %d new/modified files at once: %s", cnt, commit_msg)
elif pr_target_repo == GITHUB_EASYCONFIGS_REPO and all(file_info['new']) and not paths['files_to_delete']:
# automagically derive meaningful commit message if all easyconfig files are new
commit_msg = "adding easyconfigs: %s" % ', '.join(os.path.basename(p) for p in file_info['paths_in_repo'])
if paths['patch_files']:
commit_msg += " and patches: %s" % ', '.join(os.path.basename(p) for p in paths['patch_files'])
elif pr_target_repo == GITHUB_EASYBLOCKS_REPO and all(file_info['new']):
commit_msg = "adding easyblocks: %s" % ', '.join(os.path.basename(p) for p in file_info['paths_in_repo'])
else:
raise EasyBuildError("A meaningful commit message must be specified via --pr-commit-msg when "
"modifying/deleting files or targeting the framework repo.")
# figure out to which software name patches relate, and copy them to the right place
if paths['patch_files']:
patch_specs = det_patch_specs(paths['patch_files'], file_info, [target_dir])
print_msg("copying patch files to %s..." % target_dir)
patch_info = copy_patch_files(patch_specs, target_dir)
# determine path to files to delete (if any)
deleted_paths = []
for fn in paths['files_to_delete']:
fullpath = os.path.join(repo_path, fn)
if os.path.exists(fullpath):
deleted_paths.append(fullpath)
else:
# if no existing relative path is specified, assume just the easyconfig file name is provided
hits = glob.glob(os.path.join(repo_path, 'easybuild', 'easyconfigs', '*', '*', fn))
if len(hits) == 1:
deleted_paths.append(hits[0])
else:
raise EasyBuildError("Path doesn't exist or file to delete isn't found in target branch: %s", fn)
dep_info = {
'ecs': [],
'paths_in_repo': [],
'new': [],
}
# include missing easyconfigs for dependencies, if robot is enabled
if ecs is not None:
abs_paths = [os.path.realpath(os.path.abspath(path)) for path in ec_paths]
dep_paths = [ec['spec'] for ec in ecs if os.path.realpath(ec['spec']) not in abs_paths]
_log.info("Paths to easyconfigs for missing dependencies: %s", dep_paths)
all_dep_info = copy_easyconfigs(dep_paths, target_dir)
# only consider new easyconfig files for dependencies (not updated ones)
for idx in range(len(all_dep_info['ecs'])):
if all_dep_info['new'][idx]:
for key in dep_info:
dep_info[key].append(all_dep_info[key][idx])
# checkout target branch
if pr_branch is None:
if ec_paths and pr_target_repo == GITHUB_EASYCONFIGS_REPO:
label = file_info['ecs'][0].name + re.sub('[.-]', '', file_info['ecs'][0].version)
else:
label = ''.join(random.choice(ascii_letters) for _ in range(10))
pr_branch = '%s_new_pr_%s' % (time.strftime("%Y%m%d%H%M%S"), label)
# create branch to commit to and push;
# use force to avoid errors if branch already exists (OK since this is a local temporary copy of the repo)
git_repo.create_head(pr_branch, force=True).checkout()
_log.info("New branch '%s' created to commit files to", pr_branch)
# stage
_log.debug("Staging all %d new/modified easyconfigs", len(file_info['paths_in_repo']))
git_repo.index.add(file_info['paths_in_repo'])
git_repo.index.add(dep_info['paths_in_repo'])
if paths['patch_files']:
_log.debug("Staging all %d new/modified patch files", len(patch_info['paths_in_repo']))
git_repo.index.add(patch_info['paths_in_repo'])
# stage deleted files
if deleted_paths:
git_repo.index.remove(deleted_paths)
# overview of modifications
if build_option('extended_dry_run'):
print_msg("\nFull patch:\n", log=_log, prefix=False)
print_msg(git_repo.git.diff(cached=True) + '\n', log=_log, prefix=False)
diff_stat = git_repo.git.diff(cached=True, stat=True)
if not diff_stat:
raise EasyBuildError("No changed files found when comparing to current develop branch. "
"Refused to make empty pull request.")
# commit
git_repo.index.commit(commit_msg)
push_branch_to_github(git_repo, target_account, pr_target_repo, pr_branch)
return file_info, deleted_paths, git_repo, pr_branch, diff_stat, pr_target_repo
def create_remote(git_repo, account, repo, https=False):
"""
Create remote in specified git working directory for specified account & repository.
:param git_repo: git.Repo instance to use (after init_repo & setup_repo)
:param account: GitHub account name
:param repo: repository name
:param https: use https:// URL rather than git@
"""
if https:
github_url = 'https://github.com/%s/%s.git' % (account, repo)
else:
github_url = 'git@github.com:%s/%s.git' % (account, repo)
salt = ''.join(random.choice(ascii_letters) for _ in range(5))
remote_name = 'github_%s_%s' % (account, salt)
try:
remote = git_repo.create_remote(remote_name, github_url)
except GitCommandError as err:
raise EasyBuildError("Failed to create remote %s for %s: %s", remote_name, github_url, err)
return remote
def push_branch_to_github(git_repo, target_account, target_repo, branch):
"""
Push specified branch to GitHub from specified git repository.
:param git_repo: git.Repo instance to use (after init_repo & setup_repo)
:param target_account: GitHub account name
:param target_repo: repository name
:param branch: name of branch to push
"""
# push to GitHub
remote = create_remote(git_repo, target_account, target_repo)
dry_run = build_option('dry_run') or build_option('extended_dry_run')
github_url = 'git@github.com:%s/%s.git' % (target_account, target_repo)
push_branch_msg = "pushing branch '%s' to remote '%s' (%s)" % (branch, remote.name, github_url)
if dry_run:
print_msg(push_branch_msg + ' [DRY RUN]', log=_log)
else:
print_msg(push_branch_msg, log=_log)
try:
res = remote.push(branch)
except GitCommandError as err:
raise EasyBuildError("Failed to push branch '%s' to GitHub (%s): %s", branch, github_url, err)
if res:
if res[0].ERROR & res[0].flags:
raise EasyBuildError("Pushing branch '%s' to remote %s (%s) failed: %s",
branch, remote, github_url, res[0].summary)
else:
_log.debug("Pushed branch %s to remote %s (%s): %s", branch, remote, github_url, res[0].summary)
else:
raise EasyBuildError("Pushing branch '%s' to remote %s (%s) failed: empty result",
branch, remote, github_url)
def is_patch_for(patch_name, ec):
"""Check whether specified patch matches any patch in the provided EasyConfig instance."""
res = False
patches = copy.copy(ec['patches'])
for ext in ec['exts_list']:
if isinstance(ext, (list, tuple)) and len(ext) == 3 and isinstance(ext[2], dict):
ext_options = ext[2]
patches.extend(ext_options.get('patches', []))
for patch in patches:
if isinstance(patch, (tuple, list)):
patch = patch[0]
if patch == patch_name:
res = True
break
return res
def det_patch_specs(patch_paths, file_info, ec_dirs):
""" Determine software names for patch files """
print_msg("determining software names for patch files...")
patch_specs = []
for patch_path in patch_paths:
soft_name = None
patch_file = os.path.basename(patch_path)
# consider patch lists of easyconfigs being provided
for ec in file_info['ecs']:
if is_patch_for(patch_file, ec):
soft_name = ec['name']
break
if soft_name:
patch_specs.append((patch_path, soft_name))
else:
# fall back on scanning all eb files for patches
print("Matching easyconfig for %s not found on the first try:" % patch_path)
print("scanning all easyconfigs to determine where patch file belongs (this may take a while)...")
soft_name = find_software_name_for_patch(patch_file, ec_dirs)
if soft_name:
patch_specs.append((patch_path, soft_name))
else:
# still nothing found
raise EasyBuildError("Failed to determine software name to which patch file %s relates", patch_path)
return patch_specs
def find_software_name_for_patch(patch_name, ec_dirs):
"""
Scan all easyconfigs in the robot path(s) to determine which software a patch file belongs to
:param patch_name: name of the patch file
:param ecs_dirs: list of directories to consider when looking for easyconfigs
:return: name of the software that this patch file belongs to (if found)
"""
soft_name = None
all_ecs = []
for ec_dir in ec_dirs:
for (dirpath, _, filenames) in os.walk(ec_dir):
for fn in filenames:
if fn != 'TEMPLATE.eb' and not fn.endswith('.py'):
path = os.path.join(dirpath, fn)
rawtxt = read_file(path)
if 'patches' in rawtxt:
all_ecs.append(path)
nr_of_ecs = len(all_ecs)
for idx, path in enumerate(all_ecs):
if soft_name:
break
rawtxt = read_file(path)
try:
ecs = process_easyconfig(path, validate=False)
for ec in ecs:
if is_patch_for(patch_name, ec['ec']):
soft_name = ec['ec']['name']
break
except EasyBuildError as err:
_log.debug("Ignoring easyconfig %s that fails to parse: %s", path, err)
sys.stdout.write('\r%s of %s easyconfigs checked' % (idx + 1, nr_of_ecs))
sys.stdout.flush()
sys.stdout.write('\n')
return soft_name
def check_pr_eligible_to_merge(pr_data):
"""
Check whether PR is eligible for merging.
:param pr_data: PR data obtained through GitHub API
:return: boolean value indicates whether PR is eligible
"""
res = True
def not_eligible(msg):
"""Helper function to warn about PR not being eligible for merging"""
print_msg("%s => not eligible for merging!" % msg, stderr=True, prefix=False)
return False
target = '%s/%s' % (pr_data['base']['repo']['owner']['login'], pr_data['base']['repo']['name'])
print_msg("Checking eligibility of %s PR #%s for merging..." % (target, pr_data['number']), prefix=False)
# check target branch, must be branch name specified in --pr-target-branch (usually 'develop')
pr_target_branch = build_option('pr_target_branch')
msg_tmpl = "* targets %s branch: %%s" % pr_target_branch
if pr_data['base']['ref'] == pr_target_branch:
print_msg(msg_tmpl % 'OK', prefix=False)
else:
res = not_eligible(msg_tmpl % "FAILED; found '%s'" % pr_data['base']['ref'])
# check test suite result, Travis must give green light
msg_tmpl = "* test suite passes: %s"
if pr_data['status_last_commit'] == 'success':
print_msg(msg_tmpl % 'OK', prefix=False)
elif pr_data['status_last_commit'] == 'pending':
res = not_eligible(msg_tmpl % "pending...")
elif pr_data['status_last_commit'] in ['error', 'failure']:
res = not_eligible(msg_tmpl % "FAILED")
else:
res = not_eligible(msg_tmpl % "(result unknown)")
if pr_data['base']['repo']['name'] == GITHUB_EASYCONFIGS_REPO:
# check for successful test report (checked in reverse order)
msg_tmpl = "* last test report is successful: %s"
test_report_regex = re.compile(r"^Test report by @\S+")
test_report_found = False
for comment in pr_data['issue_comments'][::-1]:
comment = comment['body']
if test_report_regex.search(comment):
if 'SUCCESS' in comment:
print_msg(msg_tmpl % 'OK', prefix=False)
test_report_found = True
break
elif 'FAILED' in comment:
res = not_eligible(msg_tmpl % 'FAILED')
test_report_found = True
break
else:
print_warning("Failed to determine outcome of test report for comment:\n%s" % comment)
if not test_report_found:
res = not_eligible(msg_tmpl % "(no test reports found)")
# check for approved review
approved_review_by = []
for review in pr_data['reviews']:
if review['state'] == 'APPROVED':
approved_review_by.append(review['user']['login'])
msg_tmpl = "* approved review: %s"
if approved_review_by:
print_msg(msg_tmpl % 'OK (by %s)' % ', '.join(approved_review_by), prefix=False)
else:
res = not_eligible(msg_tmpl % 'MISSING')
# check whether a milestone is set
msg_tmpl = "* milestone is set: %s"
if pr_data['milestone']:
print_msg(msg_tmpl % "OK (%s)" % pr_data['milestone']['title'], prefix=False)
else:
res = not_eligible(msg_tmpl % 'no milestone found')
return res
def reasons_for_closing(pr_data):
"""
Look for valid reasons to close PR by comparing with existing easyconfigs.
"""
if pr_data['status_last_commit']:
print_msg("Status of last commit is %s\n" % pr_data['status_last_commit'].upper(), prefix=False)
if pr_data['issue_comments']:
last_comment = pr_data['issue_comments'][-1]
timestamp = last_comment['updated_at'].replace('T', ' at ')[:-1]
username = last_comment['user']['login']
print_msg("Last comment on %s, by %s, was:\n\n%s" % (timestamp, username, last_comment['body']), prefix=False)
if pr_data['reviews']:
last_review = pr_data['reviews'][-1]
timestamp = last_review['submitted_at'].replace('T', ' at ')[:-1]
username = last_review['user']['login']
state, body = last_review['state'], last_review['body']
print_msg("Last reviewed on %s by %s, state %s\n\n%s" % (timestamp, username, state, body), prefix=False)
possible_reasons = []
print_msg("No activity since %s" % pr_data['updated_at'].replace('T', ' at ')[:-1], prefix=False)
# check if PR is inactive for more than 6 months
last_updated = datetime.strptime(pr_data['updated_at'], "%Y-%m-%dT%H:%M:%SZ")
if datetime.now() - last_updated > timedelta(days=180):
possible_reasons.append('inactive')
robot_paths = build_option('robot_path')
pr_files = [path for path in fetch_easyconfigs_from_pr(pr_data['number']) if path.endswith('.eb')]
obsoleted = []
uses_archived_tc = []
for pr_file in pr_files:
pr_ec = EasyConfigParser(pr_file).get_config_dict()
pr_tc = '%s-%s' % (pr_ec['toolchain']['name'], pr_ec['toolchain']['version'])
print_msg("* %s-%s" % (pr_ec['name'], pr_ec['version']), prefix=False)
for robot_path in robot_paths:
# check if PR easyconfig uses an archived toolchain
path = os.path.join(robot_path, EASYCONFIGS_ARCHIVE_DIR, pr_tc[0].lower(), pr_tc.split('-')[0])
for (dirpath, _, filenames) in os.walk(path):
for fn in filenames:
if fn.endswith('.eb'):
ec = EasyConfigParser(os.path.join(dirpath, fn)).get_config_dict()
if ec.get('easyblock') == 'Toolchain':
if 'versionsuffix' in ec:
archived_tc = '%s-%s%s' % (ec['name'], ec['version'], ec.get('versionsuffix'))
else:
archived_tc = '%s-%s' % (ec['name'], ec['version'])
if pr_tc == archived_tc:
print_msg(" - uses archived toolchain %s" % pr_tc, prefix=False)
uses_archived_tc.append(pr_ec)
# check if there is a newer version of PR easyconfig
newer_versions = set()
for (dirpath, _, filenames) in os.walk(os.path.join(robot_path, pr_ec['name'].lower()[0], pr_ec['name'])):
for fn in filenames:
if fn.endswith('.eb'):
ec = EasyConfigParser(os.path.join(dirpath, fn)).get_config_dict()
if LooseVersion(ec['version']) > LooseVersion(pr_ec['version']):
newer_versions.add(ec['version'])
if newer_versions:
print_msg(" - found newer versions %s" % ", ".join(sorted(newer_versions)), prefix=False)
obsoleted.append(pr_ec)
if uses_archived_tc:
possible_reasons.append('archived')
if any([e['name'] in pr_data['title'] for e in obsoleted]):
possible_reasons.append('obsolete')
return possible_reasons
def close_pr(pr, motivation_msg=None):
"""
Close specified pull request
:param pr: PR number
:param motivation_msg: string containing motivation for closing the PR
"""
github_user = build_option('github_user')
if github_user is None:
raise EasyBuildError("GitHub user must be specified to use --close-pr")
pr_target_account = build_option('pr_target_account')
pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO
pr_data, _ = fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user, full=True)
if pr_data['state'] == GITHUB_STATE_CLOSED:
raise EasyBuildError("PR #%d from %s/%s is already closed.", pr, pr_target_account, pr_target_repo)
pr_owner = pr_data['user']['login']
msg = "\n%s/%s PR #%s was submitted by %s, " % (pr_target_account, pr_target_repo, pr, pr_owner)
msg += "you are using GitHub account '%s'\n" % github_user
msg += "\nPR Title: \"%s\"\n" % pr_data['title']
print_msg(msg, prefix=False)
dry_run = build_option('dry_run') or build_option('extended_dry_run')
reopen = motivation_msg == VALID_CLOSE_PR_REASONS['retest']
if not motivation_msg:
print_msg("No reason or message specified, looking for possible reasons\n")
possible_reasons = reasons_for_closing(pr_data)
if not possible_reasons:
raise EasyBuildError("No reason specified and none found from PR data, "
"please use --close-pr-reasons or --close-pr-msg")
else:
motivation_msg = ", ".join([VALID_CLOSE_PR_REASONS[reason] for reason in possible_reasons])
print_msg("\nNo reason specified but found possible reasons: %s.\n" % motivation_msg, prefix=False)
msg = "@%s, this PR is being closed for the following reason(s): %s." % (pr_data['user']['login'], motivation_msg)
if not reopen:
msg += "\nPlease don't hesitate to reopen this PR or add a comment if you feel this contribution is still "
msg += "relevant.\nFor more information on our policy w.r.t. closing PRs, see "
msg += "https://easybuild.readthedocs.io/en/latest/Contributing.html"
msg += "#why-a-pull-request-may-be-closed-by-a-maintainer"
post_comment_in_issue(pr, msg, account=pr_target_account, repo=pr_target_repo, github_user=github_user)
if dry_run:
print_msg("[DRY RUN] Closed %s/%s PR #%s" % (pr_target_account, pr_target_repo, pr), prefix=False)
if reopen:
print_msg("[DRY RUN] Reopened %s/%s PR #%s" % (pr_target_account, pr_target_repo, pr), prefix=False)
else:
github_token = fetch_github_token(github_user)
if github_token is None:
raise EasyBuildError("GitHub token for user '%s' must be available to use --close-pr", github_user)
g = RestClient(GITHUB_API_URL, username=github_user, token=github_token)
pull_url = g.repos[pr_target_account][pr_target_repo].pulls[pr]
body = {'state': 'closed'}
status, data = pull_url.post(body=body)
if not status == HTTP_STATUS_OK:
raise EasyBuildError("Failed to close PR #%s; status %s, data: %s", pr, status, data)
if reopen:
body = {'state': 'open'}
status, data = pull_url.post(body=body)
if not status == HTTP_STATUS_OK:
raise EasyBuildError("Failed to reopen PR #%s; status %s, data: %s", pr, status, data)
def list_prs(params, per_page=GITHUB_MAX_PER_PAGE, github_user=None):
"""
List pull requests according to specified selection/order parameters
:param params: 3-tuple with selection parameters for PRs (<state>, <sort>, <direction>),
see https://developer.github.com/v3/pulls/#parameters
"""
parameters = {
'state': params[0],
'sort': params[1],
'direction': params[2],
'per_page': per_page,
}
print_msg("Listing PRs with parameters: %s" % ', '.join(k + '=' + str(parameters[k]) for k in sorted(parameters)))
pr_target_account = build_option('pr_target_account')
pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO
pr_data, _ = fetch_pr_data(None, pr_target_account, pr_target_repo, github_user, **parameters)
lines = []
for pr in pr_data:
lines.append("PR #%s: %s" % (pr['number'], pr['title']))
return '\n'.join(lines)
def merge_pr(pr):
"""
Merge specified pull request
"""
github_user = build_option('github_user')
if github_user is None:
raise EasyBuildError("GitHub user must be specified to use --merge-pr")
pr_target_account = build_option('pr_target_account')
pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO
pr_data, pr_url = fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user, full=True)
msg = "\n%s/%s PR #%s was submitted by %s, " % (pr_target_account, pr_target_repo, pr, pr_data['user']['login'])
msg += "you are using GitHub account '%s'\n" % github_user
print_msg(msg, prefix=False)
if pr_data['user']['login'] == github_user:
raise EasyBuildError("Please do not merge your own PRs!")
force = build_option('force')
dry_run = build_option('dry_run') or build_option('extended_dry_run')
def merge_url(gh):
"""Utility function to fetch merge URL for a specific PR."""
return gh.repos[pr_target_account][pr_target_repo].pulls[pr].merge
if check_pr_eligible_to_merge(pr_data) or force:
print_msg("\nReview %s merging pull request!\n" % ("OK,", "FAILed, yet forcibly")[force], prefix=False)
comment = "Going in, thanks @%s!" % pr_data['user']['login']
post_comment_in_issue(pr, comment, account=pr_target_account, repo=pr_target_repo, github_user=github_user)
if dry_run:
print_msg("[DRY RUN] Merged %s/%s pull request #%s" % (pr_target_account, pr_target_repo, pr), prefix=False)
else:
body = {
'commit_message': pr_data['title'],
'sha': pr_data['head']['sha'],
}
github_api_put_request(merge_url, github_user, body=body)
else:
print_warning("Review indicates this PR should not be merged (use -f/--force to do so anyway)")
@only_if_module_is_available('git', pkgname='GitPython')
def new_branch_github(paths, ecs, commit_msg=None):
"""
Create new branch on GitHub using specified files
:param paths: paths to categorized lists of files (easyconfigs, files to delete, patches, files with .py extension)
:param ecs: list of parsed easyconfigs, incl. for dependencies (if robot is enabled)
:param commit_msg: commit message to use
"""
branch_name = build_option('pr_branch_name')
if commit_msg is None:
commit_msg = build_option('pr_commit_msg')
# create branch, commit files to it & push to GitHub
res = _easyconfigs_pr_common(paths, ecs, pr_branch=branch_name, commit_msg=commit_msg)
return res
@only_if_module_is_available('git', pkgname='GitPython')
def new_pr_from_branch(branch_name, title=None, descr=None, pr_target_repo=None, pr_metadata=None, commit_msg=None):
"""
Create new pull request from specified branch on GitHub.
"""
if descr is None:
descr = build_option('pr_descr')
if commit_msg is None:
commit_msg = build_option('pr_commit_msg')
if title is None:
title = build_option('pr_title') or commit_msg
pr_target_account = build_option('pr_target_account')
pr_target_branch = build_option('pr_target_branch')
if pr_target_repo is None:
pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO
# fetch GitHub token (required to perform actions on GitHub)
github_user = build_option('github_user')
if github_user is None:
raise EasyBuildError("GitHub user must be specified to open a pull request")
github_token = fetch_github_token(github_user)
if github_token is None:
raise EasyBuildError("GitHub token for user '%s' must be available to open a pull request", github_user)
# GitHub organisation or GitHub user where branch is located
github_account = build_option('github_org') or github_user
if pr_metadata:
file_info, deleted_paths, diff_stat = pr_metadata
else:
# initialize repository
git_working_dir = tempfile.mkdtemp(prefix='git-working-dir')
git_repo = init_repo(git_working_dir, pr_target_repo)
# check out PR branch, and sync with current develop
setup_repo(git_repo, github_account, pr_target_repo, branch_name)
print_msg("syncing '%s' with current '%s/develop' branch..." % (branch_name, pr_target_account), log=_log)
sync_with_develop(git_repo, branch_name, pr_target_account, pr_target_repo)
# checkout target branch, to obtain diff with PR branch
# make sure right branch is being used by checking it out via remotes/*
print_msg("checking out target branch '%s/%s'..." % (pr_target_account, pr_target_branch), log=_log)
remote = create_remote(git_repo, pr_target_account, pr_target_repo, https=True)
git_repo.git.fetch(remote.name)
if pr_target_branch in [b.name for b in git_repo.branches]:
git_repo.delete_head(pr_target_branch, force=True)
full_target_branch_ref = 'remotes/%s/%s' % (remote.name, pr_target_branch)
git_repo.git.checkout(full_target_branch_ref, track=True, force=True)
diff_stat = git_repo.git.diff(full_target_branch_ref, branch_name, stat=True)
print_msg("determining metadata for pull request based on changed files...", log=_log)
# figure out list of new/changed & deletes files compared to target branch
difflist = git_repo.head.commit.diff(branch_name)
changed_files, ec_paths, deleted_paths, patch_paths = [], [], [], []
for diff in difflist:
path = diff.b_path
changed_files.append(path)
if diff.deleted_file:
deleted_paths.append(path)
elif path.endswith('.eb'):
ec_paths.append(path)
elif path.endswith('.patch'):
patch_paths.append(path)
if changed_files:
from_branch = '%s/%s' % (github_account, branch_name)
to_branch = '%s/%s' % (pr_target_account, pr_target_branch)
msg = ["found %d changed file(s) in '%s' relative to '%s':" % (len(changed_files), from_branch, to_branch)]
if ec_paths:
msg.append("* %d new/changed easyconfig file(s):" % len(ec_paths))
msg.extend([" " + x for x in ec_paths])
if patch_paths:
msg.append("* %d patch(es):" % len(patch_paths))
msg.extend([" " + x for x in patch_paths])
if deleted_paths:
msg.append("* %d deleted file(s)" % len(deleted_paths))
msg.append([" " + x for x in deleted_paths])
print_msg('\n'.join(msg), log=_log)
else:
raise EasyBuildError("No changes in '%s' branch compared to current 'develop' branch!", branch_name)
# copy repo while target branch is still checked out
tmpdir = tempfile.mkdtemp()
target_dir = os.path.join(tmpdir, pr_target_repo)
copy_dir(os.path.join(git_working_dir, pr_target_repo), target_dir, force_in_dry_run=True)
# check out PR branch to determine info on changed/added files relative to target branch
# make sure right branch is being used by checkout it out via remotes/*
print_msg("checking out PR branch '%s/%s'..." % (github_account, branch_name), log=_log)
remote = create_remote(git_repo, github_account, pr_target_repo, https=True)
git_repo.git.fetch(remote.name)
if branch_name in [b.name for b in git_repo.branches]:
git_repo.delete_head(branch_name, force=True)
git_repo.git.checkout('remotes/%s/%s' % (remote.name, branch_name), track=True, force=True)
# path to easyconfig files is expected to be absolute in det_file_info
ec_paths = [os.path.join(git_working_dir, pr_target_repo, x) for x in ec_paths]
file_info = det_file_info(ec_paths, target_dir)
labels = []
if pr_target_repo == GITHUB_EASYCONFIGS_REPO:
# label easyconfigs for new software and/or new easyconfigs for existing software
if any(file_info['new_folder']):
labels.append('new')
if any(file_info['new_file_in_existing_folder']):
labels.append('update')
# only use most common toolchain(s) in toolchain label of PR title
toolchains = ['%(name)s/%(version)s' % ec['toolchain'] for ec in file_info['ecs']]
toolchains_counted = sorted([(toolchains.count(tc), tc) for tc in nub(toolchains)])
toolchain_label = ','.join([tc for (cnt, tc) in toolchains_counted if cnt == toolchains_counted[-1][0]])
# only use most common module class(es) in moduleclass label of PR title
classes = [ec['moduleclass'] for ec in file_info['ecs']]
classes_counted = sorted([(classes.count(c), c) for c in nub(classes)])
class_label = ','.join([tc for (cnt, tc) in classes_counted if cnt == classes_counted[-1][0]])
elif pr_target_repo == GITHUB_EASYBLOCKS_REPO:
if any(file_info['new']):
labels.append('new')
if title is None:
if pr_target_repo == GITHUB_EASYCONFIGS_REPO:
if file_info['ecs'] and all(file_info['new']) and not deleted_paths:
# mention software name/version in PR title (only first 3)
names_and_versions = nub(["%s v%s" % (ec.name, ec.version) for ec in file_info['ecs']])
if len(names_and_versions) <= 3:
main_title = ', '.join(names_and_versions)
else:
main_title = ', '.join(names_and_versions[:3] + ['...'])
title = "{%s}[%s] %s" % (class_label, toolchain_label, main_title)
# if Python is listed as a dependency, then mention Python version(s) in PR title
pyver = []
for ec in file_info['ecs']:
# iterate over all dependencies (incl. build dependencies & multi-deps)
for dep in ec.dependencies():
if dep['name'] == 'Python':
# check whether Python is listed as a multi-dep if it's marked as a build dependency
if dep['build_only'] and 'Python' not in ec['multi_deps']:
continue
else:
pyver.append(dep['version'])
if pyver:
title += " w/ Python %s" % ' + '.join(sorted(nub(pyver)))
elif pr_target_repo == GITHUB_EASYBLOCKS_REPO:
if file_info['eb_names'] and all(file_info['new']) and not deleted_paths:
plural = 's' if len(file_info['eb_names']) > 1 else ''
title = "new easyblock%s for %s" % (plural, (', '.join(file_info['eb_names'])))
if title is None:
raise EasyBuildError("Don't know how to make a PR title for this PR. "
"Please include a title (use --pr-title)")
full_descr = "(created using `eb --new-pr`)\n"
if descr is not None:
full_descr += descr
# create PR
pr_target_branch = build_option('pr_target_branch')
dry_run = build_option('dry_run') or build_option('extended_dry_run')
msg = '\n'.join([
'',
"Opening pull request%s" % ('', " [DRY RUN]")[dry_run],
"* target: %s/%s:%s" % (pr_target_account, pr_target_repo, pr_target_branch),
"* from: %s/%s:%s" % (github_account, pr_target_repo, branch_name),
"* title: \"%s\"" % title,
"* labels: %s" % (', '.join(labels) or '(none)'),
"* description:",
'"""',
full_descr,
'"""',
"* overview of changes:\n%s" % diff_stat,
'',
])
print_msg(msg, log=_log, prefix=False)
if not dry_run:
g = RestClient(GITHUB_API_URL, username=github_user, token=github_token)
pulls_url = g.repos[pr_target_account][pr_target_repo].pulls
body = {
'base': pr_target_branch,
'head': '%s:%s' % (github_account, branch_name),
'title': title,
'body': full_descr,
}
status, data = pulls_url.post(body=body)
if not status == HTTP_STATUS_CREATED:
raise EasyBuildError("Failed to open PR for branch %s; status %s, data: %s", branch_name, status, data)
print_msg("Opened pull request: %s" % data['html_url'], log=_log, prefix=False)
if labels:
# post labels
pr = data['html_url'].split('/')[-1]
pr_url = g.repos[pr_target_account][pr_target_repo].issues[pr]
try:
status, data = pr_url.labels.post(body=labels)
if status == HTTP_STATUS_OK:
print_msg("Added labels %s to PR#%s" % (', '.join(labels), pr), log=_log, prefix=False)
except HTTPError as err:
_log.info("Failed to add labels to PR# %s: %s." % (pr, err))
def new_pr(paths, ecs, title=None, descr=None, commit_msg=None):
"""
Open new pull request using specified files
:param paths: paths to categorized lists of files (easyconfigs, files to delete, patches)
:param ecs: list of parsed easyconfigs, incl. for dependencies (if robot is enabled)
:param title: title to use for pull request
:param descr: description to use for description
:param commit_msg: commit message to use
"""
if commit_msg is None:
commit_msg = build_option('pr_commit_msg')
# create new branch in GitHub
res = new_branch_github(paths, ecs, commit_msg=commit_msg)
file_info, deleted_paths, _, branch_name, diff_stat, pr_target_repo = res
new_pr_from_branch(branch_name, title=title, descr=descr, pr_target_repo=pr_target_repo,
pr_metadata=(file_info, deleted_paths, diff_stat), commit_msg=commit_msg)
def det_account_branch_for_pr(pr_id, github_user=None, pr_target_repo=None):
"""Determine account & branch corresponding to pull request with specified id."""
if github_user is None:
github_user = build_option('github_user')
if github_user is None:
raise EasyBuildError("GitHub username (--github-user) must be specified!")
pr_target_account = build_option('pr_target_account')
if pr_target_repo is None:
pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO
pr_data, _ = fetch_pr_data(pr_id, pr_target_account, pr_target_repo, github_user)
# branch that corresponds with PR is supplied in form <account>:<branch_label>
account = pr_data['head']['label'].split(':')[0]
branch = ':'.join(pr_data['head']['label'].split(':')[1:])
github_target = '%s/%s' % (pr_target_account, pr_target_repo)
print_msg("Determined branch name corresponding to %s PR #%s: %s" % (github_target, pr_id, branch), log=_log)
return account, branch
def det_pr_target_repo(paths):
"""Determine target repository for pull request from given cagetorized list of files
:param paths: paths to categorized lists of files (easyconfigs, files to delete, patches, .py files)
"""
pr_target_repo = build_option('pr_target_repo')
# determine target repository for PR based on which files are provided
# (see categorize_files_by_type function)
if pr_target_repo is None:
_log.info("Trying to derive target repository based on specified files...")
easyconfigs, files_to_delete, patch_files, py_files = [paths[key] for key in sorted(paths.keys())]
# Python files provided, and no easyconfig files or patches
if py_files and not (easyconfigs or patch_files):
_log.info("Only Python files provided, no easyconfig files or patches...")
# if all Python files are easyblocks, target repo should be easyblocks;
# otherwise, target repo is assumed to be framework
if all([get_easyblock_class_name(path) for path in py_files]):
pr_target_repo = GITHUB_EASYBLOCKS_REPO
_log.info("All Python files are easyblocks, target repository is assumed to be %s", pr_target_repo)
else:
pr_target_repo = GITHUB_FRAMEWORK_REPO
_log.info("Not all Python files are easyblocks, target repository is assumed to be %s", pr_target_repo)
# if no Python files are provided, only easyconfigs & patches, or if files to delete are .eb files,
# then target repo is assumed to be easyconfigs
elif easyconfigs or patch_files or (files_to_delete and all(x.endswith('.eb') for x in files_to_delete)):
pr_target_repo = GITHUB_EASYCONFIGS_REPO
_log.info("Only easyconfig and patch files found, target repository is assumed to be %s", pr_target_repo)
else:
_log.info("No Python files, easyconfigs or patches found, can't derive target repository...")
return pr_target_repo
@only_if_module_is_available('git', pkgname='GitPython')
def update_branch(branch_name, paths, ecs, github_account=None, commit_msg=None):
"""
Update specified branch in GitHub using specified files
:param paths: paths to categorized lists of files (easyconfigs, files to delete, patches)
:param github_account: GitHub account where branch is located
:param ecs: list of parsed easyconfigs, incl. for dependencies (if robot is enabled)
:param commit_msg: commit message to use
"""
if commit_msg is None:
commit_msg = build_option('pr_commit_msg')
if commit_msg is None:
raise EasyBuildError("A meaningful commit message must be specified via --pr-commit-msg when using --update-pr")
if github_account is None:
github_account = build_option('github_user') or build_option('github_org')
_, _, _, _, diff_stat, pr_target_repo = _easyconfigs_pr_common(paths, ecs, start_branch=branch_name,
pr_branch=branch_name, start_account=github_account,
commit_msg=commit_msg)
print_msg("Overview of changes:\n%s\n" % diff_stat, log=_log, prefix=False)
full_repo = '%s/%s' % (github_account, pr_target_repo)
msg = "pushed updated branch '%s' to %s" % (branch_name, full_repo)
if build_option('dry_run') or build_option('extended_dry_run'):
msg += " [DRY RUN]"
print_msg(msg, log=_log)
@only_if_module_is_available('git', pkgname='GitPython')
def update_pr(pr_id, paths, ecs, commit_msg=None):
"""
Update specified pull request using specified files
:param pr_id: ID of pull request to update
:param paths: paths to categorized lists of files (easyconfigs, files to delete, patches)
:param ecs: list of parsed easyconfigs, incl. for dependencies (if robot is enabled)
:param commit_msg: commit message to use
"""
pr_target_repo = det_pr_target_repo(paths)
if pr_target_repo is None:
raise EasyBuildError("Failed to determine target repository, please specify it via --pr-target-repo!")
github_account, branch_name = det_account_branch_for_pr(pr_id, pr_target_repo=pr_target_repo)
update_branch(branch_name, paths, ecs, github_account=github_account, commit_msg=commit_msg)
full_repo = '%s/%s' % (build_option('pr_target_account'), pr_target_repo)
msg = "updated https://github.com/%s/pull/%s" % (full_repo, pr_id)
if build_option('dry_run') or build_option('extended_dry_run'):
msg += " [DRY RUN]"
print_msg(msg, log=_log)
def check_online_status():
"""
Check whether we currently are online
Return True if online, else a list of error messages
"""
# Try repeatedly and with different URLs to cater for flaky servers
# E.g. Github returned "HTTP Error 403: Forbidden" and "HTTP Error 406: Not Acceptable" randomly
# Timeout and repeats set to total 1 minute
urls = [GITHUB_API_URL + '/rate_limit', GITHUB_URL, GITHUB_API_URL]
num_repeats = 6
errors = set() # Use set to record only unique errors
for attempt in range(num_repeats):
# Cycle through URLs
url = urls[attempt % len(urls)]
try:
urlopen(url, timeout=10)
errors = None
break
except URLError as err:
errors.add('%s: %s' % (url, err))
return sorted(errors) if errors else True
def check_github():
"""
Check status of GitHub integration, and report back.
* check whether GitHub username is available
* check whether a GitHub token is available, and whether it works
* check whether git and GitPython are available
* check whether push access to own GitHub repositories works
* check whether creating gists works
* check whether location to local working directories for Git repositories is available (not strictly needed)
"""
debug = build_option('debug')
# start by assuming that everything works, individual checks will disable action that won't work
status = {}
for action in ['--from-pr', '--new-pr', '--review-pr', '--upload-test-report', '--update-pr']:
status[action] = True
print_msg("\nChecking status of GitHub integration...\n", log=_log, prefix=False)
# check whether we're online; if not, half of the checks are going to fail...
print_msg("Making sure we're online...", log=_log, prefix=False, newline=False)
online_state = check_online_status()
if online_state is True:
print_msg("OK\n", log=_log, prefix=False)
else:
print_msg("FAIL (%s)", ', '.join(online_state), log=_log, prefix=False)
raise EasyBuildError("checking status of GitHub integration must be done online")
# GitHub user
print_msg("* GitHub user...", log=_log, prefix=False, newline=False)
github_user = build_option('github_user')
github_account = build_option('github_org') or build_option('github_user')
if github_user is None:
check_res = "(none available) => FAIL"
status['--new-pr'] = status['--update-pr'] = status['--upload-test-report'] = False
else:
check_res = "%s => OK" % github_user
print_msg(check_res, log=_log, prefix=False)
# check GitHub token
print_msg("* GitHub token...", log=_log, prefix=False, newline=False)
github_token = fetch_github_token(github_user)
if github_token is None:
check_res = "(no token found) => FAIL"
else:
# don't print full token, should be kept secret!
partial_token = '%s..%s' % (github_token[:3], github_token[-3:])
token_descr = partial_token + " (len: %d)" % len(github_token)
if validate_github_token(github_token, github_user):
check_res = "%s => OK (validated)" % token_descr
else:
check_res = "%s => FAIL (validation failed)" % token_descr
if 'FAIL' in check_res:
status['--new-pr'] = status['--update-pr'] = status['--upload-test-report'] = False
print_msg(check_res, log=_log, prefix=False)
# check git command
print_msg("* git command...", log=_log, prefix=False, newline=False)
git_cmd = which('git')
git_version = get_tool_version('git')
if git_cmd:
if git_version in [UNKNOWN, None]:
check_res = "%s version => FAIL" % git_version
else:
check_res = "OK (\"%s\")" % git_version
else:
check_res = "(not found) => FAIL"
if 'FAIL' in check_res:
status['--new-pr'] = status['--update-pr'] = False
print_msg(check_res, log=_log, prefix=False)
# check GitPython module
print_msg("* GitPython module...", log=_log, prefix=False, newline=False)
if 'git' in sys.modules:
git_check = True
git_attrs = ['GitCommandError', 'Repo']
for attr in git_attrs:
git_check &= attr in dir(git)
if git_check:
check_res = "OK (GitPython version %s)" % git.__version__
else:
check_res = "FAIL (import ok, but module doesn't provide what is expected)"
else:
check_res = "FAIL (import failed)"
if 'FAIL' in check_res:
status['--new-pr'] = status['--update-pr'] = False
print_msg(check_res, log=_log, prefix=False)
# test push access to own GitHub repository: try to clone repo and push a test branch
msg = "* push access to %s/%s repo @ GitHub..." % (github_account, GITHUB_EASYCONFIGS_REPO)
print_msg(msg, log=_log, prefix=False, newline=False)
git_working_dir = tempfile.mkdtemp(prefix='git-working-dir')
git_repo, res, push_err = None, None, None
branch_name = 'test_branch_%s' % ''.join(random.choice(ascii_letters) for _ in range(5))
try:
git_repo = init_repo(git_working_dir, GITHUB_EASYCONFIGS_REPO, silent=not debug)
remote_name = setup_repo(git_repo, github_account, GITHUB_EASYCONFIGS_REPO, 'master',
silent=not debug, git_only=True)
git_repo.create_head(branch_name)
res = getattr(git_repo.remotes, remote_name).push(branch_name)
except Exception as err:
_log.warning("Exception when testing push access to %s/%s: %s", github_account, GITHUB_EASYCONFIGS_REPO, err)
push_err = err
if res:
if res[0].flags & res[0].ERROR:
_log.warning("Error occurred when pushing test branch to GitHub: %s", res[0].summary)
check_res = "FAIL (error occurred)"
else:
check_res = "OK"
elif github_user:
if 'git' in sys.modules:
ver, req_ver = git.__version__, '1.0'
if LooseVersion(ver) < LooseVersion(req_ver):
check_res = "FAIL (GitPython version %s is too old, should be version %s or newer)" % (ver, req_ver)
else:
check_res = "FAIL (unexpected exception: %s)" % push_err
else:
check_res = "FAIL (GitPython is not available)"
else:
check_res = "FAIL (no GitHub user specified)"
if 'FAIL' in check_res:
status['--new-pr'] = status['--update-pr'] = False
print_msg(check_res, log=_log, prefix=False)
# cleanup: delete test branch that was pushed to GitHub
if git_repo and push_err is None:
try:
getattr(git_repo.remotes, remote_name).push(branch_name, delete=True)
except GitCommandError as err:
sys.stderr.write("WARNING: failed to delete test branch from GitHub: %s\n" % err)
# test creating a gist
print_msg("* creating gists...", log=_log, prefix=False, newline=False)
gist_url = None
try:
gist_url = create_gist("This is just a test", 'test.txt', descr='test123', github_user=github_user,
github_token=github_token)
gist_id = gist_url.split('/')[-1]
_log.info("Gist with ID %s successfully created, now deleting it again...", gist_id)
delete_gist(gist_id, github_user=github_user, github_token=github_token)
_log.info("Gist with ID %s deleted!", gist_id)
except Exception as err:
_log.warning("Exception occurred when trying to create & delete gist: %s", err)
if gist_url and re.match('https://gist.github.com/[0-9a-f]+$', gist_url):
check_res = "OK"
else:
check_res = "FAIL (gist_url: %s)" % gist_url
status['--upload-test-report'] = False
print_msg(check_res, log=_log, prefix=False)
# check whether location to local working directories for Git repositories is available (not strictly needed)
print_msg("* location to Git working dirs... ", log=_log, prefix=False, newline=False)
git_working_dirs_path = build_option('git_working_dirs_path')
if git_working_dirs_path:
check_res = "OK (%s)" % git_working_dirs_path
else:
check_res = "not found (suboptimal)"
print_msg(check_res, log=_log, prefix=False)
# report back
if all(status.values()):
msg = "\nAll checks PASSed!\n"
else:
msg = '\n'.join([
'',
"One or more checks FAILed, GitHub configuration not fully complete!",
"See http://easybuild.readthedocs.org/en/latest/Integration_with_GitHub.html#configuration for help.",
'',
])
print_msg(msg, log=_log, prefix=False)
print_msg("Status of GitHub integration:", log=_log, prefix=False)
for action in sorted(status):
res = ("not supported", 'OK')[status[action]]
print_msg("* %s: %s" % (action, res), log=_log, prefix=False)
print_msg('', prefix=False)
def fetch_github_token(user):
"""Fetch GitHub token for specified user from keyring."""
token, msg = None, None
if user is None:
msg = "No GitHub user name provided, required for fetching GitHub token."
elif not HAVE_KEYRING:
msg = "Failed to obtain GitHub token from keyring, "
msg += "required Python module https://pypi.python.org/pypi/keyring is not available."
else:
try:
token = keyring.get_password(KEYRING_GITHUB_TOKEN, user)
except Exception as err:
_log.warning("Exception occurred when fetching GitHub token: %s", err)
if token is None:
python_cmd = '; '.join([
"import getpass, keyring",
"keyring.set_password(\"%s\", \"%s\", getpass.getpass())" % (KEYRING_GITHUB_TOKEN, user),
])
msg = '\n'.join([
"Failed to obtain GitHub token for %s" % user,
"Use the following procedure to install a GitHub token in your keyring:",
"$ python -c '%s'" % python_cmd,
])
if token is None:
# failed to obtain token, log message explaining why
_log.warning(msg)
else:
_log.info("Successfully obtained GitHub token for user %s from keyring." % user)
return token
@only_if_module_is_available('keyring')
def install_github_token(github_user, silent=False):
"""
Install specified GitHub token for specified user.
:param github_user: GitHub user to install token for
:param silent: keep quiet (don't print any messages)
"""
if github_user is None:
raise EasyBuildError("GitHub user must be specified to install GitHub token")
# check if there's a token available already
current_token = fetch_github_token(github_user)
if current_token:
current_token = '%s..%s' % (current_token[:3], current_token[-3:])
if build_option('force'):
msg = "WARNING: overwriting installed token '%s' for user '%s'..." % (current_token, github_user)
print_msg(msg, prefix=False, silent=silent)
else:
raise EasyBuildError("Installed token '%s' found for user '%s', not overwriting it without --force",
current_token, github_user)
# get token to install
token = getpass.getpass(prompt="Token: ").strip()
# validate token before installing it
print_msg("Validating token...", prefix=False, silent=silent)
valid_token = validate_github_token(token, github_user)
if valid_token:
print_msg("Token seems to be valid, installing it.", prefix=False, silent=silent)
else:
raise EasyBuildError("Token validation failed, not installing it. Please verify your token and try again.")
# install token
keyring.set_password(KEYRING_GITHUB_TOKEN, github_user, token)
print_msg("Token '%s..%s' installed!" % (token[:3], token[-3:]), prefix=False, silent=silent)
def validate_github_token(token, github_user):
"""
Check GitHub token:
* see if it conforms expectations (only [a-f]+[0-9] characters, length of 40)
* see if it can be used for authenticated access
"""
sha_regex = re.compile('^[0-9a-f]{40}')
# token should be 40 characters long, and only contain characters in [0-9a-f]
sanity_check = bool(sha_regex.match(token))
if sanity_check:
_log.info("Sanity check on token passed")
else:
_log.warning("Sanity check on token failed; token doesn't match pattern '%s'", sha_regex.pattern)
# try and determine sha of latest commit in easybuilders/easybuild-easyconfigs repo through authenticated access
sha = None
try:
sha = fetch_latest_commit_sha(GITHUB_EASYCONFIGS_REPO, GITHUB_EB_MAIN, github_user=github_user, token=token)
except Exception as err:
_log.warning("An exception occurred when trying to use token for authenticated GitHub access: %s", err)
token_test = bool(sha_regex.match(sha or ''))
if token_test:
_log.info("GitHub token can be used for authenticated GitHub access, validation passed")
return sanity_check and token_test
def find_easybuild_easyconfig(github_user=None):
"""
Fetches the latest EasyBuild version eb file from GitHub
:param github_user: name of GitHub user to use when querying GitHub
"""
dev_repo = download_repo(GITHUB_EASYCONFIGS_REPO, branch='develop', account=GITHUB_EB_MAIN, github_user=github_user)
eb_parent_path = os.path.join(dev_repo, 'easybuild', 'easyconfigs', 'e', 'EasyBuild')
files = os.listdir(eb_parent_path)
# find most recent version
file_versions = []
for eb_file in files:
txt = read_file(os.path.join(eb_parent_path, eb_file))
for line in txt.split('\n'):
if re.search(r'^version\s*=', line):
scope = {}
exec(line, scope)
version = scope['version']
file_versions.append((LooseVersion(version), eb_file))
if file_versions:
fn = sorted(file_versions)[-1][1]
else:
raise EasyBuildError("Couldn't find any EasyBuild easyconfigs")
eb_file = os.path.join(eb_parent_path, fn)
return eb_file
def fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user, full=False, **parameters):
"""Fetch PR data from GitHub"""
def pr_url(gh):
"""Utility function to fetch data for a specific PR."""
if pr is None:
return gh.repos[pr_target_account][pr_target_repo].pulls
else:
return gh.repos[pr_target_account][pr_target_repo].pulls[pr]
status, pr_data = github_api_get_request(pr_url, github_user, **parameters)
if status != HTTP_STATUS_OK:
raise EasyBuildError("Failed to get data for PR #%d from %s/%s (status: %d %s)",
pr, pr_target_account, pr_target_repo, status, pr_data)
if full:
# also fetch status of last commit
def status_url(gh):
"""Helper function to grab status of latest commit."""
return gh.repos[pr_target_account][pr_target_repo].commits[pr_data['head']['sha']].status
status, status_data = github_api_get_request(status_url, github_user, **parameters)
if status != HTTP_STATUS_OK:
raise EasyBuildError("Failed to get status of last commit for PR #%d from %s/%s (status: %d %s)",
pr, pr_target_account, pr_target_repo, status, status_data)
pr_data['status_last_commit'] = status_data['state']
# also fetch comments
def comments_url(gh):
"""Helper function to grab comments for this PR."""
return gh.repos[pr_target_account][pr_target_repo].issues[pr].comments
status, comments_data = github_api_get_request(comments_url, github_user, **parameters)
if status != HTTP_STATUS_OK:
raise EasyBuildError("Failed to get comments for PR #%d from %s/%s (status: %d %s)",
pr, pr_target_account, pr_target_repo, status, comments_data)
pr_data['issue_comments'] = comments_data
# also fetch reviews
def reviews_url(gh):
"""Helper function to grab reviews for this PR"""
return gh.repos[pr_target_account][pr_target_repo].pulls[pr].reviews
status, reviews_data = github_api_get_request(reviews_url, github_user, **parameters)
if status != HTTP_STATUS_OK:
raise EasyBuildError("Failed to get reviews for PR #%d from %s/%s (status: %d %s)",
pr, pr_target_account, pr_target_repo, status, reviews_data)
pr_data['reviews'] = reviews_data
return pr_data, pr_url
def sync_with_develop(git_repo, branch_name, github_account, github_repo):
"""Sync specified branch with develop branch."""
# pull in latest version of 'develop' branch from central repository
msg = "pulling latest version of '%s' branch from %s/%s..." % (GITHUB_DEVELOP_BRANCH, github_account, github_repo)
print_msg(msg, log=_log)
remote = create_remote(git_repo, github_account, github_repo, https=True)
# fetch latest version of develop branch
pull_out = git_repo.git.pull(remote.name, GITHUB_DEVELOP_BRANCH)
_log.debug("Output of 'git pull %s %s': %s", remote.name, GITHUB_DEVELOP_BRANCH, pull_out)
# fetch to make sure we can check out the 'develop' branch
fetch_out = git_repo.git.fetch(remote.name)
_log.debug("Output of 'git fetch %s': %s", remote.name, fetch_out)
_log.debug("Output of 'git branch -a': %s", git_repo.git.branch(a=True))
_log.debug("Output of 'git remote -v': %s", git_repo.git.remote(v=True))
# create 'develop' branch (with force if one already exists),
git_repo.create_head(GITHUB_DEVELOP_BRANCH, remote.refs.develop, force=True).checkout()
# check top of git log
git_log_develop = git_repo.git.log('-n 3')
_log.debug("Top of 'git log' for %s branch:\n%s", GITHUB_DEVELOP_BRANCH, git_log_develop)
# checkout PR branch, and merge develop branch in it (which will create a merge commit)
print_msg("merging '%s' branch into PR branch '%s'..." % (GITHUB_DEVELOP_BRANCH, branch_name), log=_log)
git_repo.git.checkout(branch_name)
merge_out = git_repo.git.merge(GITHUB_DEVELOP_BRANCH)
_log.debug("Output of 'git merge %s':\n%s", GITHUB_DEVELOP_BRANCH, merge_out)
# check git log, should show merge commit on top
post_merge_log = git_repo.git.log('-n 3')
_log.debug("Top of 'git log' after 'git merge %s':\n%s", GITHUB_DEVELOP_BRANCH, post_merge_log)
def sync_pr_with_develop(pr_id):
"""Sync pull request with specified ID with current develop branch."""
github_user = build_option('github_user')
if github_user is None:
raise EasyBuildError("GitHub user must be specified to use --sync-pr-with-develop")
target_account = build_option('pr_target_account')
target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO
pr_account, pr_branch = det_account_branch_for_pr(pr_id)
# initialize repository
git_working_dir = tempfile.mkdtemp(prefix='git-working-dir')
git_repo = init_repo(git_working_dir, target_repo)
setup_repo(git_repo, pr_account, target_repo, pr_branch)
sync_with_develop(git_repo, pr_branch, target_account, target_repo)
# push updated branch back to GitHub (unless we're doing a dry run)
return push_branch_to_github(git_repo, pr_account, target_repo, pr_branch)
def sync_branch_with_develop(branch_name):
"""Sync branch with specified name with current develop branch."""
github_user = build_option('github_user')
if github_user is None:
raise EasyBuildError("GitHub user must be specified to use --sync-branch-with-develop")
target_account = build_option('pr_target_account')
target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO
# initialize repository
git_working_dir = tempfile.mkdtemp(prefix='git-working-dir')
git_repo = init_repo(git_working_dir, target_repo)
# GitHub organisation or GitHub user where branch is located
github_account = build_option('github_org') or github_user
setup_repo(git_repo, github_account, target_repo, branch_name)
sync_with_develop(git_repo, branch_name, target_account, target_repo)
# push updated branch back to GitHub (unless we're doing a dry run)
return push_branch_to_github(git_repo, github_account, target_repo, branch_name)
# copy functions for --new-pr
COPY_FUNCTIONS = {
GITHUB_EASYCONFIGS_REPO: copy_easyconfigs,
GITHUB_EASYBLOCKS_REPO: copy_easyblocks,
GITHUB_FRAMEWORK_REPO: copy_framework_files,
}
|
pescobar/easybuild-framework
|
easybuild/tools/github.py
|
Python
|
gpl-2.0
| 92,199
|
#!/usr/bin/python
#Andrew Hannebrink
#This is a configuration file written in Python source code for ease of manipulation. Save it at /usr/local/etc/dhcpcfg.py. This file is read when dhcplog.py, dhcpsearch.py, rest.py, and dhcppurge.py run. These parameters are necessary for the program to work, and may contain sensitive information, so it is suggested to make a user whose sole purpose is to own and execute files associated with this software system.
logFilePath = '/var/log/infoblox.log.1'
sqlHost = 'localhost'
sqlUser = '<PUT MYSQL USER HERE>'
sqlPasswd = '<PUT MYSQL USER\'S PASSWORD HERE>'
db = '<NAME OF DATABASE>'
senderEmail = '<EMAIL ADDRESS USED TO SEND DIAGNOSTIC REPORTS>'
senderPassword = '<PASSWORD FOR EMAIL ADDRESS USED TO SEND DIAGNOSTIC REPORTS>'
recipientEmails = ['DIAGNOSTIC REPORT RECIPIENT EMAIL ADDRESS \#1', '<DIAGNOSTIC REPORT RECIPIENT EMAIL ADDRESS \#2']
maxLease = 180 # NUMBER OF DAYS FROM LAST DHCPACK TO SET USERS' DHCP LEASE EXPIRATION DATES
ibxUrl = 'INFOBLOX GRID MASTER URL' # i.e. 'https://gm.ip.wustl.edu/wapi/v1.2/'
infobloxUser = '<GRID MASTER LOGIN USERNAME>'
infobloxPasswd = '<GRID MASTER LOGIN PASSWORD>'
dbExpirationMonths = 6 # MONTHS TO KEEP RECORDS IN HISTORY TABLE (DONT MAKE THIS LARGER THAN 11)
|
andrewhannebrink/DHCPACK-Logger-and-Infoblox-Lease-Updater
|
dhcpcfg.py
|
Python
|
gpl-2.0
| 1,259
|
# ------------------------------------------------------------------------------- #
# grid_converter.py - Converts a grid map with population to the following format #
# lon, lat, population #
# Author: Thyago Mota #
# Date: 02/01/2014 #
# ------------------------------------------------------------------------------- #
import datetime, shapely, sys, math
from datetime import datetime, date, time
from shapely.geometry import Polygon, Point
from math import modf
def help():
print('Use: ' + sys.argv[0] + ' input_file geometry_file output_file')
# ------------------------------------------------------------------------------- #
# Some definitions #
# ------------------------------------------------------------------------------- #
FEEDBACK_NUM_RECORDS = 100
NUM_ARGS = 4
TOLERANCE = 0.00001
ARCGIS_NO_DATA_VALUE = -3.40282346639e+038
# ------------------------------------------------------------------------------- #
# Script begins #
# ------------------------------------------------------------------------------- #
startTime = datetime.now()
print('Start time: ' + str(startTime.hour) + ':' + str(startTime.minute) + ':' + str(startTime.second))
# ------------------------------------------------------------------------------- #
# Command line validation #
# Parameters (required): input_file output_file #
# ------------------------------------------------------------------------------- #
if len(sys.argv) != NUM_ARGS:
help()
exit(1)
# ------------------------------------------------------------------------------- #
# Opening of files #
# ------------------------------------------------------------------------------- #
print('Trying to open the input and geometry files for reading')
try:
input = open(sys.argv[1], 'rt')
except:
print('Could not open file ' + sys.argv[1])
exit(2)
try:
geometry = open(sys.argv[2], 'rt')
except:
print('Could not open file ' + sys.argv[2])
input.close()
exit(3)
try:
output = open(sys.argv[3], 'wt')
except:
print('Could not open file ' + sys.argv[3])
input.close()
geometry.close()
exit(4)
print('Success!')
# ------------------------------------------------------------------------------- #
# Reading geometry file #
# ------------------------------------------------------------------------------- #
print('Reading geometry file')
geoData = []
for line in geometry:
line = line.replace('\n', '')
data = line.split(' ')
for i in range(0, len(data)):
d = data[i].split(',')
geoData.append([float(d[0]), float(d[1])])
geometry.close()
print('Geometry file looking good :-)')
#print(geoData)
# ------------------------------------------------------------------------------- #
# Creating a polygon based on geometry #
# ------------------------------------------------------------------------------- #
print('Creating a polygon based on geometry')
poly = Polygon(geoData)
print('That was easy!')
# ------------------------------------------------------------------------------- #
# Reading metadata #
# ------------------------------------------------------------------------------- #
print('Metadata:')
for i in range(0, 6):
line = input.readline()
line = line.replace('\n', '')
line = " ".join(line.split()) # eliminates duplicate whitespaces
data = line.split(' ')
#print(data[1])
if i == 0:
nCols = int(data[1])
elif i == 1:
nRows = int(data[1])
elif i == 2:
xllCorner = float(data[1])
elif i == 3:
yllCorner = float(data[1])
elif i == 4:
cellSize = float(data[1])
else:
noDataValue = float(data[1])
print('\tnCols: \t\t' + str(nCols))
print('\tRows: \t\t' + str(nRows))
print('\txllCorner: \t' + str(xllCorner))
print('\tyllCorner: \t' + str(yllCorner))
print('\tcellSize: \t' + str(cellSize))
print('\tnoDataValue: \t' + str(noDataValue))
print('Grid box:')
print('\t(' + str(xllCorner) + ',' + str(yllCorner) + ')')
print('\t(' + str(xllCorner + nCols * cellSize) + ',' + str(yllCorner) + ')')
print('\t(' + str(xllCorner) + ',' + str(yllCorner + nRows * cellSize) + ')')
print('\t(' + str(xllCorner + nCols * cellSize) + ',' + str(yllCorner + nRows * cellSize) + ')')
# ------------------------------------------------------------------------------- #
# Reading the grid #
# ------------------------------------------------------------------------------- #
grid = [ [ 0. for j in xrange(nCols) ] for i in xrange(nRows) ]
i = 0
totalUnbounded = 0
for line in input:
line = line.replace('\n', '')
if line[0] == ' ':
line = line[1:]
data = line.split(' ')
for j in xrange(nCols):
value = float(data[j])
if value == ARCGIS_NO_DATA_VALUE or value == noDataValue or value < 0:
continue
grid[i][j] = value
totalUnbounded = totalUnbounded + value
i = i + 1
input.close()
print('Total unbounded: ' + str(totalUnbounded))
# ------------------------------------------------------------------------------- #
# Writing the new file #
# ------------------------------------------------------------------------------- #
print('Writing the new file')
totalBounded = 0
for i in xrange(nRows):
#print('Line ' + str(i+1) + ' of ' + str(nRows))
lat = yllCorner + (nRows - i) * cellSize + cellSize/2 # cellSize/2 to have values centered instead of top-left
for j in xrange(nCols):
if grid[i][j] == 0:
continue
lon = xllCorner + j * cellSize + cellSize/2 # cellSize/2 to have values centered instead of top-left
point = Point(lon, lat)
if point.within(poly):
totalBounded = totalBounded + grid[i][j]
output.write(str.format('{0:.5f}', lon) + ',' + str.format('{0:.5f}', lat) + ',' + str.format('{0:.2f}', grid[i][j]) + '\n')
output.close()
print('Total bounded: ' + str(totalBounded))
# ------------------------------------------------------------------------------- #
# Script ends #
# ------------------------------------------------------------------------------- #
endTime = datetime.now()
print('End time: ' + str(endTime.hour) + ':' + str(endTime.minute) + ':' + str(endTime.second))
elapsedTime = endTime - startTime
print('Elapsed time: ' + str(elapsedTime))
|
sgonzalez/cell-tower-population
|
grid_converter.py
|
Python
|
gpl-2.0
| 7,013
|
from __future__ import division
import logging
import base64
import hmac
import random
import re
import os.path
import string
from binascii import unhexlify
from collections import namedtuple
from copy import deepcopy
from hashlib import sha256
from io import BytesIO
from math import ceil
from .flvconcat import FLVTagConcat
from .segmented import (SegmentedStreamReader, SegmentedStreamWriter, SegmentedStreamWorker)
from .stream import Stream
from .wrappers import StreamIOIterWrapper
from ..cache import Cache
from ..compat import parse_qsl, urljoin, urlparse, urlunparse, bytes, range
from ..exceptions import StreamError, PluginError
from ..utils import absolute_url, swfdecompress
from ..packages.flashmedia import F4V, F4VError
from ..packages.flashmedia.box import Box
from ..packages.flashmedia.tag import ScriptData, Tag, TAG_TYPE_SCRIPT
log = logging.getLogger(__name__)
# Akamai HD player verification key
# Use unhexlify() rather than bytes.fromhex() for compatibility with before
# Python 3. However, in Python 3.2 (not 3.3+), unhexlify only accepts a byte
# string.
AKAMAIHD_PV_KEY = unhexlify(b"BD938D5EE6D9F42016F9C56577B6FDCF415FE4B184932B785AB32BCADC9BB592")
# Some streams hosted by Akamai seem to require a hdcore parameter
# to function properly.
HDCORE_VERSION = "3.1.0"
# Fragment URL format
FRAGMENT_URL = "{url}{identifier}{quality}Seg{segment}-Frag{fragment}"
Fragment = namedtuple("Fragment", "segment fragment duration url")
class HDSStreamWriter(SegmentedStreamWriter):
def __init__(self, reader, *args, **kwargs):
options = reader.stream.session.options
kwargs["retries"] = options.get("hds-segment-attempts")
kwargs["threads"] = options.get("hds-segment-threads")
kwargs["timeout"] = options.get("hds-segment-timeout")
SegmentedStreamWriter.__init__(self, reader, *args, **kwargs)
duration, tags = None, []
if self.stream.metadata:
duration = self.stream.metadata.value.get("duration")
tags = [Tag(TAG_TYPE_SCRIPT, timestamp=0, data=self.stream.metadata)]
self.concater = FLVTagConcat(tags=tags, duration=duration, flatten_timestamps=True)
def fetch(self, fragment, retries=None):
if self.closed or not retries:
return
try:
request_params = self.stream.request_params.copy()
params = request_params.pop("params", {})
params.pop("g", None)
return self.session.http.get(fragment.url, stream=True, timeout=self.timeout, exception=StreamError, params=params, **request_params)
except StreamError as err:
log.error("Failed to open fragment {0}-{1}: {2}", fragment.segment, fragment.fragment, err)
return self.fetch(fragment, retries - 1)
def write(self, fragment, res, chunk_size=8192):
fd = StreamIOIterWrapper(res.iter_content(chunk_size))
self.convert_fragment(fragment, fd)
def convert_fragment(self, fragment, fd):
mdat = None
try:
f4v = F4V(fd, raw_payload=True)
# Fast forward to mdat box
for box in f4v:
if box.type == "mdat":
mdat = box.payload.data
break
except F4VError as err:
log.error("Failed to parse fragment {0}-{1}: {2}", fragment.segment, fragment.fragment, err)
return
if not mdat:
log.error("No MDAT box found in fragment {0}-{1}", fragment.segment, fragment.fragment)
return
try:
for chunk in self.concater.iter_chunks(buf=mdat, skip_header=True):
self.reader.buffer.write(chunk)
if self.closed:
break
else:
log.debug("Download of fragment {0}-{1} complete", fragment.segment, fragment.fragment)
except IOError as err:
if "Unknown tag type" in str(err):
log.error("Unknown tag type found, this stream is probably encrypted")
self.close()
return
log.error("Error reading fragment {0}-{1}: {2}", fragment.segment, fragment.fragment, err)
class HDSStreamWorker(SegmentedStreamWorker):
def __init__(self, *args, **kwargs):
SegmentedStreamWorker.__init__(self, *args, **kwargs)
self.bootstrap = self.stream.bootstrap
self.current_segment = -1
self.current_fragment = -1
self.first_fragment = 1
self.last_fragment = -1
self.end_fragment = None
self.bootstrap_minimal_reload_time = 2.0
self.bootstrap_reload_time = self.bootstrap_minimal_reload_time
self.invalid_fragments = set()
self.live_edge = self.session.options.get("hds-live-edge")
self.update_bootstrap()
def update_bootstrap(self):
log.debug("Updating bootstrap")
if isinstance(self.bootstrap, Box):
bootstrap = self.bootstrap
else:
bootstrap = self.fetch_bootstrap(self.bootstrap)
self.live = bootstrap.payload.live
self.profile = bootstrap.payload.profile
self.timestamp = bootstrap.payload.current_media_time
self.identifier = bootstrap.payload.movie_identifier
self.time_scale = bootstrap.payload.time_scale
self.segmentruntable = bootstrap.payload.segment_run_table_entries[0]
self.fragmentruntable = bootstrap.payload.fragment_run_table_entries[0]
self.first_fragment, last_fragment = self.fragment_count()
fragment_duration = self.fragment_duration(last_fragment)
if last_fragment != self.last_fragment:
bootstrap_changed = True
self.last_fragment = last_fragment
else:
bootstrap_changed = False
if self.current_fragment < 0:
if self.live:
current_fragment = last_fragment
# Less likely to hit edge if we don't start with last fragment,
# default buffer is 10 sec.
fragment_buffer = int(ceil(self.live_edge / fragment_duration))
current_fragment = max(self.first_fragment,
current_fragment - (fragment_buffer - 1))
log.debug("Live edge buffer {0} sec is {1} fragments",
self.live_edge, fragment_buffer)
# Make sure we don't have a duration set when it's a
# live stream since it will just confuse players anyway.
self.writer.concater.duration = None
else:
current_fragment = self.first_fragment
self.current_fragment = current_fragment
log.debug("Current timestamp: {0}", self.timestamp / self.time_scale)
log.debug("Current segment: {0}", self.current_segment)
log.debug("Current fragment: {0}", self.current_fragment)
log.debug("First fragment: {0}", self.first_fragment)
log.debug("Last fragment: {0}", self.last_fragment)
log.debug("End fragment: {0}", self.end_fragment)
self.bootstrap_reload_time = fragment_duration
if self.live and not bootstrap_changed:
log.debug("Bootstrap not changed, shortening timer")
self.bootstrap_reload_time /= 2
self.bootstrap_reload_time = max(self.bootstrap_reload_time,
self.bootstrap_minimal_reload_time)
def fetch_bootstrap(self, url):
res = self.session.http.get(url,
exception=StreamError,
**self.stream.request_params)
return Box.deserialize(BytesIO(res.content))
def fragment_url(self, segment, fragment):
url = absolute_url(self.stream.baseurl, self.stream.url)
return FRAGMENT_URL.format(url=url,
segment=segment,
fragment=fragment,
identifier="",
quality="")
def fragment_count(self):
table = self.fragmentruntable.payload.fragment_run_entry_table
first_fragment, end_fragment = None, None
for i, fragmentrun in enumerate(table):
if fragmentrun.discontinuity_indicator is not None:
if fragmentrun.discontinuity_indicator == 0:
break
elif fragmentrun.discontinuity_indicator > 0:
continue
if first_fragment is None:
first_fragment = fragmentrun.first_fragment
end_fragment = fragmentrun.first_fragment
fragment_duration = (fragmentrun.first_fragment_timestamp +
fragmentrun.fragment_duration)
if self.timestamp > fragment_duration:
offset = ((self.timestamp - fragment_duration) /
fragmentrun.fragment_duration)
end_fragment += int(offset)
if first_fragment is None:
first_fragment = 1
if end_fragment is None:
end_fragment = 1
return first_fragment, end_fragment
def fragment_duration(self, fragment):
fragment_duration = 0
table = self.fragmentruntable.payload.fragment_run_entry_table
time_scale = self.fragmentruntable.payload.time_scale
for i, fragmentrun in enumerate(table):
if fragmentrun.discontinuity_indicator is not None:
self.invalid_fragments.add(fragmentrun.first_fragment)
# Check for the last fragment of the stream
if fragmentrun.discontinuity_indicator == 0:
if i > 0:
prev = table[i - 1]
self.end_fragment = prev.first_fragment
break
elif fragmentrun.discontinuity_indicator > 0:
continue
if fragment >= fragmentrun.first_fragment:
fragment_duration = fragmentrun.fragment_duration / time_scale
return fragment_duration
def segment_from_fragment(self, fragment):
table = self.segmentruntable.payload.segment_run_entry_table
for segment, start, end in self.iter_segment_table(table):
if start - 1 <= fragment <= end:
return segment
else:
segment = 1
return segment
def iter_segment_table(self, table):
# If the first segment in the table starts at the beginning we
# can go from there, otherwise we start from the end and use the
# total fragment count to figure out where the last segment ends.
if table[0].first_segment == 1:
prev_frag = self.first_fragment - 1
for segmentrun in table:
start = prev_frag + 1
end = prev_frag + segmentrun.fragments_per_segment
yield segmentrun.first_segment, start, end
prev_frag = end
else:
prev_frag = self.last_fragment + 1
for segmentrun in reversed(table):
start = prev_frag - segmentrun.fragments_per_segment
end = prev_frag - 1
yield segmentrun.first_segment, start, end
prev_frag = start
def valid_fragment(self, fragment):
return fragment not in self.invalid_fragments
def iter_segments(self):
while not self.closed:
fragments = range(self.current_fragment, self.last_fragment + 1)
fragments = filter(self.valid_fragment, fragments)
for fragment in fragments:
self.current_fragment = fragment + 1
self.current_segment = self.segment_from_fragment(fragment)
fragment_duration = int(self.fragment_duration(fragment) * 1000)
fragment_url = self.fragment_url(self.current_segment, fragment)
fragment = Fragment(self.current_segment, fragment,
fragment_duration, fragment_url)
log.debug("Adding fragment {0}-{1} to queue", fragment.segment, fragment.fragment)
yield fragment
# End of stream
stream_end = self.end_fragment and fragment.fragment >= self.end_fragment
if self.closed or stream_end:
return
if self.wait(self.bootstrap_reload_time):
try:
self.update_bootstrap()
except StreamError as err:
log.warning("Failed to update bootstrap: {0}", err)
class HDSStreamReader(SegmentedStreamReader):
__worker__ = HDSStreamWorker
__writer__ = HDSStreamWriter
def __init__(self, stream, *args, **kwargs):
SegmentedStreamReader.__init__(self, stream, *args, **kwargs)
class HDSStream(Stream):
"""
Implements the Adobe HTTP Dynamic Streaming protocol
*Attributes:*
- :attr:`baseurl` Base URL
- :attr:`url` Base path of the stream, joined with the base URL when
fetching fragments
- :attr:`bootstrap` Either a URL pointing to the bootstrap or a
bootstrap :class:`Box` object used for initial information about
the stream
- :attr:`metadata` Either `None` or a :class:`ScriptData` object
that contains metadata about the stream, such as height, width and
bitrate
"""
__shortname__ = "hds"
def __init__(self, session, baseurl, url, bootstrap, metadata=None,
timeout=60, **request_params):
Stream.__init__(self, session)
self.baseurl = baseurl
self.url = url
self.bootstrap = bootstrap
self.metadata = metadata
self.timeout = timeout
# Deep copy request params to make it mutable
self.request_params = deepcopy(request_params)
parsed = urlparse(self.url)
if parsed.query:
params = parse_qsl(parsed.query)
if params:
if not self.request_params.get("params"):
self.request_params["params"] = {}
self.request_params["params"].update(params)
self.url = urlunparse(
(parsed.scheme, parsed.netloc, parsed.path, None, None, None)
)
def __repr__(self):
return "<HDSStream({0!r}, {1!r}, {2!r}, metadata={3!r}, timeout={4!r})>".format(self.baseurl, self.url, self.bootstrap, self.metadata, self.timeout)
def __json__(self):
if isinstance(self.bootstrap, Box):
bootstrap = base64.b64encode(self.bootstrap.serialize())
else:
bootstrap = self.bootstrap
if isinstance(self.metadata, ScriptData):
metadata = self.metadata.__dict__
else:
metadata = self.metadata
return dict(type=HDSStream.shortname(), baseurl=self.baseurl,
url=self.url, bootstrap=bootstrap, metadata=metadata,
params=self.request_params.get("params", {}),
headers=self.request_params.get("headers", {}))
def open(self):
reader = HDSStreamReader(self)
reader.open()
return reader
@classmethod
def parse_manifest(cls, session, url, timeout=60, pvswf=None, is_akamai=False,
**request_params):
"""Parses a HDS manifest and returns its substreams.
:param url: The URL to the manifest.
:param timeout: How long to wait for data to be returned from
from the stream before raising an error.
:param is_akamai: force adding of the akamai parameters
:param pvswf: URL of player SWF for Akamai HD player verification.
"""
# private argument, should only be used in recursive calls
raise_for_drm = request_params.pop("raise_for_drm", False)
if not request_params:
request_params = {}
request_params["headers"] = request_params.get("headers", {})
request_params["params"] = request_params.get("params", {})
# These params are reserved for internal use
request_params.pop("exception", None)
request_params.pop("stream", None)
request_params.pop("timeout", None)
request_params.pop("url", None)
if "akamaihd" in url or is_akamai:
request_params["params"]["hdcore"] = HDCORE_VERSION
request_params["params"]["g"] = cls.cache_buster_string(12)
res = session.http.get(url, exception=IOError, **request_params)
manifest = session.http.xml(res, "manifest XML", ignore_ns=True,
exception=IOError)
if manifest.findtext("drmAdditionalHeader"):
log.debug("Omitting HDS stream protected by DRM: {}", url)
if raise_for_drm:
raise PluginError("{} is protected by DRM".format(url))
log.warning("Some or all streams are unavailable as they are protected by DRM")
return {}
parsed = urlparse(url)
baseurl = manifest.findtext("baseURL")
baseheight = manifest.findtext("height")
bootstraps = {}
streams = {}
if not baseurl:
baseurl = urljoin(url, os.path.dirname(parsed.path))
if not baseurl.endswith("/"):
baseurl += "/"
for bootstrap in manifest.findall("bootstrapInfo"):
name = bootstrap.attrib.get("id") or "_global"
url = bootstrap.attrib.get("url")
if url:
box = absolute_url(baseurl, url)
else:
data = base64.b64decode(bytes(bootstrap.text, "utf8"))
box = Box.deserialize(BytesIO(data))
bootstraps[name] = box
pvtoken = manifest.findtext("pv-2.0")
if pvtoken:
if not pvswf:
raise IOError("This manifest requires the 'pvswf' parameter "
"to verify the SWF")
params = cls._pv_params(session, pvswf, pvtoken, **request_params)
request_params["params"].update(params)
child_drm = False
for media in manifest.findall("media"):
url = media.attrib.get("url")
bootstrapid = media.attrib.get("bootstrapInfoId", "_global")
href = media.attrib.get("href")
if url and bootstrapid:
bootstrap = bootstraps.get(bootstrapid)
if not bootstrap:
continue
bitrate = media.attrib.get("bitrate")
streamid = media.attrib.get("streamId")
height = media.attrib.get("height")
if height:
quality = height + "p"
elif bitrate:
quality = bitrate + "k"
elif streamid:
quality = streamid
elif baseheight:
quality = baseheight + "p"
else:
quality = "live"
metadata = media.findtext("metadata")
if metadata:
metadata = base64.b64decode(bytes(metadata, "utf8"))
metadata = ScriptData.deserialize(BytesIO(metadata))
else:
metadata = None
stream = HDSStream(session, baseurl, url, bootstrap,
metadata=metadata, timeout=timeout,
**request_params)
streams[quality] = stream
elif href:
url = absolute_url(baseurl, href)
try:
child_streams = cls.parse_manifest(session, url, timeout=timeout, is_akamai=is_akamai, raise_for_drm=True, **request_params)
except PluginError:
child_drm = True
child_streams = {}
for name, stream in child_streams.items():
# Override stream name if bitrate is available in parent
# manifest but not the child one.
bitrate = media.attrib.get("bitrate")
if bitrate and not re.match(r"^(\d+)k$", name):
name = bitrate + "k"
streams[name] = stream
if child_drm:
log.warning("Some or all streams are unavailable as they are protected by DRM")
return streams
@classmethod
def _pv_params(cls, session, pvswf, pv, **request_params):
"""Returns any parameters needed for Akamai HD player verification.
Algorithm originally documented by KSV, source:
http://stream-recorder.com/forum/showpost.php?p=43761&postcount=13
"""
try:
data, hdntl = pv.split(";")
except ValueError:
data = pv
hdntl = ""
cache = Cache(filename="stream.json")
key = "akamaihd-player:" + pvswf
cached = cache.get(key)
request_params = deepcopy(request_params)
headers = request_params.pop("headers", {})
if cached:
headers["If-Modified-Since"] = cached["modified"]
swf = session.http.get(pvswf, headers=headers, **request_params)
if cached and swf.status_code == 304: # Server says not modified
hash = cached["hash"]
else:
# Calculate SHA-256 hash of the uncompressed SWF file, base-64
# encoded
hash = sha256()
hash.update(swfdecompress(swf.content))
hash = base64.b64encode(hash.digest()).decode("ascii")
modified = swf.headers.get("Last-Modified", "")
# Only save in cache if a valid date is given
if len(modified) < 40:
cache.set(key, dict(hash=hash, modified=modified))
msg = "st=0~exp=9999999999~acl=*~data={0}!{1}".format(data, hash)
auth = hmac.new(AKAMAIHD_PV_KEY, msg.encode("ascii"), sha256)
pvtoken = "{0}~hmac={1}".format(msg, auth.hexdigest())
# The "hdntl" parameter can be accepted as a cookie or passed in the
# query string, but the "pvtoken" parameter can only be in the query
# string
params = [("pvtoken", pvtoken)]
params.extend(parse_qsl(hdntl, keep_blank_values=True))
return params
@staticmethod
def cache_buster_string(length):
return "".join([random.choice(string.ascii_uppercase) for i in range(length)])
|
repotvsupertuga/tvsupertuga.repository
|
script.module.streamlink.base/resources/lib/streamlink/stream/hds.py
|
Python
|
gpl-2.0
| 22,547
|
#!/usr/bin/env python
import datetime
import requests
import os
import time
from grader import grader, tester
import hashlib
import random
import lowlevelhttptests
from subprocess import Popen, PIPE, STDOUT
import os.path
import socket
import sys
BIN = "./lisod"
MIME = {
'.html' : 'text/html',
'.css' : 'text/css',
'.png' : 'image/png',
'.jpg' : 'image/jpeg',
'.gif' : 'image/gif',
'' : 'application/octet-stream'
}
class project1cp2tester(tester):
def __init__(self, test_name, testsuit):
super(project1cp2tester, self).__init__(test_name, testsuit)
def check_headers(self, response_type, headers, length_content, ext):
self.pAssertEqual(headers['Server'].lower(), 'liso/1.0')
try:
datetime.datetime.strptime(headers['Date'],\
'%a, %d %b %Y %H:%M:%S %Z')
except KeyError:
print 'Bad Date header'
except Exception:
print 'Bad Date header: %s' % (headers['Date'])
self.pAssertEqual(int(headers['Content-Length']), length_content)
#self.pAssertEqual(headers['Connection'].lower(), 'close')
if response_type == 'GET' or response_type == 'HEAD':
header_set = set(['connection', 'content-length',
'date', 'last-modified',
'server', 'content-type'])
self.pAssertEqual(set(), header_set - set(headers.keys()))
if headers['Content-Type'].lower() != MIME[ext]:
print 'MIME got %s expected %s'\
% (headers['Content-Type'].lower(), MIME[ext])
self.pAssertTrue(headers['Content-Type'].lower() == MIME[ext]\
or headers['Content-Type'].lower() == MIME['.html'])
try:
datetime.datetime.strptime(headers['Last-Modified'],\
'%a, %d %b %Y %H:%M:%S %Z')
except Exception:
print 'Bad Last-Modified header: %s' \
% (headers['Last-Modified'])
elif response_type == 'POST':
header_set = set(['connection', 'content-length',
'date', 'server'])
self.pAssertEqual(set(), header_set - set(headers.keys()))
else:
self.fail('Unsupported Response Type...')
def test_HEAD_headers(self):
print '----- Testing Headers -----'
if self.testsuite.process is None:
self.skipTest("server failed to start. skip this test")
time.sleep(1)
for test in self.testsuite.tests:
dummy_root, ext = os.path.splitext(test)
response = requests.head(test % (self.testsuite.ip, self.testsuite.port), timeout=10.0)
self.check_headers(response.request.method,
response.headers,
self.testsuite.tests[test][1],
ext)
self.testsuite.scores['test_HEAD_headers'] = 1
def test_HEAD(self):
print '----- Testing HEAD -----'
if self.testsuite.process is None:
self.skipTest("server failed to start. skip this test")
time.sleep(1)
for test in self.testsuite.tests:
response = requests.head(test % (self.testsuite.ip, self.testsuite.port), timeout=10.0)
self.pAssertEqual(200, response.status_code)
self.testsuite.scores['test_HEAD'] = 1
if not lowlevelhttptests.check_correct_HEAD(self.testsuite.ip,\
self.testsuite.port):
self.testsuite.scores['test_HEAD'] -= 0.5
print "HEAD must not return Content!"
def test_GET(self):
print '----- Testing GET -----'
if self.testsuite.process is None:
self.skipTest("server failed to start. skip this test")
time.sleep(1)
for test in self.testsuite.tests:
response = requests.get(test % (self.testsuite.ip, self.testsuite.port), timeout=10.0)
contenthash = hashlib.sha256(response.content).hexdigest()
self.pAssertEqual(200, response.status_code)
self.pAssertEqual(contenthash, self.testsuite.tests[test][0])
self.testsuite.scores['test_GET'] = 1
err, res = lowlevelhttptests.check_correct_GET(self.testsuite.ip,\
self.testsuite.port)
if res > 1:
self.testsuite.scores['test_GET'] -= 0.5
print "Received %d responses, should get only 1" % res
if err:
print "And, some of them reported error" % res
def test_POST(self):
print '----- Testing POST -----'
if self.testsuite.process is None:
self.skipTest("server failed to start. skip this test")
time.sleep(1)
for test in self.testsuite.tests:
# for checkpoint 2, this should time out; we told them to
# swallow the data and ignore
try:
response = requests.post(test % (self.testsuite.ip, self.testsuite.port),\
data='dummy data', timeout=3.0)
#except requests.exceptions.Timeout:
except requests.exceptions.RequestException:
#print 'timeout'
continue
except socket.timeout:
#print 'socket.timeout'
continue
# if they do return something, make sure it's OK
self.pAssertEqual(200, response.status_code)
self.testsuite.scores['test_POST'] = 1
def test_bad(self):
print '----- Testing Bad Requests-----'
if self.testsuite.process is None:
self.skipTest("server failed to start. skip this test")
time.sleep(1)
for test in self.testsuite.bad_tests:
response = requests.head(test % (self.testsuite.ip, self.testsuite.port), timeout=3.0)
self.pAssertEqual(404, response.status_code)
self.testsuite.scores['test_bad'] = 1
class project1cp2grader(grader):
def __init__(self, checkpoint):
super(project1cp2grader, self).__init__()
self.process = 'nasty-hack'
self.checkpoint = checkpoint
self.tests = {
'http://%s:%d/index.html' :
('f5cacdcb48b7d85ff48da4653f8bf8a7c94fb8fb43407a8e82322302ab13becd', 802),
'http://%s:%d/images/liso_header.png' :
('abf1a740b8951ae46212eb0b61a20c403c92b45ed447fe1143264c637c2e0786', 17431),
'http://%s:%d/style.css' :
('575150c0258a3016223dd99bd46e203a820eef4f6f5486f7789eb7076e46736a', 301)
}
self.bad_tests = [
'http://%s:%d/bad.html'
]
def prepareTestSuite(self):
super(project1cp2grader, self)
self.suite.addTest(project1cp2tester('test_HEAD_headers', self))
self.suite.addTest(project1cp2tester('test_HEAD', self))
self.suite.addTest(project1cp2tester('test_GET', self))
self.suite.addTest(project1cp2tester('test_POST', self))
self.suite.addTest(project1cp2tester('test_bad', self))
self.scores['test_HEAD_headers'] = 0
self.scores['test_HEAD'] = 0
self.scores['test_GET'] = 0
self.scores['test_POST'] = 0
self.scores['test_bad'] = 0
def setUp(self, argv):
self.ip = argv[1]
self.port = int(argv[2])
#self.port = 9999
self.tmp_dir = "../"
self.priv_key = os.path.join(self.tmp_dir, 'grader.key')
self.cert = os.path.join(self.tmp_dir, 'grader.crt')
self.www = os.path.join(self.tmp_dir, 'www/')
self.cgi = os.path.join(self.tmp_dir, 'cgi/cgi_script.py')
print '\nUsing ports: %d' % (self.port)
if __name__ == '__main__':
if len(sys.argv) < 3:
sys.stderr.write('Usage: ./grader1cp2.py <ip> <port>')
sys.exit(1)
p1cp2grader = project1cp2grader("checkpoint-2")
p1cp2grader.prepareTestSuite()
p1cp2grader.setUp(sys.argv)
results = p1cp2grader.runTests()
p1cp2grader.reportScores()
|
tavaresdong/TinyWeb
|
liso/cp2/proj1_cp2/grader1cp2.py
|
Python
|
gpl-2.0
| 8,066
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
url(r'^getCat/', 'openshift.views.getCat', name='getCat'),
url(r'^sendCat/', 'openshift.views.sendCat', name='sendCat'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
from django.conf import settings
if settings.DEBUG:
urlpatterns += patterns('', (r'^media\/(?P<path>.*)$', 'django.views.static.serve',{'document_root': settings.STATIC_ROOT}),)
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
|
Honeybunch/GetCats
|
wsgi/openshift/urls.py
|
Python
|
gpl-2.0
| 929
|
def roombafunc(msg, rseed, rows, cols, matracoef):
print "Initializing RoombaFunction Cipher System"
print "plaintext: " + msg
print "key: " + str((rseed, rows, cols, matracoef))
print "\n\nBeginning step 1: Alphabet Shuffle..."
import random
random.seed(rseed)
alphalist = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z",0,1,2,3,4,5,6,7,8,9," ","-",",",".","!","?"]
random.shuffle(alphalist)
print "Step 1: Alphabet Shuffle completed successfully."
print "Shuffled alphabet list:"
print alphalist
print "\nBeginning step 2: Loading Alphabet Into Coding Matrix #1..."
if (rows*cols == 42):
alphatable = {}
x = 1
y = 1
for letter in alphalist:
if (x == (cols+1)):
x = 1
y += 1
alphatable[letter] = (x,y)
print "filling in " + str(x) + "," + str(y)
x += 1
print "Step 2: Loading Alphabet Into Coding Matrix #1 completed successfully."
print "42 space matrix loaded with shuffled character list:"
print alphatable
else:
return "Product of row and column arguments must equal 42. Aborting."
print "\n\nBeginning step 3: First Cipher Pass (Coordinate Conversion)..."
msg2 = []
for letter in msg:
if letter in alphatable:
msg2.append(alphatable[letter][0])
msg2.append(alphatable[letter][1])
print "Encoded " + letter + " as " + str(alphatable[letter]) + "."
else:
if letter.islower():
if letter.upper() in alphatable:
msg2.append(alphatable[letter.upper()][0])
msg2.append(alphatable[letter.upper()][1])
print "Lowercase letter " + letter + " detected. converted to uppercase letter " + letter.upper() + " and encoded as " + str(alphatable[letter.upper()]) + "."
else:
print "Unsupported letter " + letter + " detected. letter omitted from ciphertext."
else:
print "Unsupported letter " + letter + " detected. letter omitted from ciphertext."
print "Step 3: First Cipher Pass (Coordinate Conversion) completed successfully."
print "Intermediate ciphertext (plaintext converted to coordinates): \n"
print msg2
print "\n\nBeginning step 4: Preparation For Second Cipher Pass..."
if (len(msg2)%3) != 0:
if (len(msg2)%3) == 1:
msg2.append(0)
msg2.append(0)
print "Added 2 zeros for padding."
if (len(msg2)%3) == 2:
msg2.append(0)
print "Added 1 zero for padding."
print "Step 4: Preparation For Second Cipher Pass completed successfully."
print "\n\nBeginning step 5: Generating Code Matrix #2..."
random.seed(rseed)
multmatrix = []
msg3 = []
for number in xrange(0,9):
multmatrix.append(random.randint((-1*matracoef), (matracoef)))
print "Multmatrix position " + str(number + 1) + " set to " + str(multmatrix[number]) +"."
det = (multmatrix[0]*multmatrix[4]*multmatrix[8]+multmatrix[1]*multmatrix[5]*multmatrix[6]+multmatrix[2]*multmatrix[3]*multmatrix[7])- (multmatrix[2]*multmatrix[4]*multmatrix[6]+multmatrix[0]*multmatrix[5]*multmatrix[7]+multmatrix[1]*multmatrix[3]*multmatrix[8])
print det
if (det == 0):
return "Matrix determinant = 0. Aborting."
print "Step 5: Generating Code Matrix #2 completed successfully."
print "\n\nBeginning step 6: Final Cipher Pass (Matrix Multiplication)..."
for inc in xrange(0,len(msg2)/3):
i = inc*3
msg3.append(msg2[i]*multmatrix[0]+msg2[i+1]*multmatrix[3]+msg2[i+2]*multmatrix[6])
msg3.append(msg2[i]*multmatrix[1]+msg2[i+1]*multmatrix[4]+msg2[i+2]*multmatrix[7])
msg3.append(msg2[i]*multmatrix[2]+msg2[i+1]*multmatrix[5]+msg2[i+2]*multmatrix[8])
print "Step 6: Final Cipher Pass (Matrix Multiplication) completed successfully."
print "\n\nFinal ciphertext (intermediate ciphertext plus matrix multiplication pass):"
return msg3
roombafunc("Alcari is an alcari",50,6,7,9000)
|
ZeroK-RTS/SpringRTS-Tools
|
MidSteed/Other Py/RoombaFunction CryptoSystem/the code.py
|
Python
|
gpl-2.0
| 3,817
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A simple wrapper class for the matplotlib chart module."""
# Copyright 2002, 2003 St James Software
#
# This file is part of jToolkit.
#
# jToolkit is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# jToolkit is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with jToolkit; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import cStringIO, tempfile, os, math
from jToolkit.widgets import widgets
from jToolkit.data import dates
from jToolkit import errors
import random, os, time
import sys
sys.stderr = sys.stdout #Solve a problem with the font manager
if not hasattr(sys, "argv"):
# matplotlib expects an argv...
sys.argv = []
matplotlib_import_error = None
matplotlib_import_traceback = None
# TODO: improve error handling here...
try:
# TODO: change to from matplotlib import matlab...
os.environ['MATPLOTLIBDATA'] = os.path.join(sys.prefix, 'share', 'matplotlib')
import matplotlib
matplotlib.use('Agg')
import matplotlib.pylab as matlab
from matplotlib import ticker
from matplotlib import dates as matdates
except Exception, e:
# not importing these functions will cause charts to fail...
errorhandler = errors.ConsoleErrorHandler()
matplotlib_import_error = str(e)
matplotlib_import_traceback = errorhandler.traceback_str()
# print "error importing matplotlib. jToolkit.widgets.chart functions will fail later."
raise
#We need this lock for gdchart, as it's not thread-safe
#from threading import Lock
#gdchartLock = Lock()
#Graph type constants
LINE_CHART = 0
BAR_CHART = 1
class jMajorDateLocator(matdates.DayLocator):
def __call__(self):
self.verify_intervals()
vmin, vmax = self.viewInterval.get_bounds()
ret = [vmin]
ret.extend(matdates.DayLocator.__call__(self))
ret.append(vmax)
return ret
class jMinorLinearLocator(ticker.LinearLocator):
"""The purpose of this class is to create a LinearLocator that does not return vmin or vmax"""
def __init__(self, numticks=None, presets=None):
if numticks is not None:
numticks += 2 # We strip these out in __call__
ticker.LinearLocator.__init__(self, numticks, presets)
def __call__(self):
self.verify_intervals()
vmin, vmax = self.viewInterval.get_bounds()
ret = ticker.LinearLocator.__call__(self)[1:-1]
return ret
class jMinuteLocator(matdates.MinuteLocator):
"""The purpose of this class is to ignore the change of day tick, so it doesn't overlap and look ugly"""
def __call__(self):
dates = matdates.MinuteLocator.__call__(self)
days = matdates.DayLocator()
days.set_view_interval(self.viewInterval)
days.set_data_interval(self.dataInterval)
daychanges = days()
realdates = []
for date in dates:
if date not in daychanges:
realdates.append(date)
return realdates
class jSecondLocator(matdates.SecondLocator):
"""The purpose of this class is to ignore the change of day tick, so it doesn't overlap and look ugly"""
def __call__(self):
dates = matdates.SecondLocator.__call__(self)
days = matdates.DayLocator()
days.set_view_interval(self.viewInterval)
days.set_data_interval(self.dataInterval)
daychanges = days()
realdates = []
for date in dates:
if date not in daychanges:
realdates.append(date)
return realdates
class Chart(widgets.ContentWidget):
def __init__(self, charttable, newattribs={}):
self.charttable = charttable
if not newattribs.has_key('chartType'):
newattribs['chartType'] = LINE_CHART
if not newattribs.has_key('dpi'):
newattribs['dpi'] = 80
self.content_type = 'image/png'
widgets.ContentWidget.__init__(self, "", [], newattribs)
self.getdata()
self.sizeimage()
self.figure = matlab.new_figure_manager(1,(self.im_width,self.im_height),self.attribs['dpi'])
def getdata(self):
"""Subclasses of this should implement this uniquely"""
self.xdata = []
self.ydata = []
def getimage(self):
"""Subclasses of this should implement this uniquely"""
return ''
def sizeimage(self):
"""Subclasses of this should implement this uniquely"""
self.im_width = 8.75
self.im_height = 5.6
def getimagefromtempfile(self):
#Save temporary file
# tempfilename = os.path.join(tempfile.gettempdir(),'temp.png')
retrieved = 0
while not retrieved:
try:
tempfilename = str(random.randint(100000,999999)) + '.png'
if hasattr(self.figure,'figure'):
self.figure.figure.print_figure(tempfilename,self.attribs['dpi'])
else: # matplotlib 0.50
self.figure.canvas.print_figure(tempfilename,self.attribs['dpi'])
#Read and report
f = open(tempfilename,'rb')
img = f.read()
f.close()
retrieved = 1
os.remove(tempfilename)
except:
(etype, evalue, trace) = sys.exc_info()
# print "Temp file error:",etype,evalue
# import traceback
# traceback.print_tb(trace)
return img
def gethtml(self):
# gdchartLock.acquire()
try:
self.drawimage()
except:
raise
print "Unexpected Error:"
print errors.ConsoleErrorHandler().traceback_str()
# gdchartLock.release()
# return None
# gdchartLock.release()
img = self.getimagefromtempfile()
return img
standardColours = [(0.0, 0.0, 1.0), # blue
(0.0, 0.5, 0.0), # green
(1.0, 0.0, 0.0), # red
(0.0, 0.75, 0.75), # cyan
(0.75, 0, 0.75), # magenta
(0.75, 0.75, 0), # yellow
(0.0, 0.0, 0.0), # black
'#4682B4', # Steelblue
'#7FFF00', # Chartreuse
'#FF7F50', # Coral
'#808000', # Olive
'#FF4500', # Orangered
]
#This class now assumes a text storage format for x values. See below for subclass which handles dates
class LineChart(Chart):
def __init__(self, charttable, xcolumn, ycolumns, ylabels=None, newattribs={}):
if not newattribs.has_key('datalimits'):
newattribs['datalimits'] = {}
if not newattribs.has_key('numticks'):
newattribs['numticks'] = 10
if not newattribs.has_key('chartcolours'):
newattribs['chartcolours'] = standardColours
self.ylabels = ylabels
self.normalisedValues = len(newattribs['datalimits']) > 0
self.xcolumn = xcolumn
self.ycolumns = ycolumns
self.xMajorLocator = None
self.xMajorFormatter = None
self.xMinorLocator = None
self.xMinorFormatter = None
Chart.__init__(self, charttable, newattribs)
def sizeimage(self):
self.im_width = self.attribs['width']/self.attribs['dpi']
if self.attribs.has_key('height'):
self.im_height = self.attribs['height']/self.attribs['dpi']
else:
self.im_height = 5.6
def mapfromNone(self, value):
if value == None or value == '':
return 0
else:
return value
def getdata(self):
chartdata = self.charttable.gettablerows(self.attribs.get('filter',None))
xlabels = [str(row[self.xcolumn]) for row in chartdata]
self.ydata = [[self.mapfromNone(row[ycolumn]) for row in chartdata] for ycolumn in self.ycolumns]
self.legendlabels = [str(ycolumn) for ycolumn in self.ycolumns]
#x axis tick labels should be uniformly distributed in this case
startVal = self.attribs.get('startval', 0)
endVal = self.attribs.get('endval', len(self.xdata))
self.xvalues = matlab.arange(startVal, endVal, float(endVal - startVal)/len(xlabels))
self.xMajorLocator = ticker.LinearLocator(self.attribs.get('numticks',10))
self.xMajorFormatter = ticker.FixedFormatter(xlabels)
def normaliseValue(self, val, min, max, ymin, ymax):
"""Adjust value between min and max to be between ymin and ymax"""
temp = (val - min)/(max - min)
return temp*(ymax - ymin) + ymin
def normaliseData(self, ymin, ymax):
"""If limits are provided for y values, normalise to between those limits"""
newData = []
for i, dataset in enumerate(self.ydata):
#Don't factor limited data into ymin, ymax
if self.legendlabels is not None and len(self.legendlabels) > i and self.attribs['datalimits'].get(self.legendlabels[i],None) is not None:
min, max = self.attribs['datalimits'][self.legendlabels[i]]
newSet = [self.normaliseValue(val,min,max,ymin,ymax) for val in dataset]
newData.append(newSet)
else:
newData.append(dataset[:])
self.ydata = newData
def getAxisMap(self, rangelimits):
# Override this function for better management of axes
# This version returns one set of axes
minEU = None
maxEU = None
for plotname in rangelimits.keys():
ymin, ymax = rangelimits[plotname]
if maxEU is None or ymax > maxEU:
maxEU = ymax
if minEU is None or ymin > minEU:
minEU = ymin
axismap = [((minEU, maxEU), rangelimits.keys())]
return axismap
def drawimage(self):
#Set the min/max of each axis
plotnametodata = {}
for i, dataset in enumerate(self.ydata):
#Don't factor limited data into ymin, ymax
datalimits = self.attribs['datalimits'].get(self.legendlabels[i],None)
# Remember to check that the limits themselves have not been set to None
if self.legendlabels is not None and len(self.legendlabels) > i and datalimits is not None and datalimits[0] is not None and datalimits[1] is not None and datalimits[0] != datalimits[1]:
ymax, ymin = datalimits
else:
ymin = None
ymax = None
for value in dataset:
if ymin is None or value < ymin:
ymin = value
if ymax is None or value > ymax:
ymax = value
self.attribs['datalimits'][self.legendlabels[i]] = [ymax, ymin]
plotnametodata[self.legendlabels[i]] = dataset
# Create the mapping the right way round
tagtolimitmap = {}
for plotname in self.attribs['datalimits'].keys():
ymax, ymin = self.attribs['datalimits'][plotname]
tagtolimitmap[plotname] = (ymin, ymax)
#Set the size of the subplot big enough to handle times
#[left, bottom, width, height] out of 1
axismap = {}
axismap = self.getAxisMap(tagtolimitmap)
axes = []
subplotTotal = len(axismap)
if (subplotTotal != 0):
plotSize = .85 / subplotTotal
for i, (axis, tags) in enumerate(axismap):
axes.append(self.figure.canvas.figure.add_subplot(subplotTotal,1,i+1,axisbg='w'))
axes[-1].set_position([.08,i*plotSize+.15,.9,plotSize-.02])
self.figure.canvas.figure.set_figsize_inches(self.im_width, self.im_height)
#Plot each dataset
plots = []
for i, (axis, tags) in enumerate(axismap):
for plotname in tags:
if not self.attribs.get('plotdate',0):
plots.append(axes[i].plot(self.xvalues, plotnametodata[plotname]))
else:
plots.append(axes[i].plot_date(self.xvalues,plotnametodata[plotname],fmt="-"))
#Set the min/max of each axis
for i, (axis, tags) in enumerate(axismap):
ymin, ymax = axis
if ymin == ymax:
ymax += 1
ymin -= 1
axes[i].set_ylim([math.floor(ymin),math.ceil(ymax)])
# We can set the colour of the lines here
# with the variable plots
chartColours = self.attribs.get('chartcolours',standardColours)
plotNum = 0
for lines in plots:
for line in lines:
line.set_color(chartColours[plotNum % len(chartColours)])
plotNum += 1
for ax in axes:
ax.set_xlim([self.attribs.get('startval',None), self.attribs.get('endval',None)])
if (len(axes) > 0):
if self.xMajorLocator:
for ax in axes:
ax.xaxis.set_major_locator(self.xMajorLocator)
if self.xMajorFormatter:
axes[0].xaxis.set_major_formatter(self.xMajorFormatter)
if self.xMinorLocator:
for ax in axes:
ax.xaxis.set_minor_locator(self.xMinorLocator)
if self.xMinorFormatter:
axes[0].xaxis.set_minor_formatter(self.xMinorFormatter)
for ax in axes:
ax.set_xlim([self.attribs.get('startval',None), self.attribs.get('endval',None)])
for ax in axes[1:]:
labels = ax.get_xticklabels()
labels.extend([tick.label1 for tick in ax.xaxis.get_minor_ticks()])
for label in labels:
label.set_alpha(0)
label.set_color('w')
if (len(axes) > 0):
labels = axes[0].get_xticklabels()
labels.extend([tick.label1 for tick in axes[0].xaxis.get_minor_ticks()])
for label in labels:
label.set_rotation('vertical')
#Draw a legend, but only if there are any plots to draw
if self.legendlabels:
for i, (axis, tags) in enumerate(axismap):
axes[i].legend(tags,2)
leg = axes[i].get_legend()
leg.get_frame().set_fill(False)
# ax.autoscale_view()
NUM_TIME_LABELS = 10
def pottime2date(tm):
"""Converts an object from a time object to a date object safely"""
if not type(tm).__name__ == "time":
return tm
return dates.WinPyTimeToDate(tm)
class DateLineChart(LineChart):
def __init__(self, charttable, xcolumn, ycolumns, ylabels=None, newattribs={}):
newattribs['plotDate'] = 1
LineChart.__init__(self, charttable, xcolumn, ycolumns, ylabels=ylabels, newattribs=newattribs)
def getdata(self):
filter = self.attribs.get('filter',None)
chartdata = self.charttable.gettablerows(filter)
startDate = self.attribs.get('startdate',None)
endDate = self.attribs.get('enddate',None)
if startDate == None:
if chartdata:
startDate = matdates.date2num(chartdata[0][self.xcolumn])
else:
startDate = matdates.date2num(dates.currentdate())
else:
startDate = matdates.date2num(startDate)
if endDate == None:
if chartdata:
endDate = matdates.date2num(chartdata[-1][self.xcolumn])
else:
endDate = matdates.date2num(dates.currentdate() - dates.days(1))
else:
endDate = matdates.date2num(endDate)
self.attribs['startval'] = startDate
self.attribs['endval'] = endDate
if chartdata == []: #No data
self.xvalues = [startDate, endDate]
self.ydata = [[0 for xvalue in self.xvalues] for ycolumn in self.ycolumns]
else:
self.xvalues = [matdates.date2num(pottime2date(row[self.xcolumn])) for row in chartdata]
self.ydata = [[self.mapfromNone(row[ycolumn]) for row in chartdata] for ycolumn in self.ycolumns]
self.legendlabels = [str(ycolumn) for ycolumn in self.ycolumns]
gradationUnits = self.attribs.get('gradationunits',None)
numOfGradations = self.attribs.get('numofgradations',None)
gradationSize = self.attribs.get('gradationsize',None)
if gradationUnits is None or numOfGradations is None or gradationSize is None:
self.xMajorLocator = jMajorDateLocator(time.timezone/3600)
# self.xMajorLocator = matdates.DayLocator(100)
self.xMajorFormatter = matdates.DateFormatter(self.attribs.get('xMajorFormat','%y-%m-%d'))
self.xMinorLocator = jMinorLinearLocator(10)
self.xMinorFormatter = matdates.DateFormatter(self.attribs.get('xMinorFormat','%H:%M:%S'))
elif gradationUnits == 'days':
self.xMajorLocator = matdates.MonthLocator()
self.xMajorFormatter = matdates.DateFormatter('%y-%m-%d')
self.xMinorLocator = matdates.DayLocator(interval=gradationSize)
self.xMinorFormatter = matdates.DateFormatter('%m-%d')
elif gradationUnits == 'hours':
self.xMajorLocator = matdates.DayLocator()
self.xMajorFormatter = matdates.DateFormatter('%y-%m-%d')
self.xMinorLocator = matdates.HourLocator(byhour=range(1,24),interval=gradationSize)
self.xMinorFormatter = matdates.DateFormatter('%H:%M')
else:
self.xMajorLocator = matdates.DayLocator()
self.xMajorFormatter = matdates.DateFormatter('%y-%m-%d')
if gradationUnits == 'minutes':
self.xMinorLocator = jMinuteLocator(interval=gradationSize)
else:
self.xMinorLocator = jSecondLocator(interval=gradationSize)
self.xMinorFormatter = matdates.DateFormatter('%H:%M:%S')
class CurrentValueLegendChart(Chart):
"""This class creates a bar chart which acts as a legend and a current value reporter"""
def __init__(self, charttable, xcolumns, colours, newattribs={}):
self.xcolumns = xcolumns
newattribs['chartType'] = BAR_CHART
Chart.__init__(self, charttable, newattribs)
#Turn colours into an array exactly len(self.xcolumns) long
repeatcolours = len(self.xcolumns) / len(colours)
endcolours = len(self.xcolumns) % len(colours)
self.colours = colours*repeatcolours + colours[:endcolours]
def getdata(self):
self.xdata = [str(xcolumn) for xcolumn in self.xcolumns]
chartdata = self.charttable.gettablerows(self.attribs.get('filter',None))
finalrow = chartdata[len(chartdata)-1]
self.ydata = [[finalrow[xcolumn] for xcolumn in self.xcolumns]]
def drawimage(self):
#Find the longest text on the x axis
maxtextlen = 0
for text in self.xdata:
if len(text) > maxtextlen:
maxtextlen = len(text)
#Convert it to a proportion of the image
bottomProportion = .1 + maxtextlen*.013
heightProportion = .99 - bottomProportion
#Set the size of the subplot big enough to handle times
#[left, bottom, width, height] out of 1
self.figure.add_axes([.125,bottomProportion,.825,heightProportion],'w')
#Set the min/max of each axis
ymin = sys.maxint
ymax = -sys.maxint+1
for value in self.ydata[0]:
if value < ymin:
ymin = value
if value > ymax:
ymax = value
self.figure.get_current_axis().set_xlim([0,len(self.xdata)+1])
self.figure.get_current_axis().set_ylim([math.floor(ymin),math.ceil(ymax)])
self.figure.get_current_axis().set_xticks(matlab.arange(len(self.xdata))+0.25)
self.figure.get_current_axis().set_xticklabels(self.xdata,rotation='vertical')
originY = None
if ymin < 0 and ymax > 0:
originY = 0
self.figure.get_current_axis().bar(matlab.arange(len(self.xdata)),self.ydata[0],0.5,color=self.colours,originY=originY)
def sizeimage(self):
self.im_width = 3
self.im_height = 4 #This should take tagname length into account
def test():
"""tests using some values that were giving problems"""
import datetime
startDate = datetime.datetime(2004, 12, 8, 14, 9, 34, 6000)
endDate = datetime.datetime(2004, 12, 8, 15, 9, 34, 6000)
newattribs = {"startDate": startDate, "endDate": endDate, 'width': 775, 'height': 550, 'dpi': 80}
ylabels = ['MinEU', 'MaxEU']
xcolumn = 'logtime'
ycolumns = ['asd']
class charttabletest:
def gettablerows(self, filter):
return [{'logtime':startDate, 'asd':20}, {'logtime':endDate, 'asd':40}]
charttable = charttabletest()
chart = DateLineChart(charttable, xcolumn, ycolumns, ylabels, newattribs)
open("test.png", "wb").write(chart.gethtml())
if __name__ == "__main__":
test()
|
cc-archive/jtoolkit
|
jToolkit/widgets/chart.py
|
Python
|
gpl-2.0
| 19,564
|
# -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------
# eventserver.py - XBMC-compatible eventserver
# -----------------------------------------------------------------------
# $Id$
#
# JSONRPC and XBMC eventserver to be used for XBMC-compatible
# remotes. Only tested with Yatse so far. If something is not working,
# do not blame the remote, blame this plugin.
#
# Not all API calls are implemented yet.
#
# -----------------------------------------------------------------------
# Freevo - A Home Theater PC framework
# Copyright (C) 2014 Dirk Meyer, et al.
#
# First Edition: Dirk Meyer <https://github.com/Dischi>
# Maintainer: Dirk Meyer <https://github.com/Dischi>
#
# Please see the file AUTHORS for a complete list of authors.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# ----------------------------------------------------------------------- */
# python imports
import logging
import struct
# freevo imports
from ... import core as freevo
from ..input.eventmap import EVENTMAP
# get logging object
log = logging.getLogger('freevo')
# keymap used for the button mapping. If a key is not found the
# plugin.input EVENTMAP is used.
keymap = {
'menu': {
'title': freevo.MENU_SUBMENU,
'contextmenu': freevo.MENU_SUBMENU,
'back': freevo.MENU_BACK_ONE_MENU,
},
'videoplayer': {
'back': freevo.STOP,
'skipminus': freevo.Event(freevo.SEEK, -60),
'reverse': freevo.Event(freevo.SEEK, -10),
'forward': freevo.Event(freevo.SEEK, 10),
'skipplus': freevo.Event(freevo.SEEK, 60),
'left': freevo.Event(freevo.SEEK, -60),
'right': freevo.Event(freevo.SEEK, 60),
'info': freevo.TOGGLE_OSD,
'osd': freevo.VIDEO_CHANGE_ASPECT,
'display': freevo.VIDEO_CHANGE_ASPECT,
},
'audioplayer': {
'back': freevo.STOP,
'skipminus': freevo.PLAYLIST_PREV,
'reverse': freevo.Event(freevo.SEEK, -10),
'forward': freevo.Event(freevo.SEEK, 10),
'skipplus': freevo.PLAYLIST_NEXT,
'left': freevo.Event(freevo.SEEK, -60),
'right': freevo.Event(freevo.SEEK, 60),
},
'imageviewer': {
'back': freevo.STOP,
'skipminus': freevo.PLAYLIST_PREV,
'skipplus': freevo.PLAYLIST_NEXT,
'left': freevo.Event(freevo.IMAGE_ROTATE, 'left'),
'right': freevo.Event(freevo.IMAGE_ROTATE, 'right'),
},
}
# Muted is not supported by Freevo but Yatse uses it to test the
# eventserver.
muted = False
def handle(data):
"""
Callback for UDP eventserver events
"""
header = struct.unpack('!4c2bhLLL', data[:20])
if header[6] == 0x03: # BUTTON
key = data[38:].split('\0')[1]
app = freevo.taskmanager.applications[-1]
if app.name in keymap and key in keymap[app.name]:
freevo.Event(keymap[app.name][key]).post(event_source='user')
return True
if app.eventmap in EVENTMAP and key.upper() in EVENTMAP[app.eventmap]:
EVENTMAP[app.eventmap][key.upper()].post(event_source='user')
return True
log.error('unmapped key: %s' % key)
return True
if header[6] == 0x0A: # ACTION
action = data[33:].strip()
if action.startswith('ActivateWindow'):
window = action[action.find('(')+1:action.find(')')]
if window == 'Pictures':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('image', event_source='user')
elif window == 'MusicLibrary':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('audio', event_source='user')
elif window == 'Videos,MovieTitles':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'movie', event_source='user')
elif window == 'Videos,TvShowTitles':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'tv', event_source='user')
elif window == 'Home':
freevo.Event(freevo.MENU_GOTO_MAINMENU).post(event_source='user')
else:
log.error('ActivateWindow: unsupported window: %s' % window)
elif action.startswith('Mute'):
global muted
muted = not muted
else:
log.error('unsupported eventserver action: %s' % action)
else:
log.error('unsupported packet type: %s' % int(header[6]))
def input(key, params):
if key == 'executeaction':
key = params['action']
app = freevo.taskmanager.applications[-1]
if app.name in keymap and key in keymap[app.name]:
freevo.Event(keymap[app.name][key]).post(event_source='user')
return True
if app.eventmap in EVENTMAP and key.upper() in EVENTMAP[app.eventmap]:
EVENTMAP[app.eventmap][key.upper()].post(event_source='user')
return True
log.error('unmapped key: %s' % key)
return True
|
freevo/freevo2
|
src/plugins/jsonrpc/eventserver.py
|
Python
|
gpl-2.0
| 5,552
|
#!/usr/bin/env python
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import os
import time
import pymongo
import datetime
from pymongo import ASCENDING, DESCENDING
if "Darwin" in os.popen("uname").read():
MONGOSERVER = 'localhost'
else:
MONGOSERVER = '192.168.167.192'
MONGOPORT = 27017
def getDB():
conn = pymongo.Connection(MONGOSERVER, MONGOPORT)
db = conn.newsrivr
return db
def getCollUsers():
db = getDB()
coll = db.users
return coll
def getCollUnprocessedTweets():
db = getDB()
coll = db.tweets
return coll
def getCollDrops():
db = getDB()
coll = db.drops
return coll
def getCollStream():
db = getDB()
coll = db.stream
return coll
def getCollImageMd5s():
db = getDB()
coll = db.imagemd5
return coll
def main():
ids = set()
cnt = 0
for i in getCollDrops().find():
ids.add(i["id_str"])
cnt += 1
print(len(ids), cnt)
return
cnt2 = 0
for id in ids:
cd = None
for d in getCollDrops().find({"id_str":id}):
if type(d["newsrivr_userid_md5"])!=type([]):
if not cd:
cd = d
cd["newsrivr_userid_md5"] = [cd["newsrivr_userid_md5"]]
else:
cd["newsrivr_userid_md5"].append(d["newsrivr_userid_md5"])
getCollDrops().remove({"id_str":id}, safe=True)
cnt2 += 1
if cnt2%1000==0:
print(cnt2)
time.sleep(1)
getCollDrops().save(cd, safe=True)
if __name__=='__main__':
main()
print("done")
|
erikdejonge/newsrivr
|
daemons/one_tweet_one_drop.py
|
Python
|
gpl-2.0
| 1,554
|
#!/usr/bin/env python3
import unittest
import sys
sys.path = ["/opt/thinlinc/modules"] + sys.path
import subprocess
import re
import os
import tempfile
class ObfuscateTest(unittest.TestCase):
def mkstemp(self):
"""wrapper for mkstemp, calling mktemp if mkstemp is not available"""
if hasattr(tempfile, "mkstemp"):
return tempfile.mkstemp()
else:
fname = tempfile.mktemp()
return os.open(fname, os.O_RDWR|os.O_CREAT), fname
def run_pyobfuscate(self, testfile, args=[]):
cmdline = ["../pyobfuscate"] + args + [testfile]
p = subprocess.Popen(cmdline,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
assert b'' == stderr, "pyobfuscate wrote to stderr: %s" % stderr
return stdout.decode()
def obfuscate_and_write(self, testfile, outfile, args=[]):
with open(outfile, 'w') as f:
f.write(self.run_pyobfuscate(testfile, args))
def run_src(self, src):
f, fname = self.mkstemp()
os.write(f, src.encode())
os.close(f)
retcode = subprocess.call([sys.executable, fname])
os.remove(fname)
return retcode
def obfuscate_and_run_file(self, testfile, args=[]):
output = self.run_pyobfuscate(testfile, args)
return self.run_src(output)
def obfuscate_and_run_src(self, src, args=[]):
f, fname = self.mkstemp()
os.write(f, src)
os.close(f)
retcode = self.obfuscate_and_run_file(fname, args)
os.remove(fname)
return retcode
def test_DontKeepblanks(self):
"""Don't keep blanks unless told so"""
output = self.run_pyobfuscate("testfiles/commblanks.py")
assert None == re.search(output, "^$"), "Blank lines in output"
lines = output.split("\n")
assert "#" == lines[0][0], "First line is not a comment"
assert 42 == self.run_src(output)
def test_Keepblanks(self):
"""Keep blanks when told so"""
output = self.run_pyobfuscate("testfiles/commblanks.py",
args=["--keepblanks"])
lines = output.split("\n")
assert '' == lines[5], "Blank lines removed"
assert 42 == self.run_src(output)
def test_lambdaGlobal(self):
"""Support lambda constructs referencing global functions.
Test inspired by log message for revision 1.15"""
assert 42 == self.obfuscate_and_run_file("testfiles/lambda_global.py"), "Incorrect value returned after obfuscation"
def test_power(self):
"""Handle power operator correctly. Bug 1411"""
assert 4 == self.obfuscate_and_run_file("testfiles/power.py"), "Incorrect value returned after obfuscation"
def test_keywordfunc(self):
"""Handle keyword functions correctly.
Test inspired by revision 1.8 and revision 1.9"""
assert 42 == self.obfuscate_and_run_file("testfiles/keywordfunc.py"), "Incorrect value returned after obfuscation"
def test_importlist(self):
"""Handle from <x> import <y>"""
self.obfuscate_and_write("testfiles/tobeimported.py",
"generated/tobeimported.py")
self.obfuscate_and_write("testfiles/import.py",
"generated/import.py")
assert 42 == subprocess.call([sys.executable, "generated/import.py"]), "Incorrect value returned after obfuscation"
def test_import_package(self):
"""Handle from x.y import z"""
self.obfuscate_and_write("testfiles/package/tobeimported.py",
"generated/package/tobeimported.py")
self.obfuscate_and_write("testfiles/package/__init__.py",
"generated/package/__init__.py")
self.obfuscate_and_write("testfiles/importpackage.py",
"generated/importpackage.py")
assert 42 == subprocess.call([sys.executable,
"generated/importpackage.py"],
env={"PYTHONPATH":"generated"}), "Incorrect value returned after obfuscation"
def test_import_package_as(self):
"""Handle from x.y import z as a"""
self.obfuscate_and_write("testfiles/package/tobeimported.py",
"generated/package/tobeimported.py")
self.obfuscate_and_write("testfiles/package/__init__.py",
"generated/package/__init__.py")
self.obfuscate_and_write("testfiles/importpackage_as.py",
"generated/importpackage_as.py")
assert 42 == subprocess.call([sys.executable,
"generated/importpackage_as.py"],
env={"PYTHONPATH":"generated"}), "Incorrect value returned after obfuscation"
def test_import_everything(self):
self.obfuscate_and_write("testfiles/tobeimported_everything.py",
"generated/tobeimported_everything.py")
self.obfuscate_and_write("testfiles/importall_star.py",
"generated/importall_star.py")
assert 42 == subprocess.call([sys.executable, "generated/importall_star.py"]), "Incorrect value returned after obfuscation"
def test_import_dyn_all(self):
"""Verify that trying to import from a file with dynamic __all__ fails"""
cmdline = ["../pyobfuscate", "testfiles/dyn_all.py"]
p = subprocess.Popen(cmdline,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
assert -1 != stderr.find(b"__all__ is not a list of constants"), "pyobufscate didn't bail out with an error on file with dynamic __all__"
def test_import_with_keywords(self):
"""Verify that an imported class, defined in __all__, does not get obfuscated keyword arguments"""
self.obfuscate_and_write("testfiles/keywordclass.py",
"generated/keywordclass.py")
self.obfuscate_and_write("testfiles/keywordclassuser.py",
"generated/keywordclassuser.py")
assert 42 == subprocess.call([sys.executable, "generated/keywordclassuser.py"]), "Incorrect value returned after obfuscation"
def test_global_stmt(self):
"""Verify use of 'global' keyword"""
assert 42 == self.obfuscate_and_run_file("testfiles/global.py"), "Incorrect value returned after obfuscation"
def test_definition_after_use(self):
"""Verify that a function defined after it's used works as expected"""
output = self.run_pyobfuscate("testfiles/defafteruse.py")
assert 42 == self.run_src(output), "Incorrect value returned after obfuscation"
def test_bug1583(self):
"""Verify that bug 1583 is not present (lambda obfuscation problems)"""
output = self.run_pyobfuscate("testfiles/bug1583.py")
assert 42 == self.run_src(output), "Incorrect value returned after obfuscation"
def test_bug1673(self):
"""Verify that bug 1673 is not present (global variable handling)"""
output = self.run_pyobfuscate("testfiles/bug1673.py")
assert 49 == self.run_src(output), "Incorrect value returned after obfuscation"
def test_allpublic(self):
""" Verify that we respect the --allpublic flag but still keep
functions starting with '_' hidden """
self.obfuscate_and_write("testfiles/tobeimported_noall.py",
"generated/tobeimported_noall.py",
args=["--allpublic"])
self.obfuscate_and_write("testfiles/allpublic.py",
"generated/allpublic.py")
res = subprocess.call([sys.executable, "generated/allpublic.py"])
assert 1 == res, "Incorrect value returned after obfuscation"
def test_allpublic_hidden(self):
""" Verify that we still keep functions starting with '_' hidden """
self.obfuscate_and_write("testfiles/tobeimported_noall.py",
"generated/tobeimported_noall.py",
args=["--allpublic"])
self.obfuscate_and_write("testfiles/allpublic_hidden.py",
"generated/allpublic_hidden.py")
res = subprocess.call([sys.executable, "generated/allpublic_hidden.py"])
assert 1 == res, "Incorrect value returned after obfuscation"
if "__main__" == __name__:
unittest.main()
|
astrand/pyobfuscate
|
test/test_pyobfuscate.py
|
Python
|
gpl-2.0
| 8,719
|
#!/usr/bin/env python
def count_sort(list):
max_value = max(list)
bucket = [0 for _ in xrange(max_value+1)]
for value in list:
bucket[value] += 1
# print "bucket:", bucket
for i in xrange(1, len(bucket)):
# if i == 0: continue
bucket[i] += bucket[i-1]
# print "cumulative bucket:", bucket
new_list = [0 for _ in xrange(len(list))]
for i in xrange(len(list)):
# print i, list[i], "->new_list", bucket[list[i]]
bucket[list[i]] -= 1
new_list[bucket[list[i]]] = list[i]
return new_list
def radix_sort(list, base=10):
exp = 1
max_value = max(list)
if max_value < 1: return list
bucket = [0 for _ in xrange(base)]
while exp < max_value:
for i in xrange(base):
bucket[i] = 0
# split to bucket
for value in list:
digit = value//exp % base
# print value, digit
bucket[digit] += 1
# print "bucket:", bucket
# cumulative sum
for i in xrange(1, len(bucket)):
bucket[i] += bucket[i-1]
# print "cumulative bucket:", bucket
# reposition
new_list = [0 for _ in xrange(len(list))]
for i in xrange(len(list) - 1, -1, -1):
digit = list[i]//exp % base
bucket[digit] -= 1
new_list[bucket[digit]] = list[i]
# print "list[i]:", list[i], "digit: ", digit, "bucket[digit]: ", bucket[digit]
# print "list after iteration:", new_list
list = new_list
exp *= base
return list
|
alvin777/excelsior
|
sort/radix_sort.py
|
Python
|
gpl-2.0
| 1,594
|
##
# Copyright 2012-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for gqacml compiler toolchain (includes GCC, QLogicMPI, ACML, BLACS, ScaLAPACK and FFTW).
:author: Kenneth Hoste (Ghent University)
"""
from easybuild.toolchains.gcc import GccToolchain
from easybuild.toolchains.fft.fftw import Fftw
from easybuild.toolchains.linalg.acml import Acml
from easybuild.toolchains.linalg.scalapack import ScaLAPACK
from easybuild.toolchains.mpi.qlogicmpi import QLogicMPI
class Gqacml(GccToolchain, QLogicMPI, Acml, ScaLAPACK, Fftw):
"""Compiler toolchain with GCC, QLogic MPI, ACML, ScaLAPACK and FFTW."""
NAME = 'gqacml'
SUBTOOLCHAIN = GccToolchain.NAME
|
pescobar/easybuild-framework
|
easybuild/toolchains/gqacml.py
|
Python
|
gpl-2.0
| 1,671
|
# -*- coding: utf-8 -*-
# Asymmetric Base Framework - A collection of utilities for django frameworks
# Copyright (C) 2013 Asymmetric Ventures Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from __future__ import absolute_import, division, print_function, unicode_literals
from django.forms import * # @UnusedWildImport pylint: disable-msg=W0401
from django import forms # @Reimport
from django.forms.models import modelformset_factory, BaseModelFormSet, \
inlineformset_factory
from django.utils.encoding import force_text
from django.utils.html import conditional_escape
from django.conf import settings
import jinja2
from asymmetricbase.jinja import jinja_env
from asymmetricbase.forms.monkey_patch_django import monkey_patch_django
from .datetimetzfield import DateTimeTZField
HTML5 = getattr(settings, 'ASYM_HTML5', False)
HTML5_WIDGETS = getattr(settings, 'ASYM_HTML5_WIDGETS', {})
if HTML5:
from asymmetricbase.forms.html5_widgets import * # pylint: disable-msg=W0401
class BaseFormMixin(object):
required_css_class = 'field_required'
error_css_class = 'field_error'
def __init__(self, *args, **kwargs):
super(BaseFormMixin, self).__init__(*args, **kwargs)
if not hasattr(self, 'Meta'):
return
validate = getattr(self.Meta, 'validate', {})
field_info = getattr(self.Meta, 'field_info', {})
widgets = getattr(self.Meta, 'widgets', {})
for name, field in self.fields.items():
newattrs = {}
if isinstance(field, (DateField,)):
newattrs.update({'class' : 'datepicker placeholder', 'title' : 'YYYY-mm-dd'})
if hasattr(field, 'min_date_range'):
newattrs.update({'data-min_date_range' : field.min_date_range.strftime("%Y/%m/%d")})
if isinstance(field, forms.DecimalField):
field.localize = True
field.widget.is_localized = True
if getattr(widgets.get(name, {}), 'input_type', '') != 'hidden':
if HTML5_WIDGETS.get('email', False) and isinstance(field, forms.EmailField):
field.widget.input_type = 'email'
if HTML5_WIDGETS.get('number', False) and isinstance(field, (forms.IntegerField, forms.DecimalField)):
field.widget.input_type = 'number'
if field.max_value is not None:
newattrs.update({'max' : field.max_value})
if field.min_value is not None:
newattrs.update({'min' : field.min_value})
if isinstance(field, forms.DecimalField):
# get step from field.decimal_places
newattrs.update(
step = '0.{}1'.format('0' * (field.decimal_places - 1)) if field.decimal_places > 0 else '1'
)
if name in validate:
validate_string = validate[name]
newattrs.update({'data-validate' : validate_string})
if name in field_info:
info = field_info[name]
field_data = info.pop('data', {})
newattrs.update(info)
for key, value in field_data.items():
key = "data-{}".format(key)
newattrs.update({ key : value})
# re-add data back into the meta
info['data'] = field_data
field.widget.attrs.update(newattrs)
# A default Meta class that can be used in any subclass
class Meta(object): pass
class Form(BaseFormMixin, forms.Form):
def _html_output(self, *args, **kwargs):
return jinja2.Markup(super(Form, self)._html_output(*args, **kwargs))
def __html__(self):
return self.as_table()
class ModelForm(BaseFormMixin, forms.ModelForm):
template_module = jinja_env.get_template('asymmetricbase/forms/form_rows.djhtml').module
def __init__(self, *args, **kwargs):
self._meta.exclude = tuple(set(('uuid',) + (self._meta.exclude or ())))
super(ModelForm, self).__init__(*args, **kwargs)
def _html_output(self, *args, **kwargs):
return jinja2.Markup(super(ModelForm, self)._html_output(*args, **kwargs))
def _render_html_template(self, template_macro, errors_on_separate_row, required_mark = '*'):
top_errors = self.non_field_errors()
output, hidden_fields = [], []
for name, field in self.fields.items():
bf = self[name]
macro_args = {
'label' : '',
'field' : bf,
'help_text' : '',
'errors' : '',
'html_class_attr' : '',
'is_error_row' : errors_on_separate_row,
'required_mark' : ''
}
bf_errors = self.error_class([conditional_escape(error) for error in bf.errors])
macro_args['errors'] = force_text(bf_errors)
macro_args['required_mark'] = required_mark if field.required else u''
if bf.is_hidden:
if bf.errors:
top_errors.extend([u'(Hidden field {}) {}'.format(name, force_text(e)) for e in bf_errors])
hidden_fields.append(unicode(bf))
else:
css_classes = bf.css_classes()
if css_classes:
macro_args['html_class_attr'] = jinja2.Markup(' class="{}"'.format(css_classes))
if errors_on_separate_row and bf_errors:
output.append(template_macro(**macro_args))
if bf.label:
label = conditional_escape(force_text(bf.label))
if self.label_suffix:
if label[-1] not in ':?.!':
label += self.label_suffix
label_css = 'required' if field.required else u''
macro_args['label'] = bf.label_tag(label, attrs = {'class' : label_css}) or u''
else:
macro_args['label'] = u''
if field.help_text:
macro_args['help_text'] = force_text(field.help_text)
else:
macro_args['help_text'] = u''
output.append(template_macro(**macro_args))
if top_errors:
output.insert(0, template_macro(label = '', field = '', help_text = '', html_class_attr = '', is_error_row = True, errors = top_errors))
if hidden_fields:
output.append(u''.join(hidden_fields))
return jinja2.Markup('\n'.join(output))
def as_table(self):
return self._render_html_template(self.template_module.as_table, False)
def as_tr(self):
return '<tr>{}</tr>'.format(self._render_html_template(self.template_module.as_tr, False))
def as_ul(self):
return self._render_html_template(self.template_module.as_ul, False)
def as_p(self):
return self._render_html_template(self.template_module.as_p, True)
def is_valid_and_save(self, commit = True):
if self.is_valid():
return self.save(commit)
return False
class TwoColumnTableLayout(object):
"""
This is basically a class to override the default as_table function.
Note that this does NOT inherit from forms.(Model)Form since we don't
want to override any other methods/attributes.
To use this form, in the inheritance list, it must come before any other
classes that implement the as_table() method to take advantage of C3 MRO
"""
def as_table(self):
return self._render_html_template(self.template_module.as_table_twocol, False)
class AsymBaseModelFormSet(BaseModelFormSet):
def is_valid_and_save(self, commit = True):
if self.is_valid():
return self.save(commit)
return False
def _define_wrapper_class(form, args, kwargs):
formargs = kwargs.pop('formargs', {})
exclude = kwargs.pop('exclude', ())
kwargs.setdefault('formset', AsymBaseModelFormSet)
class WrapperClass(form):
def __init__(self, *args, **kwargs):
kwargs.update(formargs)
super(WrapperClass, self).__init__(*args, **kwargs)
for fname in exclude:
del self.fields[fname]
kwargs.update(dict(form = WrapperClass))
def make_modelformset_factory(model, form = ModelForm, *args, **kwargs):
_define_wrapper_class(form, args, kwargs)
return modelformset_factory(model, *args, **kwargs)
def make_inlineformset_factory(parent_model, model, form = ModelForm, *args, **kwargs):
_define_wrapper_class(form, args, kwargs)
return inlineformset_factory(parent_model, model, *args, **kwargs)
monkey_patch_django()
|
AsymmetricVentures/asymmetricbase
|
asymmetricbase/forms/__init__.py
|
Python
|
gpl-2.0
| 8,311
|
''' Read/Write a table in the STAR format
This module reads from and writes to the STAR format, which is an
alternative to XML.
An example of the file:
.. container:: bottomnav, topic
| data_images
| loop\_
| _rlnImageName
| _rlnDefocusU
| _rlnDefocusV
| _rlnDefocusAngle
| _rlnVoltage
| _rlnAmplitudeContrast
| _rlnSphericalAberration
| 000001@/lmb/home/scheres/data/VP7/all_images.mrcs 13538 13985 109.45 300 0.15 2
| 000002@/lmb/home/scheres/data/VP7/all_images.mrcs 13293 13796 109.45 300 0.15 2
| 000003@/lmb/home/scheres/data/VP7/all_images.mcrs 13626 14085 109.45 300 0.15 2
It supports the following attributes:
- Extension: star
- Filter: Star (\*.star)
.. Created on Sep 28, 2010
.. codeauthor:: Robert Langlois <rl2528@columbia.edu>
'''
from .. import format_utility
import logging
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.INFO)
def read_header(filename, header=[], data_found=False, tablename=None, **extra):
'''Parses the header on the first line of the Star file
.. sourcecode:: py
>>> import os
>>> os.system("more data.star")
data_images
loop_
_rlnImageName
_rlnDefocusU
_rlnDefocusV
_rlnDefocusAngle
_rlnVoltage
_rlnAmplitudeContrast
_rlnSphericalAberration
000001@/lmb/home/scheres/data/VP7/all_images.mrcs 13538 13985 109.45 300 0.15 2
000002@/lmb/home/scheres/data/VP7/all_images.mrcs 13293 13796 109.45 300 0.15 2
000003@/lmb/home/scheres/data/VP7/all_images.mcrs 13626 14085 109.45 300 0.15 2
>>> header = []
>>> fin = open("data.star", 'r')
>>> factory, first_vals = read_header(fin, header)
>>> header
["_rlnImageName","_rlnDefocusU","_rlnDefocusV","_rlnDefocusAngle","_rlnVoltage","_rlnAmplitudeContrast","_rlnSphericalAberration"]
:Parameters:
filename : str or stream
Input filename or stream
header : list
List of strings overriding parsed header
data_found : bool
`data_` line has already been read
extra : dict
Unused keyword arguments
:Returns:
val : container
Container with the given header values
'''
fin = open(filename, 'r') if isinstance(filename, str) else filename
try:
if not data_found:
while True: # Remove header comments
line = fin.readline()
if line == "": raise format_utility.ParseFormatError, "Not a star file or empty"
if len(line) >= 5 and line[:5] == "data_": break
if tablename is not None: tablename.append(line.strip())
while True:
line = fin.readline()
if line == "": raise format_utility.ParseFormatError, "Not a star file or empty"
line = line.strip()
if line != "": break
tmpheader = []
if line == "loop_":
_logger.debug("Found loop - header has labels")
while True:
line = fin.readline()
if line == "": raise format_utility.ParseFormatError, "Unexpected end of header"
if line[0] != "_": break
line = line.strip()
idx = line.find('#')
if idx != -1:
line = line[:idx].strip()
tmpheader.append(line[1:])
while line[0] == ';' or line[0] == '#' or line == "":
line = fin.readline()
if line == "": raise format_utility.ParseFormatError, "Unexpected end of file"
line = line.strip()
first_vals = parse_line(line, extra.get('numeric'))
tot = len(first_vals)
else:
while line[0] == ';' or line[0] == '#':
line = fin.readline()
if line == "": raise format_utility.ParseFormatError, "Unexpected end of file"
line = line.strip()
first_vals = parse_line(line, extra.get('numeric'))
tot = len(first_vals)
tmpheader.extend(["column"+str(i+1) for i in xrange(tot)])
_logger.debug("create-header: "+str(header))
if isinstance(header, dict):
if len(header) == 0: raise ValueError, "Dictionary header cannot have zero elements"
for key, val in header.iteritems():
tmpheader[val] = key
elif len(header) == 0: header.extend(tmpheader)
if tot != len(header):
raise format_utility.ParseFormatError, "Header does not match the file: %d != %d -> %s -> %s -> %s"%(tot, len(header), line, str(first_vals), header)
if isinstance(filename, str): fin.close()
return header, first_vals
except:
fin.close()
raise
else:
fin.close()
raise format_utility.ParseFormatError, "Cannot parse header of Star document file - end of document"
def reader(filename, header=[], numeric=False, columns=None, **extra):
'''Creates a Star read iterator
.. sourcecode:: py
>>> import os
>>> os.system("more data.star")
data_images
loop_
_rlnImageName
_rlnDefocusU
_rlnDefocusV
_rlnDefocusAngle
_rlnVoltage
_rlnAmplitudeContrast
_rlnSphericalAberration
000001@/lmb/home/scheres/data/VP7/all_images.mrcs 13538 13985 109.45 300 0.15 2
000002@/lmb/home/scheres/data/VP7/all_images.mrcs 13293 13796 109.45 300 0.15 2
>>> header = []
>>> fin = open("data.star", 'r')
>>> factory, first_vals = read_header(fin, header)
>>> header
["_rlnImageName","_rlnDefocusU","_rlnDefocusV","_rlnDefocusAngle","_rlnVoltage","_rlnAmplitudeContrast","_rlnSphericalAberration"]
>>> [first_vals]+map(factory, reader(fin, header, numeric=True))
[ BasicTuple(rlnImageName="000001@/lmb/home/scheres/data/VP7/all_images.mrcs", rlnDefocusU=13538, rlnDefocusV=13985, rlnDefocusAngle=109.45, rlnVoltage=300, rlnAmplitudeContrast=0.15, rlnSphericalAberration=2),
BasicTuple(rlnImageName="000002@/lmb/home/scheres/data/VP7/all_images.mrcs", rlnDefocusU=13293, rlnDefocusV=13796, rlnDefocusAngle=109.45, rlnVoltage=300, rlnAmplitudeContrast=0.15, rlnSphericalAberration=2) ]
:Parameters:
filename : str or stream
Input filename or input stream
header : list
List of strings overriding parsed header
numeric : boolean
If true then convert string values to numeric
columns : list
List of columns to read otherwise None (all columns)
extra : dict
Unused keyword arguments
:Returns:
val : iterator
Star read iterator
'''
fin = open(filename, 'r') if isinstance(filename, str) else filename
try:
while True:
line = fin.readline()
if line == "": break
line = line.strip()
if line[:5] == 'data_': raise format_utility.MultipleEntryException, line
if line == "" or line[0] == ';' or line[0] == '#': continue
yield parse_line(line, numeric, columns, len(header))
except format_utility.MultipleEntryException, exp:
raise exp
except:
fin.close()
else:
fin.close()
def parse_line(line, numeric=False, columns=None, hlen=None):
''' Parse a line of values in the CSV format
>>> parse_line("000001@/lmb/home/scheres/data/VP7/all_images.mrcs 13538 13985 109.45 300 0.15 2", True)
["000001@/lmb/home/scheres/data/VP7/all_images.mrcs", 13538, 13985, 109.45, 300, 0.15, 2]
:Parameters:
line : str
String to parse
numeric : boolean
If true then convert string values to numeric
columns : list
List of columns to read otherwise None (all columns)
hlen : int
Number of elements in the header, optional
:Returns:
val : list
List of values parsed from input line
'''
vals = line.split()
if hlen is not None and hlen != len(vals):
raise format_utility.ParseFormatError, "Header length does not match values: "+str(hlen)+" != "+str(len(vals))+" --> "+str(vals)
if columns is not None: vals = vals[columns]
if numeric: return [format_utility.convert(v) for v in vals]
return vals
############################################################################################################
# Write format #
############################################################################################################
def write_header(filename, values, mode, header, tag="", blockcode="images", **extra):
'''Write a comma separated value (Star) header
.. sourcecode:: py
>>> BasicTuple = namedtuple("BasicTuple", "id,select,peak")
>>> values = [ BasicTuple("1/1", 1, 0.00025182), BasicTuple("1/2", 1, 0.00023578) ]
>>> write_header("data.star", values)
>>> import os
>>> os.system("more data.star")
data_images
loop_
_rlnImageName
_rlnDefocusU
_rlnDefocusV
_rlnDefocusAngle
_rlnVoltage
_rlnAmplitudeContrast
_rlnSphericalAberration
:Parameters:
filename : str or stream
Output filename or stream
values : container
Value container such as a list or an ndarray
mode : str
Write mode - if 'a', do not write header
header : list
List of strings describing columns in data
tag : str
Tag for each header value, e.g. tag=rln
blockcode : str
Label for the data block
extra : dict
Unused keyword arguments
:Returns:
header : list
List of strings describing columns in data
'''
if mode != 'a':
fout = open(filename, 'w') if isinstance(filename, str) else filename
fout.write('data_'+blockcode+'\n')
fout.write('loop_\n')
for h in header:
fout.write("_"+tag+h+'\n')
if isinstance(filename, str): fout.close()
return header
def write_values(filename, values, star_separtor=' ', **extra):
'''Write comma separated value (Star) values
.. sourcecode:: py
>>> BasicTuple = namedtuple("BasicTuple", "id,select,peak")
>>> values = [ BasicTuple(rlnImageName="000001@/lmb/home/scheres/data/VP7/all_images.mrcs", rlnDefocusU=13538, rlnDefocusV=13985, rlnDefocusAngle=109.45, rlnVoltage=300, rlnAmplitudeContrast=0.15, rlnSphericalAberration=2),
BasicTuple(rlnImageName="000002@/lmb/home/scheres/data/VP7/all_images.mrcs", rlnDefocusU=13293, rlnDefocusV=13796, rlnDefocusAngle=109.45, rlnVoltage=300, rlnAmplitudeContrast=0.15, rlnSphericalAberration=2) ]
>>> write_values("data.star", values)
>>> import os
>>> os.system("more data.star")
000001@/lmb/home/scheres/data/VP7/all_images.mrcs 13538 13985 109.45 300 0.15 2
000002@/lmb/home/scheres/data/VP7/all_images.mrcs 13293 13796 109.45 300 0.15 2
:Parameters:
filename : str or stream
Output filename or stream
values : container
Value container such as a list or an ndarray
extra : dict
Unused keyword arguments
'''
fout = open(filename, 'w') if isinstance(filename, str) else filename
for v in values:
fout.write(star_separtor.join(v)+"\n")
if isinstance(filename, str): fout.close()
def float_format():
''' Format for a floating point number
:Returns:
return_val : str
%11g
'''
return "%11g"
############################################################################################################
# Extension and Filters #
############################################################################################################
def extension():
'''Get extension of Star format
:Returns:
val : str
File extension - star
'''
return "star"
def filter():
'''Get filter of Star format
:Returns:
val : str
File filter - Star (\*.star)
'''
return "Star (*.star)"
|
ezralanglois/arachnid
|
arachnid/core/metadata/formats/star.py
|
Python
|
gpl-2.0
| 12,950
|
#!/usr/bin/python
import requests
class ImageDownloader:
def __init__(self, destination):
self.destination = destination
def download(self, url, name):
r = requests.get(url, stream=True)
if r.status_code == 200:
try:
with open("%s%s" % (self.destination, name), 'wb') as f:
for chunk in r.iter_content():
f.write(chunk)
except:
return None
else:
return None
return "%s%s" % (self.destination, name)
if __name__ == "__main__":
print "save image from remote url to local folder"
image_downloader = ImageDownloader('/tmp/')
image_local_path = image_downloader.download(
'http://ecx.images-amazon.com/images/I/51DU8kmcx1L._SP160,160,0,T_.jpg',
'black_ice.jpg')
if image_local_path is not None:
print "Image saved to : %s" % image_local_path
else:
print "Error"
|
Ganapati/DjangoZik
|
infos_grabber/imageDownloader.py
|
Python
|
gpl-2.0
| 975
|
"""
Settings
--------
Change a user's password/email or delete an account.
"""
from Acquisition import aq_inner
from euphorie.client import MessageFactory as _
from euphorie.client.model import Account
from euphorie.client.model import AccountChangeRequest
from euphorie.client.model import get_current_account
from euphorie.client.utils import CreateEmailTo
from euphorie.client.utils import randomString
from plone import api
from plone.autoform import directives
from plone.autoform.form import AutoExtensibleForm
from plone.supermodel import model
from Products.CMFCore.utils import getToolByName
from Products.Five import BrowserView
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.MailHost.MailHost import MailHostError
from Products.statusmessages.interfaces import IStatusMessage
from six.moves.urllib.parse import urlencode
from z3c.form import button
from z3c.form import form
from z3c.form.interfaces import WidgetActionExecutionError
from z3c.saconfig import Session
from z3c.schema.email import RFC822MailAddress
from zope import schema
from zope.i18n import translate
from zope.interface import directlyProvides
from zope.interface import Invalid
from zope.interface import invariant
import datetime
import logging
import smtplib
import socket
log = logging.getLogger(__name__)
class PasswordChangeSchema(model.Schema):
old_password = schema.Password(
title=_(u"label_old_password", default=u"Current Password"), required=True
)
directives.widget(old_password="z3c.form.browser.password.PasswordFieldWidget")
new_password = schema.Password(
title=_(u"label_new_password", default=u"Desired password")
)
directives.widget(new_password="z3c.form.browser.password.PasswordFieldWidget")
new_password_confirmation = schema.Password(
title=_(u"label_new_password_confirmation", default=u"Again password")
)
directives.widget(
new_password_confirmation="z3c.form.browser.password.PasswordFieldWidget"
)
@invariant
def validate_same_value(data):
if data.new_password != data.new_password_confirmation:
raise Invalid(_("Password doesn't compare with confirmation value"))
class AccountDeleteSchema(model.Schema):
password = schema.Password(
title=_(u"Your password for confirmation"), required=True
)
directives.widget(password="z3c.form.browser.password.PasswordFieldWidget")
class EmailChangeSchema(model.Schema):
loginname = RFC822MailAddress(title=_(u"Email address/account name"), required=True)
directives.widget("loginname", type="email")
password = schema.Password(
title=_(u"Your password for confirmation"), required=True
)
directives.widget(password="z3c.form.browser.password.PasswordFieldWidget")
class AccountSettings(AutoExtensibleForm, form.Form):
"""View name: @@account-settings"""
template = ViewPageTemplateFile("templates/account-settings.pt")
schema = PasswordChangeSchema
ignoreContext = True
label = _(u"title_change_password", default=u"Change password")
def updateWidgets(self):
super(AccountSettings, self).updateWidgets()
self.widgets["old_password"].addClass("password")
self.widgets["new_password"].addClass("password")
@button.buttonAndHandler(_(u"Save changes"), name="save")
def handleSave(self, action):
flash = IStatusMessage(self.request).addStatusMessage
(data, errors) = self.extractData()
if errors:
for error in errors:
flash(error.message, "notice")
return
user = get_current_account()
if not data["new_password"]:
flash(_(u"There were no changes to be saved."), "notice")
return
login_view = api.content.get_view(
name="login",
context=self.context,
request=self.request,
)
error = login_view.check_password_policy(data["new_password"])
if error:
raise WidgetActionExecutionError("new_password", Invalid(error))
if not user.verify_password(data["old_password"]):
raise WidgetActionExecutionError(
"old_password", Invalid(_(u"Invalid password"))
)
user.password = data["new_password"]
flash(_(u"Your password was successfully changed."), "success")
@button.buttonAndHandler(_("button_cancel", default=u"Cancel"), name="cancel")
def handleCancel(self, action):
self.request.response.redirect(self.request.client.absolute_url())
class DeleteAccount(AutoExtensibleForm, form.Form):
""""View name: @@account-delete"""
template = ViewPageTemplateFile("templates/account-delete.pt")
schema = AccountDeleteSchema
ignoreContext = True
label = _(u"title_account_delete", default=u"Delete account")
def updateWidgets(self):
super(DeleteAccount, self).updateWidgets()
self.widgets["password"].addClass("password")
def logout(self):
pas = getToolByName(self.context, "acl_users")
pas.resetCredentials(self.request, self.request.response)
@button.buttonAndHandler(_(u"Delete account"), name="delete")
def handleDelete(self, action):
(data, errors) = self.extractData()
if errors:
return
user = get_current_account()
if not user.verify_password(data["password"]):
raise WidgetActionExecutionError(
"password", Invalid(_(u"Invalid password"))
)
Session.delete(user)
self.logout()
self.request.response.redirect(self.request.client.absolute_url())
@button.buttonAndHandler(_("button_cancel", default=u"Cancel"), name="cancel")
def handleCancel(self, action):
self.request.response.redirect(self.request.client.absolute_url())
class NewEmail(AutoExtensibleForm, form.Form):
template = ViewPageTemplateFile("templates/new-email.pt")
schema = EmailChangeSchema
email_template = ViewPageTemplateFile("templates/confirm-email.pt")
ignoreContext = True
label = _(u"title_account_settings", default=u"Account settings")
@property
def email_from_name(self):
return api.portal.get_registry_record("plone.email_from_name")
@property
def email_from_address(self):
return api.portal.get_registry_record("plone.email_from_address")
def updateFields(self):
super(NewEmail, self).updateFields()
self.fields["password"].ignoreContext = True
def updateWidgets(self):
super(NewEmail, self).updateWidgets()
self.widgets["password"].addClass("password")
def getContent(self):
user = get_current_account()
directlyProvides(user, EmailChangeSchema)
return user
def initiateRequest(self, account, login):
flash = IStatusMessage(self.request).addStatusMessage
# Make it work when acl_users is in Memcached: We need to fetch the
# account again, to prevent DetachedInstanceError
account_query = Session.query(Account).filter(Account.id == account.id)
if not account_query.count():
log.error("Account could not be fetched")
flash(_(u"An error occured while sending the confirmation email."), "error")
return False
account = account_query.one()
if account.change_request is None:
account.change_request = AccountChangeRequest()
account.change_request.id = randomString()
account.change_request.expires = datetime.datetime.now() + datetime.timedelta(
days=7
)
account.change_request.value = login
client_url = self.request.client.absolute_url()
confirm_url = "%s/confirm-change?%s" % (
client_url,
urlencode({"key": account.change_request.id}),
)
mailhost = getToolByName(self.context, "MailHost")
body = self.email_template(
account=account,
new_login=login,
client_url=client_url,
confirm_url=confirm_url,
)
subject = translate(
_(u"Confirm OiRA email address change"), context=self.request
)
mail = CreateEmailTo(
self.email_from_name, self.email_from_address, login, subject, body
)
try:
mailhost.send(mail, login, self.email_from_address, immediate=True)
log.info("Sent email confirmation to %s", account.email)
except MailHostError as e:
log.error(
"MailHost error sending email confirmation to %s: %s", account.email, e
)
flash(_(u"An error occured while sending the confirmation email."), "error")
return False
except smtplib.SMTPException as e:
log.error(
"smtplib error sending the confirmation email to %s: %s",
account.email,
e,
)
flash(_(u"An error occured while sending the confirmation email."), "error")
return False
except socket.error as e:
log.error(
"Socket error sending confirmation email to %s: %s", account.email, e[1]
)
flash(_(u"An error occured while sending the confirmation email."), "error")
return False
return True
@button.buttonAndHandler(_(u"Save changes"), name="save")
def handleSave(self, action):
flash = IStatusMessage(self.request).addStatusMessage
(data, errors) = self.extractData()
if errors:
return
url = self.context.absolute_url()
user = get_current_account()
if not user.verify_password(data["password"]):
raise WidgetActionExecutionError(
"password", Invalid(_(u"Invalid password"))
)
settings_url = "%s/account-settings" % url
if not data["loginname"] or data["loginname"].strip() == user.loginname:
self.request.response.redirect(settings_url)
flash(_(u"There were no changes to be saved."), "notice")
return
login = data["loginname"].strip().lower()
existing = Session.query(Account.id).filter(Account.loginname == login)
if existing.count():
raise WidgetActionExecutionError(
"loginname", Invalid(_(u"This email address is not available."))
)
self.initiateRequest(user, login)
flash(
_(
"email_change_pending",
default=(
u"Please confirm your new email address by clicking on "
u"the link in the email that will be sent in a few "
u'minutes to "${email}". Please note that the new '
u"email address is also your new login name."
),
mapping={"email": data["loginname"]},
),
"warning",
)
self.request.response.redirect("%s/" % url)
@button.buttonAndHandler(_("button_cancel", default=u"Cancel"), name="cancel")
def handleCancel(self, action):
self.request.response.redirect(self.request.client.absolute_url())
class ChangeEmail(BrowserView):
def __call__(self):
url = "%s/" % aq_inner(self.context).absolute_url()
flash = IStatusMessage(self.request).addStatusMessage
key = self.request.get("key")
if key is None:
flash(_(u"This request could not be processed."), "warning")
self.request.response.redirect(url)
return
request = Session.query(AccountChangeRequest).get(key)
if request is None:
flash(_(u"This request could not be processed."), "warning")
self.request.response.redirect(url)
return
request.account.loginname = request.value
Session.delete(request)
flash(_("Your email address has been updated."), "success")
self.request.response.redirect(url)
|
euphorie/Euphorie
|
src/euphorie/client/browser/settings.py
|
Python
|
gpl-2.0
| 12,114
|
"""templates.unix.restore.adjust.FSTab Module"""
import cairn
def getClass():
return FSTab()
class FSTab(object):
def run(self, sysdef):
return True
|
redshodan/cairn
|
src/python/cairn/sysdefs/templates/unix/restore/adjust/FSTab.py
|
Python
|
gpl-2.0
| 162
|
#!/usr/bin/env python
import sys
from PyQt4 import QtGui, QtCore, QtDeclarative
from mcc2.frontends.services.controllers import Controller
from mcc2.frontends.services.models import ServiceModel
from mcc2.frontends.services.proxy import ProxyServiceModel
class SystemServicesView(QtDeclarative.QDeclarativeView):
def __init__(self, parent=None):
QtDeclarative.QDeclarativeView.__init__(self, parent)
self.proxyServiceModel = ProxyServiceModel(parent=self)
self.serviceModel = ServiceModel(self.proxyServiceModel)
self.serviceModel.populate()
self.proxyServiceModel.setSourceModel(self.serviceModel)
self.controller = Controller(self)
self.context = self.rootContext()
self.context.setContextProperty('controller', self.controller)
self.context.setContextProperty('serviceModel', self.proxyServiceModel)
self.setSource(QtCore.QUrl('/usr/share/mandriva/mcc2/frontends/services/views/SystemServices.qml'))
self.setResizeMode(QtDeclarative.QDeclarativeView.SizeRootObjectToView)
self.setWindowTitle('Mandriva Control Center - System Services')
def start():
app = QtGui.QApplication(sys.argv)
locale = QtCore.QLocale.system()
translator = QtCore.QTranslator()
i18n_file = 'SystemServices_' + locale.name() + '.qm'
i18n_path = '/usr/share/mandriva/mcc2/frontends/services/views/i18n/'
if (translator.load(i18n_file, i18n_path)):
app.installTranslator(translator)
view = SystemServicesView()
view.show()
app.exec_()
if __name__ == '__main__':
start()
|
wiliamsouza/mandriva-control-center
|
mcc2/frontends/services/services.py
|
Python
|
gpl-2.0
| 1,611
|
__author__ = 'roelvdberg@gmail.com'
import unittest
import lxml.etree as etree
import crawler.webpage
def fetch(self, url=None, download=True, *args, **kwargs):
"""
Fetches the content of a file, based on a filepath(url).
Changes fetch so that it loads a file from disk instead of from url.
:param url: the path from which content will be downloaded.
:param download: default: True, if set to False, the url content will
not be downloaded. The parse method will look at the html content
given on initialization.
"""
if download and not self.html:
if url is None:
url = self.url
with open(url, 'r') as response:
self.html = response.read()
self.parse(*args, **kwargs)
crawler.webpage.WebpageRaw.fetch = fetch
crawler.webpage.Webpage.fetch = fetch
crawler.webpage.Links.fetch = fetch
import crawler.validate as validate
def add_links(self, link_container, depth=0, base_url=None):
"""
Add a list of urls to self at a certain depth.
:param link_container: list of urls
:param depth: depth at which the urls have been harvested
:param base_url: base at which the urls have been harvested
"""
if not base_url:
base_url = self.base[0]
try:
for url_ in link_container.links:
if "#" in url_:
url_ = url_.split('#')[0]
if not len(url_):
continue
if not validate.url_explicit(url_):
continue
self.add(url_, depth)
except AttributeError:
pass
import logging
import threading
import dateutil.parser
try:
import webpage
from crawl import BaseUrl
import robot
import model
except ImportError:
from crawler.crawl import BaseUrl
import crawler.robot as robot
import crawler.model as model
LOG_FILENAME = "testlog.log"
# setup logger
logger = logging.getLogger(__name__)
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
print_logger = logging.StreamHandler()
print_logger.setLevel(logging.DEBUG)
logger.addHandler(print_logger)
def init(self, base, link_queue, historic_links, page=webpage.WebpageRaw,
base_url=None, depth=0, database_lock=None):
"""
:param base: base url string .
:param link_queue: queue from base url.
:param historic_links: historic links from base url.
:param page: WebPage class or one of its children.
:param base_url: BaseUrl object that at least contains this website.
:param depth: crawl depth of this website.
"""
if not database_lock:
self.database_lock = threading.RLock()
else:
self.database_lock = database_lock
self.session = model.Session()
self.base = base
self.has_content = True
self.robot_txt = robot.Txt('/'.join([base, 'robots.txt']))
self.robot_txt.read()
if base_url:
self.base_url = base_url
else:
self.base_url = BaseUrl(base, self.database_lock)
_, _, links = self.base_url.base_queue.get()
self.links = link_queue
self.depth = depth
try:
print('trying to add sitemap')
logger.debug('CHECK DIT' + str(self.robot_txt.sitemap.links))
print('at least I tried', self.base)
for i, link in enumerate(self.robot_txt.sitemap.links):
if self.robot_txt.sitemap.xml:
try:
site = self.session.query(model.Webpage).filter_by(
url=link).order_by(
model.Webpage.crawl_modified).all()[-1]
modified = dateutil.parser.parse(
self.robot_txt.sitemap.modified_time[i])
if site.crawl_modified > modified:
with base_url.lock:
self.links.put(link)
except IndexError:
with base_url.lock:
self.links.put(link)
else:
with base_url.lock:
self.links.put(link)
with base_url.lock:
historic_links += self.robot_txt.sitemap.links
except AttributeError:
logger.debug('SITEMAP NOT FOUND FOR: ' + self.base)
self.webpage = page
try:
import sitemap
import crawl
except ImportError:
import crawler.sitemap as sitemap
import crawler.crawl as crawl
crawl.Website.__init__ = init
def gz_file_fetch(self, url=None, download=None, *args, **kwargs):
if not url:
url = self.url
with open(url, 'rb') as f:
self.html = f.read()
self.parse()
sitemap.GunZip.fetch = gz_file_fetch
try:
from webpage import Webpage
from crawl import Website
from datetime_from_html import WebPageDateTime
from test.webpage_testcases import *
except ImportError:
from crawler.webpage import Webpage
from crawler.crawl import Website
from crawler.datetime_from_html import WebPageDateTime
from test.webpage_testcases import *
with open('sitemapindexlocation.txt', 'r') as f:
sitemapindexlocation = f.read()
sitemap_base = sitemapindexlocation.split('/')[:-1]
print(sitemapindexlocation)
sitemap.XmlSitemap.fetch = fetch
sitemap.XmlSitemapIndex.fetch = fetch
sitemap.XmlUrlset.fetch = fetch
class TestSitemap(unittest.TestCase):
sitemap = sitemap.Sitemap(url=sitemapindexlocation, base='')
def test_sitemap_load(self):
print(self.sitemap.links)
#
# def test_agent(self):
# agent = self.webpage.agent
# self.assertEqual(2, len(agent))
# self.assertTrue('User-Agent' in agent[1].keys())
# self.assertIsInstance(agent[0], bytes)
#
# TEST_URL = 'http://python.org/'
#
#
#
# class TestBaseFetcher(unittest.TestCase):
# webpage = Webpage(TEST_URL)
#
# def test_agent(self):
# agent = self.webpage.agent
# self.assertEqual(2, len(agent))
# self.assertTrue('User-Agent' in agent[1].keys())
# self.assertIsInstance(agent[0], bytes)
#
# class TestWebsite(unittest.TestCase):
#
# def test_parse(self):
# pass
#
# class TestDateTime(unittest.TestCase):
#
# def setUp(self):
# self.papers = papers
# self.paper_trees = [(html, etree.HTML(html))for html in self.papers]
# self.wpdt = WebPageDateTime()
#
# def test_dt(self):
# for html, htmltree in self.paper_trees:
# self.wpdt.method(html, htmltree)
#
# def tearDown(self):
# pass
if __name__ == '__main__':
unittest.main()
# x = TestDateTime
# x.setUp()
# x.test()
|
RoelvandenBerg/nlnieuwscrawler
|
test/test_crawl.py
|
Python
|
gpl-2.0
| 6,577
|
#!/usr/bin/env python
from numpy import random
from shogun.Classifier import LibSVM
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import LinearKernel
num_feats=23
num_vec=42
scale=2.1
size_cache=10
C=0.017
epsilon=1e-5
tube_epsilon=1e-2
svm=LibSVM()
svm.set_C(C, C)
svm.set_epsilon(epsilon)
svm.set_tube_epsilon(tube_epsilon)
for i in xrange(3):
data_train=random.rand(num_feats, num_vec)
data_test=random.rand(num_feats, num_vec)
feats_train=RealFeatures(data_train)
feats_test=RealFeatures(data_test)
labels=Labels(random.rand(num_vec).round()*2-1)
svm.set_kernel(LinearKernel(size_cache, scale))
svm.set_labels(labels)
kernel=svm.get_kernel()
print "kernel cache size: %s" % (kernel.get_cache_size())
kernel.init(feats_test, feats_test)
svm.train()
kernel.init(feats_train, feats_test)
print svm.apply().get_labels()
#kernel.remove_lhs_and_rhs()
#import pdb
#pdb.set_trace()
|
ratschlab/ASP
|
testsuite/python_modular/test_svm_kernel_multiple.py
|
Python
|
gpl-2.0
| 929
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# These are unit tests which don't need a MySQL database. Tests which need to
# talk to external services belong in the IntegrationTests subdir.
import errno
import gzip
import os
import unittest
from tempfile import mkdtemp
import pkg_resources
from lxml import etree
from shutil import copy, rmtree
from sqlalchemy.dialects.mysql.base import MySQLDialect
from sqlalchemy.dialects.sqlite.base import SQLiteDialect
from sqlalchemy.schema import MetaData, Table, Column
from sqlalchemy.types import Integer, Unicode
from turbogears.config import update
from bkr.server.model.sql import ConditionalInsert
from bkr.server.model.tasklibrary import TaskLibrary
class ConditionalInsertTest(unittest.TestCase):
# We want ConditionalInsert to work with both MySQL (for real code) and
# SQLite (for unit tests) so each test case needs to check both versions of
# the compiled statement.
def test_unique_params_only(self):
metadata = MetaData()
table = Table('table', metadata,
Column('id', Integer, primary_key=True),
Column('name', Unicode(16), nullable=False, unique=True),
)
clause = ConditionalInsert(table, {table.c.name: 'asdf'})
# there is a bug in upstream in pylint so we have to disable it for
# SQLAlchemy 0.9.
# https://bitbucket.org/logilab/astroid/issue/39/support-for-sqlalchemy
# pylint: disable=E1120
compiled = clause.compile(dialect=MySQLDialect())
self.assertEquals(str(compiled),
'INSERT INTO `table` (name)\n'
'SELECT %s\n'
'FROM DUAL\n'
'WHERE NOT (EXISTS (SELECT 1 \n'
'FROM `table` \n'
'WHERE `table`.name = %s FOR UPDATE))')
self.assertEquals(compiled.positiontup, ['name', 'name_1'])
self.assertEquals(compiled.params, {'name': 'asdf', 'name_1': 'asdf'})
# pylint: disable=E1120
compiled = clause.compile(dialect=SQLiteDialect())
self.assertEquals(str(compiled),
'INSERT INTO "table" (name)\n'
'SELECT ?\n'
'WHERE NOT (EXISTS (SELECT 1 \n'
'FROM "table" \n'
'WHERE "table".name = ?))')
self.assertEquals(compiled.positiontup, ['name', 'name_1'])
self.assertEquals(compiled.params, {'name': 'asdf', 'name_1': 'asdf'})
def test_with_extra_params(self):
metadata = MetaData()
table = Table('table', metadata,
Column('id', Integer, primary_key=True),
Column('name', Unicode(16), nullable=False, unique=True),
Column('extra', Unicode(16), nullable=False),
)
clause = ConditionalInsert(table, {table.c.name: 'asdf'},
{table.c.extra: 'something'})
# pylint: disable=E1120
compiled = clause.compile(dialect=MySQLDialect())
self.assertEquals(str(compiled),
'INSERT INTO `table` (name, extra)\n'
'SELECT %s, %s\n'
'FROM DUAL\n'
'WHERE NOT (EXISTS (SELECT 1 \n'
'FROM `table` \n'
'WHERE `table`.name = %s FOR UPDATE))')
self.assertEquals(compiled.positiontup, ['name', 'extra', 'name_1'])
self.assertEquals(compiled.params, {'name': 'asdf',
'extra': 'something', 'name_1': 'asdf'})
# pylint: disable=E1120
compiled = clause.compile(dialect=SQLiteDialect())
self.assertEquals(str(compiled),
'INSERT INTO "table" (name, extra)\n'
'SELECT ?, ?\n'
'WHERE NOT (EXISTS (SELECT 1 \n'
'FROM "table" \n'
'WHERE "table".name = ?))')
self.assertEquals(compiled.positiontup, ['name', 'extra', 'name_1'])
self.assertEquals(compiled.params, {'name': 'asdf',
'extra': 'something', 'name_1': 'asdf'})
class TaskLibraryTest(unittest.TestCase):
def setUp(self):
test_rpmspath = mkdtemp(prefix='beaker-task-library-test-rpms')
self.addCleanup(rmtree, test_rpmspath)
# hack to override descriptor for rpmspath
class TestTaskLibrary(TaskLibrary):
rpmspath = test_rpmspath
self.tasklibrary = TestTaskLibrary()
self.assertEquals(self.tasklibrary.rpmspath, test_rpmspath)
def tearDown(self):
# Make sure sane value is left after test run
update({'beaker.createrepo_command': 'createrepo_c'})
def _hash_repodata_file(self, content, total=0):
"""Returns an int type representation of the XML contents.
Ordering is not important, as the int representations (returned
via the hash() function) are all just added together.
"""
for child in content.getchildren():
for pck in child.keys():
pck_k_v_text = '%s%s' % (pck.strip(), child.get(pck).strip())
total += hash(pck_k_v_text)
if child.text is not None:
child_text = child.text.strip()
if child_text:
total += hash(child_text)
total += hash(child.tag.strip())
total += self._hash_repodata_file(child, total)
return total
def _assert_xml_equivalence(self, file1, file2):
file1_content = etree.fromstring(file1.read().strip())
file2_content = etree.fromstring(file2.read().strip())
hashed_file1 = self._hash_repodata_file(file1_content)
hashed_file2 = self._hash_repodata_file(file2_content)
# Assert the contents of the files are indeed the same
self.assertEquals(hashed_file2, hashed_file1)
def test_createrepo_c_command(self):
update({'beaker.createrepo_command': 'createrepo_c'})
rpm_file = pkg_resources.resource_filename(
'bkr.server.tests',
'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')
copy(rpm_file, self.tasklibrary.rpmspath)
try:
self.tasklibrary.update_repo()
except OSError as e:
if e.errno is errno.ENOENT:
raise unittest.SkipTest('Could not find createrepo_c')
def test_invalid_createrepo_command_fail(self):
update({'beaker.createrepo_command': 'iamnotarealcommand'})
rpm_file = pkg_resources.resource_filename(
'bkr.server.tests',
'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')
copy(rpm_file, self.tasklibrary.rpmspath)
with self.assertRaises(OSError):
self.tasklibrary.update_repo()
def test_update_repo(self):
rpm_file = pkg_resources.resource_filename(
'bkr.server.tests',
'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')
copy(rpm_file, self.tasklibrary.rpmspath)
self.tasklibrary.update_repo()
# https://bugzilla.redhat.com/show_bug.cgi?id=1101402
def test_update_repo_cleans_stale_dot_repodata(self):
# createrepo_c refuses to run if .repodata exists
update({'beaker.createrepo_command': 'createrepo_c'})
os.mkdir(os.path.join(self.tasklibrary.rpmspath, '.repodata'))
self.tasklibrary.update_repo()
def test_unlink_rpm(self):
rpm_file = pkg_resources.resource_filename(
'bkr.server.tests',
'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')
copy(rpm_file, self.tasklibrary.rpmspath)
self.tasklibrary. \
unlink_rpm('tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')
self.assertTrue(not os.path.exists(
os.path.join(self.tasklibrary.rpmspath,
'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')))
# This tests that it does not throw an exception
# if the file has been removed
self.tasklibrary.unlink_rpm('tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')
def test_make_snapshot_repo(self):
recipe_repo_parent = mkdtemp(prefix='beaker-test_make_snapshot_repo')
self.addCleanup(rmtree, recipe_repo_parent)
rpm_file = pkg_resources.resource_filename(
'bkr.server.tests',
'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm')
copy(rpm_file, self.tasklibrary.rpmspath)
repo_dir = os.path.join(self.tasklibrary.rpmspath, 'repodata')
# Assert we don't already have a repodata folder
self.assertFalse(os.path.exists(repo_dir))
self.tasklibrary.make_snapshot_repo(recipe_repo_parent)
# It should now be there in the rpmspath
self.assertTrue(os.path.exists(repo_dir))
repo_dir_list = os.listdir(repo_dir)
recipe_repo_dir = os.path.join(recipe_repo_parent, 'repodata')
recipe_repo_dir_list = os.listdir(recipe_repo_dir)
# Assert the contents at least appear to be the same
self.assertItemsEqual(recipe_repo_dir_list, repo_dir_list)
# Now test the actual content
for filename in repo_dir_list:
if filename.endswith(".gz"):
open_file = gzip.open
elif filename.endswith(".xml"):
open_file = open
else:
raise AssertionError('Expected gzip or xml, not %r' % filename)
repo_filename = os.path.join(repo_dir, filename)
recipe_repo_filename = os.path.join(recipe_repo_dir, filename)
repo_file = open_file(repo_filename)
recipe_repo_file = open_file(recipe_repo_filename)
self._assert_xml_equivalence(repo_file, recipe_repo_file)
|
beaker-project/beaker
|
Server/bkr/server/tests/test_model.py
|
Python
|
gpl-2.0
| 10,199
|
#! /usr/bin/env python
"""! @package input
"""
from utils.xml_format import parse_xml
from utils.error_handling import InputError
def compute_name(object_name):
"""! @brief Compute attribute/module name from object name as follows: 'ObjectName' attribute/module name is 'object_name'.
@param object_name String containing name of the object, e.g. 'LexicalEntry'.
@return The corresponding attribute/module name, e.g. 'lexical_entry'.
"""
name = ''
for c in object_name:
# Detect first letter of a word: it is an uppercase letter
if c.isupper():
# Add an underscore separator between words if needed
if name != '':
name += '_'
name += c.lower()
return name
def factory(object_name, attributes):
"""! @brief This function is an object factory. Indeed, from an object name and its attributes, it creates a Python object and sets its attributes.
@param object_name A Python string containing the object name, for instance 'LexicalEntry'.
@param attributes A Python dictionary containing pairs of attribute name (as a Python string) and value, for instance {'partOfSpeech': 'n'}.
"""
# Compute module name from object name
module_name = compute_name(object_name)
# Find the package in which the object class is defined, in order to be able to import the correct Python module
import sys, os, glob
running_path = sys.path[0]
if os.name == 'posix':
# Unix-style path
separator = '/'
else:
# Windows-style path
separator = '\\'
full_path = glob.glob(running_path + separator + ".." + separator + ".." + separator + "pylmflib" + separator + "*" + separator + module_name + ".py")
if len(full_path) < 1:
# No file with this name exists
raise InputError(module_name + ".py", "No file named '%s' exists in the library. It is not allowed, so please solve this issue by renaming files correctly." % (module_name + ".py"))
elif len(full_path) > 1:
# Several files with this name exist
raise InputError(module_name + ".py", "Several files named '%s' exist in the library. It is not allowed, so please solve this issue by renaming files correctly. Here is the list of found files with this name: %s" % ((module_name + ".py"), str(full_path)))
# Retrieve package name from full path
package_name = full_path[0].split(separator)[-2]
# Import object module: "package.module"
object_module = __import__(package_name + "." + module_name)
# Retrieve object class from module
object_class = getattr(object_module, object_name)
# Create an instance of it
instance = object_class()
# Set class attributes
for attribute in attributes.iteritems():
setattr(instance, attribute[0], attribute[1])
return instance
def xml_lmf_read(filename):
"""! @brief Read an XML LMF file.
@param filename The name of the XML LMF file to read with full path, for instance 'user/input.xml'.
@return A Lexical Resource instance containing all lexicons.
"""
root = parse_xml(filename)
# Create an object instance corresponding to the XML root element
root_instance = factory(root.tag, root.attrib)
# Parse XML sub-elements and create instance childs
get_sub_elements(root_instance, root)
return root_instance
def get_sub_elements(instance, element):
"""! @brief This function recursively parses the given XML element and creates corresponding LMF instances with their attributes.
@param instance An LMF object instance.
@param element An XML element.
"""
for sub_element in element:
# XML elements "feat" are modelized by LMF class attributes
if sub_element.tag == "feat":
# "feat" elements have 2 XML attributes: one for LMF attribute name ("att"), a second for LMF attribute value ("val")
setattr(instance, sub_element.attrib["att"], sub_element.attrib["val"])
elif sub_element.tag == "a":
# "a" elements are HTML links => do not consider them
pass
else:
# Create LMF instances corresponding to XML sub-elements
sub_instance = factory(sub_element.tag, sub_element.attrib)
# Root LMF object must own the child objects
attr_name = compute_name(sub_element.tag)
attr_value = getattr(instance, attr_name)
if type(attr_value) is list:
# If this attribute is a list, append the new value to the list
attr_value.append(sub_instance)
else:
# Simply set the value
setattr(instance, attr_name, sub_instance)
# Repeat the same operation recursively
get_sub_elements(sub_instance, sub_element)
|
buret/pylmflib
|
pylmflib/input/xml_lmf.py
|
Python
|
gpl-2.0
| 4,817
|
#!/usr/bin/env python
from __future__ import division
from numpy import *
import numpy.fft as fft
n=3
x=arange(2**n)
print x
#x_ = fft.fftshift(fft.fft(x))
x_ = fft.fft(x)
print x_
|
jornada/swifft
|
test_ft.py
|
Python
|
gpl-2.0
| 184
|
#!/usr/bin/python -tt
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# (C) 2009 - Tim Lauridsen <timlau@fedoraproject.org>
# Yum Extender Constants
'''
Yum Extender Constants
'''
import os
import sys
import pango
import time
from xdg import BaseDirectory
# We want these lines, but don't want pylint to whine about the imports not being used
# pylint: disable-msg=W0611
from yumexbase import _, P_ # lint:ok
# pylint: enable-msg=W0611
from yum.constants import *
# Constant
__yumex_version__ = "3.0.17"
YUMEX_LOG = 'yumex.verbose'
FILTER_ACTIONS = {'updates' : 'u', 'available': 'i', 'installed' : 'r', \
'obsoletes' : 'o', 'downgrade' : 'do', 'reinstall' : 'ri', 'localinstall' : 'li'}
ACTIONS_FILTER = { 'u' : 'updates', 'i' : 'available', \
'r' : 'installed' , 'o' : 'obsoletes', \
'do' : 'downgrade', 'ri' : 'reinstall', 'li' : 'localinstall' }
# Paths
BIN_PATH = os.path.abspath(os.path.dirname(sys.argv[0]))
if BIN_PATH.endswith('/bin'):
IS_PROD = True
else:
IS_PROD = False
if IS_PROD:
MAIN_PATH = '/usr/share/yumex'
else:
MAIN_PATH = BIN_PATH
BUILDER_FILE = MAIN_PATH + '/yumex.glade'
BUILDER_PKGINST = MAIN_PATH + '/pkginst.glade'
if IS_PROD:
PIXMAPS_PATH = '/usr/share/pixmaps/yumex'
elif MAIN_PATH.endswith('test'):
PIXMAPS_PATH = MAIN_PATH + '/../../gfx'
else:
PIXMAPS_PATH = MAIN_PATH + '/../gfx'
OLD_CONF_FILE = os.environ['HOME'] + "/.yumex.conf"
CONF_DIR = BaseDirectory.save_config_path('yumex')
CONF_FILE = os.path.join(CONF_DIR,'yumex.conf')
TIMESTAMP_FILE = os.path.join(CONF_DIR,'update_timestamp.conf')
MIN_UPDATE_INTERVAL = 5
# icons
ICON_YUMEX = PIXMAPS_PATH + "/yumex-icon.png"
ICON_PACKAGES = PIXMAPS_PATH + '/button-packages.png'
ICON_GROUPS = PIXMAPS_PATH + '/button-group.png'
ICON_QUEUE = PIXMAPS_PATH + '/button-queue.png'
ICON_OUTPUT = PIXMAPS_PATH + '/button-output.png'
ICON_REPOS = PIXMAPS_PATH + '/button-repo.png'
ICON_HISTORY = PIXMAPS_PATH + '/button-history.png'
ICON_SPINNER = PIXMAPS_PATH + '/spinner.gif'
ICON_SMALL_SPINNER = PIXMAPS_PATH + '/spinner-small.gif'
ICON_TRAY_ERROR = PIXMAPS_PATH + '/tray-error.png'
ICON_TRAY_NO_UPDATES = PIXMAPS_PATH + '/tray-no-updates.png'
ICON_TRAY_UPDATES = PIXMAPS_PATH + '/tray-updates.png'
ICON_TRAY_WORKING = PIXMAPS_PATH + '/tray-working.png'
ICON_TRAY_INFO = PIXMAPS_PATH + '/tray-info.png'
# NOTE: The package filter radio buttons in the top of the package page
PKG_FILTERS_STRINGS = (_('updates'), _('available'), _('installed'))
PKG_FILTERS_ENUMS = ('updates', 'available', 'installed')
REPO_HIDE = ['source', 'debuginfo']
RECENT_LIMIT = time.time() - (3600 * 24 * 14)
# Max Window size
#gdkRootWindow = gtk._root_window()
#MAX_HEIGHT = gdkRootWindow.height
#MAX_WIDHT = gdkRootWindow.width
#DEFAULT_FONT = gdkRootWindow.get_pango_context().get_font_description()
# Fonts
XSMALL_FONT = pango.FontDescription("sans 6")
SMALL_FONT = pango.FontDescription("sans 8")
BIG_FONT = pango.FontDescription("sans 12")
# STRINGS
REPO_INFO_MAP = {
'repomd' : _("Downloading repository information for the %s repository"),
'primary' : _("Downloading Package information for the %s repository"),
'primary_db' : _("Downloading Package information for the %s repository"),
'filelists' : _("Downloading Filelist information for the %s repository"),
'filelists_db' : _("Downloading Filelist information for the %s repository"),
'other' : _("Downloading Changelog information for the %s repository"),
'other_db' : _("Downloading Changelog information for the %s repository"),
'group' : _("Downloading Group information for the %s repository"),
'metalink' : _("Downloading metalink information for the %s repository"),
'presto' : _("Downloading Delta update information for the %s repository"),
'prestodelta' : _("Downloading Delta update information for the %s repository"),
'updateinfo' : _("Downloading Update information for the %s repository")
}
TASK_PENDING = 1
TASK_RUNNING = 2
TASK_COMPLETE = 3
TASK_ICONS = {TASK_PENDING : 'gtk-media-stop',
TASK_RUNNING : 'gtk-refresh',
TASK_COMPLETE : 'gtk-yes' }
CATEGORY_AGE = [
('1', _('0 - 7 Days')),
('2', _('7 - 14 Days')),
('3', _('14 - 21 Days')),
('4', _('21 - 30 days')),
('5', _('30 - 90 days')),
('6', _('90+ days'))]
CATEGORY_SIZE = [
('1', '0 KB - 100 KB'),
('2', '100 KB - 1 MB'),
('3', '1 MB - 10 MB'),
('4', '10 MB - 50 MB'),
('5', '50+ MB')]
SIZE_RANGES = {
'1' : (0, 100 * 1024),
'2' : (100 * 1024, 1024 * 1024),
'3' : (1024 * 1024, 10 * 1024 * 1024),
'4' : (10 * 1024 * 1024, 50 * 1024 * 1024),
'5' : (50 * 1024 * 1024, 1024 * 1024 * 1024)
}
HISTORY_KEYS = ['True-Install', 'Install', 'Update', 'Erase', \
'Dep-Install', 'Reinstall', 'Obsoleted', 'Downgrade', \
'Updated', 'Downgraded', 'Obsoleting']
PACKAGE_LOAD_MSG = {
'all' : _('Getting all packages'),
'installed' : _('Getting installed packages'),
'available' : _('Getting available packages'),
'updates' : _('Getting available updates'),
'obsoletes' : _('Getting available obsoletes')
}
# RPM Completed action messages
RPM_ACTIONS = {
TS_UPDATE: _('%s is updated'),
TS_ERASE: _('%s is erased'),
TS_INSTALL: _('%s is installed'),
TS_TRUEINSTALL: _('%s is installed'),
TS_OBSOLETED: _('%s is obsoleted'),
TS_OBSOLETING: _('%s is installed'),
TS_UPDATED: _('%s is cleanup')
}
HISTORY_NEW_STATES = ['Update', 'Downgrade', 'Obsoleting']
HISTORY_OLD_STATES = ['Updated', 'Downgraded', 'Obsoleted']
HISTORY_UPDATE_STATES = ['Update', 'Downgrade', 'Updated', 'Downgraded']
HISTORY_SORT_ORDER = ['Install', 'True-Install', 'Reinstall', 'Update', 'Downgrade', 'Obsoleting', 'Obsoleted', 'Erase', 'Dep-Install' ]
HISTORY_STATE_LABLES = {
'Update' : _('Updated packages'),
'Downgrade' : _('Downgraded packages'),
'Obsoleting' : _('Obsoleting packages'),
'Obsoleted' : _('Obsoleted packages'),
'Erase' : _('Erased packages'),
'Install' : _('Installed packages'),
'True-Install' : _('Installed packages'),
'Dep-Install' : _('Installed for dependencies'),
'Reinstall' : _('Reinstalled packages')}
# Queue autocomplete lookup
QUEUE_COMMANDS = {
'ins' : 'install',
'era' : 'erase',
'upd' : 'update',
'rem' : 'remove',
'rei' : 'reinstall',
'dow' : 'downgrade'
}
QUEUE_PACKAGE_TYPES = {
'i' : 'install',
'u' : 'update',
'r' : 'remove',
'o' : 'obsolete',
'ri' : 'reinstall',
'do' : 'downgrade',
'li' : 'localinstall'
}
YUMEX_CMDLINE_CMDS = ['search', 'install', 'remove', 'update', 'downgrade', 'reinstall']
SEARCH_KEYS_VALUES = {
'name' : _("Name"),
'summary' : _("Summary"),
'description' : _("Description"),
'arch' : _("Arch")}
SEARCH_KEYS_ORDER = ['name', 'summary', 'description', 'arch']
# backend error messages
BACKEND_ERRMSG = {
5 : _("The Yum Extender backend failed because it could not exchange information\n"
"with the frontend (gui)\n\n"
"check /usr/tmp/yumex-dump-<date>-<time>.log for details\n\n"
"Try to restart Yum Extender and retry your actions")
}
|
timlau/yumex
|
src/yumexbase/constants.py
|
Python
|
gpl-2.0
| 7,935
|
#!/usr/bin/python
"""Preferences Editor for Terminator.
Load a UIBuilder config file, display it,
populate it with our current config, then optionally read that back out and
write it to a config file
"""
import os
from gi.repository import GObject, Gtk, Gdk
from .util import dbg, err
from . import config
from .keybindings import Keybindings, KeymapError
from .translation import _
from .encoding import TerminatorEncoding
from .terminator import Terminator
def color2hex(widget):
"""Pull the colour values out of a Gtk ColorPicker widget and return them
as 8bit hex values, sinces its default behaviour is to give 16bit values"""
widcol = widget.get_color()
return('#%02x%02x%02x' % (widcol.red>>8, widcol.green>>8, widcol.blue>>8))
# FIXME: We need to check that we have represented all of Config() below
class PrefsEditor:
"""Class implementing the various parts of the preferences editor"""
config = None
registry = None
keybindings = None
window = None
builder = None
layouteditor = None
previous_layout_selection = None
previous_profile_selection = None
colorschemevalues = {'black_on_yellow': 0,
'black_on_white': 1,
'grey_on_black': 2,
'green_on_black': 3,
'white_on_black': 4,
'orange_on_black': 5,
'ambience': 6,
'solarized_light': 7,
'solarized_dark': 8,
'custom': 9}
colourschemes = {'grey_on_black': ['#aaaaaa', '#000000'],
'black_on_yellow': ['#000000', '#ffffdd'],
'black_on_white': ['#000000', '#ffffff'],
'white_on_black': ['#ffffff', '#000000'],
'green_on_black': ['#00ff00', '#000000'],
'orange_on_black': ['#e53c00', '#000000'],
'ambience': ['#ffffff', '#300a24'],
'solarized_light': ['#657b83', '#fdf6e3'],
'solarized_dark': ['#839496', '#002b36']}
palettevalues = {'tango': 0,
'linux': 1,
'xterm': 2,
'rxvt': 3,
'ambience': 4,
'solarized': 5,
'custom': 6}
palettes = {'tango': '#000000:#cc0000:#4e9a06:#c4a000:#3465a4:\
#75507b:#06989a:#d3d7cf:#555753:#ef2929:#8ae234:#fce94f:#729fcf:\
#ad7fa8:#34e2e2:#eeeeec',
'linux': '#000000:#aa0000:#00aa00:#aa5500:#0000aa:\
#aa00aa:#00aaaa:#aaaaaa:#555555:#ff5555:#55ff55:#ffff55:#5555ff:\
#ff55ff:#55ffff:#ffffff',
'xterm': '#000000:#cd0000:#00cd00:#cdcd00:#1e90ff:\
#cd00cd:#00cdcd:#e5e5e5:#4c4c4c:#ff0000:#00ff00:#ffff00:#4682b4:\
#ff00ff:#00ffff:#ffffff',
'rxvt': '#000000:#cd0000:#00cd00:#cdcd00:#0000cd:\
#cd00cd:#00cdcd:#faebd7:#404040:#ff0000:#00ff00:#ffff00:#0000ff:\
#ff00ff:#00ffff:#ffffff',
'ambience': '#2e3436:#cc0000:#4e9a06:#c4a000:\
#3465a4:#75507b:#06989a:#d3d7cf:#555753:#ef2929:#8ae234:#fce94f:\
#729fcf:#ad7fa8:#34e2e2:#eeeeec',
'solarized': '#073642:#dc322f:#859900:#b58900:\
#268bd2:#d33682:#2aa198:#eee8d5:#002b36:#cb4b16:#586e75:#657b83:\
#839496:#6c71c4:#93a1a1:#fdf6e3'}
keybindingnames = { 'zoom_in' : 'Increase font size',
'zoom_out' : 'Decrease font size',
'zoom_normal' : 'Restore original font size',
'new_tab' : 'Create a new tab',
'cycle_next' : 'Focus the next terminal',
'cycle_prev' : 'Focus the previous terminal',
'go_next' : 'Focus the next terminal',
'go_prev' : 'Focus the previous terminal',
'go_up' : 'Focus the terminal above',
'go_down' : 'Focus the terminal below',
'go_left' : 'Focus the terminal left',
'go_right' : 'Focus the terminal right',
'rotate_cw' : 'Rotate terminals clockwise',
'rotate_ccw' : 'Rotate terminals counter-clockwise',
'split_horiz' : 'Split horizontally',
'split_vert' : 'Split vertically',
'close_term' : 'Close terminal',
'copy' : 'Copy selected text',
'paste' : 'Paste clipboard',
'toggle_scrollbar' : 'Show/Hide the scrollbar',
'search' : 'Search terminal scrollback',
'page_up' : 'Scroll upwards one page',
'page_down' : 'Scroll downwards one page',
'page_up_half' : 'Scroll upwards half a page',
'page_down_half' : 'Scroll downwards half a page',
'line_up' : 'Scroll upwards one line',
'line_down' : 'Scroll downwards one line',
'close_window' : 'Close window',
'resize_up' : 'Resize the terminal up',
'resize_down' : 'Resize the terminal down',
'resize_left' : 'Resize the terminal left',
'resize_right' : 'Resize the terminal right',
'move_tab_right' : 'Move the tab right',
'move_tab_left' : 'Move the tab left',
'toggle_zoom' : 'Maximise terminal',
'scaled_zoom' : 'Zoom terminal',
'next_tab' : 'Switch to the next tab',
'prev_tab' : 'Switch to the previous tab',
'switch_to_tab_1' : 'Switch to the first tab',
'switch_to_tab_2' : 'Switch to the second tab',
'switch_to_tab_3' : 'Switch to the third tab',
'switch_to_tab_4' : 'Switch to the fourth tab',
'switch_to_tab_5' : 'Switch to the fifth tab',
'switch_to_tab_6' : 'Switch to the sixth tab',
'switch_to_tab_7' : 'Switch to the seventh tab',
'switch_to_tab_8' : 'Switch to the eighth tab',
'switch_to_tab_9' : 'Switch to the ninth tab',
'switch_to_tab_10' : 'Switch to the tenth tab',
'full_screen' : 'Toggle fullscreen',
'reset' : 'Reset the terminal',
'reset_clear' : 'Reset and clear the terminal',
'group_all' : 'Group all terminals',
'group_all_toggle' : 'Group/Ungroup all terminals',
'ungroup_all' : 'Ungroup all terminals',
'group_tab' : 'Group terminals in tab',
'group_tab_toggle' : 'Group/Ungroup terminals in tab',
'ungroup_tab' : 'Ungroup terminals in tab',
'new_window' : 'Create a new window',
'new_terminator' : 'Spawn a new Terminator process',
'broadcast_off' : 'Don\'t broadcast key presses',
'broadcast_group' : 'Broadcast key presses to group',
'broadcast_all' : 'Broadcast key events to all',
'insert_number' : 'Insert terminal number',
'insert_padded' : 'Insert zero padded terminal number',
'edit_window_title': 'Edit window title',
'layout_launcher' : 'Open layout launcher window'
}
def __init__ (self, term):
self.config = config.Config()
self.config.base.reload()
self.term = term
self.builder = Gtk.Builder()
self.keybindings = Keybindings()
try:
# Figure out where our library is on-disk so we can open our
(head, _tail) = os.path.split(config.__file__)
librarypath = os.path.join(head, 'preferences.glade')
gladefile = open(librarypath, 'r')
gladedata = gladefile.read()
except Exception as ex:
print("Failed to find preferences.glade")
print(ex)
return
self.builder.add_from_string(gladedata)
self.window = self.builder.get_object('prefswin')
icon_theme = Gtk.IconTheme()
try:
icon = icon_theme.load_icon('terminator-preferences', 48, 0)
except (NameError, GObject.GError):
dbg('Unable to load 48px Terminator preferences icon')
icon = self.window.render_icon(Gtk.STOCK_DIALOG_INFO, Gtk.IconSize.BUTTON)
self.window.set_icon(icon)
self.layouteditor = LayoutEditor(self.builder)
self.builder.connect_signals(self)
self.layouteditor.prepare()
self.window.show_all()
try:
self.config.inhibit_save()
self.set_values()
except Exception as e:
err('Unable to set values: %s' % e)
self.config.uninhibit_save()
def on_closebutton_clicked(self, _button):
"""Close the window"""
terminator = Terminator()
terminator.reconfigure()
self.window.destroy()
del(self)
def set_values(self):
"""Update the preferences window with all the configuration from
Config()"""
guiget = self.builder.get_object
## Global tab
# Mouse focus
focus = self.config['focus']
active = 0
if focus == 'click':
active = 1
elif focus in ['sloppy', 'mouse']:
active = 2
widget = guiget('focuscombo')
widget.set_active(active)
# Terminal separator size
termsepsize = self.config['handle_size']
widget = guiget('handlesize')
widget.set_value(float(termsepsize))
# Window geometry hints
geomhint = self.config['geometry_hinting']
widget = guiget('wingeomcheck')
widget.set_active(geomhint)
# Window state
option = self.config['window_state']
if option == 'hidden':
active = 1
elif option == 'maximise':
active = 2
elif option == 'fullscreen':
active = 3
else:
active = 0
widget = guiget('winstatecombo')
widget.set_active(active)
# Window borders
widget = guiget('winbordercheck')
widget.set_active(not self.config['borderless'])
# Tab bar position
option = self.config['tab_position']
widget = guiget('tabposcombo')
if option == 'bottom':
active = 1
elif option == 'left':
active = 2
elif option == 'right':
active = 3
elif option == 'hidden':
active = 4
else:
active = 0
widget.set_active(active)
# scroll_tabbar
widget = guiget('scrolltabbarcheck')
widget.set_active(self.config['scroll_tabbar'])
# homogeneous_tabbar
widget = guiget('homogeneouscheck')
widget.set_active(self.config['homogeneous_tabbar'])
#Hide from taskbar
widget = guiget('hidefromtaskbcheck')
widget.set_active(self.config['hide_from_taskbar'])
#Always on top
widget = guiget('alwaysontopcheck')
widget.set_active(self.config['always_on_top'])
#Hide on lose focus
widget = guiget('hideonlosefocuscheck')
widget.set_active(self.config['hide_on_lose_focus'])
#Show on all workspaces
widget = guiget('stickycheck')
widget.set_active(self.config['sticky'])
#Hide size text from the title bar
widget = guiget('title_hide_sizetextcheck')
widget.set_active(self.config['title_hide_sizetext'])
#Always split with profile
widget = guiget('always_split_with_profile')
widget.set_active(self.config['always_split_with_profile'])
## Profile tab
# Populate the profile list
widget = guiget('profilelist')
liststore = widget.get_model()
profiles = self.config.list_profiles()
self.profileiters = {}
for profile in profiles:
if profile == 'default':
editable = False
else:
editable = True
self.profileiters[profile] = liststore.append([profile, editable])
selection = widget.get_selection()
selection.connect('changed', self.on_profile_selection_changed)
selection.select_iter(self.profileiters['default'])
## Layouts tab
widget = guiget('layoutlist')
liststore = widget.get_model()
layouts = self.config.list_layouts()
self.layoutiters = {}
for layout in layouts:
if layout == 'default':
editable = False
else:
editable = True
self.layoutiters[layout] = liststore.append([layout, editable])
selection = widget.get_selection()
selection.connect('changed', self.on_layout_selection_changed)
selection.select_iter(self.layoutiters['default'])
# Now set up the selection changed handler for the layout itself
widget = guiget('LayoutTreeView')
selection = widget.get_selection()
selection.connect('changed', self.on_layout_item_selection_changed)
## Keybindings tab
widget = guiget('keybindingtreeview')
liststore = widget.get_model()
liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
keybindings = self.config['keybindings']
for keybinding in keybindings:
keyval = 0
mask = 0
value = keybindings[keybinding]
if value is not None and value != '':
try:
(keyval, mask) = self.keybindings._parsebinding(value)
except KeymapError:
pass
liststore.append([keybinding, self.keybindingnames[keybinding],
keyval, mask])
def set_profile_values(self, profile):
"""Update the profile values for a given profile"""
self.config.set_profile(profile)
guiget = self.builder.get_object
dbg('PrefsEditor::set_profile_values: Setting profile %s' % profile)
## General tab
# Use system font
widget = guiget('system_font_checkbutton')
widget.set_active(self.config['use_system_font'])
self.on_system_font_checkbutton_toggled(widget)
# Font selector
widget = guiget('font_selector')
if self.config['use_system_font'] == True:
fontname = self.config.get_system_font()
if fontname is not None:
widget.set_font_name(fontname)
else:
widget.set_font_name(self.config['font'])
# Allow bold text
widget = guiget('allow_bold_checkbutton')
widget.set_active(self.config['allow_bold'])
# Icon terminal bell
widget = guiget('icon_bell_checkbutton')
widget.set_active(self.config['icon_bell'])
# Audible terminal bell
widget = guiget('audible_bell_checkbutton')
widget.set_active(self.config['audible_bell'])
# WM_URGENT terminal bell
widget = guiget('urgent_bell_checkbutton')
widget.set_active(self.config['urgent_bell'])
# Show titlebar
widget = guiget('show_titlebar')
widget.set_active(self.config['show_titlebar'])
# Copy on selection
widget = guiget('copy_on_selection')
widget.set_active(self.config['copy_on_selection'])
# Cursor shape
widget = guiget('cursor_shape_combobox')
if self.config['cursor_shape'] == 'underline':
active = 1
elif self.config['cursor_shape'] == 'ibeam':
active = 2
else:
active = 0
widget.set_active(active)
# Cursor blink
widget = guiget('cursor_blink')
widget.set_active(self.config['cursor_blink'])
# Cursor colour
widget = guiget('cursor_color')
try:
widget.set_color(Gdk.color_parse(self.config['cursor_color']))
except ValueError:
self.config['cursor_color'] = "#FFFFFF"
widget.set_color(Gdk.color_parse(self.config['cursor_color']))
## Command tab
# Login shell
widget = guiget('login_shell_checkbutton')
widget.set_active(self.config['login_shell'])
# Login records
widget = guiget('update_records_checkbutton')
widget.set_active(self.config['update_records'])
# Use Custom command
widget = guiget('use_custom_command_checkbutton')
widget.set_active(self.config['use_custom_command'])
self.on_use_custom_command_checkbutton_toggled(widget)
# Custom Command
widget = guiget('custom_command_entry')
widget.set_text(self.config['custom_command'])
# Exit action
widget = guiget('exit_action_combobox')
if self.config['exit_action'] == 'restart':
widget.set_active(1)
elif self.config['exit_action'] == 'hold':
widget.set_active(2)
else:
# Default is to close the terminal
widget.set_active(0)
## Colors tab
# Use system colors
widget = guiget('use_theme_colors_checkbutton')
widget.set_active(self.config['use_theme_colors'])
# Colorscheme
widget = guiget('color_scheme_combobox')
scheme = None
for ascheme in self.colourschemes:
forecol = self.colourschemes[ascheme][0]
backcol = self.colourschemes[ascheme][1]
if self.config['foreground_color'].lower() == forecol and \
self.config['background_color'].lower() == backcol:
scheme = ascheme
break
if scheme not in self.colorschemevalues:
if self.config['foreground_color'] in [None, ''] or \
self.config['background_color'] in [None, '']:
scheme = 'grey_on_black'
else:
scheme = 'custom'
# NOTE: The scheme is set in the GUI widget after the fore/back colours
# Foreground color
widget = guiget('foreground_colorpicker')
widget.set_color(Gdk.color_parse(self.config['foreground_color']))
if scheme == 'custom':
widget.set_sensitive(True)
else:
widget.set_sensitive(False)
# Background color
widget = guiget('background_colorpicker')
widget.set_color(Gdk.color_parse(self.config['background_color']))
if scheme == 'custom':
widget.set_sensitive(True)
else:
widget.set_sensitive(False)
# Now actually set the scheme
widget = guiget('color_scheme_combobox')
widget.set_active(self.colorschemevalues[scheme])
# Palette scheme
widget = guiget('palette_combobox')
palette = None
for apalette in self.palettes:
if self.config['palette'].lower() == self.palettes[apalette]:
palette = apalette
if palette not in self.palettevalues:
if self.config['palette'] in [None, '']:
palette = 'rxvt'
else:
palette = 'custom'
# NOTE: The palette selector is set after the colour pickers
# Palette colour pickers
colourpalette = self.config['palette'].split(':')
for i in range(1, 17):
widget = guiget('palette_colorpicker_%d' % i)
widget.set_color(Gdk.color_parse(colourpalette[i - 1]))
# Now set the palette selector widget
widget = guiget('palette_combobox')
widget.set_active(self.palettevalues[palette])
# Titlebar colors
for bit in ['title_transmit_fg_color', 'title_transmit_bg_color',
'title_receive_fg_color', 'title_receive_bg_color',
'title_inactive_fg_color', 'title_inactive_bg_color']:
widget = guiget(bit)
widget.set_color(Gdk.color_parse(self.config[bit]))
# Inactive terminal shading
widget = guiget('inactive_color_offset')
widget.set_value(float(self.config['inactive_color_offset']))
# Use custom URL handler
widget = guiget('use_custom_url_handler_checkbox')
widget.set_active(self.config['use_custom_url_handler'])
self.on_use_custom_url_handler_checkbutton_toggled(widget)
# Custom URL handler
widget = guiget('custom_url_handler_entry')
widget.set_text(self.config['custom_url_handler'])
## Scrolling tab
# Scrollbar position
widget = guiget('scrollbar_position_combobox')
value = self.config['scrollbar_position']
if value == 'left':
widget.set_active(0)
elif value in ['disabled', 'hidden']:
widget.set_active(2)
else:
widget.set_active(1)
# Scrollback lines
widget = guiget('scrollback_lines_spinbutton')
widget.set_value(self.config['scrollback_lines'])
# Scrollback infinite
widget = guiget('scrollback_infinite')
widget.set_active(self.config['scrollback_infinite'])
# Scroll on outut
widget = guiget('scroll_on_output_checkbutton')
widget.set_active(self.config['scroll_on_output'])
# Scroll on keystroke
widget = guiget('scroll_on_keystroke_checkbutton')
widget.set_active(self.config['scroll_on_keystroke'])
## Compatibility tab
# Backspace key
widget = guiget('backspace_binding_combobox')
value = self.config['backspace_binding']
if value == 'control-h':
widget.set_active(1)
elif value == 'ascii-del':
widget.set_active(2)
elif value == 'escape-sequence':
widget.set_active(3)
else:
widget.set_active(0)
# Delete key
widget = guiget('delete_binding_combobox')
value = self.config['delete_binding']
if value == 'control-h':
widget.set_active(1)
elif value == 'ascii-del':
widget.set_active(2)
elif value == 'escape-sequence':
widget.set_active(3)
else:
widget.set_active(0)
# Encoding
rowiter = None
widget = guiget('encoding_combobox')
encodingstore = guiget('EncodingListStore')
value = self.config['encoding']
encodings = TerminatorEncoding().get_list()
#encodings.sort(lambda x, y: cmp(x[2].lower(), y[2].lower()))
encodings.sort()
for encoding in encodings:
if encoding[1] is None:
continue
label = "%s %s" % (encoding[2], encoding[1])
rowiter = encodingstore.append([label, encoding[1]])
if encoding[1] == value:
widget.set_active_iter(rowiter)
def set_layout(self, layout_name):
"""Set a layout"""
self.layouteditor.set_layout(layout_name)
def on_wingeomcheck_toggled(self, widget):
"""Window geometry setting changed"""
self.config['geometry_hinting'] = widget.get_active()
self.config.save()
def on_homogeneous_toggled(self, widget):
"""homogeneous_tabbar setting changed"""
guiget = self.builder.get_object
self.config['homogeneous_tabbar'] = widget.get_active()
scroll_toggled = guiget('scrolltabbarcheck')
if widget.get_active():
scroll_toggled.set_sensitive(True)
else:
scroll_toggled.set_active(True)
scroll_toggled.set_sensitive(False)
self.config.save()
def on_scroll_toggled(self, widget):
"""scroll_tabbar setting changed"""
self.config['scroll_tabbar'] = widget.get_active()
self.config.save()
def on_winbordercheck_toggled(self, widget):
"""Window border setting changed"""
self.config['borderless'] = not widget.get_active()
self.config.save()
def on_hidefromtaskbcheck_toggled(self, widget):
"""Hide from taskbar setting changed"""
self.config['hide_from_taskbar'] = widget.get_active()
self.config.save()
def on_alwaysontopcheck_toggled(self, widget):
"""Always on top setting changed"""
self.config['always_on_top'] = widget.get_active()
self.config.save()
def on_hideonlosefocuscheck_toggled(self, widget):
"""Hide on lose focus setting changed"""
self.config['hide_on_lose_focus'] = widget.get_active()
self.config.save()
def on_stickycheck_toggled(self, widget):
"""Sticky setting changed"""
self.config['sticky'] = widget.get_active()
self.config.save()
def on_title_hide_sizetextcheck_toggled(self, widget):
"""Window geometry setting changed"""
self.config['title_hide_sizetext'] = widget.get_active()
self.config.save()
def on_always_split_with_profile_toggled(self, widget):
"""Always split with profile setting changed"""
self.config['always_split_with_profile'] = widget.get_active()
self.config.save()
def on_allow_bold_checkbutton_toggled(self, widget):
"""Allow bold setting changed"""
self.config['allow_bold'] = widget.get_active()
self.config.save()
def on_show_titlebar_toggled(self, widget):
"""Show titlebar setting changed"""
self.config['show_titlebar'] = widget.get_active()
self.config.save()
def on_copy_on_selection_toggled(self, widget):
"""Copy on selection setting changed"""
self.config['copy_on_selection'] = widget.get_active()
self.config.save()
def on_cursor_blink_toggled(self, widget):
"""Cursor blink setting changed"""
self.config['cursor_blink'] = widget.get_active()
self.config.save()
def on_icon_bell_checkbutton_toggled(self, widget):
"""Icon bell setting changed"""
self.config['icon_bell'] = widget.get_active()
self.config.save()
def on_audible_bell_checkbutton_toggled(self, widget):
"""Audible bell setting changed"""
self.config['audible_bell'] = widget.get_active()
self.config.save()
def on_urgent_bell_checkbutton_toggled(self, widget):
"""Window manager bell setting changed"""
self.config['urgent_bell'] = widget.get_active()
self.config.save()
def on_login_shell_checkbutton_toggled(self, widget):
"""Login shell setting changed"""
self.config['login_shell'] = widget.get_active()
self.config.save()
def on_update_records_checkbutton_toggled(self, widget):
"""Update records setting changed"""
self.config['update_records'] = widget.get_active()
self.config.save()
def on_scroll_background_checkbutton_toggled(self, widget):
"""Scroll background setting changed"""
self.config['scroll_background'] = widget.get_active()
self.config.save()
def on_scroll_on_keystroke_checkbutton_toggled(self, widget):
"""Scroll on keystrong setting changed"""
self.config['scroll_on_keystroke'] = widget.get_active()
self.config.save()
def on_scroll_on_output_checkbutton_toggled(self, widget):
"""Scroll on output setting changed"""
self.config['scroll_on_output'] = widget.get_active()
self.config.save()
def on_delete_binding_combobox_changed(self, widget):
"""Delete binding setting changed"""
selected = widget.get_active()
if selected == 1:
value = 'control-h'
elif selected == 2:
value = 'ascii-del'
elif selected == 3:
value = 'escape-sequence'
else:
value = 'automatic'
self.config['delete_binding'] = value
self.config.save()
def on_backspace_binding_combobox_changed(self, widget):
"""Backspace binding setting changed"""
selected = widget.get_active()
if selected == 1:
value = 'control-h'
elif selected == 2:
value = 'ascii-del'
elif selected == 3:
value == 'escape-sequence'
else:
value = 'automatic'
self.config['backspace_binding'] = value
self.config.save()
def on_encoding_combobox_changed(self, widget):
"""Encoding setting changed"""
selected = widget.get_active_iter()
liststore = widget.get_model()
value = liststore.get_value(selected, 1)
self.config['encoding'] = value
self.config.save()
def on_scrollback_lines_spinbutton_value_changed(self, widget):
"""Scrollback lines setting changed"""
value = widget.get_value_as_int()
self.config['scrollback_lines'] = value
self.config.save()
def on_scrollback_infinite_toggled(self, widget):
"""Scrollback infiniteness changed"""
spinbutton = self.builder.get_object('scrollback_lines_spinbutton')
value = widget.get_active()
if value == True:
spinbutton.set_sensitive(False)
else:
spinbutton.set_sensitive(True)
self.config['scrollback_infinite'] = value
self.config.save()
def on_scrollbar_position_combobox_changed(self, widget):
"""Scrollbar position setting changed"""
selected = widget.get_active()
if selected == 1:
value = 'right'
elif selected == 2:
value = 'hidden'
else:
value = 'left'
self.config['scrollbar_position'] = value
self.config.save()
def on_darken_background_scale_change_value(self, widget, scroll, value):
"""Background darkness setting changed"""
self.config['background_darkness'] = round(value, 2)
self.config.save()
def on_palette_combobox_changed(self, widget):
"""Palette selector changed"""
value = None
guiget = self.builder.get_object
active = widget.get_active()
for key in list(self.palettevalues.keys()):
if self.palettevalues[key] == active:
value = key
if value == 'custom':
sensitive = True
else:
sensitive = False
for num in range(1, 17):
picker = guiget('palette_colorpicker_%d' % num)
picker.set_sensitive(sensitive)
if value in self.palettes:
palette = self.palettes[value]
palettebits = palette.split(':')
for num in range(1, 17):
# Update the visible elements
picker = guiget('palette_colorpicker_%d' % num)
picker.set_color(Gdk.color_parse(palettebits[num - 1]))
elif value == 'custom':
palettebits = []
for num in range(1, 17):
picker = guiget('palette_colorpicker_%d' % num)
palettebits.append(color2hex(picker))
palette = ':'.join(palettebits)
else:
err('Unknown palette value: %s' % value)
return
self.config['palette'] = palette
self.config.save()
def on_background_colorpicker_color_set(self, widget):
"""Background color changed"""
self.config['background_color'] = color2hex(widget)
self.config.save()
def on_foreground_colorpicker_color_set(self, widget):
"""Foreground color changed"""
self.config['foreground_color'] = color2hex(widget)
self.config.save()
def on_palette_colorpicker_color_set(self, widget):
"""A palette colour changed"""
palette = None
palettebits = []
guiget = self.builder.get_object
# FIXME: We do this at least once elsewhere. refactor!
for num in range(1, 17):
picker = guiget('palette_colorpicker_%d' % num)
value = color2hex(picker)
palettebits.append(value)
palette = ':'.join(palettebits)
self.config['palette'] = palette
self.config.save()
def on_exit_action_combobox_changed(self, widget):
"""Exit action changed"""
selected = widget.get_active()
if selected == 1:
value = 'restart'
elif selected == 2:
value = 'hold'
else:
value = 'close'
self.config['exit_action'] = value
self.config.save()
def on_custom_url_handler_entry_changed(self, widget):
"""Custom URL handler value changed"""
self.config['custom_url_handler'] = widget.get_text()
self.config.save()
def on_custom_command_entry_changed(self, widget):
"""Custom command value changed"""
self.config['custom_command'] = widget.get_text()
self.config.save()
def on_cursor_color_color_set(self, widget):
"""Cursor colour changed"""
self.config['cursor_color'] = color2hex(widget)
self.config.save()
def on_cursor_shape_combobox_changed(self, widget):
"""Cursor shape changed"""
selected = widget.get_active()
if selected == 1:
value = 'underline'
elif selected == 2:
value = 'ibeam'
else:
value = 'block'
self.config['cursor_shape'] = value
self.config.save()
def on_font_selector_font_set(self, widget):
"""Font changed"""
self.config['font'] = widget.get_font_name()
self.config.save()
def on_title_receive_bg_color_color_set(self, widget):
"""Title receive background colour changed"""
self.config['title_receive_bg_color'] = color2hex(widget)
self.config.save()
def on_title_receive_fg_color_color_set(self, widget):
"""Title receive foreground colour changed"""
self.config['title_receive_fg_color'] = color2hex(widget)
self.config.save()
def on_title_inactive_bg_color_color_set(self, widget):
"""Title inactive background colour changed"""
self.config['title_inactive_bg_color'] = color2hex(widget)
self.config.save()
def on_title_transmit_bg_color_color_set(self, widget):
"""Title transmit backgruond colour changed"""
self.config['title_transmit_bg_color'] = color2hex(widget)
self.config.save()
def on_title_inactive_fg_color_color_set(self, widget):
"""Title inactive foreground colour changed"""
self.config['title_inactive_fg_color'] = color2hex(widget)
self.config.save()
def on_title_transmit_fg_color_color_set(self, widget):
"""Title transmit foreground colour changed"""
self.config['title_transmit_fg_color'] = color2hex(widget)
self.config.save()
def on_inactive_color_offset_change_value(self, widget, scroll, value):
"""Inactive color offset setting changed"""
if value > 1.0:
value = 1.0
self.config['inactive_color_offset'] = round(value, 2)
self.config.save()
def on_handlesize_change_value(self, widget, scroll, value):
"""Handle size changed"""
value = int(value)
if value > 5:
value = 5
self.config['handle_size'] = value
self.config.save()
def on_focuscombo_changed(self, widget):
"""Focus type changed"""
selected = widget.get_active()
if selected == 1:
value = 'click'
elif selected == 2:
value = 'mouse'
else:
value = 'system'
self.config['focus'] = value
self.config.save()
def on_tabposcombo_changed(self, widget):
"""Tab position changed"""
selected = widget.get_active()
if selected == 1:
value = 'bottom'
elif selected == 2:
value = 'left'
elif selected == 3:
value = 'right'
elif selected == 4:
value = 'hidden'
else:
value = 'top'
self.config['tab_position'] = value
self.config.save()
def on_winstatecombo_changed(self, widget):
"""Window state changed"""
selected = widget.get_active()
if selected == 1:
value = 'hidden'
elif selected == 2:
value = 'maximise'
elif selected == 3:
value = 'fullscreen'
else:
value = 'normal'
self.config['window_state'] = value
self.config.save()
def on_profileaddbutton_clicked(self, _button):
"""Add a new profile to the list"""
guiget = self.builder.get_object
treeview = guiget('profilelist')
model = treeview.get_model()
values = [ r[0] for r in model ]
newprofile = _('New Profile')
if newprofile in values:
i = 1
while newprofile in values:
i = i + 1
newprofile = '%s %d' % (_('New Profile'), i)
if self.config.add_profile(newprofile):
res = model.append([newprofile, True])
if res:
path = model.get_path(res)
treeview.set_cursor(path, start_editing=True)
self.layouteditor.update_profiles()
def on_profileremovebutton_clicked(self, _button):
"""Remove a profile from the list"""
guiget = self.builder.get_object
treeview = guiget('profilelist')
selection = treeview.get_selection()
(model, rowiter) = selection.get_selected()
profile = model.get_value(rowiter, 0)
if profile == 'default':
# We shouldn't let people delete this profile
return
self.previous_profile_selection = None
self.config.del_profile(profile)
model.remove(rowiter)
selection.select_iter(model.get_iter_first())
self.layouteditor.update_profiles()
def on_layoutaddbutton_clicked(self, _button):
"""Add a new layout to the list"""
terminator = Terminator()
current_layout = terminator.describe_layout()
guiget = self.builder.get_object
treeview = guiget('layoutlist')
model = treeview.get_model()
values = [ r[0] for r in model ]
name = _('New Layout')
if name in values:
i = 1
while name in values:
i = i + 1
name = '%s %d' % (_('New Layout'), i)
if self.config.add_layout(name, current_layout):
res = model.append([name, True])
if res:
path = model.get_path(res)
treeview.set_cursor(path, start_editing=True)
self.config.save()
def on_layoutremovebutton_clicked(self, _button):
"""Remove a layout from the list"""
guiget = self.builder.get_object
treeview = guiget('layoutlist')
selection = treeview.get_selection()
(model, rowiter) = selection.get_selected()
layout = model.get_value(rowiter, 0)
if layout == 'default':
# We shouldn't let people delete this layout
return
self.previous_selection = None
self.config.del_layout(layout)
model.remove(rowiter)
selection.select_iter(model.get_iter_first())
self.config.save()
def on_use_custom_url_handler_checkbutton_toggled(self, checkbox):
"""Toggling the use_custom_url_handler checkbox needs to alter the
sensitivity of the custom_url_handler entrybox"""
guiget = self.builder.get_object
widget = guiget('custom_url_handler_entry')
value = checkbox.get_active()
widget.set_sensitive(value)
self.config['use_custom_url_handler'] = value
self.config.save()
def on_use_custom_command_checkbutton_toggled(self, checkbox):
"""Toggling the use_custom_command checkbox needs to alter the
sensitivity of the custom_command entrybox"""
guiget = self.builder.get_object
widget = guiget('custom_command_entry')
value = checkbox.get_active()
widget.set_sensitive(value)
self.config['use_custom_command'] = value
self.config.save()
def on_system_font_checkbutton_toggled(self, checkbox):
"""Toggling the use_system_font checkbox needs to alter the
sensitivity of the font selector"""
guiget = self.builder.get_object
widget = guiget('font_selector')
value = checkbox.get_active()
widget.set_sensitive(not value)
self.config['use_system_font'] = value
self.config.save()
def on_reset_compatibility_clicked(self, widget):
"""Reset the confusing and annoying backspace/delete options to the
safest values"""
guiget = self.builder.get_object
widget = guiget('backspace_binding_combobox')
widget.set_active(2)
widget = guiget('delete_binding_combobox')
widget.set_active(3)
def on_profile_selection_changed(self, selection):
"""A different profile was selected"""
(listmodel, rowiter) = selection.get_selected()
if not rowiter:
# Something is wrong, just jump to the first item in the list
treeview = selection.get_tree_view()
liststore = treeview.get_model()
selection.select_iter(liststore.get_iter_first())
return
profile = listmodel.get_value(rowiter, 0)
self.set_profile_values(profile)
self.previous_profile_selection = profile
widget = self.builder.get_object('profileremovebutton')
if profile == 'default':
widget.set_sensitive(False)
else:
widget.set_sensitive(True)
def on_profile_name_edited(self, cell, path, newtext):
"""Update a profile name"""
oldname = cell.get_property('text')
if oldname == newtext or oldname == 'default':
return
dbg('PrefsEditor::on_profile_name_edited: Changing %s to %s' %
(oldname, newtext))
self.config.rename_profile(oldname, newtext)
self.config.save()
widget = self.builder.get_object('profilelist')
model = widget.get_model()
itera = model.get_iter(path)
model.set_value(itera, 0, newtext)
if oldname == self.previous_profile_selection:
self.previous_profile_selection = newtext
def on_layout_selection_changed(self, selection):
"""A different layout was selected"""
self.layouteditor.on_layout_selection_changed(selection)
def on_layout_item_selection_changed(self, selection):
"""A different item in the layout was selected"""
self.layouteditor.on_layout_item_selection_changed(selection)
def on_layout_profile_chooser_changed(self, widget):
"""A different profile has been selected for this item"""
self.layouteditor.on_layout_profile_chooser_changed(widget)
def on_layout_profile_command_changed(self, widget):
"""A different command has been entered for this item"""
self.layouteditor.on_layout_profile_command_activate(widget)
def on_layout_profile_workingdir_changed(self, widget):
"""A different working directory has been entered for this item"""
self.layouteditor.on_layout_profile_workingdir_activate(widget)
def on_layout_name_edited(self, cell, path, newtext):
"""Update a layout name"""
oldname = cell.get_property('text')
if oldname == newtext or oldname == 'default':
return
dbg('Changing %s to %s' % (oldname, newtext))
self.config.rename_layout(oldname, newtext)
self.config.save()
widget = self.builder.get_object('layoutlist')
model = widget.get_model()
itera = model.get_iter(path)
model.set_value(itera, 0, newtext)
if oldname == self.previous_layout_selection:
self.previous_layout_selection = newtext
if oldname == self.layouteditor.layout_name:
self.layouteditor.layout_name = newtext
def on_color_scheme_combobox_changed(self, widget):
"""Update the fore/background colour pickers"""
value = None
guiget = self.builder.get_object
active = widget.get_active()
for key in list(self.colorschemevalues.keys()):
if self.colorschemevalues[key] == active:
value = key
fore = guiget('foreground_colorpicker')
back = guiget('background_colorpicker')
if value == 'custom':
fore.set_sensitive(True)
back.set_sensitive(True)
else:
fore.set_sensitive(False)
back.set_sensitive(False)
forecol = None
backcol = None
if value in self.colourschemes:
forecol = self.colourschemes[value][0]
backcol = self.colourschemes[value][1]
elif value == 'custom':
forecol = color2hex(fore)
backcol = color2hex(back)
else:
err('Unknown colourscheme value: %s' % value)
return
fore.set_color(Gdk.color_parse(forecol))
back.set_color(Gdk.color_parse(backcol))
self.config['foreground_color'] = forecol
self.config['background_color'] = backcol
self.config.save()
def on_use_theme_colors_checkbutton_toggled(self, widget):
"""Update colour pickers"""
guiget = self.builder.get_object
active = widget.get_active()
scheme = guiget('color_scheme_combobox')
fore = guiget('foreground_colorpicker')
back = guiget('background_colorpicker')
if active:
for widget in [scheme, fore, back]:
widget.set_sensitive(False)
else:
scheme.set_sensitive(True)
self.on_color_scheme_combobox_changed(scheme)
self.config['use_theme_colors'] = active
self.config.save()
def on_cellrenderer_accel_edited(self, liststore, path, key, mods, _code):
"""Handle an edited keybinding"""
celliter = liststore.get_iter_from_string(path)
liststore.set(celliter, 2, key, 3, mods)
binding = liststore.get_value(liststore.get_iter(path), 0)
accel = Gtk.accelerator_name(key, mods)
self.config['keybindings'][binding] = accel
self.config.save()
def on_cellrenderer_accel_cleared(self, liststore, path):
"""Handle the clearing of a keybinding accelerator"""
celliter = liststore.get_iter_from_string(path)
liststore.set(celliter, 2, 0, 3, 0)
binding = liststore.get_value(liststore.get_iter(path), 0)
self.config['keybindings'][binding] = None
self.config.save()
class LayoutEditor:
profile_ids_to_profile = None
profile_profile_to_ids = None
layout_name = None
layout_item = None
builder = None
treeview = None
treestore = None
config = None
def __init__(self, builder):
"""Initialise ourself"""
self.config = config.Config()
self.builder = builder
def prepare(self, layout=None):
"""Do the things we can't do in __init__"""
self.treeview = self.builder.get_object('LayoutTreeView')
self.treestore = self.builder.get_object('LayoutTreeStore')
self.update_profiles()
if layout:
self.set_layout(layout)
def set_layout(self, layout_name):
"""Load a particular layout"""
self.layout_name = layout_name
store = self.treestore
layout = self.config.layout_get_config(layout_name)
listitems = {}
store.clear()
children = list(layout.keys())
i = 0
while children != []:
child = children.pop()
child_type = layout[child]['type']
parent = layout[child]['parent']
if child_type != 'Window' and parent not in layout:
# We have an orphan!
err('%s is an orphan in this layout. Discarding' % child)
continue
try:
parentiter = listitems[parent]
except KeyError:
if child_type == 'Window':
parentiter = None
else:
# We're not ready for this widget yet
children.insert(0, child)
continue
if child_type == 'VPaned':
child_type = 'Vertical split'
elif child_type == 'HPaned':
child_type = 'Horizontal split'
listitems[child] = store.append(parentiter, [child, child_type])
treeview = self.builder.get_object('LayoutTreeView')
treeview.expand_all()
def update_profiles(self):
"""Update the list of profiles"""
self.profile_ids_to_profile = {}
self.profile_profile_to_ids= {}
chooser = self.builder.get_object('layout_profile_chooser')
model = chooser.get_model()
model.clear()
profiles = self.config.list_profiles()
profiles.sort()
i = 0
for profile in profiles:
self.profile_ids_to_profile[i] = profile
self.profile_profile_to_ids[profile] = i
model.append([profile])
i = i + 1
def on_layout_selection_changed(self, selection):
"""A different layout was selected"""
(listmodel, rowiter) = selection.get_selected()
if not rowiter:
# Something is wrong, just jump to the first item in the list
selection.select_iter(self.treestore.get_iter_first())
return
layout = listmodel.get_value(rowiter, 0)
self.set_layout(layout)
self.previous_layout_selection = layout
widget = self.builder.get_object('layoutremovebutton')
if layout == 'default':
widget.set_sensitive(False)
else:
widget.set_sensitive(True)
command = self.builder.get_object('layout_profile_command')
chooser = self.builder.get_object('layout_profile_chooser')
workdir = self.builder.get_object('layout_profile_workingdir')
command.set_sensitive(False)
chooser.set_sensitive(False)
workdir.set_sensitive(False)
def on_layout_item_selection_changed(self, selection):
"""A different item in the layout was selected"""
(treemodel, rowiter) = selection.get_selected()
if not rowiter:
return
item = treemodel.get_value(rowiter, 0)
self.layout_item = item
self.set_layout_item(item)
def set_layout_item(self, item_name):
"""Set a layout item"""
layout = self.config.layout_get_config(self.layout_name)
layout_item = layout[self.layout_item]
command = self.builder.get_object('layout_profile_command')
chooser = self.builder.get_object('layout_profile_chooser')
workdir = self.builder.get_object('layout_profile_workingdir')
if layout_item['type'] != 'Terminal':
command.set_sensitive(False)
chooser.set_sensitive(False)
workdir.set_sensitive(False)
return
command.set_sensitive(True)
chooser.set_sensitive(True)
workdir.set_sensitive(True)
if 'command' in layout_item and layout_item['command'] != '':
command.set_text(layout_item['command'])
else:
command.set_text('')
if 'profile' in layout_item and layout_item['profile'] != '':
chooser.set_active(self.profile_profile_to_ids[layout_item['profile']])
else:
chooser.set_active(0)
if 'directory' in layout_item and layout_item['directory'] != '':
workdir.set_text(layout_item['directory'])
else:
workdir.set_text('')
def on_layout_profile_chooser_changed(self, widget):
"""A new profile has been selected for this item"""
if not self.layout_item:
return
profile = widget.get_active_text()
layout = self.config.layout_get_config(self.layout_name)
layout[self.layout_item]['profile'] = profile
self.config.save()
def on_layout_profile_command_activate(self, widget):
"""A new command has been entered for this item"""
command = widget.get_text()
layout = self.config.layout_get_config(self.layout_name)
layout[self.layout_item]['command'] = command
self.config.save()
def on_layout_profile_workingdir_activate(self, widget):
"""A new working directory has been entered for this item"""
workdir = widget.get_text()
layout = self.config.layout_get_config(self.layout_name)
layout[self.layout_item]['directory'] = workdir
self.config.save()
if __name__ == '__main__':
from . import util
util.DEBUG = True
from . import terminal
TERM = terminal.Terminal()
PREFEDIT = PrefsEditor(TERM)
Gtk.main()
|
m45t3r/t3rminator
|
terminatorlib/prefseditor.py
|
Python
|
gpl-2.0
| 53,737
|
import os
import sys
import re
from collections import deque
flagAllowCycle = False
for op in sys.argv:
if op == '-a':
flagAllowCycle = True
lines = sys.stdin.readlines()
nodes = {}
edges = []
lastNode = None
for i in range(1, len(lines)):
match = re.match(r' <way id="(\d+)">', lines[i])
if match:
lastNode = None
continue
match = re.match(r' <nd id="(\d+)" lat="([0-9\.\-]+)" lon="([0-9\.\-]+)"\/>', lines[i])
if match:
nodes[ match.group(1) ] = (match.group(2), match.group(3))
if lastNode != None:
edges += [ (lastNode, match.group(1)) ]
lastNode = match.group(1)
continue
nextNodes = {} # node-> list of neighbor nodex
for e in edges:
(na, nb) = e
if na == nb: # i don't know why this can happen in osm
continue
if na not in nextNodes:
nextNodes[na] = []
if nb not in nextNodes[na]: # it's strange neighbor nodes can appear several times
nextNodes[na] += [nb]
if nb not in nextNodes:
nextNodes[nb] = []
if na not in nextNodes[nb]: # it's strange neighbor nodes can appear several times
nextNodes[nb] += [na]
sys.stderr.write('Total # of nodes: ' + str(len(nodes)) + '\n')
sys.stderr.write('Total # of edges: ' + str(len(edges)) + '\n')
#asking = ['122905200', '1903812544', '1903812541', '1903812539', '1903812527', '1903812524', '122905197']
#for ask in asking:
# print ask, nextNodes[ask]
#exit()
trajCnt = 0
tracedNodes = set()
tracedEdges = set()
for n in nextNodes:
if n not in tracedNodes:
connectedNodes = set()
queue = deque([n])
while len(queue) > 0:
nn = queue.popleft()
connectedNodes.add(nn)
for nnn in nextNodes[nn]:
if nnn not in connectedNodes and nnn not in queue:
queue += [nnn]
tracedNodes |= connectedNodes
criticalNodes = [ x for x in connectedNodes if len(nextNodes[x]) != 2 ]
jointNodes = [ x for x in connectedNodes if len(nextNodes[x]) > 2 ]
if len(criticalNodes) == 0 and flagAllowCycle:
criticalNodes = connectedNodes[0]
if len(jointNodes) < 100:
continue
sys.stderr.write('find component with size ' + str(len(criticalNodes)) + '\n')
idx = 0
for nn in criticalNodes:
idx += 1
#sys.stderr.write(str(idx) + '\n')
for tn in nextNodes[nn]:
if (nn, tn) not in tracedEdges:
seg = [nn, tn]
#sys.stderr.write(str(nn) + ' ->' + str(tn))
tracedEdges.add( (nn, tn) )
tracedEdges.add( (tn, nn) )
while seg[-1] not in criticalNodes:
nnn = seg[-1]
find = 0
#sys.stderr.write(' ')
for tnn in nextNodes[nnn]:
if (nnn, tnn) not in tracedEdges:
tracedEdges.add( (nnn, tnn) )
tracedEdges.add( (tnn, nnn) )
seg += [tnn]
#sys.stderr.write('->' + str(tnn))
find += 1
if find != 1:
sys.stderr.write('error\n')
print >>sys.stderr, nnn
print >>sys.stderr, nextNodes[nnn]
print >>sys.stderr, (nnn in criticalNodes)
quit()
segCoor = [ x + ',' + nodes[x][0] + ',' + nodes[x][1] for x in seg ]
#sys.stderr.write('\n')
print ",".join( tuple(segCoor) )
|
nesl/mercury
|
Services/Mapping/fixProgram/fix.py
|
Python
|
gpl-2.0
| 3,782
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# pyILPER 1.2.4 for Linux
#
# An emulator for virtual HP-IL devices for the PIL-Box
# derived from ILPER 1.4.5 for Windows
# Copyright (c) 2008-2013 Jean-Francois Garnier
# C++ version (c) 2013 Christoph Gießelink
# Python Version (c) 2015 Joachim Siebold
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# HP2225B virtual device classes ---------------------------------------------
#
# Changelog
# 30.12.2018 jsi:
# - initial version
# 02.01.2018 jsi:
# - added support for ^N and ^O for bold mode on and off
# - allow ESC&kS in addition to ESC&k0S to switch to normal character pitch
# 04.01.2018 jsi:
# - support zero length graphics chunks
# - switchable print color
#
import copy
import queue
import threading
import re
from math import floor
from PyQt5 import QtCore, QtWidgets, QtGui, QtPrintSupport
from .pilcore import UPDATE_TIMER, PDF_ORIENTATION_PORTRAIT
from .pilconfig import PILCONFIG
from .pilcharconv import charconv, barrconv, CHARSET_HP2225
from .pildevbase import cls_pildevbase
from .pilwidgets import cls_tabgeneric, LogCheckboxWidget, T_INTEGER, O_DEFAULT, T_STRING
from .pilcore import *
#
# constants --------------------------------------------------------------
#
PDF_LINES=70 # number of lines in pdf output
PDF_MARGINS=50 # margins (top,bot,left,right) of pdf output
PDF_MAX_COLS=3 # max number of columns in pdf output
PDF_COLUMN_SPACING=80 # spacing between columns
PDF_LINE_SPACING=0 # linespacing in (relative) pixel
# GUI commands
CMD_LF_PRESSED= 0
CMD_LF_RELEASED= 1
CMD_FF_PRESSED= 2
CMD_CLEAR= 3
# HPIL-Thread commands
REMOTECMD_CLEAR=0 # clear device
REMOTECMD_LOG=1 # log something
REMOTECMD_STATUS=2 # printer status information:
REMOTECMD_TEXT=3 # print characters according to current status
REMOTECMD_GRAPHICS=4 # print graphics according to current status
REMOTECMD_CR=5 # carriage return
REMOTECMD_LF=6 # line feed
REMOTECMD_HLF=7 # half line feed
REMOTECMD_BS=8 # backspace
REMOTECMD_FF=9 # form feed
REMOTECMD_TERMGRAPHICS=10 # end of graphics
ELEMENT_FF=0
ELEMENT_TEXT=1
ELEMENT_GRAPHICS=2
# Printer constants
BUFFER_LINE_H=2 # number of dots for a buffer line height
PRINTER_WIDTH_HIGH= 1280 # width in dots for high resolution
PRINTER_WIDTH_LOW= 640 # width in dots for low resolution
HP2225B_FONT_PIXELSIZE=28 # pixel size of the font used
HP2225B_MAX_LINES= 69 # maximum number of lines for a page
# Font widths
# Print mode norm : 80 chars/line, character width 16 dots
# Print mode expand : 40 chars/line, character width 32 dots
# Print mode compressed : 142 chars/line, character width 9 dots
# Print mode expand/compressed : 71 chars/line, character width 18 dots
FONT_WIDTH= [16, 32, 9, 18 ]
#
# this is a hack because Qt on Macos does not display expanded fonts correctly
#
if isMACOS():
FONT_STRETCH= [100, 110, 56, 110]
else:
FONT_STRETCH= [100, 200, 56, 113]
BUFFER_SIZE_NAMES=["5 Pages", "10 Pages","20 Pages","50 Pages"]
BUFFER_SIZE_VALUES=[2500, 5000, 10000, 25000]
#
# Print colors
#
HP2225_COLOR_BLACK=0
HP2225_COLOR_RED=1
HP2225_COLR_BLUE=2
HP2225_COLOR_GREEN=3
COLOR_NAMES= [ "black", "red", "blue", "green" ]
HP2225_COLORS=[QtCore.Qt.black, QtCore.Qt.red, QtCore.Qt.blue, QtCore.Qt.green]
#
# HP2225B tab widget ---------------------------------------------------------
#
class cls_tabhp2225b(cls_tabgeneric):
def __init__(self,parent,name):
super().__init__(parent,name)
self.name=name
#
# this parameter is global
#
self.papersize=PILCONFIG.get("pyilper","papersize")
#
# init local parameter
#
self.screenwidth=PILCONFIG.get(self.name,"hp2225b_screenwidth",-1)
self.scrollupbuffersize=PILCONFIG.get(self.name,"hp2225b_scrollupbuffersize",1)
self.printcolor=PILCONFIG.get(self.name,"hp2225b_printcolor",HP2225_COLOR_BLACK)
#
# create Printer GUI object
#
self.guiobject=cls_hp2225bWidget(self,self.name,self.papersize)
#
# add gui object
#
self.add_guiobject(self.guiobject)
#
# add cascading config menu
#
self.add_configwidget()
#
# add local config option
#
self.cBut.add_option("Screen width","hp2225b_screenwidth",T_INTEGER,[O_DEFAULT,640,960,1280])
self.cBut.add_option("Buffer size","hp2225b_scrollupbuffersize",T_STRING,BUFFER_SIZE_NAMES)
self.cBut.add_option("Print color","hp2225b_printcolor",T_STRING,COLOR_NAMES)
#
# add logging control widget
#
self.add_logging()
#
# create IL-Interface object, notify printer processor object
#
self.pildevice= cls_pilhp2225b(self.guiobject)
self.guiobject.set_pildevice(self.pildevice)
self.cBut.config_changed_signal.connect(self.do_tabconfig_changed)
#
# handle changes of tab config options
#
def do_tabconfig_changed(self):
self.loglevel= PILCONFIG.get(self.name,"loglevel",0)
self.guiobject.reconfigure()
super().do_tabconfig_changed()
#
# reconfigure: reconfigure the gui object
#
def reconfigure(self):
self.guiobject.reconfigure()
#
# enable pildevice and gui object
#
def enable(self):
super().enable()
self.parent.commthread.register(self.pildevice,self.name)
self.pildevice.setactive(self.active)
self.pildevice.enable()
self.guiobject.enable()
#
# disable pildevice and gui object
#
def disable(self):
self.pildevice.disable()
self.guiobject.disable()
super().disable()
#
# active/inactive: enable/disable GUI controls
#
def toggle_active(self):
super().toggle_active()
self.guiobject.toggle_active()
#
# becomes visible, refresh content, activate update
#
def becomes_visible(self):
self.guiobject.becomes_visible()
return
#
# becomes invisible, deactivate update
#
def becomes_invisible(self):
self.guiobject.becomes_invisible()
return
#
#
# hp2225b widget classes - GUI component of the HP2225B HP-IL printer
#
class cls_hp2225bWidget(QtWidgets.QWidget):
def __init__(self,parent,name,papersize):
super().__init__()
self.name= name
self.parent= parent
self.papersize= papersize
self.pildevice= None
#
# printer status that controls the appearance of the printer output
#
self.pdf_rows=480 # text length in rows
self.char_attr=0 # character pitch
self.char_bold=False # bold mode
self.char_underline=False # underline mode
self.hiRes=False # high resolution of graphics output
self.lpi6=True # lines/inch
self.wrapEOL=False # EOL wrap
#
# line coordinates
#
self.pos_y=0 # in 4 dots (1280dpi) steps
self.pos_x=0 # in 1280dpi steps
self.graphics_counter=0 # number of graphics lines
#
# create user interface of printer widget
#
self.hbox=QtWidgets.QHBoxLayout()
self.hbox.addStretch(1)
#
# scrolled printer view
#
self.printview=cls_ScrolledHp2225bView(self,self.name,self.papersize)
self.hbox.addWidget(self.printview)
self.vbox=QtWidgets.QVBoxLayout()
#
# Clear Button
#
self.clearButton= QtWidgets.QPushButton("Clear")
self.clearButton.setEnabled(False)
self.clearButton.setAutoDefault(False)
self.vbox.addWidget(self.clearButton)
self.clearButton.clicked.connect(self.do_clear)
#
# LF Button
#
self.LFButton= QtWidgets.QPushButton("LF")
self.LFButton.setEnabled(False)
self.LFButton.setAutoDefault(False)
self.vbox.addWidget(self.LFButton)
self.LFButton.pressed.connect(self.do_LF_pressed)
self.LFButton.released.connect(self.do_LF_released)
#
# FF Button
#
self.FFButton= QtWidgets.QPushButton("FF")
self.FFButton.setEnabled(False)
self.FFButton.setAutoDefault(False)
self.vbox.addWidget(self.FFButton)
self.FFButton.pressed.connect(self.do_FF_pressed)
#
# PDF Button
#
self.pdfButton= QtWidgets.QPushButton("PDF")
self.pdfButton.setEnabled(False)
self.pdfButton.setAutoDefault(False)
self.vbox.addWidget(self.pdfButton)
self.pdfButton.clicked.connect(self.do_pdf)
self.vbox.addStretch(1)
self.hbox.addLayout(self.vbox)
self.hbox.addStretch(1)
self.setLayout(self.hbox)
#
# initialize GUI command queue and lock
#
self.gui_queue= queue.Queue()
self.gui_queue_lock= threading.Lock()
#
# initialize refresh timer
#
self.UpdateTimer=QtCore.QTimer()
self.UpdateTimer.setSingleShot(True)
self.UpdateTimer.timeout.connect(self.process_queue)
#
# initialize timer for the repeated pressed LF action
#
self.repeatedLFpressedTimer=QtCore.QTimer()
self.repeatedLFpressedTimer.timeout.connect(self.repeated_LFpressed)
self.repeatedLFpressedTimer.setInterval(1500)
#
# set HP-IL device object
#
def set_pildevice(self,pildevice):
self.pildevice=pildevice
#
# enable: start timer, send mode to virtual device, update check boxes
#
def enable(self):
self.UpdateTimer.start(UPDATE_TIMER)
self.toggle_active()
return
#
# disable, clear the GUI queue, stop the timer
#
def disable(self):
self.gui_queue_lock.acquire()
while True:
try:
self.gui_queue.get_nowait()
self.gui_queue.task_done()
except queue.Empty:
break
self.gui_queue_lock.release()
self.UpdateTimer.stop()
return
#
# becomes visible
#
def becomes_visible(self):
self.printview.becomes_visible()
#
# becomes invisible, do nothing
#
def becomes_invisible(self):
pass
#
# active/inactive: enable/disable GUI controls
#
def toggle_active(self):
if self.parent.active:
self.clearButton.setEnabled(True)
self.LFButton.setEnabled(True)
self.FFButton.setEnabled(True)
self.pdfButton.setEnabled(True)
else:
self.clearButton.setEnabled(False)
self.LFButton.setEnabled(False)
self.FFButton.setEnabled(False)
self.pdfButton.setEnabled(False)
#
# reconfigure
#
def reconfigure(self):
self.printview.reconfigure()
return
#
# action scripts
#
def do_clear(self):
self.printview.reset()
self.pildevice.put_cmd(CMD_CLEAR)
return
def do_FF_pressed(self):
self.put_cmd([REMOTECMD_FF])
return
def do_LF_pressed(self):
self.repeatedLFpressedTimer.start()
self.put_cmd([REMOTECMD_LF])
return
def do_LF_released(self):
self.repeatedLFpressedTimer.stop()
return
def do_pdf(self):
filename=cls_PdfOptions.getPdfOptions()
if filename== "":
return
self.printview.pdf(filename,self.pdf_rows)
return
#
# put command into the GUI-command queue, this is called by the thread component
#
def put_cmd(self,item):
self.gui_queue_lock.acquire()
self.gui_queue.put(item)
self.gui_queue_lock.release()
#
# repeated LF pressed action
#
def repeated_LFpressed(self):
self.put_cmd([REMOTECMD_LF])
#
# process commands in the GUI command queue, this is called by a timer event
#
def process_queue(self):
items=[]
self.gui_queue_lock.acquire()
while True:
try:
i=self.gui_queue.get_nowait()
items.append(i)
self.gui_queue.task_done()
except queue.Empty:
break
self.gui_queue_lock.release()
if len(items):
for c in items:
self.process(c)
self.UpdateTimer.start(UPDATE_TIMER)
return
#
# GUI command processing, commands issued by the HP-IL thread
#
# Printer coordinate system
#
# The internal scene coordinate systems of this program is dots at
# a resolution of 192 dpi. The HP2225B operates
# either at 96x96dpi or 96x192dpi resolution. Thus we have always even
# values for the y coordinate.
#
# Constants for movement and positioning
# Dots per line: 1280
# Print mode norm : 80 chars/line, character width 16 dots
# Print mode expand : 40 chars/line, character width 32 dots
# Print mode compressed : 142 chars/line, character width 9 dots
# Print mode expand/compressed : 71 chars/line, character width 18 dots
# Character height always 16 dots
# Line spacing 8 dots at 8 lines/inch, 16 dots at 6 lines/inch
# Line feed is 24 dots at 8 lines/inch, 32 dots at 6 lines/inc
# Half line feed is 12 dots at 8 lines/inch and 16 dots at 6 lines/inch
# Graphics line: 16 dots height, 1280 dots width
# A graphics dot is 2x2 dots at low res an 2x1 dot at high res
# Graphics data is 80 bytes at low res and 160 bytes at high res
#
# Here we use the following coordinate system:
# x= 1 dot (resolution 1280)
# y= 4 dots (resolution 1280)
# LF= 6 / 8 , half LF= 3 / 4
#
def process(self,item):
cmd= item[0]
#
# clear graphhics views
#
if cmd== REMOTECMD_CLEAR:
self.printview.reset()
self.pos_x=0
# print("GUI: reset")
self.graphics_counter=0
#
# carriage return go to beginning of the current line
#
elif cmd== REMOTECMD_CR:
# print("GUI: cr")
self.pos_x=0
self.graphics_counter=0
#
# line feed advance according to line spacing
#
elif cmd== REMOTECMD_LF:
# print("GUI: lf")
if self.lpi6:
self.printview.advance(8)
else:
self.printview.advance(6)
self.graphics_counter=0
#
# Form feed, we need that for the PDF output later
#
elif cmd== REMOTECMD_FF:
# print("GUI: ff")
if self.lpi6:
self.printview.advance(8)
else:
self.printview.advance(6)
self.graphics_counter=0
self.printview.add([ELEMENT_FF])
#
# advance one half line feed
#
elif cmd== REMOTECMD_HLF:
# print("GUI: half lf")
self.graphics_counter=0
if self.lpi6:
self.printview.advance(4)
else:
self.printview.advance(3)
#
# Backspace, go back one character, use current font width
#
elif cmd== REMOTECMD_BS:
# print("GUI: bs")
self.graphics_counter=0
l= FONT_WIDTH[self.char_attr]
self.pos_x-=l
if self.pos_x < 0:
self.pos_x=0
#
# update configuration triggered by an escape sequence
#
elif cmd== REMOTECMD_STATUS:
# print("GUI status", item[1])
self.pdf_rows=item[1][0]
self.char_attr=item[1][1]
self.char_bold=item[1][2]
self.char_underline=item[1][3]
self.hiRes=item[1][4]
self.lpi6=item[1][5]
self.wrapEOL=item[1][6]
#
# text element, we do not support EOL wrap at the moment and ignore any text
# that exceeds a sinlge line
#
elif cmd== REMOTECMD_TEXT:
self.graphics_counter=0
# print("GUI text", self.pos_x, item[1])
txt_list= item[1]
while txt_list is not None:
#
# new length of row
#
newlen=self.pos_x+len(txt_list)*FONT_WIDTH[self.char_attr]
#
# exceeds row
#
if newlen> PRINTER_WIDTH_HIGH:
fit_in_row=len(txt_list)- round((newlen-PRINTER_WIDTH_HIGH)/
FONT_WIDTH[self.char_attr])
#
# txt contains the characters that fit in the current row
#
txt=bytearray(txt_list[:fit_in_row])
#
# if eolWrap is off we throw away the remaining content, otherwise
# keep it
#
if self.wrapEOL:
txt_list= txt_list[fit_in_row:]
else:
txt_list= None
else:
#
# text fits into current row
#
fit_in_row= len(txt_list)
txt=bytearray(txt_list)
txt_list= None
#
# add it to the current line in the view
#
self.printview.add([ELEMENT_TEXT,self.pos_x,self.char_attr,
self.char_bold,self.char_underline,barrconv(txt,CHARSET_HP2225)])
self.pos_x+= fit_in_row* FONT_WIDTH[self.char_attr]
#
# if we have remaining text in txt_list then do a cr/lf
#
if txt_list is not None:
if self.lpi6:
self.printview.advance(8)
else:
self.printview.advance(6)
self.graphics_counter=0
self.pos_x=0
#
# graphics, we can have only 2 graphics lines at a single printer rows
#
elif cmd== REMOTECMD_GRAPHICS:
self.pos_x=0
# print("GUI: graphics",self.graphics_counter, item[1])
self.printview.add([ELEMENT_GRAPHICS,self.graphics_counter,self.hiRes,item[1]])
self.graphics_counter+=1
if self.graphics_counter == 2:
self.graphics_counter=0
self.printview.advance(1)
#
# terminate graphics, advance 4 dots
#
elif cmd== REMOTECMD_TERMGRAPHICS:
self.graphics_counter=0
self.printview.advance(1)
#
# log line
#
elif cmd== REMOTECMD_LOG:
self.parent.cbLogging.logWrite(item[1])
self.parent.cbLogging.logFlush()
#
# custom class for scrolled hp2225b output widget ----------------------------
#
class cls_ScrolledHp2225bView(QtWidgets.QWidget):
def __init__(self,parent,name,papersize):
super().__init__(parent)
self.parent=parent
self.name=name
#
# create window and scrollbars
#
self.hbox= QtWidgets.QHBoxLayout()
self.scrollbar= QtWidgets.QScrollBar()
self.hp2225bwidget= cls_hp2225bView(self,self.name,papersize)
self.hp2225bwidget.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.hp2225bwidget.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.hbox.addWidget(self.hp2225bwidget)
self.hbox.addWidget(self.scrollbar)
self.setLayout(self.hbox)
#
# Initialize scrollbar
#
self.scrollbar.valueChanged.connect(self.do_scrollbar)
self.scrollbar.setEnabled(True)
self.reset()
#
# scrollbar value changed action
#
def do_scrollbar(self):
self.hp2225bwidget.do_scroll(self.scrollbar.value())
#
# reset output window
#
def reset(self):
self.hp2225bwidget.reset()
self.scrollbar.setMinimum(0)
self.scrollbar.setMaximum(0)
self.scrollbar.setSingleStep(1)
#
# generate pdf output
#
def pdf(self,filename,pdf_rows):
self.hp2225bwidget.pdf(filename,pdf_rows)
#
# becomes visible/invisible: nothing to do
#
def becomes_visible(self):
return
def becomes_invisible(self):
return
#
# reconfigure
#
def reconfigure(self):
self.hp2225bwidget.reconfigure()
return
#
# add elements
#
def add(self,element):
self.hp2225bwidget.add(element)
#
# advance
#
def advance(self,n):
self.hp2225bwidget.advance(n)
#
# custom class for hp2225b output -----------------------------------------
#
class cls_hp2225bView(QtWidgets.QGraphicsView):
def __init__(self,parent,name,papersize):
super().__init__()
self.parent=parent
self.name= name
self.screenwidth= -1
self.printcolor= QtCore.Qt.black
self.w=-1
self.h=-1
self.rows= 0
self.linebuffersize= -1
self.papersize= papersize
#
# set the font and font size
self.font=QtGui.QFont(FONT)
self.font.setPixelSize(HP2225B_FONT_PIXELSIZE)
metrics=QtGui.QFontMetrics(self.font)
#
# Initialize line bitmap buffer
#
self.lb= [ ]
self.lb_current= 0
self.lb_anz=0
self.lb_position=0
self.printscene=None
self.reconfigure()
return
def reconfigure(self):
#
# re/configure the printview widget
#
tmp=BUFFER_SIZE_VALUES[PILCONFIG.get(self.name,"hp2225b_scrollupbuffersize")]
if tmp != self.linebuffersize:
self.linebuffersize=tmp
if self.printscene is not None:
self.printscene.reset()
self.lb= [None]* self.linebuffersize
self.lb_current= 0
self.lb_anz=0
self.lb_position=0
tmp=PILCONFIG.get_dual(self.name,"hp2225b_screenwidth")
if tmp != self.screenwidth:
self.screenwidth=tmp
self.w=self.screenwidth
#
# set fixed width
#
self.setFixedWidth(self.w)
#
# reconfigure scene if it exists
#
if self.printscene is not None:
self.printscene.reconfigure(self.screenwidth,self.printcolor)
self.do_resize()
#
# print color
#
tmp=HP2225_COLORS[PILCONFIG.get(self.name,"hp2225b_printcolor")]
if tmp != self.printcolor:
self.printcolor=tmp
if self.printscene is not None:
self.printscene.reconfigure(self.screenwidth,self.printcolor)
self.do_resize()
#
# initialize scene if it does not exist
#
if self.printscene is None:
self.printscene= cls_hp2225b_scene(self,self.font, self.screenwidth,self.printcolor)
self.setScene(self.printscene)
self.reset()
return
#
# reset output window
#
def reset(self):
for i in range(0,self.linebuffersize):
if self.lb[i] is not None:
self.lb[i]= None
self.lb_current= 0
self.lb_anz=0
self.lb_position=0
self.printscene.reset()
#
# resize event, adjust the scene size, reposition everything and redraw
#
def resizeEvent(self,event):
self.do_resize()
def do_resize(self):
h=self.height()
#
# compute the number of rows that will fit into the current window size
#
self.rows= floor(h /BUFFER_LINE_H /2 * PRINTER_WIDTH_HIGH /self.screenwidth)
# print("resize view dimensions ",self.screenwidth,h);
# print("resize view rows: ",self.rows)
# print("resize view: fit in view", PRINTER_WIDTH_HIGH, self.rows*2*BUFFER_LINE_H))
#
# adjust the size of the print scene
#
self.printscene.set_scenesize(self.rows)
#
# now transform the scene into the current view, force transformation
# to identity if we use a screen width of 1280
#
if self.screenwidth != PRINTER_WIDTH_HIGH:
self.fitInView(0,0,PRINTER_WIDTH_HIGH,self.rows*2*BUFFER_LINE_H)
else:
self.resetTransform()
#
# now adjust the scroll bar parameters
#
scroll_max=self.lb_current- self.rows
if scroll_max < 0:
scroll_max=0
# print("scrollbar adjustment: ", self.lb_current,scroll_max)
# print("---")
self.parent.scrollbar.setMaximum(scroll_max)
self.parent.scrollbar.setPageStep(self.rows)
self.printscene.update_scene()
return
#
# PDF output. Text length configuration is not supported at the moment
#
def pdf(self,filename,pdf_rows):
self.printer=QtPrintSupport.QPrinter (QtPrintSupport.QPrinter.HighResolution)
self.printer.setOrientation(QtPrintSupport.QPrinter.Portrait)
self.printer.setOutputFormat(QtPrintSupport.QPrinter.PdfFormat)
self.pdfscene=QtWidgets.QGraphicsScene()
#
# page set up, we use 192 dpi dots as scene units and set the left
# and right margins so that we get a print width of 6.7 inches
# The height of 60 lines is 10 inches
# DINA4: 0,79 inches= 151 dots
# Letter: 0.9 inches = 173 dots
#
#
# A4 format is 8,27 inches x 11,7 inches
#
if self.papersize== PDF_FORMAT_A4:
self.printer.setPageSize(QtPrintSupport.QPrinter.A4)
lmargin= 151
tmargin= 163
scene_w= 1280 + lmargin*2
scene_h= 1920 + tmargin*2
self.pdfscene.setSceneRect(0,0,scene_w,scene_h)
else:
#
# Letter format is 8.5 inches x 11 inches
#
self.printer.setPageSize(QtPrintSupport.QPrinter.Letter)
lmargin= 173
tmargin= 96
scene_w= 1280 + lmargin*2
scene_h= 1920 + tmargin*2
self.pdfscene.setSceneRect(0,0,scene_w,scene_h)
self.painter= QtGui.QPainter()
self.printer.setOutputFileName(filename)
self.painter.begin(self.printer)
pdfitems=[]
anzitems=0
delta= BUFFER_LINE_H*2
horizontal_margin=floor((480-pdf_rows)/2)*delta
rowcount=0
y=tmargin + horizontal_margin
#
# print all items to pdf
#
for i in range(0,self.lb_anz):
s= self.lb[i]
#
# we have a form feed element, issue new page
#
if s is not None:
if s[0][0]== ELEMENT_FF:
# print("FF ",rowcount, pdf_rows)
self.pdfscene.render(self.painter)
self.printer.newPage()
for l in reversed(range(anzitems)):
self.pdfscene.removeItem(pdfitems[l])
del pdfitems[-1]
anzitems=0
y=tmargin + horizontal_margin
rowcount=0
# print("reset y to ",y,tmargin,horizontal_margin)
item=cls_hp2225b_line(s,self.font,self.printcolor)
pdfitems.append(item)
self.pdfscene.addItem(item)
item.setPos(lmargin,y)
# print("pdf item added ",rowcount,y,s)
anzitems+=1
# else:
# print("none element")
rowcount+=1
y+= delta
#
# does the next line fit into the page, if not issue page break
# The character height is always 16px.
#
if rowcount > pdf_rows:
# print("page break ",rowcount, pdf_rows)
self.pdfscene.render(self.painter)
self.printer.newPage()
for l in reversed(range(anzitems)):
self.pdfscene.removeItem(pdfitems[l])
del pdfitems[-1]
anzitems=0
rowcount=0
y=tmargin + horizontal_margin
# print("reset y to ",y,tmargin,horizontal_margin)
#
# output remaining data and terminate printing
#
if anzitems > 0:
self.pdfscene.render(self.painter)
for l in reversed(range(anzitems)):
self.pdfscene.removeItem(pdfitems[l])
del pdfitems[-1]
self.painter.end()
#
#
# Mouse wheel event
#
def wheelEvent(self,event):
numDegrees= event.angleDelta()/8
delta=0
step= round (8 * self.w / PRINTER_WIDTH_HIGH)
if numDegrees.y() is not None:
if numDegrees.y() < 0:
delta=step
if numDegrees.y() > 0:
delta=-step
event.accept()
if self.lb_current < self.rows:
return
if self.lb_position+delta < 0:
delta=-self.lb_position
if self.lb_position+delta+self.rows > self.lb_current:
delta=self.lb_current-(self.lb_position + self.rows )
self.lb_position+=delta
self.parent.scrollbar.setValue(self.lb_position)
self.printscene.update_scene()
return
#
# external methods
#
# add element
#
def add(self,elem):
# print("View add element: ",self.lb_current,elem)
if self.lb[self.lb_current] is None:
self.lb[self.lb_current]= [ ]
self.lb[self.lb_current].append(elem)
self.printscene.update_scene()
return
#
# advance
#
def advance(self,n):
if self.lb_anz+n < self.linebuffersize:
self.lb_anz+=n
self.lb_current+=n
else:
self.lb=self.lb[n:] + self.lb[:n]
for i in range (0,n):
self.lb[i-n]=None
self.lb_position= self.lb_current- (self.rows)
if self.lb_position < 0:
self.lb_position=0
# print("View advance: ",n,self.lb_current, self.lb_position)
self.parent.scrollbar.setMaximum(self.lb_position)
self.parent.scrollbar.setValue(self.lb_position)
self.printscene.update_scene()
return
#
# scroll bar action
#
def do_scroll(self,value):
self.lb_position=value
self.printscene.update_scene()
#
# pdf output
#
def do_pdf(self,filename):
return
#
# custom class for HP2225B graphics scene
#
class cls_hp2225b_scene(QtWidgets.QGraphicsScene):
def __init__(self,parent,font,screenwidth,printcolor):
super().__init__()
self.rows= 0
self.w=0
self.h=0
self.parent=parent
self.si= None
self.font=font
self.reconfigure(screenwidth,printcolor)
return
#
# re/configure graphics scene
#
def reconfigure(self,screenwidth,printcolor):
self.screenwidth=screenwidth
self.w= PRINTER_WIDTH_HIGH
self.h= BUFFER_LINE_H *2
self.printcolor=printcolor
return
#
# set or change the size of the scene
#
def set_scenesize(self,rows):
self.reset()
self.rows= rows
self.si= [None] * rows
self.setSceneRect(0,0,self.w,(self.h*(self.rows)))
# print("Scene size ",self.w,self.h*self.rows)
#
# clear window and reset
#
def reset(self):
for i in range(0,self.rows):
if self.si[i] is not None:
self.removeItem(self.si[i])
self.si[i]=None
#
# update graphics scene
#
def update_scene(self):
for i in range(0,self.rows):
if self.si[i] is not None:
self.removeItem(self.si[i])
self.si[i]=None
start= self.parent.lb_position
end= start+self.rows
if end >= self.parent.lb_anz:
end=self.parent.lb_anz
y=0
j=0
for i in range(start,end):
self.si[j]=self.parent.lb[i]
if self.parent.lb[i] is not None:
self.si[j]=cls_hp2225b_line(self.parent.lb[i], self.font,self.printcolor)
self.addItem(self.si[j])
self.si[j].setPos(0,y)
y+=self.h
j+=1
# print("Scene updated: ",start,end)
#
# custum class HP2225 print line
#
class cls_hp2225b_line(QtWidgets.QGraphicsItem):
def __init__(self,itemlist, font, color):
super().__init__()
self.itemlist= itemlist
self.font=font
self.color=color
metrics=QtGui.QFontMetrics(self.font)
self.font_height=metrics.height()
self.rect= QtCore.QRectF(0,0,PRINTER_WIDTH_HIGH,self.font_height)
# self.flags=QtCore.Qt.AlignLeft | QtCore.Qt.AlignTop | QtCore.Qt.TextDontClip
# self.flags=QtCore.Qt.AlignLeft | QtCore.Qt.AlignTop
def setPos(self,x,y):
super().setPos(x,y)
def boundingRect(self):
return self.rect
#
# paint elements
#
def paint(self,painter,option,widget):
posx=0
for item in self.itemlist:
#
# Ignore element form feed
#
if item[0]== ELEMENT_FF:
continue
#
# Paint text, align each character so that we get exactly the
# number of characters per row as the original printer
#
elif item[0]== ELEMENT_TEXT:
painter.setPen(self.color)
posx=item[1]
self.font.setBold(item[3])
self.font.setUnderline(item[4])
self.font.setStretch(FONT_STRETCH[item[2]])
posy=self.font_height-12
painter.setFont(self.font)
for c in item[5]:
painter.drawText(posx,posy,c)
# bounding_rect= QtCore.QRect(posx,0,posx+ (FONT_WIDTH[item[2]]),self.font_height)
# painter.drawText(bounding_rect, self.flags, c)
posx+= FONT_WIDTH[item[2]]
continue
#
# Paint raster graphics elements. They always begin at column 0
# We have at most two graphics row elements. The y resolution
# is always 96dpi and the x resolution may bei either 96 or 192
# dpi according to the hiRes mode
#
elif item[0]==ELEMENT_GRAPHICS:
painter.setPen(self.color)
posy=item[1]*2
hiRes=item[2]
posx=0
for i in item[3]:
mask=0x80
if posx>=PRINTER_WIDTH_HIGH:
break
for j in range (0,8):
if hiRes:
if i & mask:
painter.fillRect(posx,posy,1,2,self.color)
posx+=1
else:
if i & mask:
painter.fillRect(posx,posy,2,2,self.color)
posx+=2
mask= mask >> 1
return
#
# custom class open pdf output file and set options
#
class cls_PdfOptions(QtWidgets.QDialog):
def __init__(self):
super().__init__()
self.filename="hp2225b.pdf"
self.setWindowTitle('HP2225B PDF output')
self.vlayout = QtWidgets.QVBoxLayout()
self.setLayout(self.vlayout)
self.glayout = QtWidgets.QGridLayout()
self.vlayout.addLayout(self.glayout)
self.glayout.addWidget(QtWidgets.QLabel("PDF Output Options"),0,0,1,3)
self.glayout.addWidget(QtWidgets.QLabel("Output file:"),1,0)
self.filename="hp2225b.pdf"
self.lfilename=QtWidgets.QLabel(self.filename)
self.glayout.addWidget(self.lfilename,1,1)
self.butchange=QtWidgets.QPushButton("Change")
self.butchange.setFixedWidth(60)
self.glayout.addWidget(self.butchange,1,2)
self.buttonBox = QtWidgets.QDialogButtonBox()
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setCenterButtons(True)
self.buttonBox.accepted.connect(self.do_ok)
self.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False)
self.buttonBox.rejected.connect(self.do_cancel)
self.hlayout = QtWidgets.QHBoxLayout()
self.hlayout.addWidget(self.buttonBox)
self.vlayout.addLayout(self.hlayout)
self.butchange.clicked.connect(self.change_pdffile)
def get_pdfFilename(self):
dialog=QtWidgets.QFileDialog()
dialog.setWindowTitle("Enter PDF file name")
dialog.setAcceptMode(QtWidgets.QFileDialog.AcceptSave)
dialog.setFileMode(QtWidgets.QFileDialog.AnyFile)
dialog.setDefaultSuffix("pdf")
dialog.setNameFilters( ["PDF (*.pdf )", "All Files (*)"] )
dialog.setOptions(QtWidgets.QFileDialog.DontUseNativeDialog)
if dialog.exec():
return dialog.selectedFiles()
def change_pdffile(self):
flist= self.get_pdfFilename()
if flist is None:
return
self.filename= flist [0]
self.lfilename.setText(self.filename)
self.buttonBox.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(True)
def do_ok(self):
super().accept()
def do_cancel(self):
super().reject()
@staticmethod
def getPdfOptions():
dialog= cls_PdfOptions()
result= dialog.exec()
if result== QtWidgets.QDialog.Accepted:
return dialog.lfilename.text()
else:
return ""
#
# HP2225B emulator (thread component) --------------------------------------
#
class cls_hp2225b(QtCore.QObject):
BUF_EMPTY=0
BUF_TEXT=1
BUF_GRAPHICS=2
def __init__(self,parent,guiobject):
super().__init__()
self.pildevice=parent
self.guiobject= guiobject
self.esc= False # escape mode
self.esc_seq="" # escape sequence
self.esc_prefix="" # prefix of combined esc sequences
self.num_graphics=-1 # number of graphics bytes
self.ignore_crlf=False # flag to ignore cr/lf between graphics chunks
self.apgot=False # flag avoid printing graphics over text
#
# printer status that controls the appearance of the printer output and
# is therefore handled in the GUI component
#
self.text_legnth= 60 # text length given in lines
self.char_attr=0 # character pitch
self.char_bold=False # bold mode
self.char_underline=False # underline mode
self.hiRes=False # high resolution of graphics output
self.lpi6=False # lines/inch
self.pdf_rows=480 # number of rows for pdf output
self.wrapEOL=False # EOL wrap
self.empty_line=False # detect empty text lines, this disables
# ignoring cr/lf.
#
# printer status which is handled here
#
self.ltermMode=0 # line termination mode
self.altMode= False # alternate control mode
self.displayFunctions=False # display functions mode
#
# buffer for accumulated text and graphics data b e t w e e n control
# characters or escape sequences
#
self.buf_status=self.BUF_EMPTY # buffer status
self.buf_data= [ ]
self.log_line=""
self.reset()
#
#
def reset(self):
#
# reset variables to default
#
self.esc= False
self.esc_seq=""
self.esc_prefix=""
self.num_graphics=-1
self.ignore_crlf=False
self.apgot=False
self.text_length=60
self.pdf_rows=480
self.char_attr=0
self.char_bold= False
self.char_underline=False
self.ltermMode=0
self.hiRes=False
self.lpi6=True
self.wrapEOL=False
self.displayFunctions=False
self.log_line=""
self.empty_line=False
self.buf_clear()
#
# send clear command to GUI
#
self.guiobject.put_cmd([REMOTECMD_CLEAR])
self.put_status()
return
#
# clear data buffer
#
def buf_clear(self):
self.buf_status= self.BUF_EMPTY
self.buf_data= [ ]
#
# flush buffer, send data to printer
#
def buf_flush(self):
if self.buf_status == self.BUF_EMPTY:
return
data_copy= copy.deepcopy(self.buf_data)
if self.buf_status == self.BUF_TEXT:
self.guiobject.put_cmd([REMOTECMD_TEXT,data_copy])
# print("put cmd text",data_copy)
else:
self.guiobject.put_cmd([REMOTECMD_GRAPHICS,data_copy])
# print("put cmd graphics",data_copy)
self.buf_clear()
#
# send status
#
def put_status(self):
#
# if we have data in the buffer, clear it first
#
self.buf_flush()
#
# send printer status to GUI
#
self.guiobject.put_cmd([REMOTECMD_STATUS,[self.pdf_rows,self.char_attr,self.char_bold,self.char_underline,self.hiRes,self.lpi6,self.wrapEOL]])
#
# set/clear alternate control mode (has no effect)
#
def setAltMode(self,mode):
self.altMode= mode
if self.altMode:
print("hp2225: entering ESC/P command mode. This mode is not supported and the emulator will ignore all data.")
else:
print("hp2225: returning to PCL command mode.")
return
#
# process escaape sequences HP command set
#
def process_esc_hp(self):
#
# normal width
#
if self.esc_seq=="&k0S" or self.esc_seq=="&kS":
self.char_attr= (self.char_attr & 0x0C)
self.put_status()
return
#
# expanded width
#
elif self.esc_seq=="&k1S":
self.char_attr= (self.char_attr & 0x0C) | 0x01
self.put_status()
return
#
# compressed width
#
elif self.esc_seq=="&k2S":
self.char_attr= (self.char_attr & 0x0C) | 0x02
self.put_status()
return
#
# expanded-compressed width
#
elif self.esc_seq=="&k3S":
self.char_attr= (self.char_attr & 0x0C) | 0x03
self.put_status()
return
#
# bold mode on
#
elif self.esc_seq=="(s1B":
self.char_bold= True
self.put_status()
return
#
# bold mode off
#
elif self.esc_seq=="(s0B":
self.char_bold=False
self.put_status()
return
#
# underline mode on
#
elif self.esc_seq=="&dD":
self.char_underline=True
self.put_status()
return
#
# underline mode off
#
elif self.esc_seq=="&d@":
self.char_underline=False
self.put_status()
return
#
# 6 lines/inch
#
elif self.esc_seq=="&l6D":
self.lpi6=True
self.put_status()
return
#
# 8 lines/inch
#
elif self.esc_seq=="&l8D":
self.lpi6=False
self.put_status()
return
#
# perforation skip on (has no effect)
#
elif self.esc_seq=="&l1L":
return
#
# perforation skip off (has no effect)
#
elif self.esc_seq=="&l0L":
return
#
# EOL wrap on
#
elif self.esc_seq=="&s0C":
self.wrapEOL=True
self.put_status()
return
#
# EOL wrap off
#
elif self.esc_seq=="&s1C":
self.wrapEOL=False
self.put_status()
return
#
# display functions on
#
elif self.esc_seq=="Y":
self.displayFunctions=True
return
#
# display functions off
#
elif self.esc_seq=="Z":
self.displayFunctions=False
return
#
# unidirectional printing (has no effect)
#
elif self.esc_seq=="&k0W":
return
#
# biidirectional printing (has no effect)
#
elif self.esc_seq=="&k1W":
return
#
# half line feed
#
elif self.esc_seq=="=":
self.half_lf()
return
#
# avoid printing graphics over text
#
elif self.esc_seq=="*rA":
self.apgot= True
return
#
# terminate graphics
#
elif self.esc_seq=="*rB":
self.ignore_crlf= False
self.guiobject.put_cmd([REMOTECMD_TERMGRAPHICS])
return
#
# line termination mode 0
#
elif self.esc_seq=="&k0G":
self.ltermMode=0
return
#
# line termination mode 1
#
elif self.esc_seq=="&k1G":
self.ltermMode=1
return
#
# line termination mode 2
#
elif self.esc_seq=="&k2G":
self.ltermMode=2
return
#
# line termination mode 3
#
elif self.esc_seq=="&k3G":
self.ltermMode=3
return
#
# self test (has no effect)
#
elif self.esc_seq=="z":
return
#
# reset printer
#
elif self.esc_seq=="E":
self.reset()
return
#
# Graphics dot row
#
elif self.esc_seq.startswith("*b") and self.esc_seq.endswith("W"):
ret=re.findall(r"\d+",self.esc_seq)
if ret== []:
return
try:
n=int(ret[0])
except ValueError:
return
if n<0 or n> 255:
return
self.num_graphics=n
self.begin_graphics()
return
#
# graphics resolution
#
elif self.esc_seq.startswith("*r") and self.esc_seq.endswith("S"):
ret=re.findall(r"\d+",self.esc_seq)
if ret== []:
return
try:
n=int(ret[0])
except ValueError:
return
if n<=640:
self.hiRes=False
else:
self.hiRes=True
self.put_status()
return
#
# page length (has no effect)
#
elif self.esc_seq.startswith("&l") and self.esc_seq.endswith("P"):
return
#
# Text length
#
elif self.esc_seq.startswith("&l") and self.esc_seq.endswith("F"):
ret=re.findall(r"\d+",self.esc_seq)
if ret== []:
return
try:
n=int(ret[0])
except ValueError:
return
self.text_length=n
#
# the text length can not exceed 80 lines at 8lpi or 60 lines at
# 6lpi. The maximum print area is limited to 10 inches in this
# emulator. We now compute the numbes of rows the new text length
# will occupy in the pdf file
#
if self.text_length < 1:
self.text_length=1
if self.lpi6:
if self.text_length> 60:
self.text_length=60
self.pdf_rows= self.text_length* 8
else:
if self.text_length> 80:
self.text_length=80
self.pdf_rows= self.text_length * 6
self.pdf_rows-=1
self.put_status()
return
else:
print("hp2225: illegal escape sequence ignored: ", self.esc_seq)
return
#
# begin graphics
#
def begin_graphics(self):
#
# flush any pending text and go to BOL
#
# print("begin new graphics ",self.num_graphics)
if self.buf_status== self.BUF_TEXT:
self.cr()
#
# if the avoid printing graphics over text command was issued do a linefeed
#
if self.apgot:
self.lf()
self.apgot= False
self.ignore_crlf= True
self.buf_status= self.BUF_GRAPHICS
self.empty_line= False
return
#
# printer data processor HP command set
#
def process_char_hp(self,ch):
#
# if there are graphics data, append it to buffer and return
#
if self.num_graphics > 0:
self.num_graphics-=1
self.buf_data.append(ch)
return
#
# last byte of graphics received, flush buffer and proceed
#
if self.num_graphics==0:
self.buf_flush()
# print("graphics flushed ", self.buf_status)
self.num_graphics= -1
#
# process ESC sequences
#
if (self.esc== False) and (ch== 0x1B):
self.esc= True
self.esc_seq=""
self.esc_prefix=""
self.empty_line=False
return
if self.esc:
#
# ESC | or escape sequence terminated with capital letter
#
# if ch == 0x7c or (ch >= 0x41 and ch <= 0x5A):
self.empty_line=False
if chr(ch) in "SBD@LPFCYZW=AGzE":
self.esc_seq+= chr(ch)
if self.esc_prefix!="":
self.esc_seq= self.esc_prefix+self.esc_seq
self.process_esc_hp()
self.esc= False
self.esc_seq=""
self.esc_prefix=""
return
#
# repeated escape sequence terminated with lowercase letter
#
if chr(ch) in "sdlpfcwabg" and len(self.esc_seq)>2:
if self.esc_prefix == "":
self.esc_prefix= self.esc_seq[:2]
self.esc_seq= self.esc_seq[2:]
self.esc_seq= self.esc_prefix+self.esc_seq+chr(ch).upper()
self.process_esc_hp()
self.esc_seq=""
return
#
# still in escape sequence, accumulate characters
#
self.esc_seq+= chr(ch)
return
#
# not in escape sequence, process control and data characters
#
# Backspace:
#
if (ch == 0x08):
if self.ignore_crlf:
return
self.buf_flush()
self.guiobject.put_cmd([REMOTECMD_BS])
#
# CR
#
elif (ch == 0x0D):
if self.ignore_crlf:
return
self.cr()
if self.ltermMode & 0x01:
self.lf()
#
# LF
#
elif (ch == 0x0A):
if self.empty_line:
if self.ignore_crlf:
self.ignore_crlf= False
self.empty_line= True
if self.ignore_crlf:
return
if self.ltermMode & 0x02:
self.cr()
self.lf()
#
# FF
#
elif (ch == 0x0C):
if self.ltermMode & 0x02 or self.ltermMode & 0x03:
self.cr()
self.ff()
#
# bold mode on ^N
#
elif (ch == 0x0E):
self.empty_line= False
self.char_bold=True
self.put_status()
#
# bold mode off ^O
#
elif (ch == 0x0F):
self.empty_line= False
self.char_bold=False
self.put_status()
#
# normal character
#
else:
self.empty_line= False
self.ignore_crlf=False
if ((ch >=0x20 and ch < 127) or (ch > 159 and ch< 255)):
self.buf_status= self.BUF_TEXT
assert self.buf_status== self.BUF_EMPTY or self.buf_status== self.BUF_TEXT
self.buf_data.append(ch)
self.log_line+= charconv(chr(ch), CHARSET_HP2225)
#
# process printer data for display functions mode
#
def process_char_df(self,ch):
if self.esc:
self.esc= False
if ch== 0x5A: # ESC Z terminates display functions mode
self.displayFunctions= False
if ch == 0x1B:
self.esc=True
self.buf_status= self.BUF_TEXT
self.buf_data.append(ch)
self.log_line+= charconv(chr(ch), CHARSET_HP2225)
if (ch == 0x0A):
self.cr()
self.lf()
return
#
# process printer data for the alternate control mode (not implemented!)
#
def process_char_alt(self,ch):
return
#
# process printer data
#
def process_char(self,ch):
if self.altMode:
self.process_char_alt(ch)
else:
if self.displayFunctions:
self.process_char_df(ch)
else:
self.process_char_hp(ch)
return
#
# line positioning: cr, lf, ff
#
def cr(self):
self.buf_flush()
self.guiobject.put_cmd([REMOTECMD_CR])
return
def lf(self):
self.buf_flush()
self.guiobject.put_cmd([REMOTECMD_LF])
if self.log_line != "":
self.log_line+="\n"
self.guiobject.put_cmd([REMOTECMD_LOG,self.log_line])
self.log_line=""
return
def half_lf(self):
self.buf_flush()
self.guiobject.put_cmd([REMOTECMD_HLF])
return
def ff(self):
self.buf_flush()
self.guiobject.put_cmd([REMOTECMD_FF])
if self.log_line != "":
self.log_line+="\n"
self.guiobject.put_cmd([REMOTECMD_LOG,self.log_line])
self.log_line=""
return
#
# process commands sent from the GUI
#
def process(self,command):
if command== CMD_CLEAR:
self.reset()
return
#
# HP-IL virtual HP2225B object class ---------------------------------------
#
class cls_pilhp2225b(cls_pildevbase):
def __init__(self,guiobject):
super().__init__()
#
# overloaded variable initialization
#
self.__aid__ = 0x23 # accessory id
self.__defaddr__ = 5 # default address alter AAU
self.__did__ = "HP2225B" # device id
self.__status_len__=2 # device status is 2 bytes
#
# private vars
#
self.__ddlcmd__=False # ddl command was sent
#
# object specific variables
#
self.__guiobject__= guiobject
#
# initialize remote command queue and lock
#
self.__print_queue__= queue.Queue()
self.__print_queue_lock__= threading.Lock()
#
# printer processor
#
self.__printer__=cls_hp2225b(self,self.__guiobject__)
#
# the printer status is a l w a y s:
# - first byte : 0xA1 (everything is fine, no service request)
# - second byte: 0x04 (buffer empty)
#
self.set_status(0x04A1)
#
# public (overloaded) --------
#
# enable: reset
#
def enable(self):
self.__ddlcmd__= False
self.__printer__.reset()
return
#
# disable: clear the printer command queue
#
def disable(self):
self.__print_queue_lock__.acquire()
while True:
try:
self.__print_queue__.get_nowait()
self.__print_queue__.task_done()
except queue.Empty:
break
self.__print_queue_lock__.release()
#
# process frames
#
def process(self,frame):
if self.__isactive__:
self.process_print_queue()
frame= super().process(frame)
return frame
#
# process the printer command queue
#
def process_print_queue(self):
items=[]
self.__print_queue_lock__.acquire()
while True:
try:
i=self.__print_queue__.get_nowait()
items.append(i)
self.__print_queue__.task_done()
except queue.Empty:
break
self.__print_queue_lock__.release()
if len(items):
for c in items:
self.__printer__.process(c)
return
#
# put command into the print-command queue
#
def put_cmd(self,item):
self.__print_queue_lock__.acquire()
self.__print_queue__.put(item)
self.__print_queue_lock__.release()
#
# set status (2 byte status information)
#
def set_status(self,s):
self.__setstatus__(s)
return s
#
# get status byte (2 byte status information)
#
def get_status(self):
s=self.__getstatus__()
return s
#
# private (overloaded) --------
#
#
# output character to HP2225B
#
def __indata__(self,frame):
n= frame & 0xFF
#
if self.__ddlcmd__:
if n== 18:
self.__printer__.setAltMode(True)
if n==0:
self.__printer__.setAltMode(False)
self.__ddlcmd__= False
else:
self.__printer__.process_char(n)
#
# ddl command implementation
#
def __cmd_ext__(self,frame):
n= frame & 0xFF
t= n>>5
if t==5: # DDL
n=n & 31
if (self.__ilstate__ & 0xC0) == 0x80: # are we listener?
self.__devl__= n & 0xFF
if n== 6:
self.__ddlcmd__= True
return(frame)
#
# clear device: reset printer
#
def __clear_device__(self):
super().__clear_device__()
#
# clear printer queue
#
self.__print_queue_lock__.acquire()
while True:
try:
self.__print_queue__.get_nowait()
self.__print_queue__.task_done()
except queue.Empty:
break
self.__print_queue_lock__.release()
#
# reset device
#
self.__printer__.reset()
|
bug400/pyilper
|
pyilper/pilhp2225b.py
|
Python
|
gpl-2.0
| 53,988
|
#-----------------------------------------------------------------------------------
#Copyright (c) 2015 Lab Sensor Solutions, Inc.
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
#-----------------------------------------------------------------------------------
#
# Author: D.Paley
# Date: 4/3/2015
# Release: ALPHA
#
#-----------------------------------------------------------------------------------
#
from GAP_Vendor_Events import *
from HCI_EXT_Events import *
import os
import sys
import time
from threading import Thread
class Return_Handler(Thread):
Device = None
GVEvents = GAP_Vendor_Events()
HCIEvents = HCI_EXT_Events()
def __init__(self, BTDongle=None):
''' input the BT USB Dongle connection
'''
Thread.__init__(self)
self.Device = BTDongle
print "Initialize Return Handler"
self.PacketOptions = {0x01: self.CommandPacket,
0x02: self.AsyncPacket,
0x03: self.SyncPacket,
0x04: self.EventPacket,
}
self.EventCode = {0xFF: 'HCI_LE_ExtEvent',
}
def CommandPacket(self):
print "Found Command Packet"
def AsyncPacket(self):
print "Found Async Packet"
def SyncPacket(self):
print "Found Sync Packet"
def EventPacket(self):
print "- Type\t\t : 0x04 (Event)"
# - should be the start of a new received command/event - lets check
event_code = ord(self.Device.read(size=1))
print "- EventCode\t : 0x%x (HCI_LE_ExtEvent)" % (event_code)
if event_code in self.EventCode:
# this is the size of incoming packet (remaining)
Data_Length = ord(self.Device.read(size=1))
print "- Data Length \t : 0x%.2x (%d) bytes" % (Data_Length, Data_Length)
event = ord(self.Device.read(size=1))
event |= ord(self.Device.read(size=1)) << 8
# print "Event is ",hex(event)
packet = []
for i in range(Data_Length - 2): # we already unstringed event.
packet.append(ord(self.Device.read(size=1)))
# print "Packet size is %d"%len(packet)
if (self.GVEvents.Event_Lookup(event)):
# we have a GAP Vendor Event - pass to handle
print "- Event \t : %s (%s)" % (hex(event), self.GVEvents.Event_Lookup(event))
self.GVEvents.Decode_Packet(event, packet)
elif self.HCIEvents.Event_Lookup(event):
# we have a HCI Event - pass to handle
print "- Event \t : %x : (%s)" % (hex(event), self.HCIEvents.Event_Lookup(event))
self.HCIEvents.Decode_Packet(event, packet)
else:
# Not sure what we have - error condition
print '*** ERROR *** - received unknown Event ' + str(event_code)
print " - " + str(packet)
# print "Packet length remaining %d"%len(packet)
def run(self):
while(self.Device.isOpen()): # Read a new packet
if (self.Device.inWaiting() > 0):
print "\n================================================="
a = ord(self.Device.read(size=1))
if ((a <= 4) and (a >= 0)):
self.PacketOptions[a]()
time.sleep(1)
|
DanielPaley/TI-CC254x-USB
|
Return_Handler.py
|
Python
|
gpl-2.0
| 4,412
|
#!/usr/bin/env python
import threading
import rospy
import Queue
from panels import ActionModulationTest
from Tkinter import Tk
from theatrebot_action_modulation.msg import ActionDesireMessage
from std_msgs.msg import String
from ttk import Frame
class interface_test_action_modulation():
def __init__(self):
info_queue = Queue.Queue()
data_queue = Queue.Queue()
rospy.init_node('interface_test_action_modulation', anonymous=True)
pubNewAction = rospy.Publisher('new_action',ActionDesireMessage,queue_size=1)
pubNewEmotion = rospy.Publisher('new_emotion',String,queue_size=1)
app = ActionModulationTest.ActionModulationTest(info_queue,data_queue)
app.start()
rospy.spin()
app.join()
def pub_information():
print 'send info'
if __name__ == '__main__':
try:
interface = interface_test_action_modulation()
except rospy.ROSInterruptException: pass
|
julianangel/EmotionBot
|
ROS/test_action_modulation/src/ActionTest.py
|
Python
|
gpl-2.0
| 969
|
import textwrap
from skoolkittest import SkoolKitTestCase
from skoolkit.ctlparser import CtlParser
CTL = """; Test control file for parse_ctl
@ 30000 start
b $7530 Data at 30000
N 30000 Block start comment
T 30002,10 Message in the data block
N 30012 Mid-block comment
M 30012,15 This comment covers the following two sub-blocks
W 30012,8
C 30020,7
30050,5,3:c2 Complex DEFB with a blank directive
# This is a control file comment
c 30100 Routine at 30100
D 30100 Description of routine at 30100
R 30100 A Some value
R 30100 BC Some other value
@ $7595 label=LOOP
E 30100 First paragraph of the end comment for the routine at 30100
E 30100 Second paragraph of the end comment for the routine at 30100
% This is another control file comment
g 30200 Game status buffer entry at 30200
30200,10,1 Blank directive in a 'g' block
i 30300 Ignored block at 30300
t 30400 Message at 30400
30450,7,4:n3 Complex DEFM with a blank directive
u 30500 Unused block at 30500
30500,2 Blank directive in a 'u' block
B 30502,3
B 30510,12,3
B 30530,20,2*7,1*3,3
B 30560,21,6,5,4,3,2,1
; This is yet another control file comment
w 30600 Words at 30600
S 30620,7
s 30700 Zeroes at 30700
B 30720,10,1,c3:2,1:c1*2
N 30730 Another mid-block comment
T 30730,15,10:n5"""
class CtlParserTest(SkoolKitTestCase):
def _get_ctl_parser(self, ctl, min_address=0, max_address=65536):
ctl_parser = CtlParser()
ctls = [ctl] if isinstance(ctl, str) else ctl
ctlfiles = [self.write_text_file(textwrap.dedent(s).strip()) for s in ctls]
ctl_parser.parse_ctls(ctlfiles, min_address, max_address)
return ctl_parser
def _check_blocks(self, blocks):
addresses = [b.start for b in blocks]
self.assertEqual(sorted(addresses), addresses)
def _check_ctls(self, exp_ctls, blocks):
ctls = {b.start: b.ctl for b in blocks}
self.assertEqual(exp_ctls, ctls)
def _check_headers(self, exp_headers, blocks):
headers = {b.start: b.header for b in blocks}
self.assertEqual(exp_headers, headers)
def _check_footers(self, exp_footers, blocks):
footers = {b.start: b.footer for b in blocks}
self.assertEqual(exp_footers, footers)
def _check_entry_asm_directives(self, exp_entry_asm_directives, blocks):
entry_asm_directives = {b.start: b.asm_directives for b in blocks}
self.assertEqual(exp_entry_asm_directives, entry_asm_directives)
def _check_asm_data_directives(self, exp_asm_data_directives, blocks):
asm_data_directives = {b.start: b.asm_data_directives for b in blocks}
self.assertEqual(exp_asm_data_directives, asm_data_directives)
def _check_titles(self, exp_titles, blocks):
titles = {b.start: b.title for b in blocks}
self.assertEqual(exp_titles, titles)
def _check_descriptions(self, exp_descriptions, blocks):
descriptions = {b.start: b.description for b in blocks}
self.assertEqual(exp_descriptions, descriptions)
def _check_registers(self, exp_registers, blocks):
registers = {b.start: b.registers for b in blocks}
self.assertEqual(exp_registers, registers)
def _check_end_comments(self, exp_end_comments, blocks):
end_comments = {b.start: b.end_comment for b in blocks}
self.assertEqual(exp_end_comments, end_comments)
def _check_subctls(self, exp_subctls, blocks):
subctls = {s.start: s.ctl for b in blocks for s in b.blocks}
self.assertEqual(exp_subctls, subctls)
def _check_mid_block_comments(self, exp_mid_block_comments, blocks):
mid_block_comments = {s.start: s.header for b in blocks for s in b.blocks}
self.assertEqual(exp_mid_block_comments, mid_block_comments)
def _check_instruction_comments(self, exp_instruction_comments, blocks):
instruction_comments = {s.start: s.comment for b in blocks for s in b.blocks}
self.assertEqual(exp_instruction_comments, instruction_comments)
def _check_sublengths(self, exp_sublengths, blocks):
sublengths = {s.start: s.sublengths for b in blocks for s in b.blocks}
self.assertEqual(exp_sublengths, sublengths)
def _check_multiline_comments(self, exp_multiline_comments, blocks):
multiline_comments = {s.start: s.multiline_comment for b in blocks for s in b.blocks}
self.assertEqual(exp_multiline_comments, multiline_comments)
def _check_instruction_asm_directives(self, exp_directives, blocks):
directives = {}
for b in blocks:
for s in b.blocks:
directives.update(s.asm_directives)
self.assertEqual(exp_directives, directives)
def _check_ignoreua_directives(self, exp_entry_directives, exp_other_directives, blocks):
entry_directives = {}
other_directives = {}
for b in blocks:
entry_directives[b.start] = b.ignoreua_directives
for s in b.blocks:
for address, dirs in s.ignoreua_directives.items():
other_directives[address] = dirs
self.assertEqual(exp_entry_directives, entry_directives)
self.assertEqual(exp_other_directives, other_directives)
def _test_asm_directives(self, ctl, exp_entry_directives, exp_instruction_directives):
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_entry_asm_directives(exp_entry_directives, blocks)
self._check_instruction_asm_directives(exp_instruction_directives, blocks)
def test_predefined_ctls_acquire_start_and_org_directives(self):
ctl_parser = CtlParser({16384: 'c', 32768: 'i'})
exp_entry_asm_directives = {16384: ['start', 'org']}
self._check_entry_asm_directives(exp_entry_asm_directives, ctl_parser.get_blocks())
def test_parse_ctl(self):
ctl_parser = self._get_ctl_parser(CTL)
blocks = ctl_parser.get_blocks()
self._check_blocks(blocks)
exp_ctls = {
30000: 'b',
30100: 'c',
30200: 'g',
30300: 'i',
30400: 't',
30500: 'u',
30600: 'w',
30700: 's'
}
self._check_ctls(exp_ctls, blocks)
exp_subctls = {
30000: 'b',
30002: 't',
30012: 'w',
30020: 'c',
30027: 'b',
30050: 'b',
30055: 'b',
30100: 'c',
30200: 'b',
30210: 'g',
30300: 'i',
30400: 't',
30450: 't',
30457: 't',
30500: 'b',
30502: 'b',
30505: 'u',
30510: 'b',
30522: 'u',
30530: 'b',
30532: 'b',
30534: 'b',
30536: 'b',
30538: 'b',
30540: 'b',
30542: 'b',
30544: 'b',
30545: 'b',
30546: 'b',
30547: 'b',
30550: 'u',
30560: 'b',
30566: 'b',
30571: 'b',
30575: 'b',
30578: 'b',
30580: 'b',
30581: 'u',
30600: 'w',
30620: 's',
30627: 'w',
30700: 's',
30720: 'b',
30721: 'b',
30726: 'b',
30728: 'b',
30730: 't',
30745: 's'
}
self._check_subctls(exp_subctls, blocks)
exp_mid_block_comments = {
30000: [['Block start comment']],
30002: (),
30012: [['Mid-block comment']],
30020: (),
30027: (),
30050: (),
30055: (),
30100: (),
30200: (),
30210: (),
30300: (),
30400: (),
30450: (),
30457: (),
30500: (),
30502: (),
30505: (),
30510: (),
30522: (),
30530: (),
30532: (),
30534: (),
30536: (),
30538: (),
30540: (),
30542: (),
30544: (),
30545: (),
30546: (),
30547: (),
30550: (),
30560: (),
30566: (),
30571: (),
30575: (),
30578: (),
30580: (),
30581: (),
30600: (),
30620: (),
30627: (),
30700: (),
30720: (),
30721: (),
30726: (),
30728: (),
30730: [['Another mid-block comment']],
30745: ()
}
self._check_mid_block_comments(exp_mid_block_comments, blocks)
exp_titles = {
30000: ['Data at 30000'],
30100: ['Routine at 30100'],
30200: ['Game status buffer entry at 30200'],
30300: ['Ignored block at 30300'],
30400: ['Message at 30400'],
30500: ['Unused block at 30500'],
30600: ['Words at 30600'],
30700: ['Zeroes at 30700']
}
self._check_titles(exp_titles, blocks)
exp_instruction_comments = {
30000: (),
30002: [(0, 'Message in the data block')],
30012: [(0, '')],
30020: [(0, '')],
30027: (),
30050: [(0, 'Complex DEFB with a blank directive')],
30055: (),
30100: (),
30200: [(0, "Blank directive in a 'g' block")],
30210: (),
30300: (),
30400: (),
30450: [(0, 'Complex DEFM with a blank directive')],
30457: (),
30500: [(0, "Blank directive in a 'u' block")],
30502: [(0, '')],
30505: (),
30510: [(0, '')],
30522: (),
30530: [(0, '')],
30532: (),
30534: (),
30536: (),
30538: (),
30540: (),
30542: (),
30544: (),
30545: (),
30546: (),
30547: (),
30550: (),
30560: [(0, '')],
30566: (),
30571: (),
30575: (),
30578: (),
30580: (),
30581: (),
30600: (),
30620: [(0, '')],
30627: (),
30700: (),
30720: [(0, '')],
30721: (),
30726: (),
30728: (),
30730: [(0, '')],
30745: ()
}
self._check_instruction_comments(exp_instruction_comments, blocks)
exp_descriptions = {
30000: (),
30100: [['Description of routine at 30100']],
30200: (),
30300: (),
30400: (),
30500: (),
30600: (),
30700: ()
}
self._check_descriptions(exp_descriptions, blocks)
exp_registers = {
30000: (),
30100: [['A Some value'], ['BC Some other value']],
30200: (),
30300: (),
30400: (),
30500: (),
30600: (),
30700: ()
}
self._check_registers(exp_registers, blocks)
exp_end_comments = {
30000: (),
30100: [
['First paragraph of the end comment for the routine at 30100'],
['Second paragraph of the end comment for the routine at 30100']
],
30200: (),
30300: (),
30400: (),
30500: (),
30600: (),
30700: ()
}
self._check_end_comments(exp_end_comments, blocks)
exp_sublengths = {
30000: ((0, 'n'),),
30002: ((0, 'c'),),
30012: ((0, 'n'),),
30020: ((0, 'n'),),
30027: ((0, 'n'),),
30050: ((3, 'n'), (2, 'c')),
30055: ((0, 'n'),),
30100: ((0, 'n'),),
30200: ((1, 'n'),),
30210: ((0, 'n'),),
30300: ((0, 'n'),),
30400: ((0, 'c'),),
30450: ((4, 'c'), (3, 'n')),
30457: ((0, 'c'),),
30500: ((0, 'n'),),
30502: ((0, 'n'),),
30505: ((0, 'n'),),
30510: ((3, 'n'),),
30522: ((0, 'n'),),
30530: ((2, 'n'),),
30532: ((2, 'n'),),
30534: ((2, 'n'),),
30536: ((2, 'n'),),
30538: ((2, 'n'),),
30540: ((2, 'n'),),
30542: ((2, 'n'),),
30544: ((1, 'n'),),
30545: ((1, 'n'),),
30546: ((1, 'n'),),
30547: ((3, 'n'),),
30550: ((0, 'n'),),
30560: ((6, 'n'),),
30566: ((5, 'n'),),
30571: ((4, 'n'),),
30575: ((3, 'n'),),
30578: ((2, 'n'),),
30580: ((1, 'n'),),
30581: ((0, 'n'),),
30600: ((0, 'n'),),
30620: ((0, 'n'),),
30627: ((0, 'n'),),
30700: ((0, 'n'),),
30720: ((1, 'n'),),
30721: ((3, 'c'), (2, 'n')),
30726: ((1, 'n'), (1, 'c')),
30728: ((1, 'n'), (1, 'c')),
30730: ((10, 'c'), (5, 'n')),
30745: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, blocks)
exp_multiline_comments = {
30000: None,
30002: None,
30012: (30027, [(0, 'This comment covers the following two sub-blocks')]),
30020: None,
30027: None,
30050: None,
30055: None,
30100: None,
30200: None,
30210: None,
30300: None,
30400: None,
30450: None,
30457: None,
30500: None,
30502: None,
30505: None,
30510: None,
30522: None,
30530: (30550, [(0, '')]),
30532: None,
30534: None,
30536: None,
30538: None,
30540: None,
30542: None,
30544: None,
30545: None,
30546: None,
30547: None,
30550: None,
30560: (30581, [(0, '')]),
30566: None,
30571: None,
30575: None,
30578: None,
30580: None,
30581: None,
30600: None,
30620: None,
30627: None,
30700: None,
30720: (30730, [(0, '')]),
30721: None,
30726: None,
30728: None,
30730: None,
30745: None
}
self._check_multiline_comments(exp_multiline_comments, blocks)
exp_entry_asm_directives = {
30000: ['start'],
30100: [],
30200: [],
30300: [],
30400: [],
30500: [],
30600: [],
30700: []
}
self._check_entry_asm_directives(exp_entry_asm_directives, blocks)
exp_instruction_asm_directives = {
30101: ['label=LOOP']
}
self._check_instruction_asm_directives(exp_instruction_asm_directives, blocks)
def test_two_ctl_files(self):
ctl1 = """
b 30000
c 30010
"""
ctl2 = """
g 30020
w 30022
"""
blocks = self._get_ctl_parser((ctl1, ctl2)).get_blocks()
exp_ctls = {
30000: 'b',
30010: 'c',
30020: 'g',
30022: 'w'
}
self._check_ctls(exp_ctls, blocks)
def test_blank_directive_out_of_order(self):
ctl = """
c 65534
b 65535
65534,1 This is a C directive
"""
blocks = self._get_ctl_parser(ctl).get_blocks()
exp_subctls = {
65534: 'c',
65535: 'b'
}
self._check_subctls(exp_subctls, blocks)
def test_blank_directive_with_no_containing_block(self):
ctl = """
30000
b 30001
"""
ctl_parser = CtlParser()
ctlfile = self.write_text_file(textwrap.dedent(ctl))
ctl_parser.parse_ctls([ctlfile])
warnings = self.err.getvalue().split('\n')[0:-1:2]
exp_warnings = ['WARNING: Ignoring line 1 in {} (blank directive with no containing block):'.format(ctlfile)]
self.assertEqual(exp_warnings, warnings)
exp_subctls = {30001: 'b'}
self._check_subctls(exp_subctls, ctl_parser.get_blocks())
def test_parse_ctl_with_min_address(self):
ctl_parser = self._get_ctl_parser(CTL, 30700)
blocks = ctl_parser.get_blocks()
exp_ctls = {30700: 's'}
self._check_ctls(exp_ctls, blocks)
exp_subctls = {
30700: 's',
30720: 'b',
30721: 'b',
30726: 'b',
30728: 'b',
30730: 't',
30745: 's'
}
self._check_subctls(exp_subctls, blocks)
exp_mid_block_comments = {
30700: (),
30720: (),
30721: (),
30726: (),
30728: (),
30730: [['Another mid-block comment']],
30745: ()
}
self._check_mid_block_comments(exp_mid_block_comments, blocks)
exp_titles = {30700: ['Zeroes at 30700']}
self._check_titles(exp_titles, blocks)
exp_instruction_comments = {
30700: (),
30720: [(0, '')],
30721: (),
30726: (),
30728: (),
30730: [(0, '')],
30745: ()
}
self._check_instruction_comments(exp_instruction_comments, blocks)
exp_descriptions = {30700: ()}
self._check_descriptions(exp_descriptions, blocks)
exp_registers = {30700: ()}
self._check_registers(exp_registers, blocks)
exp_end_comments = {30700: ()}
self._check_end_comments(exp_end_comments, blocks)
exp_sublengths = {
30700: ((0, 'n'),),
30720: ((1, 'n'),),
30721: ((3, 'c'), (2, 'n')),
30726: ((1, 'n'), (1, 'c')),
30728: ((1, 'n'), (1, 'c')),
30730: ((10, 'c'), (5, 'n')),
30745: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, blocks)
exp_multiline_comments = {
30700: None,
30720: (30730, [(0, '')]),
30721: None,
30726: None,
30728: None,
30730: None,
30745: None
}
self._check_multiline_comments(exp_multiline_comments, blocks)
exp_entry_asm_directives = {30700: []}
self._check_entry_asm_directives(exp_entry_asm_directives, blocks)
self._check_instruction_asm_directives({}, blocks)
def test_parse_ctl_with_max_address(self):
ctl_parser = self._get_ctl_parser(CTL, max_address=30200)
blocks = ctl_parser.get_blocks()
self._check_blocks(blocks)
exp_ctls = {
30000: 'b',
30100: 'c'
}
self._check_ctls(exp_ctls, blocks)
exp_subctls = {
30000: 'b',
30002: 't',
30012: 'w',
30020: 'c',
30027: 'b',
30050: 'b',
30055: 'b',
30100: 'c'
}
self._check_subctls(exp_subctls, blocks)
exp_mid_block_comments = {
30000: [['Block start comment']],
30002: (),
30012: [['Mid-block comment']],
30020: (),
30027: (),
30050: (),
30055: (),
30100: ()
}
self._check_mid_block_comments(exp_mid_block_comments, blocks)
exp_titles = {
30000: ['Data at 30000'],
30100: ['Routine at 30100']
}
self._check_titles(exp_titles, blocks)
exp_instruction_comments = {
30000: (),
30002: [(0, 'Message in the data block')],
30012: [(0, '')],
30020: [(0, '')],
30027: (),
30050: [(0, 'Complex DEFB with a blank directive')],
30055: (),
30100: ()
}
self._check_instruction_comments(exp_instruction_comments, blocks)
exp_descriptions = {
30000: (),
30100: [['Description of routine at 30100']]
}
self._check_descriptions(exp_descriptions, blocks)
exp_registers = {
30000: (),
30100: [['A Some value'], ['BC Some other value']]
}
self._check_registers(exp_registers, blocks)
exp_end_comments = {
30000: (),
30100: [
['First paragraph of the end comment for the routine at 30100'],
['Second paragraph of the end comment for the routine at 30100']
]
}
self._check_end_comments(exp_end_comments, blocks)
exp_sublengths = {
30000: ((0, 'n'),),
30002: ((0, 'c'),),
30012: ((0, 'n'),),
30020: ((0, 'n'),),
30027: ((0, 'n'),),
30050: ((3, 'n'), (2, 'c')),
30055: ((0, 'n'),),
30100: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, blocks)
exp_multiline_comments = {
30000: None,
30002: None,
30012: (30027, [(0, 'This comment covers the following two sub-blocks')]),
30020: None,
30027: None,
30050: None,
30055: None,
30100: None
}
self._check_multiline_comments(exp_multiline_comments, blocks)
exp_entry_asm_directives = {
30000: ['start'],
30100: []
}
self._check_entry_asm_directives(exp_entry_asm_directives, blocks)
exp_instruction_asm_directives = {
30101: ['label=LOOP']
}
self._check_instruction_asm_directives(exp_instruction_asm_directives, blocks)
def test_parse_ctl_with_min_and_max_addresses(self):
ctl_parser = self._get_ctl_parser(CTL, 30100, 30300)
blocks = ctl_parser.get_blocks()
self._check_blocks(blocks)
exp_ctls = {
30100: 'c',
30200: 'g'
}
self._check_ctls(exp_ctls, blocks)
exp_subctls = {
30100: 'c',
30200: 'b',
30210: 'g'
}
self._check_subctls(exp_subctls, blocks)
exp_mid_block_comments = {
30100: (),
30200: (),
30210: ()
}
self._check_mid_block_comments(exp_mid_block_comments, blocks)
exp_titles = {
30100: ['Routine at 30100'],
30200: ['Game status buffer entry at 30200']
}
self._check_titles(exp_titles, blocks)
exp_instruction_comments = {
30100: (),
30200: [(0, "Blank directive in a 'g' block")],
30210: ()
}
self._check_instruction_comments(exp_instruction_comments, blocks)
exp_descriptions = {
30100: [['Description of routine at 30100']],
30200: ()
}
self._check_descriptions(exp_descriptions, blocks)
exp_registers = {
30100: [['A Some value'], ['BC Some other value']],
30200: ()
}
self._check_registers(exp_registers, blocks)
exp_end_comments = {
30100: [
['First paragraph of the end comment for the routine at 30100'],
['Second paragraph of the end comment for the routine at 30100']
],
30200: ()
}
self._check_end_comments(exp_end_comments, blocks)
exp_sublengths = {
30100: ((0, 'n'),),
30200: ((1, 'n'),),
30210: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, blocks)
exp_multiline_comments = {
30100: None,
30200: None,
30210: None
}
self._check_multiline_comments(exp_multiline_comments, blocks)
exp_entry_asm_directives = {
30100: [],
30200: []
}
self._check_entry_asm_directives(exp_entry_asm_directives, blocks)
exp_instruction_asm_directives = {
30101: ['label=LOOP']
}
self._check_instruction_asm_directives(exp_instruction_asm_directives, blocks)
def test_invalid_lines(self):
ctl_specs = [
(' 30000,1', 'blank directive with no containing block'),
('B 30745,15,5:X10', 'invalid integer'),
('T 30760,5,2:Y3', 'invalid integer'),
('W 30765,5,1:B4', 'invalid integer'),
('S 30770,10,T8:2', 'invalid integer'),
('B 30780,10,h,5', 'invalid integer'),
('C 40000,Q', 'invalid integer'),
('@ FEDCB label=Z', 'invalid ASM directive address'),
('@ 49152', 'invalid ASM directive declaration'),
('b 50000,20', 'extra parameters after address'),
('c 50000,20', 'extra parameters after address'),
('g 50000,20', 'extra parameters after address'),
('i 50000,20', 'extra parameters after address'),
('s 50000,20', 'extra parameters after address'),
('t 50000,20', 'extra parameters after address'),
('u 50000,20', 'extra parameters after address'),
('w 50000,20', 'extra parameters after address'),
('D 50000,20 Desc.', 'extra parameters after address'),
('E 50000,20 End.', 'extra parameters after address'),
('N 50000,20 Note.', 'extra parameters after address'),
('R 50000,20 A 10', 'extra parameters after address'),
('b b50010', 'invalid address'),
('d 50020', 'invalid directive'),
('! 50030', 'invalid directive'),
('@ 50000 ignoreua:g', "invalid @ignoreua directive suffix: 'g'"),
('L 51000', 'loop length not specified'),
('L 51000,10', 'loop count not specified')
]
ctls = [spec[0] for spec in ctl_specs]
ctl_parser = CtlParser()
ctlfile = self.write_text_file('\n'.join(ctls))
ctl_parser.parse_ctls([ctlfile])
exp_warnings = []
for line_no, (ctl, error_msg) in enumerate(ctl_specs, 1):
if error_msg:
exp_warnings.append('WARNING: Ignoring line {} in {} ({}):'.format(line_no, ctlfile, error_msg))
warnings = self.err.getvalue().split('\n')[:-1]
self.assertEqual(exp_warnings, warnings[0::2])
invalid_ctls = [spec[0] for spec in ctl_specs if spec[1]]
self.assertEqual(invalid_ctls, warnings[1::2])
def test_comments(self):
ctl = """
# This is a comment
b 32768
% This is also a comment
w 32769
; This is a comment too
"""
ctl_parser = self._get_ctl_parser(ctl)
self.assertEqual(self.err.getvalue(), '')
self._check_ctls({32768: 'b', 32769: 'w'}, ctl_parser.get_blocks())
def test_bases(self):
ctl = """
c 50000 Test numeric instruction operand bases
50000,b
50002,h2
50006,hb
50010,6,d2,nb4
50016,,c2,dc4
50022,6,b2
50028,b6,n2,2,h2
"""
ctl_parser = self._get_ctl_parser(ctl)
exp_sublengths = {
50000: ((0, 'b'),),
50002: ((0, 'h'),),
50004: ((0, 'n'),),
50006: ((0, 'hb'),),
50010: ((2, 'd'),),
50012: ((4, 'nb'),),
50016: ((2, 'c'),),
50018: ((4, 'dc'),),
50022: ((2, 'b'),),
50028: ((2, 'n'),),
50030: ((2, 'b'),),
50032: ((2, 'h'),),
50034: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, ctl_parser.get_blocks())
def test_byte_formats(self):
ctl = """
b 40000 Test byte formats
40000,b 5 bytes in binary format
40005,b5 5 more bytes in binary format
B 40010,b10,5,d3,h2 5 binary, 3 decimal, 2 hex
B 40020,b,2:d3:h5 2 binary, 3 decimal, 5 hex, one line
40030,,b6,3,h1 6 binary, 3 default, 1 hex
40040,10,b5:2:h3 5 binary, 2 default, 3 hex, one line
40050,10,1,c9 1 default, 9 text
40060,10,h4:c6 4 hex, 6 text, one line
T 40070,10,3,b7 3 text, 7 binary
T 40080,10,2:h8 2 text, 8 hex, one line
T 40090,10,5,n5 5 text, 5 default
"""
ctl_parser = self._get_ctl_parser(ctl)
exp_sublengths = {
40000: ((0, 'b'),),
40005: ((0, 'b'),),
40010: ((5, 'b'),),
40015: ((3, 'd'),),
40018: ((2, 'h'),),
40020: ((2, 'b'), (3, 'd'), (5, 'h')),
40030: ((6, 'b'),),
40036: ((3, 'n'),),
40039: ((1, 'h'),),
40040: ((5, 'b'), (2, 'n'), (3, 'h')),
40050: ((1, 'n'),),
40051: ((9, 'c'),),
40060: ((4, 'h'), (6, 'c')),
40070: ((3, 'c'),),
40073: ((7, 'b'),),
40080: ((2, 'c'), (8, 'h')),
40090: ((5, 'c'),),
40095: ((5, 'n'),),
40100: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, ctl_parser.get_blocks())
def test_word_formats(self):
ctl = """
w 40000 Test word formats
40000,10 5 default
40010,b10 5 words in binary format
W 40020,b10,6,d2,h2 3 binary, 1 decimal, 1 hex
W 40030,b10,4:d4:h2 2 binary, 2 decimal, 1 hex, one line
40040,10,b2,4,h4 1 binary, 2 default, 2 hex
40050,10,b2:6:h2 1 binary, 3 default, 1 hex, one line
"""
ctl_parser = self._get_ctl_parser(ctl)
exp_sublengths = {
40000: ((0, 'n'),),
40010: ((0, 'b'),),
40020: ((6, 'b'),),
40026: ((2, 'd'),),
40028: ((2, 'h'),),
40030: ((4, 'b'), (4, 'd'), (2, 'h')),
40040: ((2, 'b'),),
40042: ((4, 'n'),),
40046: ((4, 'h'),),
40050: ((2, 'b'), (6, 'n'), (2, 'h')),
40060: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, ctl_parser.get_blocks())
def test_s_directives_with_no_byte_value(self):
ctl = """
s 50000 Test s/S directives with no byte value
50000,10
50010,b10
50020,d10
50030,h10
S 50040,b20,5,d5,h5
S 50060,d20,b5,5,h5
S 50080,h20,b5,d5,5
50100,20,b5,d5,5
"""
ctl_parser = self._get_ctl_parser(ctl)
exp_sublengths = {
50000: ((0, 'n'),),
50010: ((0, 'b'),),
50020: ((0, 'd'),),
50030: ((0, 'h'),),
50040: ((5, 'b'),),
50045: ((5, 'd'),),
50050: ((5, 'h'),),
50060: ((5, 'b'),),
50065: ((5, 'd'),),
50070: ((5, 'h'),),
50080: ((5, 'b'),),
50085: ((5, 'd'),),
50090: ((5, 'h'),),
50100: ((5, 'b'),),
50105: ((5, 'd'),),
50110: ((5, 'n'),),
50120: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, ctl_parser.get_blocks())
def test_s_directives_with_byte_values(self):
ctl = """
s 50120 Test s/S directives with byte values
50120,20,d20:b%10001000
50140,20,20:h$44
50160,12,10:h10,h2:2
50172,8,2:c",",2:c";",4:c"!"
50180,70,5:c"*"*2,58:c":",2:c" "
50250,10,4:c"\\"",6:c"\\\\"
"""
ctl_parser = self._get_ctl_parser(ctl)
exp_sublengths = {
50120: ((20, 'd'), (136, 'b')),
50140: ((20, 'n'), (68, 'h')),
50160: ((10, 'n'), (10, 'h')),
50170: ((2, 'h'), (2, 'n')),
50172: ((2, 'n'), (44, 'c')),
50174: ((2, 'n'), (59, 'c')),
50176: ((4, 'n'), (33, 'c')),
50180: ((5, 'n'), (42, 'c')),
50185: ((5, 'n'), (42, 'c')),
50190: ((58, 'n'), (58, 'c')),
50248: ((2, 'n'), (32, 'c')),
50250: ((4, 'n'), (34, 'c')),
50254: ((6, 'n'), (92, 'c')),
50260: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, ctl_parser.get_blocks())
def test_s_directives_with_blank_byte_values(self):
ctl = """
s 60000 Test s/S directives with blank byte values
60000,20,20:c
60020,40,c"(":b
60060,10,h$0A:d
60070,10,10:h
60080,10,b%1010:n
60090,10,5:c*2
"""
ctl_parser = self._get_ctl_parser(ctl)
exp_sublengths = {
60000: ((20, 'n'), (0, 'c')),
60020: ((40, 'c'), (0, 'b')),
60060: ((10, 'h'), (0, 'd')),
60070: ((10, 'n'), (0, 'h')),
60080: ((10, 'b'), (0, 'n')),
60090: ((5, 'n'), (0, 'c')),
60095: ((5, 'n'), (0, 'c')),
60100: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, ctl_parser.get_blocks())
def test_assemble_directives(self):
ctl = """
@ 30000 assemble=1
c 30000 Routine at 30000
@ 30001 assemble=0
"""
exp_entry_directives = {
30000: ['assemble=1']
}
exp_instruction_directives = {
30001: ['assemble=0']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_bfix_directives(self):
ctl = """
@ 40000 bfix=XOR A
c 40000 Routine at 40000
@ 40001 bfix=XOR B
"""
exp_entry_directives = {
40000: []
}
exp_instruction_directives = {
40000: ['bfix=XOR A'],
40001: ['bfix=XOR B']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_defb_directives(self):
ctl = """
@ 30000 defb=49152:13
c 30000 Routine at 30000
@ 30001 defb=14
"""
exp_entry_directives = {
30000: ['defb=49152:13']
}
exp_instruction_directives = {
30001: ['defb=14']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_defs_directives(self):
ctl = """
@ 40000 defs=32768:10,2
c 40000 Routine at 40000
@ 40001 defs=11,3
"""
exp_entry_directives = {
40000: ['defs=32768:10,2']
}
exp_instruction_directives = {
40001: ['defs=11,3']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_defw_directives(self):
ctl = """
@ 50000 defw=24576:32767
c 50000 Routine at 50000
@ 50001 defw=65535
"""
exp_entry_directives = {
50000: ['defw=24576:32767']
}
exp_instruction_directives = {
50001: ['defw=65535']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_end_directives(self):
ctl = """
c 40000 Routine at 40000
@ 40001 end
@ 40002 end
c 40002 Routine at 40002
"""
exp_entry_directives = {
40000: [],
40002: ['end']
}
exp_instruction_directives = {
40001: ['end']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_equ_directives(self):
ctl = """
@ 50000 equ=ATTRS=22528
c 50000 Routine at 50000
@ 50001 equ=UDG=23675
"""
exp_entry_directives = {
50000: ['equ=ATTRS=22528'],
}
exp_instruction_directives = {
50001: ['equ=UDG=23675']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_expand_directives(self):
ctl = """
@ 32768 expand=#LET(a=2)
c 32768 Routine at 32768
@ 32769 expand=#DEFINE2(MOD,#EVAL({0}%{1}))
"""
exp_entry_directives = {
32768: ['expand=#LET(a=2)']
}
exp_instruction_directives = {
32769: ['expand=#DEFINE2(MOD,#EVAL({0}%{1}))']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_if_directives(self):
ctl = """
@ 40000 if({asm})(replace=/foo/bar)
c 40000 Routine at 40000
@ 40001 if({case}==1)(replace=/FOO/foo)
"""
exp_entry_directives = {
40000: ['if({asm})(replace=/foo/bar)']
}
exp_instruction_directives = {
40001: ['if({case}==1)(replace=/FOO/foo)']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_ignoreua_directives(self):
ctl = """
@ 30000 ignoreua:t
c 30000 Routine at 30000
c 30001 Routine
@ 30001 ignoreua:d
D 30001 Description of the routine at 30001
@ 30001 ignoreua:r
R 30001 HL 30001
@ 30001 ignoreua
30001 Instruction-level comment at 30001
c 30002 Routine
@ 30003 ignoreua:m
N 30003 Mid-block comment above 30003.
@ 30003 ignoreua:i
30003 Instruction-level comment at 30003
c 30004 Routine
@ 30004 ignoreua:i
30004,1 Instruction-level comment at 30004
@ 30005 ignoreua:m
N 30005 Mid-block comment above 30005.
@ 30004 ignoreua:e
E 30004 End comment for the routine at 30004.
"""
blocks = self._get_ctl_parser(ctl).get_blocks()
exp_entry_directives = {
30000: {'t': ''},
30001: {'d': '', 'r': ''},
30002: {},
30004: {'e': ''}
}
exp_other_directives = {
30000: {},
30001: {'i': ''},
30003: {'i': '', 'm': ''},
30004: {'i': ''},
30005: {'m': ''}
}
self._check_ignoreua_directives(exp_entry_directives, exp_other_directives, blocks)
def test_ignoreua_directives_with_values(self):
ctl = """
@ 30000 ignoreua:t=30000
c 30000 Routine at 30000
c 30001 Routine
@ 30001 ignoreua:d=30001
D 30001 Description of the routine at 30001
@ 30001 ignoreua:r=30001
R 30001 HL 30001
@ 30001 ignoreua=30000,30001
30001 Instruction-level comment at 30001
c 30002 Routine
@ 30003 ignoreua:m=30003
N 30003 Mid-block comment above 30003.
@ 30003 ignoreua:i=30002,30003
30003 Instruction-level comment at 30003
c 30004 Routine
@ 30004 ignoreua:i=30004
30004,1 Instruction-level comment at 30004
@ 30005 ignoreua:m=30005
N 30005 Mid-block comment above 30005.
@ 30004 ignoreua:e=30000,30004
E 30004 End comment for the routine at 30004.
"""
blocks = self._get_ctl_parser(ctl).get_blocks()
exp_entry_directives = {
30000: {'t': '=30000'},
30001: {'d': '=30001', 'r': '=30001'},
30002: {},
30004: {'e': '=30000,30004'}
}
exp_other_directives = {
30000: {},
30001: {'i': '=30000,30001'},
30003: {'i': '=30002,30003', 'm': '=30003'},
30004: {'i': '=30004'},
30005: {'m': '=30005'}
}
self._check_ignoreua_directives(exp_entry_directives, exp_other_directives, blocks)
def test_isub_directives(self):
ctl = """
@ 40000 isub=LD A,1
c 40000 Routine at 40000
@ 40002 isub=LD A,2
"""
exp_entry_directives = {
40000: []
}
exp_instruction_directives = {
40000: ['isub=LD A,1'],
40002: ['isub=LD A,2']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_keep_directives(self):
ctl = """
@ 50000 keep
c 50000 Routine at 50000
@ 50003 keep
"""
exp_entry_directives = {
50000: []
}
exp_instruction_directives = {
50000: ['keep'],
50003: ['keep']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_label_directives(self):
ctl = """
@ 60000 label=START
c 60000 Routine at 60000
@ 60003 label=LOOP
"""
exp_entry_directives = {
60000: []
}
exp_instruction_directives = {
60000: ['label=START'],
60003: ['label=LOOP']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_nowarn_directives(self):
ctl = """
@ 40000 nowarn
c 40000 Routine at 40000
@ 40003 nowarn
"""
exp_entry_directives = {
40000: []
}
exp_instruction_directives = {
40000: ['nowarn'],
40003: ['nowarn']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_ofix_directives(self):
ctl = """
@ 50000 ofix=LD HL,12345
c 50000 Routine at 50000
@ 50003 ofix=CALL 34567
"""
exp_entry_directives = {
50000: []
}
exp_instruction_directives = {
50000: ['ofix=LD HL,12345'],
50003: ['ofix=CALL 34567']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_org_directives(self):
ctl = """
@ 60000 org=60000
c 60000 Routine at 60000
@ 60001 org
"""
exp_entry_directives = {
60000: ['org=60000'],
}
exp_instruction_directives = {
60001: ['org']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_refs_directives(self):
ctl = """
@ 30000 refs=40000:60000
c 30000 Routine at 30000
@ 30001 refs=40000,50000
"""
exp_entry_directives = {
30000: []
}
exp_instruction_directives = {
30000: ['refs=40000:60000'],
30001: ['refs=40000,50000']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_rem_directives(self):
ctl = """
@ 30000 rem=It begins
c 30000 Routine at 30000
@ 30010 rem=It ends
"""
exp_entry_directives = {
30000: []
}
exp_instruction_directives = {
30000: ['rem=It begins'],
30010: ['rem=It ends']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_remote_directives(self):
ctl = """
@ 32768 remote=main:29012,29015
c 32768 Routine at 32768
@ 32769 remote=start:20112
"""
exp_entry_directives = {
32768: ['remote=main:29012,29015']
}
exp_instruction_directives = {
32769: ['remote=start:20112']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_replace_directives(self):
ctl = """
@ 40000 replace=/foo/bar
c 40000 Routine at 40000
@ 40001 replace=/baz/qux
"""
exp_entry_directives = {
40000: ['replace=/foo/bar'],
}
exp_instruction_directives = {
40001: ['replace=/baz/qux']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_rfix_directives(self):
ctl = """
@ 50000 rfix=LD BC,0
c 50000 Routine at 50000
@ 50002 rfix=LD HL,0
"""
exp_entry_directives = {
50000: []
}
exp_instruction_directives = {
50000: ['rfix=LD BC,0'],
50002: ['rfix=LD HL,0']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_rsub_directives(self):
ctl = """
@ 60000 rsub=LD DE,0
c 60000 Routine at 60000
@ 60002 rsub=LD IX,0
"""
exp_entry_directives = {
60000: []
}
exp_instruction_directives = {
60000: ['rsub=LD DE,0'],
60002: ['rsub=LD IX,0']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_set_directives(self):
ctl = """
@ 30000 set-crlf=1
c 30000 Routine at 30000
@ 30001 set-tab=1
"""
exp_entry_directives = {
30000: ['set-crlf=1'],
}
exp_instruction_directives = {
30001: ['set-tab=1']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_ssub_directives(self):
ctl = """
@ 40000 ssub=INC HL
c 40000 Routine at 60000
@ 40001 ssub=INC BC
"""
exp_entry_directives = {
40000: []
}
exp_instruction_directives = {
40000: ['ssub=INC HL'],
40001: ['ssub=INC BC']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_start_directives(self):
ctl = """
@ 50000 start
c 50000 Routine at 50000
@ 50001 start
"""
exp_entry_directives = {
50000: ['start'],
}
exp_instruction_directives = {
50001: ['start']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_writer_directives(self):
ctl = """
@ 60000 writer=x.y.z
c 60000 Routine at 60000
@ 60001 writer=foo.bar.baz
"""
exp_entry_directives = {
60000: ['writer=x.y.z'],
}
exp_instruction_directives = {
60001: ['writer=foo.bar.baz']
}
self._test_asm_directives(ctl, exp_entry_directives, exp_instruction_directives)
def test_order_of_entry_asm_directives_is_preserved(self):
ctl = """
@ 30000 start
@ 30000 equ=ATTRS=22528
@ 30000 replace=/foo/bar
@ 30000 replace=/baz/qux
c 30000 Routine at 30000
"""
blocks = self._get_ctl_parser(ctl).get_blocks()
exp_entry_directives = {
30000: ['start', 'equ=ATTRS=22528', 'replace=/foo/bar', 'replace=/baz/qux']
}
self._check_entry_asm_directives(exp_entry_directives, blocks)
def test_header_block(self):
ctl = """
> 60000 ; This is a header.
c 60000 Routine
"""
exp_headers = {60000: ['; This is a header.']}
exp_footers = {60000: ()}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_headers(exp_headers, blocks)
self._check_footers(exp_footers, blocks)
def test_two_header_blocks(self):
ctl = """
> 30000 ; This is a header.
> 30000
> 30000 ; This is another header.
c 30000 Routine
"""
exp_headers = {
30000: [
'; This is a header.',
'',
'; This is another header.'
]
}
exp_footers = {30000: ()}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_headers(exp_headers, blocks)
self._check_footers(exp_footers, blocks)
def test_header_block_containing_asm_data_directives(self):
ctl = """
> 60000 @defb=1
> 60000 @defs=2
> 60000 @defw=3
c 60000 Routine
"""
exp_headers = {60000: ['@defb=1', '@defs=2', '@defw=3']}
exp_footers = {60000: ()}
exp_asm_data_directives = {60000: ['defb=1', 'defs=2', 'defw=3']}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_headers(exp_headers, blocks)
self._check_footers(exp_footers, blocks)
self._check_asm_data_directives(exp_asm_data_directives, blocks)
def test_header_block_unaffected_by_dot_directives(self):
ctl = """
> 40000 ; This is the start of the header.
. This is an intervening dot directive.
> 40000 ; This is the end of the header.
. Another dot directive to ignore.
c 40000 Routine
"""
exp_headers = {
40000: [
'; This is the start of the header.',
'; This is the end of the header.'
]
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_headers(exp_headers, blocks)
def test_footer_block(self):
ctl = """
c 50000 Routine
> 50000,1 ; This is a footer.
"""
exp_headers = {50000: ()}
exp_footers = {50000: ['; This is a footer.']}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_headers(exp_headers, blocks)
self._check_footers(exp_footers, blocks)
def test_two_footer_blocks(self):
ctl = """
c 60000 Routine
> 60000,1 ; This is a footer.
> 60000,1
> 60000,1 ; This is another footer.
"""
exp_headers = {60000: ()}
exp_footers = {
60000: [
'; This is a footer.',
'',
'; This is another footer.'
]
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_headers(exp_headers, blocks)
self._check_footers(exp_footers, blocks)
def test_footer_block_containing_asm_data_directives(self):
ctl = """
c 60000 Routine
> 60000,1 @defb=1
> 60000,1 @defs=2
> 60000,1 @defw=3
"""
exp_headers = {60000: ()}
exp_footers = {60000: ['@defb=1', '@defs=2', '@defw=3']}
exp_asm_data_directives = {60000: ['defb=1', 'defs=2', 'defw=3']}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_headers(exp_headers, blocks)
self._check_footers(exp_footers, blocks)
self._check_asm_data_directives(exp_asm_data_directives, blocks)
def test_footer_block_unaffected_by_dot_directives(self):
ctl = """
c 40000 Routine
> 40000,1 ; This is the start of the footer.
. This is an intervening dot directive.
> 40000,1 ; This is the end of the footer.
. Another dot directive to ignore.
"""
exp_footers = {
40000: [
'; This is the start of the footer.',
'; This is the end of the footer.'
]
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_footers(exp_footers, blocks)
def test_registers(self):
ctl = """
c 40000 Routine
R 40000 BC Important value
R 40000 DE
R 40000
R 40000 HL Another important value
"""
ctl_parser = self._get_ctl_parser(ctl)
blocks = ctl_parser.get_blocks()
self.assertEqual(len(blocks), 1)
exp_registers = [
['BC Important value'],
['DE'],
[''],
['HL Another important value']
]
self.assertEqual(exp_registers, blocks[0].registers)
def test_N_directive(self):
ctl = """
c 40000 Routine
D 40000 Description.
N 40000 Paragraph 1.
N 40000 Paragraph 2.
N 40001 Mid-routine comment.
"""
ctl_parser = self._get_ctl_parser(ctl)
blocks = ctl_parser.get_blocks()
self.assertEqual(len(blocks), 1)
self.assertEqual([['Description.']], blocks[0].description)
sub_blocks = blocks[0].blocks
self.assertEqual(len(sub_blocks), 2)
self.assertEqual([['Paragraph 1.'], ['Paragraph 2.']], sub_blocks[0].header)
self.assertEqual([['Mid-routine comment.']], sub_blocks[1].header)
def test_M_directive_terminates_previous_sub_block(self):
ctl = """
c 65533
65533 This sub-block is terminated by the following M directive
M 65534,2 This spans an implicit "C" sub-block and a "B" sub-block
B 65535,1
"""
ctl_parser = self._get_ctl_parser(ctl)
blocks = ctl_parser.get_blocks()
self.assertEqual(len(blocks), 1)
sub_blocks = blocks[0].blocks
exp_subctls = {
65533: ('c', [(0, 'This sub-block is terminated by the following M directive')], None),
65534: ('c', (), (65536, [(0, 'This spans an implicit "C" sub-block and a "B" sub-block')])),
65535: ('b', [(0, '')], None)
}
subctls = {b.start:(b.ctl, b.comment, b.multiline_comment) for b in sub_blocks}
self.assertEqual(exp_subctls, subctls)
def test_M_directive_followed_by_sub_block_with_sublengths(self):
ctl = """
b 40000 Data
M 40000 This spans a "B" sub-block with sublengths and a "W" sub-block
B 40000,3,2,1
W 40003,2
i 40005
"""
ctl_parser = self._get_ctl_parser(ctl)
blocks = ctl_parser.get_blocks()
self.assertEqual(len(blocks), 2)
sub_blocks = blocks[0].blocks
exp_subctls = {
40000: ('b', (40005, [(0, 'This spans a "B" sub-block with sublengths and a "W" sub-block')])),
40002: ('b', None),
40003: ('w', None)
}
subctls = {b.start:(b.ctl, b.multiline_comment) for b in sub_blocks}
self.assertEqual(exp_subctls, subctls)
def test_loop(self):
start = 30000
length = 25
count = 2
end = start + length * count
ctl = """
@ 30000 start
@ 30000 org=30000
c 30000 This entry should not be repeated
D 30000 This entry description should not be repeated
R 30000 HL This register should not be repeated
30000,5 Begin
B 30005,5,1,2
N 30010 A mid-block comment
M 30010,10 A multi-line comment
S 30010,6
W 30016,4,4
@ 30020 label=END
T 30020,5,4:n1 End
E 30000 This end comment should not be repeated
L {},{},{}
""".format(start, length, count)
ctl_parser = self._get_ctl_parser(ctl)
blocks = ctl_parser.get_blocks()
self.assertEqual(len(blocks), 1)
block = blocks[0]
sub_blocks = block.blocks
sub_block_map = {b.start: b for b in sub_blocks}
# Check B, C, S, T and W sub-blocks
i = 0
exp_subctls = {}
exp_sublengths = {}
for a in range(start, end, length):
for offset, subctl, sublengths in (
(0, 'c', ((0, 'n'),)),
(5, 'b', ((1, 'n'),)),
(6, 'b', ((2, 'n'),)),
(10, 's', ((0, 'n'),)),
(16, 'w', ((4, 'n'),)),
(20, 't', ((4, 'c'), (1, 'n')),),
(25, 'c', ((0, 'n'),))
):
address = a + offset
exp_subctls[address] = subctl
exp_sublengths[address] = sublengths
i += 1
self._check_subctls(exp_subctls, blocks)
self._check_sublengths(exp_sublengths, blocks)
# Check mid-block comments
offset = 10
for a in range(start + offset, end, length):
self.assertEqual([['A mid-block comment']], sub_block_map[a].header)
# Check multi-line comments
offset = 10
for a in range(start + offset, end, length):
self.assertEqual((a + offset, [(0, 'A multi-line comment')]), sub_block_map[a].multiline_comment)
# Check entry-level directives (c, D, E, R)
self._check_ctls({start: 'c'}, blocks)
self.assertEqual([['This entry description should not be repeated']], block.description)
self.assertEqual([['HL This register should not be repeated']], block.registers)
self.assertEqual([['This end comment should not be repeated']], block.end_comment)
# Check entry-level ASM directives
self.assertEqual(['start', 'org=30000'], block.asm_directives)
# Check instruction-level ASM directives
exp_directives = {start + 20: ['label=END']}
self._check_instruction_asm_directives(exp_directives, blocks)
def test_loop_including_entries(self):
start = 40000
length = 25
count = 3
end = start + length * count
ctl = """
@ 40000 start
@ 40000 org=40000
c 40000 This entry should be repeated
D 40000 This entry description should be repeated
R 40000 HL This register should be repeated
40000,5 Begin
B 40005,5,1,2
N 40010 A mid-block comment
M 40010,10 A multi-line comment
S 40010,6
W 40016,4,4
@ 40020 label=END
T 40020,5,4:n1 End
E 40000 This end comment should be repeated
L {},{},{},1
""".format(start, length, count)
ctl_parser = self._get_ctl_parser(ctl)
blocks = ctl_parser.get_blocks()
sub_block_map = {s.start: s for b in blocks for s in b.blocks}
# Check B, C, S, T and W sub-blocks
i = 0
exp_subctls = {}
exp_sublengths = {}
for a in range(start, end, length):
sub_blocks = blocks[i].blocks
for offset, subctl, sublengths in (
(0, 'c', ((0, 'n'),)),
(5, 'b', ((1, 'n'),)),
(6, 'b', ((2, 'n'),)),
(10, 's', ((0, 'n'),)),
(16, 'w', ((4, 'n'),)),
(20, 't', ((4, 'c'), (1, 'n')),),
(25, 'c', ((0, 'n'),))
):
address = a + offset
exp_subctls[address] = subctl
exp_sublengths[address] = sublengths
i += 1
self._check_subctls(exp_subctls, blocks)
self._check_sublengths(exp_sublengths, blocks)
# Check mid-block comments
offset = 10
for a in range(start + offset, end, length):
self.assertEqual([['A mid-block comment']], sub_block_map[a].header)
# Check multi-line comments
exp_multiline_comments = {}
for a in range(start, end, length):
for b in (0, 5, 6, 10, 16, 20, 25):
address = a + b
if b == 5:
exp_multiline_comments[address] = (address + 3, [(0, '')])
elif b == 10:
exp_multiline_comments[address] = (address + 10, [(0, 'A multi-line comment')])
else:
exp_multiline_comments[address] = None
self._check_multiline_comments(exp_multiline_comments, blocks)
# Check entry-level directives (c, D, E, R)
exp_ctls = {}
exp_descriptions = {}
exp_registers = {}
exp_end_comments = {}
for i, a in enumerate(range(start, end, length)):
exp_ctls[a] = 'c'
exp_descriptions[a] = [['This entry description should be repeated']]
exp_registers[a] = [['HL This register should be repeated']]
exp_end_comments[a] = [['This end comment should be repeated']]
self._check_ctls(exp_ctls, blocks)
self._check_descriptions(exp_descriptions, blocks)
self._check_registers(exp_registers, blocks)
self._check_end_comments(exp_end_comments, blocks)
# Check entry-level ASM directives
self.assertEqual(['start', 'org=40000'], blocks[0].asm_directives)
for block in blocks[1:]:
self.assertEqual([], block.asm_directives)
# Check instruction-level ASM directives
exp_directives = {start + 20: ['label=END']}
self._check_instruction_asm_directives(exp_directives, blocks)
def test_loop_crossing_64k_boundary(self):
ctl = """
u 65532
W 65532,2
L 65532,2,3
"""
ctl_parser = self._get_ctl_parser(ctl)
warnings = self.err.getvalue().split('\n')
# Check warning
self.assertEqual(warnings[0], 'WARNING: Loop crosses 64K boundary:')
self.assertEqual(warnings[1], 'L 65532,2,3')
# Check that the W sub-block is repeated anyway
exp_subctls = {65532: 'w', 65534: 'w'}
self._check_subctls(exp_subctls, ctl_parser.get_blocks())
def test_loop_with_entries_crossing_64k_boundary(self):
ctl = """
b 65534
L 65534,1,4,1
"""
ctl_parser = self._get_ctl_parser(ctl)
warnings = self.err.getvalue().split('\n')
# Check warning
self.assertEqual(warnings[0], 'WARNING: Loop crosses 64K boundary:')
self.assertEqual(warnings[1], 'L 65534,1,4,1')
# Check that there is no block that starts past the boundary
blocks = ctl_parser.get_blocks()
self.assertEqual(blocks[-1].start, 65535)
def test_loop_is_trimmed_by_max_address(self):
ctl = """
b 30000
N 30000 A comment
M 30000,10 Some bytes and text
B 30000,5
T 30005,5,4:n1
B 30010,10 Some more bytes
L 30000,20,3
"""
blocks = self._get_ctl_parser(ctl, max_address=30040).get_blocks()
exp_subctls = {
30000: 'b',
30005: 't',
30010: 'b',
30020: 'b',
30025: 't',
30030: 'b'
}
self._check_subctls(exp_subctls, blocks)
exp_mid_block_comments = {
30000: [['A comment']],
30005: (),
30010: (),
30020: [['A comment']],
30025: (),
30030: ()
}
self._check_mid_block_comments(exp_mid_block_comments, blocks)
exp_instruction_comments = {
30000: [(0, '')],
30005: [(0, '')],
30010: [(0, 'Some more bytes')],
30020: [(0, '')],
30025: [(0, '')],
30030: [(0, 'Some more bytes')]
}
self._check_instruction_comments(exp_instruction_comments, blocks)
exp_multiline_comments = {
30000: (30010, [(0, 'Some bytes and text')]),
30005: None,
30010: None,
30020: (30030, [(0, 'Some bytes and text')]),
30025: None,
30030: None,
}
self._check_multiline_comments(exp_multiline_comments, blocks)
exp_sublengths = {
30000: ((0, 'n'),),
30005: ((4, 'c'), (1, 'n')),
30010: ((0, 'n'),),
30020: ((0, 'n'),),
30025: ((4, 'c'), (1, 'n')),
30030: ((0, 'n'),)
}
self._check_sublengths(exp_sublengths, blocks)
def test_loop_with_entries_is_trimmed_by_max_address(self):
ctl = """
b 30000 A block of bytes
D 30000 This is a block of bytes
R 30000 A 0
B 30000,5
T 30005,5
E 30000 The end
L 30000,10,3,1
"""
blocks = self._get_ctl_parser(ctl, max_address=30020).get_blocks()
exp_ctls = {
30000: 'b',
30010: 'b'
}
self._check_ctls(exp_ctls, blocks)
exp_subctls = {
30000: 'b',
30005: 't',
30010: 'b',
30015: 't'
}
self._check_subctls(exp_subctls, blocks)
exp_descriptions = {
30000: [['This is a block of bytes']],
30010: [['This is a block of bytes']]
}
self._check_descriptions(exp_descriptions, blocks)
exp_registers = {
30000: [['A 0']],
30010: [['A 0']]
}
self._check_registers(exp_registers, blocks)
exp_end_comments = {
30000: [['The end']],
30010: [['The end']]
}
self._check_end_comments(exp_end_comments, blocks)
def test_terminate_multiline_comments(self):
ctl = """
c 30000
M 30000 No length specified, should end at 30002
c 30002
M 30002 No length specified, should end at 30003
N 30003 This comment implicitly terminates the M directive above
c 30004
M 30004,5 Excessive length specified, should end at 30006
c 30006
M 30006,2 Excessive length specified, should end at 30007
N 30007 This comment implicitly terminates the M directive above
c 30008
"""
blocks = self._get_ctl_parser(ctl).get_blocks()
m_comment_end_map = {s.start: s.multiline_comment[0] for b in blocks for s in b.blocks if s.multiline_comment}
exp_end_map = {
30000: 30002,
30002: 30003,
30004: 30006,
30006: 30007
}
self.assertEqual(exp_end_map, m_comment_end_map)
def test_dot_directive_with_entry_titles(self):
ctl = """
b 30000 A title split
. over two lines
c 30100 A title
. split over
. three lines
g 30200 One line, never wrapped
.
i 30300 One
. Two
s 30400 One
. Two
. Three
t 30500 Another one-liner, never wrapped
.
u 30600 Not
. used
w 30700 Some
. words
; Test a blank title with a blank continuation line
b 30800
.
c 30900
. Line 1 here
g 31000
. Trailing blank line
.
i 31100
.
. Leading blank line
s 31200
. Title
.
. Description.
.
. A The accumulator
.
. Start comment.
t 31300
. This title has
. an indent on the second line
"""
exp_titles = {
30000: ['A title split', 'over two lines'],
30100: ['A title', 'split over', 'three lines'],
30200: ['One line, never wrapped', ''],
30300: ['One', 'Two'],
30400: ['One', 'Two', 'Three'],
30500: ['Another one-liner, never wrapped', ''],
30600: ['Not', 'used'],
30700: ['Some', 'words'],
30800: ['', ''],
30900: ['', 'Line 1 here'],
31000: ['', 'Trailing blank line', ''],
31100: ['', '', 'Leading blank line'],
31200: ['', 'Title', '', 'Description.', '', 'A The accumulator', '', 'Start comment.'],
31300: ['', 'This title has', ' an indent on the second line']
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_titles(exp_titles, blocks)
def test_dot_directive_with_instruction_comments(self):
ctl = """
b 30000
B 30000,1 Single instruction,
. two comment lines
C 30001,1 Single instruction, one comment line, never wrapped
.
S 30002,2,1 Two instructions,
. two comment lines
T 30004,2,1 Two instructions,
. three comment
. lines
W 30006,2,2 Two instructions, one comment line, never wrapped
.
M 30010,2 Two instructions of different types,
. two comment lines
B 30010,1
S 30011,1
; Test a blank comment with a blank continuation line
B 30012,1
.
C 30013,1
. Line 1 here
S 30014,1
. Trailing blank line
.
T 30015,1
.
. Leading blank line
W 30016,2
. Line 1
.
. Line 3 (with a blank line 2)
B 30018,1
. This comment has
. an indent on the second line
"""
blocks = self._get_ctl_parser(ctl).get_blocks()
exp_instruction_comments = {
30000: [(0, 'Single instruction,'), (0, 'two comment lines')],
30001: [(0, 'Single instruction, one comment line, never wrapped'), (0, '')],
30002: [(0, 'Two instructions,'), (0, 'two comment lines')],
30004: [(0, 'Two instructions,'), (0, 'three comment'), (0, 'lines')],
30006: [(0, 'Two instructions, one comment line, never wrapped'), (0, '')],
30008: (),
30010: [(0, '')],
30011: [(0, '')],
30012: [(0, ''), (0, '')],
30013: [(0, ''), (0, 'Line 1 here')],
30014: [(0, ''), (0, 'Trailing blank line'), (0, '')],
30015: [(0, ''), (0, ''), (0, 'Leading blank line')],
30016: [(0, ''), (0, 'Line 1'), (0, ''), (0, 'Line 3 (with a blank line 2)')],
30018: [(0, ''), (0, 'This comment has'), (0, ' an indent on the second line')],
30019: ()
}
self._check_instruction_comments(exp_instruction_comments, blocks)
exp_multiline_comments = {
30000: None,
30001: None,
30002: None,
30004: None,
30006: None,
30008: None,
30010: (30012, [(0, 'Two instructions of different types,'), (0, 'two comment lines')]),
30011: None,
30012: None,
30013: None,
30014: None,
30015: None,
30016: None,
30018: None,
30019: None
}
self._check_multiline_comments(exp_multiline_comments, blocks)
def test_dot_directive_with_entry_descriptions(self):
ctl = """
b 40000
D 40000 This description
. spans two lines.
b 40001 Two 'D' directives
D 40001 This description spans only one line even though it would normally be wrapped over two lines.
.
D 40001
. This description
. spans three
. lines.
b 40002
D 40002
. Another long description that spans only one line but would normally be wrapped over two lines.
b 40003 Test a blank description with a blank continuation line
D 40003
.
b 40004
D 40004
. Trailing blank line.
.
b 40005
D 40005
.
. Leading blank line.
b 40006
D 40006
. Paragraph 1.
.
. Paragraph 2.
b 40007
D 40007
. This description has
. an indented second line.
"""
exp_descriptions = {
40000: [['This description', 'spans two lines.']],
40001: [
['This description spans only one line even though it would normally be wrapped over two lines.', ''],
['', 'This description', 'spans three', 'lines.']
],
40002: [['', 'Another long description that spans only one line but would normally be wrapped over two lines.']],
40003: [['', '']],
40004: [['', 'Trailing blank line.', '']],
40005: [['', '', 'Leading blank line.']],
40006: [['', 'Paragraph 1.', '', 'Paragraph 2.']],
40007: [['', 'This description has', ' an indented second line.']]
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_descriptions(exp_descriptions, blocks)
def test_dot_directive_with_block_start_and_mid_block_comments(self):
ctl = """
b 50000
N 50000 This comment
. spans two lines.
N 50000 This comment spans only one line even though it would normally be wrapped over two lines.
.
N 50001
. This comment
. spans three
. lines.
N 50002
. Another long comment that spans only one line but would normally be wrapped over two lines.
; Test a blank comment with a blank continuation line
N 50003
.
N 50004
. Trailing blank line.
.
N 50005
.
. Leading blank line.
N 50006
. Paragraph 1.
.
. Paragraph 2.
N 50007
. This comment has
. an indented second line.
"""
exp_mid_block_comments = {
50000: [
['This comment', 'spans two lines.'],
['This comment spans only one line even though it would normally be wrapped over two lines.', '']
],
50001: [['', 'This comment', 'spans three', 'lines.']],
50002: [['', 'Another long comment that spans only one line but would normally be wrapped over two lines.']],
50003: [['', '']],
50004: [['', 'Trailing blank line.', '']],
50005: [['', '', 'Leading blank line.']],
50006: [['', 'Paragraph 1.', '', 'Paragraph 2.']],
50007: [['', 'This comment has', ' an indented second line.']]
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_mid_block_comments(exp_mid_block_comments, blocks)
def test_dot_directive_with_block_end_comments(self):
ctl = """
b 50000
E 50000 This comment
. spans two lines.
E 50000 This comment spans only one line even though it would normally be wrapped over two lines.
.
b 50001
E 50001
. This comment
. spans three
. lines.
b 50002
E 50002
. Another long comment that spans only one line but would normally be wrapped over two lines.
b 50003 Test a blank comment with a blank continuation line
E 50003
.
b 50004
E 50004
. Trailing blank line.
.
b 50005
E 50005
.
. Leading blank line.
b 50006
E 50006
. Paragraph 1.
.
. Paragraph 2.
b 50007
E 50007
. This comment has
. an indented second line.
"""
exp_end_comments = {
50000: [
['This comment', 'spans two lines.'],
['This comment spans only one line even though it would normally be wrapped over two lines.', '']
],
50001: [['', 'This comment', 'spans three', 'lines.']],
50002: [['', 'Another long comment that spans only one line but would normally be wrapped over two lines.']],
50003: [['', '']],
50004: [['', 'Trailing blank line.', '']],
50005: [['', '', 'Leading blank line.']],
50006: [['', 'Paragraph 1.', '', 'Paragraph 2.']],
50007: [['', 'This comment has', ' an indented second line.']]
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_end_comments(exp_end_comments, blocks)
def test_dot_directive_with_registers(self):
ctl = """
c 60000
R 60000 BC This description
. spans two lines
R 60000 DE This description spans only one line even though it would normally be wrapped over two lines
.
c 60001
R 60001
. HL This description
. spans three
. lines
c 60002
R 60002
. A Another long description that spans only one line but would normally be wrapped over two lines
c 60003 Test a blank register description with a blank continuation line
R 60003
.
c 60004
R 60004
. IX Trailing blank line
.
c 60005
R 60005
.
. IY Leading blank line
c 60006
R 60006
. SP
.
. Stack pointer
c 60007
R 60007
. A Input
. O:B Output
"""
exp_registers = {
60000: [
['BC This description', 'spans two lines'],
['DE This description spans only one line even though it would normally be wrapped over two lines', '']
],
60001: [['', 'HL This description', 'spans three', 'lines']],
60002: [['', 'A Another long description that spans only one line but would normally be wrapped over two lines']],
60003: [['', '']],
60004: [['', 'IX Trailing blank line', '']],
60005: [['', '', 'IY Leading blank line']],
60006: [['', 'SP', '', 'Stack pointer']],
60007: [['', ' A Input', 'O:B Output']]
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_registers(exp_registers, blocks)
def test_dot_directives_with_max_address(self):
ctl = """
c 40000
C 40000,1
. Begin
C 40001,1
. End
"""
exp_instruction_comments = {
40000: [
(0, ''),
(0, 'Begin')
]
}
ctl_parser = self._get_ctl_parser(ctl, max_address=40001)
blocks = ctl_parser.get_blocks()
self._check_instruction_comments(exp_instruction_comments, blocks)
def test_colon_directive(self):
ctl = """
b 30000
B 30000,2,1
. The first two comment lines
: belong to the first DEFB.
. And these two comment lines
: belong to the second DEFB.
"""
exp_instruction_comments = {
30000: [
(0, ''),
(0, 'The first two comment lines'),
(1, 'belong to the first DEFB.'),
(0, 'And these two comment lines'),
(1, 'belong to the second DEFB.')
],
30002: ()
}
blocks = self._get_ctl_parser(ctl).get_blocks()
self._check_instruction_comments(exp_instruction_comments, blocks)
|
skoolkid/skoolkit
|
tests/test_ctlparser.py
|
Python
|
gpl-3.0
| 80,566
|
import init_location
from model import *
import UnitTests.Model.test_Authors_db as test_Authors_db
import UnitTests.Model.test_comments_db as test_comments_db
import UnitTests.Model.test_Post_db as test_Post_db
import UnitTests.Model.test_Images_db as test_Images_db
import UnitTests.Model.test_URL_db as test_URL_db
import UnitTests.Model.test_Friend_Requests_db as test_Friend_Requests_db
import UnitTests.Model.test_Author_Relationships_db as test_Author_Relationships_db
import UnitTests.Model.test_Servers_db as test_Servers_db
# import Server.main
import main
app = main.app
def runServer():
main.run()
def runTests():
test_Authors_db.runTest()
DELETE_ALL()
test_Post_db.runTest()
DELETE_ALL()
test_comments_db.runTest()
DELETE_ALL()
test_Images_db.runTest()
DELETE_ALL()
test_URL_db.runTest()
DELETE_ALL()
test_Friend_Requests_db.runTest()
DELETE_ALL()
test_Author_Relationships_db.runTest()
DELETE_ALL()
test_Servers_db.runTest()
DELETE_ALL()
if __name__=="__main__":
runServer()
# runTests()
|
CMPUT404-Fall2016/cmput404-project
|
RunMe.py
|
Python
|
gpl-3.0
| 1,028
|
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from MaKaC.services.implementation import resources
from MaKaC.services.implementation import error
from MaKaC.services.interface.rpc import description
from MaKaC.plugins import PluginsHolder
from indico.modules.rb import services as rb_services
from indico.modules.rb.services import (
aspects as rb_aspect_services,
blockings as rb_blocking_services,
locations as rb_location_services,
rooms as rb_room_services
)
def importModule(name):
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def updateMethodMapWithPlugins():
methodMap.update(PluginsHolder().getById('ajaxMethodMap').getAJAXDict())
methodMap = {
'resources.timezones.getAll': resources.GetTimezones,
# rb base
'roomBooking.getDateWarning': rb_services.GetDateWarning,
# rb locations
'roomBooking.locations.list': rb_location_services.RoomBookingListLocations,
# rb rooms
'roomBooking.rooms.list': rb_room_services.RoomBookingListRooms,
'roomBooking.rooms.availabilitySearch': rb_room_services.RoomBookingAvailabilitySearchRooms,
'roomBooking.rooms.fullNameList': rb_room_services.RoomBookingFullNameListRooms,
'roomBooking.locationsAndRooms.listWithGuids': rb_room_services.RoomBookingListLocationsAndRoomsWithGuids,
'roomBooking.locationsAndRooms.getLink': rb_room_services.RoomBookingLocationsAndRoomsGetLink,
'roomBooking.room.bookingPermission': rb_room_services.BookingPermission,
# rb aspects
'roomBooking.mapaspects.create': rb_aspect_services.RoomBookingMapCreateAspect,
'roomBooking.mapaspects.update': rb_aspect_services.RoomBookingMapUpdateAspect,
'roomBooking.mapaspects.remove': rb_aspect_services.RoomBookingMapRemoveAspect,
'roomBooking.mapaspects.list': rb_aspect_services.RoomBookingMapListAspects,
# rb blockings
'roombooking.blocking.approve': rb_blocking_services.RoomBookingBlockingApprove,
'roombooking.blocking.reject': rb_blocking_services.RoomBookingBlockingReject,
# system
'system.describe': description.describe,
'system.error.report': error.SendErrorReport
}
endpointMap = {
"event": importModule("MaKaC.services.implementation.conference"),
"user": importModule('MaKaC.services.implementation.user'),
"contribution": importModule('MaKaC.services.implementation.contribution'),
"session": importModule('MaKaC.services.implementation.session'),
"schedule": importModule('MaKaC.services.implementation.schedule'),
"search": importModule('MaKaC.services.implementation.search'),
"material": importModule('MaKaC.services.implementation.material'),
"reviewing": importModule('MaKaC.services.implementation.reviewing'),
"minutes": importModule('MaKaC.services.implementation.minutes'),
"news": importModule('MaKaC.services.implementation.news'),
"plugins": importModule('MaKaC.services.implementation.plugins'),
"category": importModule('MaKaC.services.implementation.category'),
"upcomingEvents": importModule('MaKaC.services.implementation.upcoming'),
"timezone": importModule('MaKaC.services.implementation.timezone'),
"scheduler": importModule('MaKaC.services.implementation.scheduler'),
"abstractReviewing": importModule('MaKaC.services.implementation.abstractReviewing'),
"abstract": importModule('MaKaC.services.implementation.abstract'),
"abstracts": importModule('MaKaC.services.implementation.abstracts'),
"admin": importModule('MaKaC.services.implementation.admin'),
"reportNumbers": importModule('MaKaC.services.implementation.reportNumbers'),
"oauth": importModule('MaKaC.services.implementation.oauth'),
"registration": importModule('MaKaC.services.implementation.registration')
}
|
pferreir/indico-backup
|
indico/MaKaC/services/interface/rpc/handlers.py
|
Python
|
gpl-3.0
| 4,556
|
from django.test import TestCase, tag
from django.utils import timezone
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from dashboard.tests.loader import load_model_objects, fixtures_standard
from dashboard.models import ExtractedText, QANotes
@tag("loader")
class ExtractedTest(TestCase):
def setUp(self):
self.objects = load_model_objects()
def test_extracted_doc_date_validation(self):
# check validation for proper length string
text = ExtractedText(
doc_date="Wednesday, January 21, 2014",
data_document=self.objects.doc,
extraction_script=self.objects.script,
)
self.assertRaises(ValidationError, text.clean_fields)
# check validation not thrown for arbitrary date string less than 25 chars
text = ExtractedText(
doc_date="January 1984",
data_document=self.objects.doc,
extraction_script=self.objects.script,
)
try:
text.clean()
except ValidationError:
self.fail("clean() raised ExceptionType unexpectedly!")
# check validation not thrown if doc_date is null
text = ExtractedText(
data_document=self.objects.doc, extraction_script=self.objects.script
)
try:
text.clean()
except ValidationError:
self.fail("clean() raised ExceptionType unexpectedly!")
def test_long_qa_notes(self):
self.objects.extext.qa_edited = True
note = QANotes.objects.create(extracted_text=self.objects.extext)
self.assertEqual(note.qa_notes, "")
note.qa_notes = "A short QA note"
try:
note.clean()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
long_note = "A long QA note" * 200
note.qa_notes = long_note
try:
note.clean()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
class ExtractedTestWithSeedData(TestCase):
fixtures = fixtures_standard
def test_approvable(self):
for et in ExtractedText.objects.filter(qa_edited=False):
self.assertTrue(et.is_approvable(), "The record should be approvable")
# Turn it into an edited record
et.qa_edited = True
et.save()
self.assertFalse(
et.is_approvable(), "Now the record should not be approvable"
)
qa = QANotes.objects.create(
extracted_text=et, qa_notes="Some notes have been added"
)
self.assertTrue(
et.is_approvable(),
"The record should be approvable after adding a note",
)
# Make the qa_notes field blank
qa.qa_notes = ""
qa.save()
self.assertFalse(
et.is_approvable(),
"The record should not be approvable if the notes exist but are blank",
)
|
HumanExposure/factotum
|
dashboard/tests/unit/test_extracted_text.py
|
Python
|
gpl-3.0
| 3,259
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class PyScadaSMBusConfig(AppConfig):
name = 'pyscada.smbus'
verbose_name = _("PyScada SMBus Devices")
def ready(self):
import pyscada.smbus.signals
|
trombastic/PyScada
|
pyscada/smbus/apps.py
|
Python
|
gpl-3.0
| 325
|
# Progamm is running on Windows 7 32 bits with python-2.7.1 + numpy-1.6.0 + scipy-0.9.0 + matplotlib-1.0.1
# contact: Arnaud Reveillere a.reveillere@brgm.fr / arnaud.reveillere@gmail.com
# Import some matplotlib function (only required for the plots)
from pylab import figure, xlabel, ylabel, plot, legend, subplot, savefig, axis
# Add the repository where the SAMBA model is located (absolute or relative path)
from sys import path
path.append('../')
from SAMBA import *
###########################################################
# Parameters describing the problem
###########################################################
# Bottom aquifer
h_b = 30. # m height
w_b = 0.12 # porosity
k_b = 6.67e-13 # m2 permeability
Cr_b = 4.5e-10 # Pa-1 pore compressibility
# Top aquifer
h_t = 80. # m height
w_t = 0.2 # porosity
k_t = 1.e-12 # m2 permeability
Cr_t = 4.5e-10 # Pa-1 pore compressibility
MixingCoef = 0. # part of the lifting brine mixing in the top aquifer
# T,P initial conditions
T_lb = 65. # Celcius temperature at the bottom of the leak
gradT = 0.03 # Celcius/m geothermal gradient
P_lb = 146.e5 # Pa pressure at the bottom of the leak
# Leak properties
r_l =.1102 # m leak radius
h_l = 820. # m total height
h_pc = 0. # m porous column height; 0 < h_pc <= h_l
w_pc = 1. # porous column porosity
k_pc = 1. # m2 porous column permeability
# Injection parameters
d = 3025. # m injection - leak distance
Q0 = 200./3600 # m3/s injection volumetric flow rate
t_inj = 35/365.25*3.1557e7 # s injection duration
t_sim = 100/365.25*3.1557e7 # s simulation duration
# Brine salinities (Salt mass fractions, no unit)
Xs_b = 0.035 # bottom aquifer
Xs_t = 0.001 # top aquifer
Xs0_lifted = 0.001 # bottom of the leak
gradXs_lifted = 0. # m-1 salinity gradient
###########################################################
# Semi-analytical resolution
###########################################################
# Semi-analytical resolution
[t, Ql, zi, DP_lb, DP_c, DP_lt,
InjectionForce, DensityDifference, PorousColumnResistance, AquifersResistance, TubingPressureLoss,
MonotonicQl, gamma
]=LeakageSolution(h_b, w_b, k_b, Cr_b, # Bottom aquifer properties
h_t, w_t, k_t, Cr_t, MixingCoef, # Top aquifer properties
T_lb, gradT, P_lb, # T,P initial conditions
r_l, h_l, h_pc, w_pc, k_pc, # leak properties
Xs_b, Xs_t, Xs0_lifted, gradXs_lifted, # Brines salinities as salt mass fraction
d, Q0, t_inj, t_sim, # Leak to injection distance, injection flow rate and duration, simulation time
True, True, True, True, # Isothermal leakage, tubing P losses included, Compute driving P
"Example_3_SAMBA_User_Guide.txt", 3000) # Export results in text file
###########################################################
# Plots
###########################################################
# Modify some units.
Ql = 3600*Ql # m3 per hour
t = t*365.25/3.1557e7 # days
# Plot the flow rate, the lifting - lifted brines interface position and the the over-pressures (in MPa)
figure(1)
subplot(311)
plot(t, Ql)
ylabel('$Q_L\ (m^3.h^{-1})$')
subplot(312)
plot(t, zi)
ylabel('$z_I\ (m)$')
subplot(313)
plot(t, 1.e-6*DP_lt, label = r'$P_{L+}$')
plot(t, 1.e-6*DP_lb, label = r'$P_{L-}$')
ylabel(r'$P-P(t=0)\ (MPa)$')
legend(loc = 'best')
xlabel('$\mathrm{Time}\ (days)$')
savefig('Example_3_SAMBA_User_Guide_FlowRate&Pressures.png', dpi = 200)
# Plot driving and countering pressures over time
figure(2)
plot(t, 1.e-6*InjectionForce, label = "$\mathrm{Injection\ pressurization}$")
plot(t, 1.e-6*DensityDifference, label = "$\mathrm{Brines\ density\ difference}$")
plot(t, 1.e-6*AquifersResistance, label = "$\mathrm{Aquifers\ flow\ resistance}$")
plot(t, 1.e-6*TubingPressureLoss, label = "$\mathrm{Tubing\ pressure\ loss}$")
ylabel('$\mathrm{Driving\ and\ countering\ pressures}\ (MPa)$')
legend(loc = 'best')
xlabel('$\mathrm{Time}\ (days)$')
savefig("Example_3_SAMBA_User_Guide_Drinving&CounteringPressures.png", format = 'png', dpi = 200)
# Plot the leakage flow rate decomposition in monotic components and the gamma paramter
figure(3)
subplot(211)
k=0
for Qlk in MonotonicQl:
plot(t, 3600*GetDimensionalFlow(Qlk, Q0), label = '$Q_{L,%s}$' % k)
k += 1
ylabel('$Q_{L,k}\ (m^3.h^{-1})$')
legend(loc = 'center right')
subplot(212)
k=0
for g in gamma:
plot(t, g, label = '$\gamma_{%s}$' % k)
k += 1
ylabel('$\gamma_k$')
legend(loc = 'center right')
xlabel('$\mathrm{Time}\ (days)$')
savefig("Example_3_SAMBA_User_Guide_ConvolutionIntegralsApproximation.png", format = 'png', dpi = 200)
|
arnaud-reveillere/SAMBA
|
Examples/SAMBA_User_Guide_Example_3.py
|
Python
|
gpl-3.0
| 5,169
|
"""
Routes and views for articles CRUD.
"""
from flask_login import login_required, current_user
from flask import Blueprint, render_template, redirect, url_for, request
from models.factory import PosGraduationFactory
from settings.extensions import ExtensionsManager
from views.forms.content import AttendanceForm
from bson.json_util import dumps
from bson.objectid import ObjectId
import json
import sys
crud_attendances = Blueprint('crud_attendances', __name__, url_prefix='/admin')
@crud_attendances.route('/editar_contato/', methods=['GET', 'POST'])
@login_required
def edit_attendance():
form = AttendanceForm()
pfactory = PosGraduationFactory(current_user.pg_initials)
dao = pfactory.attendances_dao()
json = pfactory.attendances_dao().find_one()
json = dict(json)
json = dumps(json)
if form.validate_on_submit() and form.create.data:
new_attendance = {
'location' : {
'building' : form.building.data,
'floor' : form.floor.data,
'room' : form.room.data,
'opening' : form.opening.data
},
'email': form.email.data,
'calendar': form.calendar.data,
'phones' : {
'0' : {
'type' : form.type1.data,
'number': form.phone1.data
},
'1' : {
'type': form.type2.data,
'number': form.phone2.data
},
'2' : {
'type': form.type3.data,
'number': form.phone3.data
}
}
}
dao.find_one_and_update({'_id' : ObjectId(form.attendance_id.data)}, {
'$set': new_attendance
})
return redirect(
url_for(
'crud_attendances.edit_attendance',
success_msg='Contato editado com sucesso.'))
return render_template(
'admin/edit_attendance.html',
attendance=json,
form=form,
success_msg=request.args.get('success_msg')
)
"""
@crud_attendances.route('/deletar_artigo/', methods=['GET', 'POST'])
@login_required
def delete_article():
form = ArticleForm()
pfactory = PosGraduationFactory(current_user.pg_initials)
dao = pfactory.publications_dao()
json = pfactory.publications_dao().find_one()
json = dict(json)
json = dumps(json)
if form.validate_on_submit() and form.create.data:
index = str(form.index.data)
dao.find_one_and_update(None, {
'$set': {'articles.' + index + '.deleted' : ''}
})
return redirect(
url_for(
'crud_attendances.delete_article',
success_msg='Arigo deletado com sucesso.'))
return render_template(
'admin/delete_articles.html',
publications=json,
form=form,
success_msg=request.args.get('success_msg')
)
"""
|
Mazuh/Minerva
|
views/crud_attendances.py
|
Python
|
gpl-3.0
| 2,990
|
# -*- coding: utf-8 -*-
# Copyright (C) projekt-und-partner.com, 2010
# Author: Michael Jenny <michael.jenny%40projekt-und-partner.com>
import random
from sqlalchemy import *
from migrate import *
from migrate.changeset import *
from migrate.changeset.constraint import ForeignKeyConstraint
class upgradedataobj(object):
def generate_random_identifier(self):
n_id = random.randint(0, 100000000)
id = "%08d" % n_id
return id
metadata = MetaData()
p2_model = Table('p2_model',
metadata,
Column('plan_identifier', String(length=255), primary_key=True, nullable=False, autoincrement=False),
Column('fk_default_form', String(10), index=True),
Column('klass', String(255), nullable=True, unique=True),
Column('table', String(255), nullable=True),
mysql_engine='InnoDB',
)
# Layout options
# 0 : form layout (absolute positioning)
# 1 : list layout (document flow)
# 2 : tree layout (document flow)
p2_form = Table('p2_form',
metadata,
Column('form_identifier', String(16), primary_key=True, autoincrement=False),
Column('form_name', String(63)),
Column('fk_p2_model',
ForeignKey('p2_model.plan_identifier', onupdate="CASCADE"),
nullable=True),
Column('css', String(1023), nullable=False, default=''),
Column('fk_p2_formlayout',
ForeignKey('p2_formlayout.id', onupdate="CASCADE"),
default='FORM'),
mysql_engine='InnoDB',
)
p2_widget = Table('p2_widget',
metadata,
Column('widget_identifier', String(10), primary_key=True, autoincrement=False),
Column('fk_p2_form', ForeignKey('p2_form.form_identifier')),
Column('widget_type', ForeignKey('p2_model.klass', onupdate="CASCADE")),
Column('css', String(1023), nullable=False, default=''),
Column('tab_order', Integer, nullable=False, default=0),
mysql_engine='InnoDB',
)
#p2_widget_type = Table('p2_widget_type', metadata,
# Column('id', String(63), primary_key=True, autoincrement=False),
# mysql_engine='InnoDB')
p2_cardinality = Table('p2_cardinality',
metadata,
Column('id', String(length=32), primary_key=True, nullable=False, autoincrement=False),
Column('cardinality', String(64), nullable=False),
mysql_engine='InnoDB',
)
#p2_embform_characteristic = Table('p2_embform_characteristic',
# metadata,
# Column('id', String(length=32), primary_key=True, nullable=False, autoincrement=False),
# Column('title', String(64), nullable=False),
# mysql_engine='InnoDB',
#)
p2_relation = Table('p2_relation',
metadata,
Column('id', String(10), primary_key=True, autoincrement=False),
Column('foreignkeycol', String(63), nullable=True),
Column('foreignkeycol2', String(63), nullable=True), # Required for n:m relations
Column('source_table', String(255), nullable=True),
Column('target_table', String(255), nullable=True),
Column('xref_table', String(255), nullable=True),
Column('fk_p2_cardinality', ForeignKey('p2_cardinality.id'), nullable=False),
mysql_engine='InnoDB')
p2_linkage = Table('p2_linkage',
metadata,
Column('id', String(10), primary_key=True, autoincrement=False),
Column('attr_name', String(63), nullable=True),
Column('ref_key', String(63), nullable=True),
Column('back_populates', String(63), nullable=True),
Column('cascade', String(255), nullable=True),
Column('fk_p2_relation', ForeignKey('p2_relation.id', onupdate="CASCADE"), nullable=True),
Column('post_update', Boolean), #http://www.sqlalchemy.org/docs/05/mappers.html#rows-that-point-to-themselves-mutually-dependent-rows
Column('fk_source_model', ForeignKey('p2_model.plan_identifier', onupdate="CASCADE"), nullable=True),
Column('fk_target_model', ForeignKey('p2_model.plan_identifier', onupdate="CASCADE"), nullable=True),
mysql_engine='InnoDB')
p2_span = Table('p2_span',
metadata,
Column('span_identifier', String(10)),
Column('fk_p2_widget', ForeignKey('p2_widget.widget_identifier')),
Column('span_name', String(63), index=True),
Column('span_type', ForeignKey('p2_model.klass', onupdate="CASCADE")),
Column('span_value', String(255), nullable=True),
Column('css', String(1023), nullable=False, default=''),
Column('visible', Boolean, nullable=True, default=True),
Column('order', Integer, primary_key=True, autoincrement=True), # Primary key is only temporarily. We are interested in the autoincrement functionality. See below.
mysql_engine='InnoDB',
)
# HACK: We want an autoincrementing order field. This allows the spans to retain the insert order.
# http://stackoverflow.com/questions/2937229/set-auto-increment-using-sqlalchemy-with-mysql-on-columns-with-non-primary-keys/5410205#5410205
from sqlalchemy.schema import DDL
DDL("ALTER TABLE p2_span DROP PRIMARY KEY, ADD UNIQUE KEY(`order`); ALTER TABLE p2_span ADD PRIMARY KEY(`span_identifier`);", on='mysql').execute_at('after-create', p2_span)
# This table inherits all attributes from p2_span!
p2_span_alphanumeric = Table('p2_span_alphanumeric',
metadata,
Column('span_identifier', String(10), ForeignKey('p2_span.span_identifier', onupdate="CASCADE"), primary_key=True), # Joined table inheritance!
Column('attr_name', String(63), nullable=True),
Column('field_identifier', String(63), nullable=True),
#Column('multi_line', Boolean, nullable=True),
Column('fk_field_type', ForeignKey('p2_fieldtype.id'), nullable=True),
Column('required', Boolean, nullable=True, default=True),
mysql_engine='InnoDB'
)
# This table inherits all attributes from p2_span!
p2_span_checkbox = Table('p2_span_checkbox',
metadata,
Column('span_identifier', String(10), ForeignKey('p2_span.span_identifier', onupdate="CASCADE"), primary_key=True), # Joined table inheritance!
Column('attr_name', String(63), nullable=True),
Column('field_identifier', String(63), nullable=True),
mysql_engine='InnoDB'
)
# This table inherits all attributes from p2_span!
p2_span_dropdown = Table('p2_span_dropdown',
metadata,
Column('span_identifier', String(10), ForeignKey('p2_span.span_identifier', onupdate="CASCADE"), primary_key=True),
Column('fk_p2_linkage', ForeignKey('p2_linkage.id', onupdate="CASCADE")),
Column('plan_identifier', String(63), nullable=True),
Column('attr_name', String(63), nullable=True),
Column('required', Boolean, nullable=True, default=False),
mysql_engine='InnoDB'
)
# This table inherits all attributes from p2_span!
p2_span_embeddedform = Table('p2_span_embeddedform',
metadata,
Column('span_identifier', String(10), ForeignKey('p2_span.span_identifier', onupdate="CASCADE"), primary_key=True),
Column('form_name', String(63), nullable=True),
Column('plan_identifier', String(63), nullable=True),
Column('filter_clause', String(255), nullable=True),
Column('editable', Boolean, default=True),
Column('fk_p2_linkage', ForeignKey('p2_linkage.id', onupdate="CASCADE"), nullable=True),
#Column('fk_characteristic', ForeignKey('p2_embform_characteristic.id', onupdate="CASCADE"), nullable=False, default="LIST"),
#Column('adjacency_linkage', String(10), nullable=True),
mysql_engine='InnoDB'
)
# Joined table inheritance. This table inherits all attributes from p2_span!
p2_span_fileupload = Table('p2_span_fileupload',
metadata,
Column('span_identifier', String(10), ForeignKey('p2_span.span_identifier', onupdate="CASCADE"), primary_key=True),
Column('fk_p2_linkage', ForeignKey('p2_linkage.id', onupdate="CASCADE")),
Column('fk_p2_relation', ForeignKey('p2_relation.id', onupdate="CASCADE")),
mysql_engine='InnoDB'
)
# This table inherits all attributes from p2_span!
p2_span_action = Table('p2_span_action',
metadata,
Column('span_identifier', String(10), ForeignKey('p2_span.span_identifier', onupdate="CASCADE"), primary_key=True),
Column('aktion', String(63), nullable=True),
mysql_engine='InnoDB'
)
p2_archetype = Table('p2_archetype',
metadata,
Column('id', String(10), primary_key=True, autoincrement=False),
mysql_engine='InnoDB'
)
p2_media = Table('p2_media',
metadata,
Column('id', String(10), primary_key=True, autoincrement=True),
Column('filename', String(255), nullable=True),
Column('size', Integer, nullable=True),
Column('data', BLOB(16777215)),
Column('thumbnail', BLOB),
Column('mime_type', String(63)),
mysql_engine='InnoDB'
)
p2_fieldtype = Table('p2_fieldtype',
metadata,
Column('id', String(10), primary_key=True, nullable=False, autoincrement=False),
Column('field_type', String(32), nullable=False),
mysql_engine='InnoDB',
)
p2_country = Table('p2_country',
metadata,
Column('id', String(10), primary_key=True, nullable=False, autoincrement=False),
Column('country_name', String(64), nullable=False),
Column('country_iso_code_2', CHAR(length=2), nullable=False),
Column('country_iso_code_3', CHAR(length=3), nullable=False),
mysql_engine='InnoDB',
)
p2_formlayout = Table('p2_formlayout', metadata,
Column('id', String(10),
primary_key=True, autoincrement=False),
Column('name', String(32)),
mysql_engine='InnoDB')
def upgrade(migrate_engine):
# Upgrade operations go here.
metadata.bind = migrate_engine
metadata.create_all()
ForeignKeyConstraint(columns=[p2_model.c.fk_default_form] , refcolumns=[p2_form.c.form_identifier]).create()
# data dict for user data during upgrade/downgrade process
migrate_engine.data = {}
def generate_random_identifier():
n_id = random.randint(0, 100000000)
id = "%08d" % n_id
return id
migrate_engine.generate_random_identifier = generate_random_identifier
def downgrade(migrate_engine):
metadata.bind = migrate_engine
ForeignKeyConstraint(columns=[p2_model.c.fk_default_form] , refcolumns=[p2_form.c.form_identifier]).drop()
# Operations to reverse the above upgrade go here.
p2_span_dropdown.drop(migrate_engine)
p2_span_checkbox.drop(migrate_engine)
p2_span_alphanumeric.drop(migrate_engine)
p2_span_action.drop(migrate_engine)
p2_span_fileupload.drop(migrate_engine)
p2_span_embeddedform.drop(migrate_engine)
p2_linkage.drop(migrate_engine)
p2_relation.drop(migrate_engine)
#p2_embform_characteristic.drop(migrate_engine)
p2_span.drop(migrate_engine)
p2_archetype.drop(migrate_engine)
p2_widget.drop(migrate_engine)
p2_form.drop(migrate_engine)
p2_model.drop(migrate_engine)
p2_fieldtype.drop(migrate_engine)
p2_cardinality.drop()
p2_country.drop()
p2_formlayout.drop()
#p2_widget_type.drop()
|
prinzdezibel/p2.datashackle.repository
|
p2/datashackle/repository/versions/001_initial_schema.py
|
Python
|
gpl-3.0
| 12,267
|
"""
Tests to check that all pages work as expected with a 200 status code and not 404 or 500 for example.
"""
# local imports
from base_tester import URLTestCase
class CasesTestCase(URLTestCase):
def test_view_all_cases(self):
self._check_url('/cases/', None, 401) # not logged in
self._check_url('/cases/', 1) # login as admin
self._check_url('/cases/', 19) # login as a case manager
self._check_url('/cases/', 1) # login as an investigator
self._check_url('/cases/', 7) # login as a QA
self._check_url('/cases/', 33) # login as a requester
self._check_url('/cases/', 37) # login as an authoriser
def test_view_case(self):
self._check_url('/cases/test_doesnt_exist/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/2/', None, 401) # not logged in
self._check_url('/cases/2/', 1) # login as admin
self._check_url('/cases/2/', 19) # login as a case manager
self._check_url('/cases/2/', 11) # login as an investigator
self._check_url('/cases/2/', 7) # login as a QA
self._check_url('/cases/2/', 33, 403) # login as a requester
self._check_url('/cases/2/', 39, 403) # login as an authoriser
self._check_url('/cases/2/', 18) # login as a primary case manager for this case
self._check_url('/cases/2/', 5) # login as a primary investigator for this case
self._check_url('/cases/2/', 3) # login as a secondary investigator for this case
self._check_url('/cases/2/', 4) # login as a primary QA for this case
self._check_url('/cases/2/', 2) # login as a secondary QA for this case
self._check_url('/cases/2/', 28) # login as a requester for this case
self._check_url('/cases/2/', 38) # login as a authoriser for this case
def test_view_private_case(self):
self._check_url('/cases/1/', None, 401) # not logged in
self._check_url('/cases/1/', 1) # login as admin
self._check_url('/cases/1/', 19, 403) # login as a case manager
self._check_url('/cases/1/', 11, 403) # login as an investigator
self._check_url('/cases/1/', 7, 403) # login as a QA
self._check_url('/cases/1/', 28, 403) # login as a requester
self._check_url('/cases/1/', 38, 403) # login as an authoriser
self._check_url('/cases/1/', 17) # login as a primary case manager for this case
self._check_url('/cases/1/', 18) # login as a secondary case manager for this case
self._check_url('/cases/1/', 3) # login as a primary investigator for this case
self._check_url('/cases/1/', 4) # login as a primary QA for this case
self._check_url('/cases/1/', 2) # login as a secondary QA for this case
self._check_url('/cases/1/', 27) # login as a requester for this case
self._check_url('/cases/1/', 37) # login as an authoriser for this case
def test_add_case(self):
self._check_url('/cases/add/', None, 401) # not logged in
self._check_url('/cases/add/', 1) # login as admin
self._check_url('/cases/add/', 19) # login as a case manager
self._check_url('/cases/add/', 11, 403) # login as an investigator
self._check_url('/cases/add/', 7, 403) # login as a QA
self._check_url('/cases/add/', 33) # login as a requester
self._check_url('/cases/add/', 39, 403) # login as an authoriser
def test_edit_case(self):
self._check_url('/cases/edit/test_doesnt_exist/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/edit/2/', None, 401) # not logged in
self._check_url('/cases/edit/2/', 1) # login as admin
self._check_url('/cases/edit/2/', 19, 403) # login as a case manager
self._check_url('/cases/edit/2/', 11, 403) # login as an investigator
self._check_url('/cases/edit/2/', 7, 403) # login as a QA
self._check_url('/cases/edit/2/', 33, 403) # login as a requester
self._check_url('/cases/edit/2/', 39, 403) # login as an authoriser
self._check_url('/cases/edit/2/', 18) # login as a primary case manager for this case
self._check_url('/cases/edit/2/', 5, 403) # login as a primary investigator for this case
self._check_url('/cases/edit/2/', 3, 403) # login as a secondary investigator for this case
self._check_url('/cases/edit/2/', 4, 403) # login as a primary QA for this case
self._check_url('/cases/edit/2/', 2, 403) # login as a secondary QA for this case
self._check_url('/cases/edit/2/', 28) # login as a requester for this case
self._check_url('/cases/edit/2/', 38, 403) # login as an authoriser for this case
def test_close_case(self):
self._check_url('/cases/close/test_doesnt_exist/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/close/2/', None, 401) # not logged in
self._check_url('/cases/close/2/', 1) # login as admin
self._check_url('/cases/close/2/', 19, 403) # login as a case manager
self._check_url('/cases/close/2/', 11, 403) # login as an investigator
self._check_url('/cases/close/2/', 7, 403) # login as a QA
self._check_url('/cases/close/2/', 33, 403) # login as a requester
self._check_url('/cases/close/2/', 39, 403) # login as an authoriser
self._check_url('/cases/close/2/', 18) # login as a primary case manager for this case
self._check_url('/cases/close/2/', 5, 403) # login as a primary investigator for this case
self._check_url('/cases/close/2/', 3, 403) # login as a secondary investigator for this case
self._check_url('/cases/close/2/', 4, 403) # login as a primary QA for this case
self._check_url('/cases/close/2/', 2, 403) # login as a secondary QA for this case
self._check_url('/cases/close/2/', 28, 403) # login as a requester for this case
self._check_url('/cases/close/2/', 38, 403) # login as an authoriser for this case
def test_change_status_case(self):
self._check_url('/cases/change_status/test_doesnt_exist/?status=Closed', 1, 404) # login as admin, but wrong case
self._check_url('/cases/change_status/2/', 1, 404) # login as admin, but no actual status change
self._check_url('/cases/change_status/2/?status=Testing', 1, 404) # login as admin, but wrong type of status
self._check_url('/cases/change_status/2/?status=Closed', None, 401) # not logged in
self._check_url('/cases/change_status/2/?status=Closed', 1) # login as admin
self._check_url('/cases/change_status/2/?status=Closed', 19, 403) # login as a case manager
self._check_url('/cases/change_status/2/?status=Closed', 11, 403) # login as an investigator
self._check_url('/cases/change_status/2/?status=Closed', 7, 403) # login as a QA
self._check_url('/cases/change_status/2/?status=Closed', 33, 403) # login as a requester
self._check_url('/cases/change_status/2/?status=Closed', 39, 403) # login as an authoriser
self._check_url('/cases/change_status/2/?status=Closed', 18) # login as a primary case manager for this case
self._check_url('/cases/change_status/2/?status=Closed', 5, 403) # login as a primary investigator for this case
self._check_url('/cases/change_status/2/?status=Closed', 3, 403) # login as a secondary investigator for this case
self._check_url('/cases/change_status/2/?status=Closed', 4, 403) # login as a primary QA for this case
self._check_url('/cases/change_status/2/?status=Closed', 2, 403) # login as a secondary QA for this case
self._check_url('/cases/change_status/2/?status=Closed', 28) # login as a requester for this case
self._check_url('/cases/change_status/2/?status=Closed', 38, 403) # login as an authoriser for this case
def test_view_upload_file(self):
# all those who can view cases can view case file uploads
self._check_url('/cases/3/uploads/1/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/test/uploads/1/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/2/uploads/4/', 1, 404) # login as admin, but wrong upload
self._check_url('/cases/2/uploads/test/', 1, 404) # login as admin, but wrong upload
self._check_url('/cases/2/uploads/1/', None, 401) # not logged in
self._check_url('/cases/2/uploads/1/', 1) # login as admin
self._check_url('/cases/2/uploads/1/', 19) # login as a case manager
self._check_url('/cases/2/uploads/1/', 11) # login as an investigator
self._check_url('/cases/2/uploads/1/', 7) # login as a QA
self._check_url('/cases/2/uploads/1/', 33, 403) # login as a requester
self._check_url('/cases/2/uploads/1/', 39, 403) # login as an authoriser
self._check_url('/cases/2/uploads/1/', 18) # login as a primary case manager for this case
self._check_url('/cases/2/uploads/1/', 5) # login as a primary investigator for this case
self._check_url('/cases/2/uploads/1/', 3) # login as a secondary investigator for this case
self._check_url('/cases/2/uploads/1/', 4) # login as a primary QA for this case
self._check_url('/cases/2/uploads/1/', 2) # login as a secondary QA for this case
self._check_url('/cases/2/uploads/1/', 28) # login as a requester for this case
self._check_url('/cases/2/uploads/1/', 38) # login as an authoriser for this case
def test_delete_upload_file(self):
# only admins, the requester and case manager of the case can delete files
self._check_url('/cases/3/uploads/1/delete/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/test/uploads/1/delete/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/2/uploads/4/delete/', 1, 404) # login as admin, but wrong upload
self._check_url('/cases/2/uploads/test/delete/', 1, 404) # login as admin, but wrong upload
self._check_url('/cases/2/uploads/1/delete/', None, 401) # not logged in
self._check_url('/cases/2/uploads/1/delete/', 1) # login as admin
self._check_url('/cases/2/uploads/1/delete/', 19, 403) # login as a case manager
self._check_url('/cases/2/uploads/1/delete/', 11, 403) # login as an investigator
self._check_url('/cases/2/uploads/1/delete/', 7, 403) # login as a QA
self._check_url('/cases/2/uploads/1/delete/', 33, 403) # login as a requester
self._check_url('/cases/2/uploads/1/delete/', 39, 403) # login as an authoriser
self._check_url('/cases/2/uploads/1/delete/', 18) # login as a primary case manager for this case
self._check_url('/cases/2/uploads/1/delete/', 5, 403) # login as a primary investigator for this case
self._check_url('/cases/2/uploads/1/delete/', 3, 403) # login as a secondary investigator for this case
self._check_url('/cases/2/uploads/1/delete/', 4, 403) # login as a primary QA for this case
self._check_url('/cases/2/uploads/1/delete/', 2, 403) # login as a secondary QA for this case
self._check_url('/cases/2/uploads/1/delete/', 28) # login as a requester for this case
self._check_url('/cases/2/uploads/1/delete/', 38, 403) # login as an authoriser for this case
def test_authorise_case(self):
self._check_url('/cases/authorise/test_doesnt_exist/', 1, 404) # login as admin, but wrong case
self._check_url('/cases/authorise/12/', None, 401) # not logged in
self._check_url('/cases/authorise/12/', 1, 403) # login as admin
self._check_url('/cases/authorise/12/', 19, 403) # login as a case manager
self._check_url('/cases/authorise/12/', 11, 403) # login as an investigator
self._check_url('/cases/authorise/12/', 7, 403) # login as a QA
self._check_url('/cases/authorise/12/', 33, 403) # login as a requester
self._check_url('/cases/authorise/12/', 37, 403) # login as an authoriser
self._check_url('/cases/authorise/12/', 18, 403) # login as a primary case manager for this case
self._check_url('/cases/authorise/12/', 39) # login as an authoriser for this case
# case not pending, aka already auth/denied
self._check_url('/cases/authorise/6/', 39, 403) # login as an authoriser for this case
|
ubunteroz/foreman
|
foreman/tests/url_tests/test_case_pages.py
|
Python
|
gpl-3.0
| 12,470
|
# euler.py - Solve ODE systems using the explicit Euler method
# Copyright (C) 2017 Shiva Iyer <shiva.iyer AT g m a i l DOT c o m>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from numpy import zeros
def euler(f, Y0, t):
Y = zeros([Y0.size,t.size])
Y[:,[0]] = Y0[:,[0]].copy()
for i in range(1, t.size):
Y[:,[i]] = Y[:,[i-1]] + (t[i] - t[i-1])*f(t[i-1], Y[:,[i-1]])
return(Y)
|
Shiva-Iyer/euler
|
ode/euler.py
|
Python
|
gpl-3.0
| 995
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
__all__ = (
"draw_entry",
"draw_km",
"draw_kmi",
"draw_filtered",
"draw_hierarchy",
"draw_keymaps",
)
import bpy
from bpy.app.translations import pgettext_iface as iface_
from bpy.app.translations import contexts as i18n_contexts
def _indented_layout(layout, level):
indentpx = 16
if level == 0:
level = 0.0001 # Tweak so that a percentage of 0 won't split by half
indent = level * indentpx / bpy.context.region.width
split = layout.split(percentage=indent)
col = split.column()
col = split.column()
return col
def draw_entry(display_keymaps, entry, col, level=0):
idname, spaceid, regionid, children = entry
for km, kc in display_keymaps:
if km.name == idname and km.space_type == spaceid and km.region_type == regionid:
draw_km(display_keymaps, kc, km, children, col, level)
'''
km = kc.keymaps.find(idname, space_type=spaceid, region_type=regionid)
if not km:
kc = defkc
km = kc.keymaps.find(idname, space_type=spaceid, region_type=regionid)
if km:
draw_km(kc, km, children, col, level)
'''
def draw_km(display_keymaps, kc, km, children, layout, level):
km = km.active()
layout.context_pointer_set("keymap", km)
col = _indented_layout(layout, level)
row = col.row()
row.prop(km, "show_expanded_children", text="", emboss=False)
row.label(text=km.name, text_ctxt=i18n_contexts.id_windowmanager)
subrow = row.row()
subrow.alignment = 'RIGHT'
if km.is_user_modified:
subrow.operator("wm.keymap_restore", text="Restore")
if km.is_modal:
subrow.label(text="", icon='LINKED')
del subrow
if km.show_expanded_children:
if children:
# Put the Parent key map's entries in a 'global' sub-category
# equal in hierarchy to the other children categories
subcol = _indented_layout(col, level + 1)
subrow = subcol.row()
subrow.prop(km, "show_expanded_items", text="", emboss=False)
subrow.label(text=iface_("%s (Global)") % km.name, translate=False)
else:
km.show_expanded_items = True
# Key Map items
if km.show_expanded_items:
for kmi in km.keymap_items:
draw_kmi(display_keymaps, kc, km, kmi, col, level + 1)
# "Add New" at end of keymap item list
col = _indented_layout(col, level + 1)
subcol = col.split(percentage=0.2).column()
subcol.operator("wm.keyitem_add", text="Add New", text_ctxt=i18n_contexts.id_windowmanager,
icon='ZOOMIN')
col.separator()
# Child key maps
if children:
subcol = col.column()
row = subcol.row()
for entry in children:
draw_entry(display_keymaps, entry, col, level + 1)
def draw_kmi(display_keymaps, kc, km, kmi, layout, level):
map_type = kmi.map_type
col = _indented_layout(layout, level)
if kmi.show_expanded:
col = col.column(align=True)
box = col.box()
else:
box = col.column()
split = box.split(percentage=0.05)
# header bar
row = split.row()
row.prop(kmi, "show_expanded", text="", emboss=False)
row = split.row()
row.prop(kmi, "active", text="", emboss=False)
if km.is_modal:
row.prop(kmi, "propvalue", text="")
else:
row.label(text=kmi.name)
row = split.row()
row.prop(kmi, "map_type", text="")
if map_type == 'KEYBOARD':
row.prop(kmi, "type", text="", full_event=True)
elif map_type == 'MOUSE':
row.prop(kmi, "type", text="", full_event=True)
elif map_type == 'NDOF':
row.prop(kmi, "type", text="", full_event=True)
elif map_type == 'TWEAK':
subrow = row.row()
subrow.prop(kmi, "type", text="")
subrow.prop(kmi, "value", text="")
elif map_type == 'TIMER':
row.prop(kmi, "type", text="")
else:
row.label()
if (not kmi.is_user_defined) and kmi.is_user_modified:
row.operator("wm.keyitem_restore", text="", icon='BACK').item_id = kmi.id
else:
row.operator("wm.keyitem_remove", text="", icon='X').item_id = kmi.id
# Expanded, additional event settings
if kmi.show_expanded:
box = col.box()
split = box.split(percentage=0.4)
sub = split.row()
if km.is_modal:
sub.prop(kmi, "propvalue", text="")
else:
# One day...
#~ sub.prop_search(kmi, "idname", bpy.context.window_manager, "operators_all", text="")
sub.prop(kmi, "idname", text="")
if map_type not in {'TEXTINPUT', 'TIMER'}:
sub = split.column()
subrow = sub.row(align=True)
if map_type == 'KEYBOARD':
subrow.prop(kmi, "type", text="", event=True)
subrow.prop(kmi, "value", text="")
elif map_type in {'MOUSE', 'NDOF'}:
subrow.prop(kmi, "type", text="")
subrow.prop(kmi, "value", text="")
subrow = sub.row()
subrow.scale_x = 0.75
subrow.prop(kmi, "any")
subrow.prop(kmi, "shift")
subrow.prop(kmi, "ctrl")
subrow.prop(kmi, "alt")
subrow.prop(kmi, "oskey", text="Cmd")
subrow.prop(kmi, "key_modifier", text="", event=True)
# Operator properties
box.template_keymap_item_properties(kmi)
# Modal key maps attached to this operator
if not km.is_modal:
kmm = kc.keymaps.find_modal(kmi.idname)
if kmm:
draw_km(display_keymaps, kc, kmm, None, layout, level + 1)
layout.context_pointer_set("keymap", km)
_EVENT_TYPES = set()
_EVENT_TYPE_MAP = {}
def draw_filtered(display_keymaps, filter_type, filter_text, layout):
if filter_type == 'NAME':
def filter_func(kmi):
return (filter_text in kmi.idname.lower() or
filter_text in kmi.name.lower())
else:
if not _EVENT_TYPES:
enum = bpy.types.Event.bl_rna.properties["type"].enum_items
_EVENT_TYPES.update(enum.keys())
_EVENT_TYPE_MAP.update({item.name.replace(" ", "_").upper(): key
for key, item in enum.items()})
del enum
_EVENT_TYPE_MAP.update({
"`": 'ACCENT_GRAVE',
"*": 'NUMPAD_ASTERIX',
"/": 'NUMPAD_SLASH',
"RMB": 'RIGHTMOUSE',
"LMB": 'LEFTMOUSE',
"MMB": 'MIDDLEMOUSE',
})
_EVENT_TYPE_MAP.update({
"%d" % i: "NUMPAD_%d" % i for i in range(9)
})
# done with once off init
filter_text_split = filter_text.strip()
filter_text_split = filter_text.split()
# Modifier {kmi.attribute: name} mapping
key_mod = {
"ctrl": "ctrl",
"alt": "alt",
"shift": "shift",
"cmd": "oskey",
"oskey": "oskey",
"any": "any",
}
# KeyMapItem like dict, use for comparing against
# attr: state
kmi_test_dict = {}
# initialize? - so if a if a kmi has a MOD assigned it wont show up.
#~ for kv in key_mod.values():
#~ kmi_test_dict[kv] = False
# altname: attr
for kk, kv in key_mod.items():
if kk in filter_text_split:
filter_text_split.remove(kk)
kmi_test_dict[kv] = True
# whats left should be the event type
if len(filter_text_split) > 1:
return False
elif filter_text_split:
kmi_type = filter_text_split[0].upper()
if kmi_type not in _EVENT_TYPES:
# replacement table
kmi_type_test = _EVENT_TYPE_MAP.get(kmi_type)
if kmi_type_test is None:
# print("Unknown Type:", kmi_type)
# Partial match
for k, v in _EVENT_TYPE_MAP.items():
if kmi_type in k:
kmi_type_test = v
break
if kmi_type in v:
kmi_type_test = v
break
if kmi_type_test is None:
return False
kmi_type = kmi_type_test
del kmi_type_test
kmi_test_dict["type"] = kmi_type
# main filter func, runs many times
def filter_func(kmi):
for kk, ki in kmi_test_dict.items():
if getattr(kmi, kk) != ki:
return False
return True
for km, kc in display_keymaps:
km = km.active()
layout.context_pointer_set("keymap", km)
filtered_items = [kmi for kmi in km.keymap_items if filter_func(kmi)]
if filtered_items:
col = layout.column()
row = col.row()
row.label(text=km.name, icon='DOT')
row.label()
row.label()
if km.is_user_modified:
row.operator("wm.keymap_restore", text="Restore")
else:
row.label()
for kmi in filtered_items:
draw_kmi(display_keymaps, kc, km, kmi, col, 1)
# "Add New" at end of keymap item list
col = _indented_layout(layout, 1)
subcol = col.split(percentage=0.2).column()
subcol.operator("wm.keyitem_add", text="Add New", icon='ZOOMIN')
return True
def draw_hierarchy(display_keymaps, layout):
from bpy_extras import keyconfig_utils
for entry in keyconfig_utils.KM_HIERARCHY:
draw_entry(display_keymaps, entry, layout)
def draw_keymaps(context, layout):
from bpy_extras import keyconfig_utils
wm = context.window_manager
kc = wm.keyconfigs.user
spref = context.space_data
col = layout.column()
sub = col.column()
subsplit = sub.split()
subcol = subsplit.column()
row = subcol.row(align=True)
#~ row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config:")
text = bpy.path.display_name(wm.keyconfigs.active.name)
if not text:
text = "Blender (default)"
row.menu("USERPREF_MT_keyconfigs", text=text)
row.operator("wm.keyconfig_preset_add", text="", icon='ZOOMIN')
row.operator("wm.keyconfig_preset_add", text="", icon='ZOOMOUT').remove_active = True
#~ layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
#~ row.operator("wm.keyconfig_remove", text="", icon='X')
row.separator()
rowsub = row.split(align=True, percentage=0.33)
# postpone drawing into rowsub, so we can set alert!
col.separator()
display_keymaps = keyconfig_utils.keyconfig_merge(kc, kc)
filter_type = spref.filter_type
filter_text = spref.filter_text.strip()
if filter_text:
filter_text = filter_text.lower()
ok = draw_filtered(display_keymaps, filter_type, filter_text, col)
else:
draw_hierarchy(display_keymaps, col)
ok = True
# go back and fill in rowsub
rowsub.prop(spref, "filter_type", text="")
rowsubsub = rowsub.row(align=True)
if not ok:
rowsubsub.alert = True
rowsubsub.prop(spref, "filter_text", text="", icon='VIEWZOOM')
|
cschenck/blender_sim
|
fluid_sim_deps/blender-2.69/2.69/scripts/modules/rna_keymap_ui.py
|
Python
|
gpl-3.0
| 12,331
|
from jobs.models import Job
from django.contrib import admin
admin.site.register(Job)
|
lanqiyi/LanqiyiTestToolSet
|
jobs/admin.py
|
Python
|
gpl-3.0
| 86
|
#example2.py source code
import turtle #1
t=turtle.Pen() #2
turtle.bgcolor('green') #3
t.color('blue') #4
t.begin_fill() #5
for x in range(1,73): #6
t.forward(250) #7
t.right(185) #8
t.end_fill() #9
|
ganeshluna/python-100-projects
|
example2.py
|
Python
|
gpl-3.0
| 278
|
def usAl(taban, us):
sonuc = 1
while us > 0:
if us % 2 == 0:
us = us / 2
taban = taban * taban
else:
us = us - 1
sonuc = sonuc * taban
us = us / 2
taban = taban * taban
return sonuc
print(usAl(2,5))
|
canberkkoc/AlgoritmaAnalizi
|
Hafta6/OzyinelemesizUsAlma1.py
|
Python
|
gpl-3.0
| 299
|
"""Test the FEI stream readers.
Because there is no official description of the format, these tests just tests
consistency between ``array_to_stream`` and ``stream_to*array``. In the
particular case of stream to sparse array, we use dask to compute the array
in order to mimic the usage in the FEI EMD reader.
"""
import numpy as np
import dask.array as da
import pytest
from hyperspy.misc.io.fei_stream_readers import (
array_to_stream, stream_to_array, stream_to_sparse_COO_array, sparse_installed)
@pytest.mark.parametrize("lazy", (True, False))
def test_dense_stream(lazy):
arr = np.random.randint(0, 65535, size=(2, 3, 4, 5)).astype("uint16")
stream = array_to_stream(arr)
if lazy:
if not sparse_installed:
pytest.skip("The sparse package is not installed")
arrs = da.from_array(stream_to_sparse_COO_array(
stream, spatial_shape=(3, 4), sum_frames=False, channels=5,
last_frame=2), chunks=(1, 1, 2, 5))
arrs = arrs.compute()
assert (arrs == arr).all()
else:
arrs = stream_to_array(
stream, spatial_shape=(3, 4), sum_frames=False, channels=5,
last_frame=2)
assert (arrs == arr).all()
@pytest.mark.parametrize("lazy", (True, False))
def test_empty_stream(lazy):
arr = np.zeros((2, 3, 4, 5), dtype="uint16")
stream = array_to_stream(arr)
if lazy:
if not sparse_installed:
pytest.skip("The sparse package is not installed")
arrs = da.from_array(stream_to_sparse_COO_array(
stream, spatial_shape=(3, 4), sum_frames=False, channels=5,
last_frame=2), chunks=(1, 1, 2, 5))
arrs = arrs.compute()
assert not arrs.any()
else:
arrs = stream_to_array(
stream, spatial_shape=(3, 4), sum_frames=False, channels=5,
last_frame=2)
assert not arrs.any()
@pytest.mark.parametrize("lazy", (True, False))
def test_sparse_stream(lazy):
arr = np.zeros((2, 3, 4, 5), dtype="uint16")
arr[0, 0, 0, 0] = 1
arr[-1, -1, -1, -1] = 2
arr[1, 1, 3, 3] = 3
stream = array_to_stream(arr)
if lazy:
if not sparse_installed:
pytest.skip("The sparse package is not installed")
arrs = da.from_array(stream_to_sparse_COO_array(
stream, spatial_shape=(3, 4), sum_frames=False, channels=5,
last_frame=2), chunks=(1, 1, 2, 5))
arrs = arrs.compute()
assert (arrs == arr).all()
else:
arrs = stream_to_array(
stream, spatial_shape=(3, 4), sum_frames=False, channels=5,
last_frame=2)
assert (arrs == arr).all()
|
magnunor/hyperspy
|
hyperspy/tests/misc/test_fei_stream_readers.py
|
Python
|
gpl-3.0
| 2,664
|
'''
This file is part of TSLoad.
Copyright 2013, Sergey Klyaus, ITMO University
TSLoad is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 3.
TSLoad is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TSLoad. If not, see <http://www.gnu.org/licenses/>.
'''
from storm.locals import *
from storm.store import Store
from tsload.util.stormx import TableSchema, UniqueConstraint
class Agent(object):
__storm_table__ = 'agent'
id = Int(primary=True)
uuid = UUID(allow_none=True)
uuid.unique = True
agentType = Enum(map = {'load': 0,
'monitor': 1})
# Basic hostinfo information
hostname = Unicode()
domainname = Unicode()
osname = RawStr()
release = RawStr()
machineArch = RawStr()
numCPUs = Int()
numCores = Int()
memTotal = Int()
# Registration information
lastOnline = DateTime()
class AgentResource(object):
__storm_table__ = 'agent_resource'
id = Int(primary=True)
aid = Int()
agent = Reference(aid, Agent.id)
resourceClass = Enum(map = {'cpu': 0,
'disk': 1,
'fs': 2,
'net': 3})
name = Unicode()
resourceType = RawStr()
data = JSON()
state = Int()
class AgentResourceChild(object):
__storm_table__ = 'agent_resource_child'
id = Int(primary = True)
pid = Int()
cid = Int()
parent = Reference(pid, AgentResource.id)
child = Reference(cid, AgentResource.id)
uniqParentChild = UniqueConstraint(pid, cid)
class WorkloadType(object):
__storm_table__ = 'agent_workload_type'
id = Int(primary = True)
aid = Int()
agent = Reference(aid, Agent.id)
name = Unicode()
module = RawStr()
modulePath = RawStr()
classList = RawStr()
class WorkloadParam(object):
__storm_table__ = 'agent_workload_param'
id = Int(primary = True)
wltid = Int()
workloadType = Reference(wltid, WorkloadType.id)
name = Unicode()
paramData = JSON()
class ExperimentProfile(object):
__storm_table__ = 'experiment_profile'
id = Int(primary = True)
# This field references user id inside user database
userId = Int()
name = Unicode()
description = Unicode()
creationDate = DateTime()
class ExperimentThreadPool(object):
__storm_table__ = 'experiment_threadpool'
id = Int(primary = True)
aid = Int(allow_none=True)
agent = Reference(aid, Agent.id)
eid = Int()
profile = Reference(eid, ExperimentProfile.id)
name = Unicode()
numWorkers = Int()
# TODO: Threadpool binding API
class ExperimentWorkload(object):
__storm_table__ = 'experiment_workload'
id = Int(primary = True)
eid = Int()
profile = Reference(eid, ExperimentProfile.id)
tpid = Int(allow_none=True)
threadpool = Reference(tpid, ExperimentThreadPool.id)
wltid = Int(allow_none=True)
workloadType = Reference(wltid, WorkloadType.id)
name = Unicode()
params = JSON()
stepsId = RawStr(allow_none=True) # References to TSDB
class ExperimentWorkloadResource(object):
__storm_table__ = 'experiment_workload_resource'
id = Int(primary = True)
wid = Int()
workload = Reference(wid, ExperimentWorkload.id)
rid = Int()
resource = Reference(rid, AgentResource.id)
rmode = Int()
def createExpsvcDB(connString):
database = create_database(connString)
store = Store(database)
TableSchema(database, Agent).create(store)
TableSchema(database, AgentResource).create(store)
TableSchema(database, AgentResourceChild).create(store)
TableSchema(database, WorkloadType).create(store)
TableSchema(database, WorkloadParam).create(store)
TableSchema(database, ExperimentProfile).create(store)
TableSchema(database, ExperimentThreadPool).create(store)
TableSchema(database, ExperimentWorkload).create(store)
TableSchema(database, ExperimentWorkloadResource).create(store)
store.commit()
store.close()
|
myaut/tsload
|
server/tsload/expsvc/model.py
|
Python
|
gpl-3.0
| 4,661
|
#!./venv/bin/python3
# File: tmoney.py
import re
import src.config as config
wo_symbol_re = r"""
[-]?
(?P<d1>[0-9]*)
[.] # If no currency symbol, must have a decimal point.
(?P<c1>[0-9]{0,2})?
[.]? # Want to know if there is a second_decimal!
"""
before_symbol_re = r"""
[-]?
{}
[-]?
(?P<d1>\d+)
([.](?P<c1>\d{{0,2}}))?
[.]?
"""
with_symbol = before_symbol_re.format(
config.DEFAULT_CURRENCY_SYMBOL)
pat_w_symbol = re.compile(with_symbol, re.VERBOSE)
pat_wo_symbol = re.compile(wo_symbol_re, re.VERBOSE)
def get_currency_value(string,
pat_w_symbol=pat_w_symbol,
debug=False):
"""
Uses regex to find a currency value within the string.
Depends on the expressions precompiled in this module.
Those expressions in turn depend on the currency symbol
set in src.config.DEFAULT_CURRENCY_SYMBOL
Returns a float if successful, None if not.
"""
value = cents = ''
negative = False
res = pat_w_symbol.search(string)
if not res:
if debug: print("failed w symbol")
res = pat_wo_symbol.search(string)
if res:
match = res.group()
if debug:
print("got a match: {}".format(match))
if match.count('.') > 1:
if debug:
print("failing: >1 decimal")
return
if match.count("-"):
if debug:
print("negative sign found")
negative = True
value = res.group("d1")
if debug: print("value is {}".format(value))
if not value:
if debug:
print("no dollar value. => '0'")
value = "0"
value = float(value)
cents = res.group("c1")
if debug: print("cents is {}".format(cents))
if not cents:
if debug: print("no cents. => '0'")
cents = "0"
if len(cents) == 1: cents += "0"
cents = float(cents)
if debug: print("final cents: {}".format(cents))
value += cents / 100
if negative: value = -value
if value != 0:
return value
data2test = (
("nothing here", "None"),
("-45", "None"),
("45", "None"),
("cost is -45", "None"),
("-45 is the price", "None"),
("-45.", "-45.00"),
("45.", "45.00"),
("cost is -45.", "-45.00"),
("-45. is the price", "-45.00"),
("-45.2", "-45.20"),
("45.2", "45.20"),
("cost is -45.2", "-45.20"),
("cost is 45.2", "45.20"),
("-45.2 is the price", "-45.20"),
("45.2 is the price", "45.20"),
("-45.33", "-45.33"),
("45.33", "45.33"),
("cost is -45.33", "-45.33"),
("-45.33 is the price", "-45.33"),
("cost is 45.33", "45.33"),
("45.33 is the price", "45.33"),
("-45.33.", "None"),
("45.33.2", "None"),
("cost is -45.33.78", "None"),
("-45.33. is the price", "None"),
("cost is 45.33.2", "None"),
("45.33.2 is the price", "None"),
("nothing here", "None"),
("-$45", "-45.00"),
("$-45", "-45.00"),
("cost is -45", "None"),
("$-45 is the price", "-45.00"),
("-$45.", "-45.00"),
("$-45.", "-45.00"),
("cost is -$45.", "-45.00"),
("$-45. is the price", "-45.00"),
("-$45.2", "-45.20"),
("$-45.2", "-45.20"),
("cost is -$45.2", "-45.20"),
("$-45.2 is the price", "-45.20"),
("-$45.33", "-45.33"),
("$-45.33", "-45.33"),
("cost is -$45.33", "-45.33"),
("$-45.33 is the price", "-45.33"),
("-$45.33.", "None"),
("$-45.33.9", "None"),
("cost is -$45.33.99", "None"),
("$-45.33.99 is the price", "None"),
("Dr 1111 5000.00", '5000.00'),
("Dr 1111 5000.00", '5000.00'),
("3100 5000.00 Cr", '5000.00'),
("3100 5000.00Cr", '5000.00'),
("3100 5000.0Cr", '5000.00'),
("3100 5000.Cr", '5000.00'),
("3100 5000Cr", "None"),
("1111 Cr 5000.00", '5000.00'),
("1511 Dr 5000.00", '5000.00'),
("no dr or cr found", "None"),
)
def test_data(list_of_2_tuples,
symbol=config.DEFAULT_CURRENCY_SYMBOL,
debug=False):
# debug=True):
"""
Each of the (2) tuples in the first paramter consists of a
string which when passed to the get_currency function should
result it its returning the second component of the tuple.
Returned is a (3) tuple each component of which is a list:
The first of these lists represents successes, the second
represents failures in that there was a match but the returned
value was not that specified, and the third lists the cases
where there was no match.
"""
successes = []
wrong_match = []
no_match = []
for subject, expected in list_of_2_tuples:
res = get_currency_value(subject,
debug=False)
# debug=True)
if res:
if "{:.2f}".format(res) == expected:
successes.append("{:>32} {:.2f} {}"
.format(subject, res, expected))
else:
wrong_match.append("{:>32} {:.2f} {}"
.format(subject, res, expected))
else:
no_match.append("{:>32} => no match - expect {}"
.format(subject, expected))
return (successes, wrong_match, no_match)
def populate(successes, wrong_match, no_match, res):
"""
Assumes that res is a (3) tuple as returned by test_data.
The three lists of res are appended to the first three params
as appropriate.
"""
successes.extend(res[0])
wrong_match.extend(res[1])
no_match.extend(res[2])
def test_foreign_currencies(dict_of_lists):
successes = []
wrong_match = []
no_match = []
for key in config.CURRENCY_SYMBOLS.keys():
data = dict_of_lists[key]
res = test_data(data)
populate(successes, wrong_match, no_match, res)
return (successes, wrong_match, no_match)
def display(successes, wrong_match, no_match):
print("Successes:")
print("\n".join(successes))
print("Matched but wrong:")
print("\n".join(wrong_match))
print("No match:")
print("\n".join(no_match))
if __name__ == "__main__":
# test(after_symbol, test_data_with_sign)
# test(wo_symbol_re, test_data_without_sign)
double_symbol=config.CURRENCY_SYMBOLS["Iceland krona"]
successes = []
wrong_match = []
no_match = []
populate(successes, wrong_match, no_match,
test_data(data2test,
# config.CURRENCY_SYMBOLS["Iceland krona"],
# debug = True))
debug = False))
display(successes, wrong_match, no_match)
|
alexKleider/debk
|
src/money.py
|
Python
|
gpl-3.0
| 6,704
|
# standard library
import http.client
import urllib
import json
import logging
# Django
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
# local Django
from exercise.models import (
Exercise, UserExercise, TestCaseExercise, UserExerciseSubmission
)
from exercise import constants
from exercise.forms import SubmitExerciseForm
from achievement.views import (
verify_correct_exercise_achievement, verify_score_achievement, verify_submited_exercises_achievement
)
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(constants.PROJECT_NAME)
@login_required
def list_exercises_not_deprecated(request):
logger.info("List exercises not deprecated page.")
exercises = Exercise.objects.filter(
deprecated=constants.IS_NOT_DEPRECATED)
user = request.user
list_exercises = get_user_submissions_exercise(user, exercises)
return render(request, 'exercises.html', {
'list_exercises': list_exercises
})
@login_required
def show_exercise(request, id):
exercise = Exercise.objects.get(id=id, deprecated=0)
logger.info("Show exercises not deprecated page.")
# Get current logged user.
user = request.user
assert user is not None, "User not logged in."
# Get the current exercise of the user.
user_exercise = get_current_user_exercise(user, exercise)
# Show the user code in the field if the code exists.
form = SubmitExerciseForm(None,
initial={constants.CODE_NAME: user_exercise.code})
# String list for the JSON.
input_exercise = get_all_input_exercise(exercise)
# String list to compare with response.
output_exercise = get_all_output_exercise(exercise)
# Verify if user should access a tip.
user_missed_exercise = False
if user_exercise.number_submission > 0 and user_exercise.status is False:
user_missed_exercise = True
else:
# Nothing to do.
pass
return render(request, 'description_exercise.html', {
'exercise': exercise,
'user_exercise': user_exercise,
'form': form,
'input_exercise': input_exercise[0],
'output_exercise': output_exercise[0],
'user_missed_exercise': user_missed_exercise
})
@login_required
def process_user_exercise(request, id):
user = request.user
form = SubmitExerciseForm(request.POST)
exercise = Exercise.objects.get(id=id, deprecated=0)
if form.is_valid():
logger.info("Code form was valid.")
# Source code sent by the user.
source_code = form.cleaned_data.get(constants.CODE_NAME)
# String list for the JSON.
input_exercise = get_all_input_exercise(exercise)
# Get the current exercise of the user.
user_exercise = get_current_user_exercise(user, exercise)
# Receives the JSON response from API.
api_result = submit_exercise(source_code, input_exercise)
# Defines whether the submitted code can be compiled
code_has_been_compiled = verify_compilation_source_code(api_result)
# Case compiled, extract API return data.
if code_has_been_compiled is True:
# Sum all runtime of test cases.
runtime = extract_time(api_result)
# Get the outputs of test cases.
stdout = extract_stdout(api_result)
else:
# Initializes with default variable values required
stdout = []
runtime = 0.0
# String list to compare with response.
output_exercise = get_all_output_exercise(exercise)
# Define if user exercise if correct or not
status = exercise_status(stdout, output_exercise)
# Define if user has scored or not in this exercise
scored = scores_exercise(user_exercise.scored, user, exercise.score, status)
user_exercise.update_or_creates(
source_code, exercise,
user, runtime, status, scored)
UserExerciseSubmission.submit(user_exercise)
# Used to unlock correct exercise achievements everytime this method is called.
verify_correct_exercise_achievement(user, request)
# Used to unlock submited exercises achievement everytime this method is called.
verify_submited_exercises_achievement(user, request)
# Used to unlock score achievements when the user receives points from exercises.
verify_score_achievement(user, request)
else:
logger.info("The code form was invalid.")
# Nothing to do.
pass
return redirect('show_exercise', id=id)
def scores_exercise(scored, user, score, status):
if not scored:
logger.info("The user has not scored.")
# if the user has not scored before
if status:
logger.info("Set score to the user.")
# if the exercise is correct
user.score += score
user.save()
return True
else:
logger.info("The exercise is incorrect.")
# but it is incorrect
return False
else:
logger.info("The user has already scored.")
# the user has already scored in that exercise
return True
def get_current_user_exercise(user, exercise):
try:
user_exercise = UserExercise.objects.get(user=user, exercise=exercise)
except UserExercise.DoesNotExist:
user_exercise = UserExercise()
return user_exercise
def submit_exercise(source_code, input_exercise):
conn = http.client.HTTPConnection("api.hackerrank.com")
conn.request("POST", "/checker/submission.json", urllib.parse.urlencode({
"source": source_code,
"lang": 1,
"testcases": json.dumps(input_exercise),
"api_key": constants.HACKERRANK_API_KEY,
"wait": "true",
"format": "json"
}), {
"accept": "application/json",
"content-type": "application/x-www-form-urlencoded"
})
logger.info("The exercise submission and the API response were made.")
result = conn.getresponse().read().decode('utf-8')
return result
def extract_time(api_result):
list_time = json.loads(api_result)['result']['time']
sum_time = constants.INITIAL_SUM
for time in list_time:
sum_time += time
logger.info("The runtime extraction was taken from the API response.")
return sum_time
def extract_stdout(api_result):
stdout = json.loads(api_result)['result']['stdout']
logger.info("The stdout extraction was taken from the API response.")
return stdout
def exercise_status(actual_output, original_output):
if actual_output == original_output:
logger.info("The exercise is correct.")
return True
else:
return False
def get_all_input_exercise(exercise):
test_cases = TestCaseExercise.objects.filter(exercise=exercise)
list_input_exercise = []
for test_case in test_cases:
current_input_exercise = str(test_case.input_exercise)
list_input_exercise.append(current_input_exercise)
logger.info("The inputs for the exercise from database have been organized.")
return list_input_exercise
def get_all_output_exercise(exercise):
test_cases = TestCaseExercise.objects.filter(exercise=exercise)
list_output_exercise = []
for test_case in test_cases:
current_output_exercise = str(test_case.output_exercise)
list_output_exercise.append(current_output_exercise)
logger.info("The outputs for the exercise from database have been organized.")
return list_output_exercise
# This method get the number of submissions made by user in the exercise.
def get_user_submissions_exercise(user, exercises):
assert user is not None, "User not logged."
logger.info("Inside get_user_submissions_exercise")
list_user_exercise = []
# Getting informations about submissions in exercises made by user.
for exercise in exercises:
user_exercise = None
try:
user_exercise = UserExercise.objects.get(user=user, exercise=exercise)
except UserExercise.DoesNotExist:
user_exercise = UserExercise()
logger.debug("Exercise: "+exercise.title+" Status: "+str(user_exercise.status))
list_user_exercise.append(user_exercise)
assert len(exercises) == len(list_user_exercise), "The list of submissions has a different number of exercises."
zipped_data = zip(exercises, list_user_exercise)
list_user_submissions = list(zipped_data)
return list_user_submissions
# This method get the last five exercises submited by user.
def get_user_exercises_last_submissions(user):
assert user is not None, "User not logged."
user_exercises_list = UserExercise.objects.filter(user=user).order_by('-date_submission')[:5]
return user_exercises_list
# Defines whether the submitted code can be compiled
def verify_compilation_source_code(api_result):
message = json.loads(api_result)['result']['message']
# True was compiled, False was not compiled
if message is not None:
return True
else:
return False
|
CodaMais/CodaMais
|
CodaMais/exercise/views.py
|
Python
|
gpl-3.0
| 9,244
|
import math
# Class definitions
class Entity:
'''A base class for a DXF entities; lines and arcs.
The class attribute delta contains the maximum distance in x and y
direction between eindpoints that are considered coincident.'''
delta = 0.005
_anoent = "Argument is not an entity!"
def __init__(self, x1=0, y1=0, x2=0, y2=0):
'''Creates an Entity from (x1, y1) to (x2, y2)..'''
# Start- and enpoint
self.x1 = float(x1)
self.y1 = float(y1)
self.x2 = float(x2)
self.y2 = float(y2)
# Bounding box
self.xmin = min(x1, x2)
self.ymin = min(y1, y2)
self.xmax = max(x1, x2)
self.ymax = max(y1, y2)
# Endpoints swapped indicator
self.sw = False
def fits(self, index, other):
'''Checks if another entity fits onto this one.
index -- end of the entity to test, either 1 or 2.
other -- Entity to test.
Returns 0 if the other entity doesn't fit. Otherwise returns 1 or 2
indicating the new free end of other.'''
assert isinstance(other, Entity), Entity._anoent
if index == 1:
if (math.fabs(self.x1-other.x1) < Entity.delta and
math.fabs(self.y1-other.y1) < Entity.delta):
# return free end of other
return 2
elif (math.fabs(self.x1-other.x2) < Entity.delta and
math.fabs(self.y1-other.y2) < Entity.delta):
return 1
elif index == 2:
if (math.fabs(self.x2-other.x1) < Entity.delta and
math.fabs(self.y2-other.y1) < Entity.delta):
return 2
elif (math.fabs(self.x2-other.x2) < Entity.delta and
math.fabs(self.y2-other.y2) < Entity.delta):
return 1
return 0 # doesn't fit!
def getbb(self):
'''Returns a tuple containing the bounding box of an entity in the
format (xmin, ymin, xmax, ymax).'''
return (self.xmin, self.ymin, self.xmax, self.ymax)
def move(self, dx, dy):
self.x1 += dx
self.x2 += dx
self.y1 += dy
self.y2 += dy
def swap(self):
'''Swap (x1, y1) and (x2, y2)'''
(self.x1, self.x2) = (self.x2, self.x1)
(self.y1, self.y2) = (self.y2, self.y1)
self.sw = not self.sw
def length(self):
'''Returns the length of the entity.'''
raise NotImplementedError
def startpoint(self):
'''Returns the (x1, y1).'''
return (self.x1, self.y1)
def endpoint(self):
'''Returns the (x2, y2).'''
return (self.x2, self.y2)
def __lt__(self, other):
'''The (xmin, ymin) corner of the bounding box will be used for
sorting. Sort by ymin first, then xmin.'''
assert isinstance(other, Entity), Entity._anoent
if self.ymin == other.ymin:
if self.xmin < other.xmin:
return True
else:
return self.ymin < other.ymin
def __gt__(self, other):
assert isinstance(other, Entity), Entity._anoent
if self.ymin == other.ymin:
if self.xmin > other.xmin:
return True
else:
return self.ymin > other.ymin
def __eq__(self, other):
assert isinstance(other, Entity), Entity._anoent
return self.xmin == other.xmin and self.ymin == other.ymin
class Line(Entity):
'''A class for a line entity, from point (x1, y1) to (x2, y2)'''
def __init__(self, x1, y1, x2, y2):
'''Creates a Line from (x1, y1) to (x2, y2).'''
Entity.__init__(self, x1, y1, x2, y2)
def __str__(self):
fs = "#LINE from ({:.3f},{:.3f}) to ({:.3f},{:.3f})"
fs = fs.format(self.x1, self.y1, self.x2, self.y2)
if self.sw:
fs += " (swapped)"
return fs
def length(self):
'''Returns the length of a Line.'''
dx = self.x2-self.x1
dy = self.y2-self.x1
return math.sqrt(dx*dx+dy*dy)
def points(self):
return (self.startpoint,self.endpoint)
class Arc(Entity):
'''A class for an arc entity, centering in (cx, cy) with radius R from
angle a1 to a2.
Class properties:
Arc.segmentsize -- Maximum length of the segment when an arc is rendered
as a list of connected line segments.
Arc.as_segments -- Whether an arc should be output as a list of
connected line segments. True by default.'''
segmentsize = 1
as_segments = True
def __init__(self, cx, cy, R, a1, a2):
'''Creates a Arc centering in (cx, cy) with radius R and running from
a1 degrees ccw to a2 degrees.'''
if a2 < a1:
a2 += 360
assert a2 > a1, 'Arcs are defined CCW, so a2 must be greater than a1'
self.cx = float(cx)
self.cy = float(cy)
self.R = float(R)
self.a1 = float(a1)
self.a2 = float(a2)
self.segments = None
x1 = cx+R*math.cos(math.radians(a1))
y1 = cy+R*math.sin(math.radians(a1))
x2 = cx+R*math.cos(math.radians(a2))
y2 = cy+R*math.sin(math.radians(a2))
Entity.__init__(self, x1, y1, x2, y2)
# Refine bounding box
A1 = int(a1)/90
A2 = int(a2)/90
for ang in range(A1, A2):
(px, py) = (cx+R*math.cos(math.radians(90*ang)),
cy+R*math.sin(math.radians(90*ang)))
if px > self.xmax:
self.xmax = px
elif px < self.xmin:
self.xmin = px
if py > self.ymax:
self.ymax = py
elif py < self.ymin:
self.ymin = py
def _gensegments(self):
'''Subdivide the arc into a list of line segments of maximally
Arc.segmentsize units length. Return the list of segments.'''
fr = float(Arc.segmentsize)/self.R
if fr > 1:
step = self.a2-self.a1
else:
ang = math.asin(fr)/math.pi*180
cnt = math.floor((self.a2-self.a1)/ang) + 1
step = (self.a2-self.a1)/cnt
sa = self.a1
ea = self.a2
if self.sw:
sa = self.a2
ea = self.a1
step = -step
angs = _frange(sa, ea, step)
pnts = [(self.cx+self.R*math.cos(math.radians(a)),
self.cy+self.R*math.sin(math.radians(a))) for a in angs]
llist = []
for j in range(1, len(pnts)):
i = j-1
llist.append(Line(pnts[i][0], pnts[i][1], pnts[j][0], pnts[j][1]))
return llist
def __str__(self):
s = "#ARC from ({:.3f},{:.3f}) to ({:.3f},{:.3f}), radius {:.3f}"
s = s.format(self.x1, self.y1, self.x2, self.y2, self.R)
if self.sw:
s += " (swapped)"
return s
def move(self, dx, dy):
Entity.move(self, dx, dy)
self.cx += dx
self.cy += dy
if self.segments:
for s in self.segments:
s.move(dx, dy)
def get_segments(self):
if self.segments == None:
self.segments = self._gensegments()
return self.segments
def length(self):
'''Returns the length of an arc.'''
angle = math.radians(self.a2-self.a1)
return self.R*angle
class Contour(Entity):
'''A class for a list of connected Entities'''
def __init__(self, ent):
'''Creates a contour from an initial entity.'''
assert isinstance(ent, Entity), Entity._anoent
Entity.__init__(self, ent.x1, ent.y1, ent.x2, ent.y2)
self.ent = [ent]
self.nument = 1
def append(self, ent):
'''Appends and entity to the contour, if one of the ends of entity
matches the end of the last entity. Returns True if matched, otherwise
False.'''
assert isinstance(ent, Entity), Entity._anoent
last = self.ent[-1]
newfree = last.fits(2, ent)
if newfree == 0:
return False
self.ent.append(ent)
self.nument += 1
(self.xmin, self.ymin,
self.xmax, self.ymax) = merge_bb(self.getbb(), ent.getbb())
if newfree == 1:
ent.swap()
self.x2 = ent.x2
self.y2 = ent.y2
return True
def prepend(self, ent):
'''Prepends and entity to the contour, if one of the ends of entity
matches the end of the first entity. Returns True if matched,
otherwise False.'''
assert isinstance(ent, Entity), Entity._anoent
first = self.ent[0]
newfree = first.fits(1, ent)
if newfree == 0:
return False
self.ent.insert(0, ent)
self.nument += 1
(self.xmin, self.ymin,
self.xmax, self.ymax) = merge_bb(self.getbb(), ent.getbb())
if newfree == 2:
ent.swap()
self.x1 = ent.x1
self.y1 = ent.y1
return True
def __str__(self):
outstr = "#Contour [boundingbox: {:.3f}, {:.3f}, {:.3f}, {:.3f}]\n"
outstr = outstr.format(self.xmin, self.ymin, self.xmax, self.ymax)
for e in self.ent:
outstr += "#" + str(e) + "\n"
return outstr[0:-1]
def dxfdata(self):
s = ""
for e in self.ent:
s += e.dxfdata()
return s
def pdfdata(self):
rl = [self.ent[0].x1, self.ent[0].y1]
for e in self.ent:
rl.append(e.x2, e.y2)
return rl
def ncdata(self):
(s1, s2) = self.ent[0].ncdata()
for e in self.ent[1:]:
(f1, f2) = e.ncdata()
s2 += f2
return (s1, s2)
def length(self):
'''Returns the length of a contour.'''
il = [e.length() for e in self.ent]
return sum(il)
# Function definitions.
def _frange(start, end, step):
'''A range function for floats.
start -- beginning of the range.
end -- end of the range.
step -- size of the step between numbers.
Returns a list of floating point numbers. If the difference between start
and end isn't a multiple of step, end will not be included in the list.'''
assert start != end, "Start and end cannot have the same value!"
assert step != 0.0, "Step cannot be 0!"
if start < end:
assert step > 0.0, "Step must be positive if start < end!"
else:
assert step < 0.0, "Step must negative if start > end!"
rv = [start]
a = start
if step > 0.0:
while a < end:
a += step
rv.append(a)
else:
while a > end:
a += step
rv.append(a)
return rv
def merge_bb(a, b):
'''The bounding boxes a and b are tuples (xmin, ymin, xmax,
ymax). Calculate and return a bounding box that contains a and b.'''
xmin = min(a[0], b[0])
ymin = min(a[1], b[1])
xmax = max(a[2], b[2])
ymax = max(a[3], b[3])
return (xmin, ymin, xmax, ymax)
def find_contours(lol, loa):
'''Find polylines in the list of lines and list of arcs.
lol -- list of lines
loa -- list of arcs.
Returns a list of contours and a list of remaining lines and a list of
remaining arcs as a tuple.'''
remlines = []
remarcs = []
elements = lol[:]+loa[:]
loc = []
while len(elements) > 0:
first = elements.pop(0)
cn = Contour(first)
oldlen = cn.nument
while True:
n = 0
while n < len(elements):
if cn.append(elements[n]) or cn.prepend(elements[n]):
del elements[n]
else:
n += 1
if cn.nument == oldlen:
break
oldlen = cn.nument
if cn.nument > 1:
loc.append(cn)
else:
if isinstance(first, Line):
remlines.append(first)
elif isinstance(first, Arc):
remarcs.append(first)
return (loc, remlines, remarcs)
def gen_arc_other(origin,center,destination,direction):
radius = twoPointDistance(origin,center)
a1 = angle(origin,center)
a2 = angle(destination,center)
cx,cy = center
return Arc(cx,cy,radius,a1,a2)
def angle(point,center):
x1,y1 = point
x2,y2 = center
dx = x1-x2
dy = y1-y2
return math.degrees(math.atan2(dx,dy))
def twoPointDistance(start_point,center_point):
x1,y1 = start_point
x2,y2 = center_point
dx = x1-x2
dy = y1-y2
return math.sqrt(dx ** 2 + dy ** 2)
|
voglster/Drawbot
|
DrawbotApp/dxfgeom.py
|
Python
|
gpl-3.0
| 12,546
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Equation of heat
----------------
Let us start with a mathematical formulation ...
.. math::
\nabla\cdot( a \cdot \nabla u ) + \frac{\partial u}{\partial t} + C = 0
.. math::
- \Delta u & = 1 \quad{\mathrm{in}}\quad\Omega\\
u & = 0 \quad{\mathrm{on}}\quad\partial\Omega\\
We will solve this equation on the unit square: :math:`\Omega=[-1, 1]^2`
"""
import pygimli as pg
from pygimli.solver import solve
"""
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import animation
from pygimli.viewer import show
from pygimli.mplviewer import drawStreams
import time
grid = pg.createGrid(x=np.linspace(-1.0, 1.0, 31),
y=np.linspace(-1.0, 1.0, 31))
vals = pg.RVector(grid.cellCount(), 1.)
for c in grid.cells():
if abs(c.center()[0]) < 0.1:
vals[c.id()] = 10.0
#grid = grid.createP2()
times = np.arange(0, 2, 1./20)
#material?
neumannBC = [[1, -0.5], # left
[2, 0.5]] # right
dirichletBC = [[3, lambda b, t: 1.0 + np.cos(2.0 * np.pi * t)], # top
[4, 1.0]] #bottom
pg.showLater(1)
ax = show(grid)[0]
ax.figure.canvas.draw()
#plt.ion()
#plt.show()
u = solve(grid, a=vals, f=0.5,
times=times,
u0=pg.RVector(grid.nodeCount(), 0.0),
duBoundary=neumannBC,
uBoundary=dirichletBC,
##plotTimeStep=updateDrawU,
verbose=False, progress=True)
uMin = min(u.flat)
uMax = max(u.flat)
show(grid, u[0], axes=ax)
"""
.. image:: PLOT2RST.current_figure
:scale: 75
"""
def gen():
mesh = gen.mesh
u = gen.u
for i, ui in enumerate(u[1:]):
yield i, len(u), ui, mesh
gen.mesh = grid
gen.u = u
def animate(data):
i = data[0]
imax = data[1]
ui = data[2]
mesh = data[3]
print(i,'/', imax)
global ax
ax.clear()
ax = show(mesh, data=ui, showLater=True, axes=ax,
levels=np.linspace(0, 3, 16))[0]
if min(ui) != max(ui):
pass
#drawStreams(ax, mesh, ui)
anim = animation.FuncAnimation(plt.gcf(), animate,
gen,
interval=2)
"""
.. animate:: anim fps=10 bitrate=1024 dpi=92
"""
#plt.show()
pg.showNow()
#plt.show()
|
KristoferHellman/gimli
|
doc/tutorials/modelling/develop/plot_4-mod-fem-heat.py
|
Python
|
gpl-3.0
| 2,356
|
import json
import os
import re
import shutil
import subprocess
import urllib
import uuid
import requests
import validators
from celery.utils.log import get_task_logger
from flask import current_app
from app.utils import replace, clear_dir, unzip, get_build_tools_version, change_theme
from app.utils.assets import resize_launcher_icon, resize_background_image, save_logo
from app.utils.libs.asset_resizer import DENSITY_TYPES
from app.utils.notification import Notification
logger = get_task_logger(__name__)
def ignore_files(path, names):
logger.info('Working in %s' % path)
# Ignore build/generated folder
return ("build", ".gradle", ".idea")
class Generator:
"""
The app generator. This is where it all begins :)
"""
def __init__(self, config, via_api=False, identifier=None, task_handle=None, build_type=None, theme_colors=None):
if not identifier:
self.identifier = str(uuid.uuid4())
else:
self.identifier = identifier
self.task_handle = task_handle
self.update_status('Starting the generator')
self.config = config
self.working_dir = config['WORKING_DIR']
self.src_dir = config['APP_SOURCE_DIR']
self.creator_email = ''
self.is_auth_enabled = False
self.event_name = ''
self.app_name = ''
self.app_working_dir = os.path.abspath(self.working_dir + '/' + self.identifier + '/android-src/')
self.app_background_image = os.path.abspath(config['BASE_DIR'] + '/app/static/assets/background.jpg')
self.app_launcher_icon = os.path.abspath(config['BASE_DIR'] + '/app/static/assets/ic_launcher.png')
self.app_package_name = 'org.fossasia.openevent.' + self.app_name.replace(" ", "")
self.app_temp_assets = os.path.abspath(self.working_dir + '/' + self.identifier + '/assets-src/')
self.api_link = ''
self.apk_path = ''
self.via_api = via_api
self.build_type = build_type
self.theme_colors = theme_colors
def get_path(self, relative_path):
"""
Get the path to a resource relative to the app source
:param relative_path:
:return:
"""
return os.path.abspath(self.app_working_dir + '/' + relative_path)
def get_temp_asset_path(self, relative_path):
"""
Get the path to a resource relative to the temp assets dir
:param relative_path:
:return:
"""
return os.path.abspath(self.app_temp_assets + '/' + relative_path)
def normalize(self, creator_email, endpoint_url=None, is_auth_enabled=False, zip_file=None):
"""
Normalize the required data irrespective of the source
:param creator_email:
:param is_auth_enabled:
:param endpoint_url:
:param zip_file:
:return:
"""
self.update_status('Normalizing source data')
if not endpoint_url and not zip_file:
raise Exception('endpoint_url or zip_file is required')
if endpoint_url:
self.api_link = endpoint_url
os.makedirs(self.app_temp_assets)
event_info = requests.get(endpoint_url + '/event').json()
self.download_event_data()
else:
unzip(zip_file, self.app_temp_assets)
with open(self.get_temp_asset_path('event')) as json_data:
event_info = json.load(json_data)
event_id = event_info['id']
if os.path.isfile(self.get_temp_asset_path('meta')):
with open(self.get_temp_asset_path('meta')) as json_data:
meta = json.load(json_data)
root_url = meta['root_url']
if root_url:
self.api_link = root_url + '/api/v1/events/' + str(event_id)
self.event_name = event_info['name']
self.app_name = self.event_name
self.creator_email = creator_email
self.is_auth_enabled = is_auth_enabled
self.update_status('Processing background image and logo')
background_image = event_info['background_image'].strip() if event_info['background_image'] else ''
logo = event_info['logo'].strip() if event_info['logo'] else ''
if background_image != '':
if background_image.startswith("/"):
self.app_background_image = self.get_temp_asset_path(background_image)
elif validators.url(background_image):
self.app_background_image = self.get_temp_asset_path('background.png')
urllib.urlretrieve(background_image, self.app_background_image)
if logo != '':
if logo.startswith("/"):
self.app_launcher_icon = self.get_temp_asset_path(logo)
elif validators.url(logo):
self.app_launcher_icon = self.get_temp_asset_path('logo.png')
urllib.urlretrieve(logo, self.app_launcher_icon)
def generate(self, should_notify=True):
"""
Generate the app
:return: the path to the generated apk
"""
logger.info('Working directory: %s' % self.app_working_dir)
self.update_status('Preparing parent source code')
self.prepare_source()
self.app_package_name = 'org.fossasia.openevent.' + re.sub('\W+', '', self.app_name)
logger.info('App package name: %s' % self.app_package_name)
config = {
'email': self.creator_email,
'app-name': self.app_name,
'api-link': self.api_link,
'is-auth-enabled': self.is_auth_enabled
}
self.update_status('Generating app configuration')
with open(self.get_path("app/src/main/assets/config.json"), "w+") as config_file:
config_file.write(json.dumps(config))
self.update_status('Generating launcher icons & background image')
save_logo(self.app_launcher_icon, self.app_working_dir)
resize_launcher_icon(self.app_launcher_icon, self.app_working_dir)
resize_background_image(self.app_background_image, self.app_working_dir)
self.update_status('Updating resources')
replace(self.get_path("app/src/main/res/values/strings.xml"), 'OpenEvent', self.app_name)
replace(self.get_path("app/src/main/res/layout/nav_header.xml"), 'twitter', 'background')
replace(self.get_path("app/build.gradle"), '"org.fossasia.openevent"', '"%s"' % self.app_package_name)
if self.theme_colors:
self.update_status('Setting theme colors')
change_theme(self.get_path("app/src/main/res/values/color.xml"), self.theme_colors)
self.update_status('Loading assets')
for f in os.listdir(self.app_temp_assets):
path = os.path.join(self.app_temp_assets, f)
if os.path.isfile(path):
logger.info('Copying %s' % path)
shutil.copyfile(path, self.get_path("app/src/main/assets/" + f))
images_path = os.path.join(self.app_temp_assets, 'images')
speakers_path = os.path.join(images_path, 'speakers')
if os.path.isdir(speakers_path):
logger.info('Copying %s' % speakers_path)
shutil.copytree(speakers_path, self.get_path("app/src/main/assets/images/speakers/"))
sponsors_path = os.path.join(images_path, 'sponsors')
if os.path.isdir(sponsors_path):
logger.info('Copying %s' % sponsors_path)
shutil.copytree(sponsors_path, self.get_path("app/src/main/assets/images/sponsors/"))
self.update_status('Preparing android build tools')
build_tools_version = get_build_tools_version(self.get_path('app/build.gradle'))
logger.info('Detected build tools version: %s' % build_tools_version)
build_tools_path = os.path.abspath(os.environ.get('ANDROID_HOME') + '/build-tools/' + build_tools_version)
logger.info('Detected build tools path: %s' % build_tools_path)
self.update_status('Building android application package')
self.run_command([os.path.abspath(self.config['BASE_DIR'] + '/scripts/build_apk.sh'), build_tools_path, self.build_type])
self.update_status('Application package generated')
self.apk_path = self.get_path('release.apk')
logger.info('Generated apk path: %s' % self.apk_path)
if should_notify:
self.notify()
apk_url = '/static/releases/%s.apk' % self.identifier
logger.info('Final apk download path: %s' % apk_url)
shutil.move(self.apk_path, os.path.abspath(self.config['BASE_DIR'] + '/app/' + apk_url))
self.update_status('SUCCESS', message=apk_url)
self.cleanup()
return apk_url
def download_event_data(self):
"""
Download all event data from api i.e. event, speakers, sessions etc..
:return:
"""
logger.info('Downloading event data')
self.save_file_in_temp_assets('event')
self.save_file_in_temp_assets('microlocations')
self.save_file_in_temp_assets('sessions')
self.save_file_in_temp_assets('speakers')
self.save_file_in_temp_assets('sponsors')
self.save_file_in_temp_assets('tracks')
self.save_file_in_temp_assets('sessions/types')
logger.info('Download complete')
def save_file_in_temp_assets(self, end_point='event'):
"""
Save response from specified end_point in temp assets directory
:param end_point:
:return:
"""
if self.api_link:
response = requests.get(self.api_link + '/' + end_point)
file = open(self.get_temp_asset_path(end_point), "w+")
file.write(response.text)
file.close()
logger.info('%s file saved', end_point)
def prepare_source(self):
"""
Prepare the app-specific source based off the parent
:return:
"""
logger.info('Preparing source code.')
logger.info('Copying source from %s to %s' % (self.src_dir, self.app_working_dir))
shutil.copytree(self.src_dir, self.app_working_dir, ignore=ignore_files)
for density in DENSITY_TYPES:
mipmap_dir = self.get_path("app/src/main/res/mipmap-%s" % density)
if os.path.exists(mipmap_dir):
shutil.rmtree(mipmap_dir, True)
clear_dir(self.get_path("app/src/main/assets/"))
def cleanup(self):
"""
Clean-up after done like a good fella :)
:return:
"""
logger.info('Cleaning up %s' % self.working_dir)
shutil.rmtree(os.path.abspath(self.working_dir + '/' + self.identifier + '/'))
zip_file = os.path.join(self.config['UPLOAD_DIR'], self.identifier)
if os.path.isfile(zip_file):
os.remove(zip_file)
def notify(self, completed=True, apk_path=None, error=None):
"""
Notify the creator of success or failure of the app generation
:param completed:
:param apk_path:
:param error:
:return:
"""
if completed and apk_path and not error:
Notification.send(
to=self.creator_email,
subject='Your android application for %s has been generated ' % self.event_name,
message='Hi,<br><br>'
'Your android application for the \'%s\' event has been generated. '
'And apk file has been attached along with this email.<br><br>'
'Thanks,<br>'
'Open Event App Generator' % self.event_name,
file_attachment=apk_path,
via_api=self.via_api
)
else:
Notification.send(
to=self.creator_email,
subject='Your android application for %s could not generated ' % self.event_name,
message='Hi,<br><br> '
'Your android application for the \'%s\' event could not generated. '
'The error message has been provided below.<br><br>'
'<code>%s</code><br><br>'
'Thanks,<br>'
'Open Event App Generator' % (self.event_name, str(error) if error else ''),
file_attachment=apk_path,
via_api=self.via_api
)
def update_status(self, state, exception=None, message=None, skip_log=False):
if not skip_log:
logger.info(state)
if self.task_handle:
if not current_app.config.get('CELERY_ALWAYS_EAGER'):
meta = {}
if exception:
meta = {'exc': exception}
if message:
meta = {'message': message}
self.task_handle.update_state(
state=state, meta=meta
)
def run_command(self, command):
logger.info('Running command: %s', command)
process = subprocess.Popen(command,
stdout=subprocess.PIPE,
cwd=self.app_working_dir,
env=os.environ.copy())
while True:
output = process.stdout.readline()
if output == '' and process.poll() is not None:
break
if output:
logger.info('> %s', output)
self.generate_status_updates(output.strip())
rc = process.poll()
return rc
def generate_status_updates(self, output_line):
if 'Starting process \'Gradle build daemon\'' in output_line:
self.update_status('Starting gradle builder', skip_log=True)
elif 'Creating configuration' in output_line:
self.update_status('Creating configuration', skip_log=True)
elif 'Parsing the SDK' in output_line:
self.update_status('Preparing Android SDK', skip_log=True)
elif 'app:preBuild' in output_line:
self.update_status('Running pre-build tasks', skip_log=True)
elif 'Loading library manifest' in output_line:
self.update_status('Loading libraries', skip_log=True)
elif 'Merging' in output_line:
self.update_status('Merging resources', skip_log=True)
elif 'intermediates' in output_line:
self.update_status('Generating intermediates', skip_log=True)
elif 'is not translated' in output_line:
self.update_status('Processing strings', skip_log=True)
elif 'generateFdroidReleaseAssets' in output_line:
self.update_status('Processing strings', skip_log=True)
elif 'Adding PreDexTask' in output_line:
self.update_status('Adding pre dex tasks', skip_log=True)
elif 'Dexing' in output_line:
self.update_status('Dexing classes', skip_log=True)
elif 'packageGoogleplayRelease' in output_line:
self.update_status('Packaging release', skip_log=True)
elif 'assembleRelease' in output_line:
self.update_status('Assembling release', skip_log=True)
elif 'BUILD SUCCESSFUL' in output_line:
self.update_status('Build successful. Starting the signing process.', skip_log=True)
elif 'signing' in output_line:
self.update_status('Signing the package.', skip_log=True)
elif 'jar signed' in output_line:
self.update_status('Package signed.', skip_log=True)
elif 'zipaligning' in output_line:
self.update_status('Verifying the package.', skip_log=True)
elif 'Verification successful' in output_line:
self.update_status('Package verified.', skip_log=True)
elif output_line == 'done':
self.update_status('Application has been generated. Please wait.', skip_log=True)
|
saheedm/MASTERS
|
open-event-android-development/apk-generator/v2/app/generator/generator.py
|
Python
|
gpl-3.0
| 15,882
|
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2009 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import dbus
import dbus.glib
import hildon
import osso
import gconf
class FremantleRotation(object):
"""thp's screen rotation for Maemo 5
Simply instantiate an object of this class and let it auto-rotate
your StackableWindows depending on the device orientation.
If you need to relayout a window, connect to its "configure-event"
signal and measure the ratio of width/height and relayout for that.
You can set the mode for rotation to AUTOMATIC (default), NEVER or
ALWAYS with the set_mode() method.
"""
AUTOMATIC, NEVER, ALWAYS = range(3)
# Human-readable captions for the above constants
MODE_CAPTIONS = ("Automatic", "Landscape", "Portrait")
# Privately-used constants
_PORTRAIT, _LANDSCAPE = ('portrait', 'landscape')
_ENABLE_ACCEL = 'req_accelerometer_enable'
_DISABLE_ACCEL = 'req_accelerometer_disable'
# Defined in mce/dbus-names.h
_MCE_SERVICE = 'com.nokia.mce'
_MCE_REQUEST_PATH = '/com/nokia/mce/request'
_MCE_REQUEST_IF = 'com.nokia.mce.request'
def __init__(self, app_name, main_window=None, version='1.0', mode=0, app=None):
"""Create a new rotation manager
app_name ... The name of your application (for osso.Context)
main_window ... The root window (optional, hildon.StackableWindow)
version ... The version of your application (optional, string)
mode ... Initial mode for this manager (default: AUTOMATIC)
"""
self._orientation = None
self._main_window = main_window
self._stack = hildon.WindowStack.get_default()
self._mode = -1
self.app = app
app_id = '-'.join((app_name, self.__class__.__name__))
self._osso_context = osso.Context(app_id, version, False)
self._last_dbus_orientation = self._get_current_orientation()
program = hildon.Program.get_instance()
program.connect('notify::is-topmost', self._on_topmost_changed)
system_bus = dbus.Bus.get_system()
system_bus.add_signal_receiver(self._on_orientation_signal, \
signal_name='sig_device_orientation_ind', \
dbus_interface='com.nokia.mce.signal', \
path='/com/nokia/mce/signal')
self.set_mode(mode)
def _get_current_orientation(self):
"""Return the current orientation
Returns portrait if in portrait mode for sure, landscape if
in landscape mode or unknown.
"""
if self._send_mce_request('get_device_orientation', True) \
== self._PORTRAIT:
return self._PORTRAIT
else:
return self._LANDSCAPE
def get_mode(self):
"""Get the currently-set rotation mode
This will return one of three values: AUTOMATIC, ALWAYS or NEVER.
"""
return self._mode
def set_mode(self, new_mode):
"""Set the rotation mode
You can set the rotation mode to AUTOMATIC (use hardware rotation
info), ALWAYS (force portrait) and NEVER (force landscape).
"""
if new_mode not in (self.AUTOMATIC, self.ALWAYS, self.NEVER):
raise ValueError('Unknown rotation mode')
if self._mode != new_mode:
if self._mode == self.AUTOMATIC:
# Remember the current "automatic" orientation for later
self._last_dbus_orientation = self._orientation
# Tell MCE that we don't need the accelerometer anymore
self._send_mce_request(self._DISABLE_ACCEL)
if new_mode == self.NEVER:
self._orientation_changed(self._LANDSCAPE)
elif new_mode == self.ALWAYS:
self._orientation_changed(self._PORTRAIT)
elif new_mode == self.AUTOMATIC:
# Restore the last-known "automatic" orientation
self._orientation_changed(self._last_dbus_orientation)
# Tell MCE that we need the accelerometer again
self._send_mce_request(self._ENABLE_ACCEL)
self._mode = new_mode
def _send_mce_request(self, request, wait_reply=False):
rpc = osso.Rpc(self._osso_context)
return rpc.rpc_run(self._MCE_SERVICE,
self._MCE_REQUEST_PATH,
self._MCE_REQUEST_IF,
request,
wait_reply=wait_reply,
use_system_bus=True)
def _on_topmost_changed(self, program, property_spec):
# XXX: This seems to never get called on Fremantle(?)
if self._mode == self.AUTOMATIC:
if program.get_is_topmost():
self._send_mce_request(self._ENABLE_ACCEL)
else:
self._send_mce_request(self._DISABLE_ACCEL)
def _get_main_window(self):
if self._main_window:
# If we have gotten the main window as parameter, return it and
# don't try "harder" to find another window using the stack
return self._main_window
else:
# The main window is at the "bottom" of the window stack, and as
# the list we get with get_windows() is sorted "topmost first", we
# simply take the last item of the list to get our main window
windows = self._stack.get_windows()
if windows:
return windows[-1]
else:
return None
def _orientation_changed(self, orientation):
if self._orientation == orientation:
# Ignore repeated requests
return
flags = 0
if orientation != self._LANDSCAPE:
flags |= hildon.PORTRAIT_MODE_SUPPORT
if orientation == self._PORTRAIT:
flags |= hildon.PORTRAIT_MODE_REQUEST
window = self._get_main_window()
if window is not None:
hildon.hildon_gtk_window_set_portrait_flags(window, flags)
self._orientation = orientation
#if orientation == self._PORTRAIT:
# try:
# self.app.disp.clear()
# self.app.disp.displayFeed()
# except:
# pass
def _on_orientation_signal(self, orientation, stand, face, x, y, z):
if orientation in (self._PORTRAIT, self._LANDSCAPE):
if (self._mode == self.AUTOMATIC) and (not gconf.client_get_default().get_bool('/system/osso/af/slide-open')):
# Automatically set the rotation based on hardware orientation, if the keyboard is not open
self._orientation_changed(orientation)
else:
# Ignore orientation changes for non-automatic modes, but save
# the current orientation for "automatic" mode later on
self._last_dbus_orientation = orientation
|
crasu/feedingit
|
src/portrait.py
|
Python
|
gpl-3.0
| 7,587
|
'''
This file is part of pyShop
pyShop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
pyShop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pyShop. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) Steve "Uru" West 2012
This file contains some global settings for the cart system
'''
#Contains the value that will be used to reference the cart inside the session.
CART_SESSION_NAME = 'cart_'
|
Uruwolf/pyshop
|
cart/settings.py
|
Python
|
gpl-3.0
| 839
|
"""Unit tests for the ``synse_server.cmd.plugin`` module."""
import pytest
from synse_grpc import api, client
from synse_server import cmd, errors
from synse_server.plugin import Plugin
@pytest.mark.asyncio
async def test_plugin_not_found(mocker):
# Mock test data
mock_get = mocker.patch('synse_server.plugin.manager.get', return_value=None)
# --- Test case -----------------------------
plugin_id = '123456'
with pytest.raises(errors.NotFound):
await cmd.plugin(plugin_id)
mock_get.assert_called_once()
mock_get.assert_called_with(plugin_id)
@pytest.mark.asyncio
async def test_plugin_client_error(mocker):
# Mock test data
mock_get = mocker.patch(
'synse_server.plugin.manager.get',
return_value=Plugin(
info={'id': '123456', 'tag': 'test-plugin'},
version={},
client=client.PluginClientV3('localhost:5001', 'tcp'),
),
)
mock_health = mocker.patch(
'synse_grpc.client.PluginClientV3.health',
side_effect=ValueError(),
)
# --- Test case -----------------------------
plugin_id = '123456'
with pytest.raises(errors.ServerError):
await cmd.plugin(plugin_id)
mock_get.assert_called_once()
mock_get.assert_called_with(plugin_id)
mock_health.assert_called_once()
@pytest.mark.asyncio
async def test_plugin_ok(mocker, simple_plugin):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {
'123': simple_plugin,
})
mock_health = mocker.patch(
'synse_grpc.client.PluginClientV3.health',
return_value=api.V3Health(
timestamp='2019-04-22T13:30:00Z',
status=api.OK,
checks=[],
),
)
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
plugin_id = '123'
resp = await cmd.plugin(plugin_id)
assert resp == {
'id': '123', # from simple_plugin fixture
'tag': 'test/foo', # from simple_plugin fixture
'vcs': 'https://github.com/vapor-ware/synse-server', # from simple_plugin fixture
'active': True,
'network': { # from simple_plugin fixture
'address': 'localhost:5432',
'protocol': 'tcp',
},
'version': {}, # from simple_plugin fixture
'health': {
'timestamp': '2019-04-22T13:30:00Z',
'status': 'OK',
'checks': [],
}
}
mock_health.assert_called_once()
mock_refresh.assert_not_called()
@pytest.mark.asyncio
async def test_plugin_ok_refresh(mocker, simple_plugin):
# Mock test data
mock_get = mocker.patch(
'synse_server.plugin.manager.get',
return_value=simple_plugin,
)
mock_health = mocker.patch(
'synse_grpc.client.PluginClientV3.health',
return_value=api.V3Health(
timestamp='2019-04-22T13:30:00Z',
status=api.OK,
checks=[],
),
)
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
plugin_id = '123'
resp = await cmd.plugin(plugin_id)
assert resp == {
'id': '123', # from simple_plugin fixture
'tag': 'test/foo', # from simple_plugin fixture
'vcs': 'https://github.com/vapor-ware/synse-server', # from simple_plugin fixture
'active': True,
'network': { # from simple_plugin fixture
'address': 'localhost:5432',
'protocol': 'tcp',
},
'version': {}, # from simple_plugin fixture
'health': {
'timestamp': '2019-04-22T13:30:00Z',
'status': 'OK',
'checks': [],
}
}
mock_get.assert_called_once()
mock_get.assert_called_with(plugin_id)
mock_health.assert_called_once()
mock_refresh.assert_called_once()
@pytest.mark.asyncio
async def test_plugins_no_plugin(mocker):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {})
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
resp = await cmd.plugins(refresh=False)
assert resp == []
mock_refresh.assert_called_once()
@pytest.mark.asyncio
async def test_plugins_ok(mocker, simple_plugin):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {
'123': simple_plugin,
})
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
resp = await cmd.plugins(refresh=False)
assert resp == [
{ # from simple_plugin fixture
'id': '123',
'tag': 'test/foo',
'active': True,
},
]
mock_refresh.assert_not_called()
@pytest.mark.asyncio
async def test_plugins_ok_with_refresh(mocker, simple_plugin):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {
'123': simple_plugin,
})
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
resp = await cmd.plugins(refresh=True)
assert resp == [
{ # from simple_plugin fixture
'id': '123',
'tag': 'test/foo',
'active': True,
},
]
mock_refresh.assert_called()
@pytest.mark.asyncio
@pytest.mark.usefixtures('patch_utils_rfc3339now')
async def test_plugin_health_no_plugins(mocker):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {})
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
resp = await cmd.plugin_health()
assert resp == {
'status': 'healthy',
'updated': '2019-04-22T13:30:00Z', # from fixture: patch_utils_rfc3339now
'healthy': [],
'unhealthy': [],
'active': 0,
'inactive': 0,
}
mock_refresh.assert_called_once()
@pytest.mark.asyncio
@pytest.mark.usefixtures('patch_utils_rfc3339now')
async def test_plugin_health_healthy(mocker, simple_plugin):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {
'123': simple_plugin,
})
mock_health = mocker.patch(
'synse_grpc.client.PluginClientV3.health',
return_value=api.V3Health(
timestamp='2019-04-22T13:30:00Z',
status=api.OK,
checks=[],
),
)
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
resp = await cmd.plugin_health()
assert resp == {
'status': 'healthy',
'updated': '2019-04-22T13:30:00Z', # from fixture: patch_utils_rfc3339now
'healthy': ['123'],
'unhealthy': [],
'active': 1,
'inactive': 0,
}
mock_health.assert_called_once()
mock_refresh.assert_not_called()
@pytest.mark.asyncio
@pytest.mark.usefixtures('patch_utils_rfc3339now')
async def test_plugin_health_unhealthy(mocker, simple_plugin):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {
'123': simple_plugin,
})
mock_health = mocker.patch(
'synse_grpc.client.PluginClientV3.health',
return_value=api.V3Health(
timestamp='2019-04-22T13:30:00Z',
status=api.FAILING,
checks=[],
),
)
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
resp = await cmd.plugin_health()
assert resp == {
'status': 'unhealthy',
'updated': '2019-04-22T13:30:00Z', # from fixture: patch_utils_rfc3339now
'healthy': [],
'unhealthy': ['123'],
'active': 1,
'inactive': 0,
}
mock_health.assert_called_once()
mock_refresh.assert_not_called()
@pytest.mark.asyncio
@pytest.mark.usefixtures('patch_utils_rfc3339now')
async def test_plugin_health_inactive(mocker, simple_plugin):
# Mock test data
mocker.patch.dict('synse_server.plugin.manager.plugins', {
'123': simple_plugin,
})
mock_health = mocker.patch(
'synse_grpc.client.PluginClientV3.health',
side_effect=ValueError(),
)
mock_refresh = mocker.patch(
'synse_server.plugin.manager.refresh',
)
# --- Test case -----------------------------
resp = await cmd.plugin_health()
assert resp == {
'status': 'unhealthy',
'updated': '2019-04-22T13:30:00Z', # from fixture: patch_utils_rfc3339now
'healthy': [],
'unhealthy': [],
'active': 0,
'inactive': 1,
}
mock_health.assert_called_once()
mock_refresh.assert_not_called()
|
vapor-ware/synse-server
|
tests/unit/cmd/test_plugin.py
|
Python
|
gpl-3.0
| 8,992
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Name:
qgis2geowe.py
Autor:
Patricio Soriano :: SIGdeletras.com
Description:
Script Python para generar un proyecto GeoWE desde QGIS.
"""
import os
import zipfile
import json
import time
import shutil
from qgis.gui import QgsMessageBar
from PyQt4.QtGui import QMessageBox
def rgb_to_hex(rgb):
return '#' + '%02x%02x%02x' % rgb
# Variables con metadatos para el proyecto GeoWE
geoweprojVersion = '1.0.0'
geoweprojTile = 'Proyecto GeoWE2QGIS'
geoweprojDescription = 'Ejemplo de proyecto GeoWE generado desde QGIS'
geoweprojDate = (time.strftime("%d/%m/%Y"))
# Variable con directorio para salvar el proyecto
prjurl = '/home/user/folder'
#prjurl = 'C:/folder'
# Nombre del proyecto GeoWE
geoweprojName = 'geowe-project-qgis'
selected_layers = iface.layerTreeView().selectedLayers()
# Comprueba si hay capas seleccionadas
if selected_layers:
# Generación de zip de proyecto
zf = zipfile.ZipFile(prjurl + geoweprojName +".zip", "w")
# Creación de proyecto
fo = open(prjurl + "geowe-project.prj", "w")
# Metadatos de proyecto
fo.write('{"version": "' + geoweprojVersion + '", "title": "'
+ geoweprojTile + '", "description": "' + geoweprojDescription
+ '", "date": "' + geoweprojDate + '", "vectors": [')
# Crea variable para SRC 4326. Obligatorio para proyectos GeoWE
crs4326 = QgsCoordinateReferenceSystem(4326,
QgsCoordinateReferenceSystem.EpsgCrsId)
# Crear carpeta para geojson
geojsonfolder = (prjurl + "geojson")
os.mkdir(geojsonfolder, 0755)
# Convierte las capas cargadas en QGIS a GeoJSON y las añade al proyecto
for l in selected_layers:
# crea e geojson
qgis.core.QgsVectorFileWriter.writeAsVectorFormat(l,
geojsonfolder + '/' + l.name() + '.geojson', 'utf-8',
crs4326, 'GeoJson')
data = json.loads(open(geojsonfolder + '/' +
l.name() + '.geojson').read())
jsonString = json.dumps(data)
double_encode = json.dumps(jsonString)
fo.write('{"name": "' + l.name().encode('utf-8') + '", "content": ')
fo.write(double_encode)
props = l.rendererV2().symbol().symbolLayer(0).properties()
if l.wkbType() == QGis.WKBLineString:
# obtener propiedades cuando la capa es lin
# print 'Layer is a lin layer'
colorlist = eval((props['line_color']))
hexColor = rgb_to_hex((colorlist[0], colorlist[1], colorlist[2]))
fo.write(
", \"style\": {\"fillColor\": \"" + hexColor + '", "fillOpacity": 0.70, "strokeColor": "' + hexColor +
'", "strokeWidth": 3 } } ,')
else:
colorlist = eval((props['color']))
outlineColorlist = eval((props['outline_color']))
hexColor = rgb_to_hex((colorlist[0], colorlist[1], colorlist[2]))
hexOutlineColor = rgb_to_hex((outlineColorlist[0],
outlineColorlist[1], outlineColorlist[2]))
fo.write(
', "style": {"fillColor": "' + hexColor + '", "fillOpacity": 0.70, "strokeColor": "'
+ hexOutlineColor + '", "strokeWidth": 3 } } ,')
fo.write('] }')
fo.close()
projfile = prjurl + "geowe-project.prj"
zf.write(projfile,os.path.basename(projfile))
os.remove(prjurl + "geowe-project.prj")
zf.close()
shutil.rmtree(geojsonfolder)
QMessageBox.information(iface.mainWindow(), "QGIS2GeoWE", 'El proyecto\
\'%s.zip\' con %s capas se generado correctamente en la carpeta %s\
' % (geoweprojName, len(selected_layers), prjurl))
else:
iface.messageBar().pushMessage("Error", "Please select one or more layers!", QgsMessageBar.CRITICAL, 6)
|
sigdeletras/qgis2geowe
|
qgis2geowe.py
|
Python
|
gpl-3.0
| 3,954
|
#listComprehension.py
#Garik Sadovy
#gcsadovy
# Create a list of all uppercase field names
fieldNames = ['FID', 'Shape', 'COVER', 'RECNO']
fieldNames2 = [ i.upper() for i in fieldNames ]
print "1. All cap field names:", fieldNames2
# Create a list of rounded float values
strList = ['3.34', '1.07', '4.21', '4.56', '4.5']
intList = [float(int(float(i))) for i in strList] #modify this
print "2. Rounded float values:", intList
# Create a list of reciprocal values (the reciprocal of a number n is defined as 1/n)
values = [8.0, 4.0, 4.0, 1.0, 5.0, 4.0, 4.0, 2.0]
reciprocal = [(1/float(i)) for i in values] #modify this
print "3. The reciprocal values:", reciprocal
# Create a list in which all the slash marks ('/') are replaced with underscores ('_').
fieldNames = [ "FireType/Protection-Type", "Time/Date", "Condition/Status/Role"]
fieldNames2 = [i.replace('/', '_') for i in fieldNames] #modify this
print "4. No slashes:", fieldNames2
# Create a list of output file names
import os
inputFiles = os.listdir("C:/Temp")
# Sample output below for inputFiles = ["COVER.shp", "Fires.shp", "Data.txt"]
outputFiles = [(os.path.basename(i[:-4]))+"out"+(os.path.basename(i[-4:])) for i in inputFiles] #modify this
print "5. Output files:", outputFiles
# Create a list file extensions -- You may assume file extensions are the last
# 4 characters or for an extra challenge, find a solution using the 'os.path.splitext' method
import arcpy
inputFiles = os.listdir("C:/Temp")
# Sample output below for inputFiles = ["COVER.shp", "Fires.shp", "Data.txt"]
extensions = [os.path.basename(i[-4:]) for i in inputFiles] #modify this
print "6. File extensions:", extensions
|
gcsadovy/generalPY
|
listComprehension.py
|
Python
|
gpl-3.0
| 1,709
|
# (C) British Crown Copyright 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the `iris.quickplot.outline` function."""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from iris.tests.unit.plot import TestGraphicStringCoord
if tests.MPL_AVAILABLE:
import iris.quickplot as qplt
@tests.skip_plot
class TestStringCoordPlot(TestGraphicStringCoord):
def test_yaxis_labels(self):
qplt.outline(self.cube, coords=('bar', 'str_coord'))
self.assertBoundsTickLabels('yaxis')
def test_xaxis_labels(self):
qplt.outline(self.cube, coords=('str_coord', 'bar'))
self.assertBoundsTickLabels('xaxis')
if __name__ == "__main__":
tests.main()
|
scollis/iris
|
lib/iris/tests/unit/quickplot/test_outline.py
|
Python
|
gpl-3.0
| 1,408
|
import random, cmath, math
def color(x, y, phase, opacity):
phase *= 2 * math.pi
rad, phi = cmath.polar(complex(x-6, y-4))
r, g, b = (math.cos(-rad*.5+phase)+1.)/2, (math.cos(-rad*.5+phase+2/3.*math.pi)+1.)/2, (math.cos(-rad*.5+phase+4/3.*math.pi)+1.)/2
return int(r**2 *255*opacity),int(g**2 *255*opacity),int(b**2 *255*opacity)
class SpaceInvader(object):
def __init__(self, w, h):
self.c, self.p = -1, -1
self.w, self.h = w, h
_ = 0
X = 1
self.sprite= (
[
[_,_,_,_,_,_,_,_,_,_,_,_,],
[_,_,_,X,_,_,_,_,_,X,_,_,],
[_,_,_,_,X,_,_,_,X,_,_,_,],
[_,_,_,X,X,X,X,X,X,X,_,_,],
[_,_,X,X,_,X,X,X,_,X,X,_,],
[_,X,X,X,X,X,X,X,X,X,X,X,],
[_,X,_,X,X,X,X,X,X,X,_,X,],
[_,X,_,X,_,_,_,_,_,X,_,X,],
[_,_,_,_,X,X,_,X,X,_,_,_,],
[_,_,_,_,_,_,_,_,_,_,_,_,],
],
[
[_,_,_,_,_,_,_,_,_,_,_,_,],
[_,_,_,X,_,_,_,_,_,X,_,_,],
[_,X,_,_,X,_,_,_,X,_,_,X,],
[_,X,_,X,X,X,X,X,X,X,_,X,],
[_,X,X,X,_,X,X,X,_,X,X,X,],
[_,X,X,X,X,X,X,X,X,X,X,X,],
[_,_,X,X,X,X,X,X,X,X,X,_,],
[_,_,_,X,_,_,_,_,_,X,_,_,],
[_,_,X,_,_,_,_,_,_,_,X,_,],
[_,_,_,_,_,_,_,_,_,_,_,_,],
])
def next(self):
self.c += 1
self.c %= len(self.sprite*25)
self.p += 1
self.p %= 64
phase = self.p/64.
return [ [ color(x, y, phase, self.sprite[int(self.c/25)][y][x]) for x in xrange(self.w) ] for y in xrange(self.h) ]
animations = [ SpaceInvader ]
|
MerlijnWajer/lewd
|
animations/wobbleinvader.py
|
Python
|
gpl-3.0
| 1,677
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: postgresql_user
short_description: Adds or removes a users (roles) from a PostgreSQL database.
description:
- Add or remove PostgreSQL users (roles) from a remote host and, optionally,
grant the users access to an existing database or tables.
- The fundamental function of the module is to create, or delete, roles from
a PostgreSQL cluster. Privilege assignment, or removal, is an optional
step, which works on one database at a time. This allows for the module to
be called several times in the same module to modify the permissions on
different databases, or to grant permissions to already existing users.
- A user cannot be removed until all the privileges have been stripped from
the user. In such situation, if the module tries to remove the user it
will fail. To avoid this from happening the fail_on_user option signals
the module to try to remove the user, but if not possible keep going; the
module will report if changes happened and separately if the user was
removed or not.
version_added: "0.6"
options:
name:
description:
- name of the user (role) to add or remove
required: true
default: null
password:
description:
- set the user's password, before 1.4 this was required.
- >
When passing an encrypted password, the encrypted parameter must also be true, and it must be generated with the format
C('str[\\"md5\\"] + md5[ password + username ]'), resulting in a total of 35 characters. An easy way to do this is:
C(echo \\"md5`echo -n \\"verysecretpasswordJOE\\" | md5`\\"). Note that if the provided password string is already in
MD5-hashed format, then it is used as-is, regardless of encrypted parameter.
default: null
db:
description:
- name of database where permissions will be granted
default: null
fail_on_user:
description:
- if C(yes), fail when user can't be removed. Otherwise just log and continue
default: 'yes'
choices: [ "yes", "no" ]
port:
description:
- Database port to connect to.
default: 5432
login_user:
description:
- User (role) used to authenticate with PostgreSQL
default: postgres
login_password:
description:
- Password used to authenticate with PostgreSQL
default: null
login_host:
description:
- Host running PostgreSQL.
default: localhost
login_unix_socket:
description:
- Path to a Unix domain socket for local connections
default: null
priv:
description:
- "PostgreSQL privileges string in the format: C(table:priv1,priv2)"
default: null
role_attr_flags:
description:
- "PostgreSQL role attributes string in the format: CREATEDB,CREATEROLE,SUPERUSER"
default: ""
choices: [ "[NO]SUPERUSER","[NO]CREATEROLE", "[NO]CREATEUSER", "[NO]CREATEDB",
"[NO]INHERIT", "[NO]LOGIN", "[NO]REPLICATION", "[NO]BYPASSRLS" ]
state:
description:
- The user (role) state
default: present
choices: [ "present", "absent" ]
encrypted:
description:
- whether the password is stored hashed in the database. boolean. Passwords can be passed already hashed or unhashed, and postgresql ensures the
stored password is hashed when encrypted is set.
default: false
version_added: '1.4'
expires:
description:
- The date at which the user's password is to expire.
- If set to C('infinity'), user's password never expire.
- Note that this value should be a valid SQL date and time type.
default: null
version_added: '1.4'
no_password_changes:
description:
- if C(yes), don't inspect database for password changes. Effective when C(pg_authid) is not accessible (such as AWS RDS). Otherwise, make
password changes as necessary.
default: 'no'
choices: [ "yes", "no" ]
version_added: '2.0'
ssl_mode:
description:
- Determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
- See https://www.postgresql.org/docs/current/static/libpq-ssl.html for more information on the modes.
- Default of C(prefer) matches libpq default.
default: prefer
choices: [disable, allow, prefer, require, verify-ca, verify-full]
version_added: '2.3'
ssl_rootcert:
description:
- Specifies the name of a file containing SSL certificate authority (CA) certificate(s). If the file exists, the server's certificate will be
verified to be signed by one of these authorities.
default: null
version_added: '2.3'
conn_limit:
description:
- Specifies the user connection limit.
default: null
version_added: '2.4'
notes:
- The default authentication assumes that you are either logging in as or
sudo'ing to the postgres account on the host.
- This module uses psycopg2, a Python PostgreSQL database adapter. You must
ensure that psycopg2 is installed on the host before using this module. If
the remote host is the PostgreSQL server (which is the default case), then
PostgreSQL must also be installed on the remote host. For Ubuntu-based
systems, install the postgresql, libpq-dev, and python-psycopg2 packages
on the remote host before using this module.
- If the passlib library is installed, then passwords that are encrypted
in the DB but not encrypted when passed as arguments can be checked for
changes. If the passlib library is not installed, unencrypted passwords
stored in the DB encrypted will be assumed to have changed.
- If you specify PUBLIC as the user, then the privilege changes will apply
to all users. You may not specify password or role_attr_flags when the
PUBLIC user is specified.
- The ssl_rootcert parameter requires at least Postgres version 8.4 and I(psycopg2) version 2.4.3.
requirements: [ psycopg2 ]
author: "Ansible Core Team"
'''
EXAMPLES = '''
# Create django user and grant access to database and products table
- postgresql_user:
db: acme
name: django
password: ceec4eif7ya
priv: "CONNECT/products:ALL"
# Create rails user, grant privilege to create other databases and demote rails from super user status
- postgresql_user:
name: rails
password: secret
role_attr_flags: CREATEDB,NOSUPERUSER
# Remove test user privileges from acme
- postgresql_user:
db: acme
name: test
priv: "ALL/products:ALL"
state: absent
fail_on_user: no
# Remove test user from test database and the cluster
- postgresql_user:
db: test
name: test
priv: ALL
state: absent
# Set user's password with no expire date
- postgresql_user:
db: acme
name: django
password: mysupersecretword
priv: "CONNECT/products:ALL"
expire: infinity
# Example privileges string format
# INSERT,UPDATE/table:SELECT/anothertable:ALL
# Remove an existing user's password
- postgresql_user:
db: test
user: test
password: NULL
'''
import itertools
import re
import traceback
from hashlib import md5
try:
import psycopg2
import psycopg2.extras
except ImportError:
postgresqldb_found = False
else:
postgresqldb_found = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.database import pg_quote_identifier, SQLParseError
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.six import iteritems
FLAGS = ('SUPERUSER', 'CREATEROLE', 'CREATEUSER', 'CREATEDB', 'INHERIT', 'LOGIN', 'REPLICATION')
FLAGS_BY_VERSION = {'BYPASSRLS': 90500}
VALID_PRIVS = dict(table=frozenset(('SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER', 'ALL')),
database=frozenset(
('CREATE', 'CONNECT', 'TEMPORARY', 'TEMP', 'ALL')),
)
# map to cope with idiosyncracies of SUPERUSER and LOGIN
PRIV_TO_AUTHID_COLUMN = dict(SUPERUSER='rolsuper', CREATEROLE='rolcreaterole',
CREATEUSER='rolcreateuser', CREATEDB='rolcreatedb',
INHERIT='rolinherit', LOGIN='rolcanlogin',
REPLICATION='rolreplication', BYPASSRLS='rolbypassrls')
class InvalidFlagsError(Exception):
pass
class InvalidPrivsError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def user_exists(cursor, user):
# The PUBLIC user is a special case that is always there
if user == 'PUBLIC':
return True
query = "SELECT rolname FROM pg_roles WHERE rolname=%(user)s"
cursor.execute(query, {'user': user})
return cursor.rowcount > 0
def user_add(cursor, user, password, role_attr_flags, encrypted, expires, conn_limit):
"""Create a new database user (role)."""
# Note: role_attr_flags escaped by parse_role_attrs and encrypted is a
# literal
query_password_data = dict(password=password, expires=expires)
query = ['CREATE USER %(user)s' %
{"user": pg_quote_identifier(user, 'role')}]
if password is not None:
query.append("WITH %(crypt)s" % {"crypt": encrypted})
query.append("PASSWORD %(password)s")
if expires is not None:
query.append("VALID UNTIL %(expires)s")
if conn_limit is not None:
query.append("CONNECTION LIMIT %(conn_limit)s" % {"conn_limit": conn_limit})
query.append(role_attr_flags)
query = ' '.join(query)
cursor.execute(query, query_password_data)
return True
def user_should_we_change_password(current_role_attrs, user, password, encrypted):
"""Check if we should change the user's password.
Compare the proposed password with the existing one, comparing
hashes if encrypted. If we can't access it assume yes.
"""
if current_role_attrs is None:
# on some databases, E.g. AWS RDS instances, there is no access to
# the pg_authid relation to check the pre-existing password, so we
# just assume password is different
return True
# Do we actually need to do anything?
pwchanging = False
if password is not None:
# 32: MD5 hashes are represented as a sequence of 32 hexadecimal digits
# 3: The size of the 'md5' prefix
# When the provided password looks like a MD5-hash, value of
# 'encrypted' is ignored.
if ((password.startswith('md5') and len(password) == 32 + 3) or encrypted == 'UNENCRYPTED'):
if password != current_role_attrs['rolpassword']:
pwchanging = True
elif encrypted == 'ENCRYPTED':
hashed_password = 'md5{0}'.format(md5(to_bytes(password) + to_bytes(user)).hexdigest())
if hashed_password != current_role_attrs['rolpassword']:
pwchanging = True
return pwchanging
def user_alter(db_connection, module, user, password, role_attr_flags, encrypted, expires, no_password_changes, conn_limit):
"""Change user password and/or attributes. Return True if changed, False otherwise."""
changed = False
cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
# Note: role_attr_flags escaped by parse_role_attrs and encrypted is a
# literal
if user == 'PUBLIC':
if password is not None:
module.fail_json(msg="cannot change the password for PUBLIC user")
elif role_attr_flags != '':
module.fail_json(msg="cannot change the role_attr_flags for PUBLIC user")
else:
return False
# Handle passwords.
if not no_password_changes and (password is not None or role_attr_flags != '' or expires is not None or conn_limit is not None):
# Select password and all flag-like columns in order to verify changes.
try:
select = "SELECT * FROM pg_authid where rolname=%(user)s"
cursor.execute(select, {"user": user})
# Grab current role attributes.
current_role_attrs = cursor.fetchone()
except psycopg2.ProgrammingError:
current_role_attrs = None
db_connection.rollback()
pwchanging = user_should_we_change_password(current_role_attrs, user, password, encrypted)
role_attr_flags_changing = False
if role_attr_flags:
role_attr_flags_dict = {}
for r in role_attr_flags.split(' '):
if r.startswith('NO'):
role_attr_flags_dict[r.replace('NO', '', 1)] = False
else:
role_attr_flags_dict[r] = True
for role_attr_name, role_attr_value in role_attr_flags_dict.items():
if current_role_attrs[PRIV_TO_AUTHID_COLUMN[role_attr_name]] != role_attr_value:
role_attr_flags_changing = True
if expires is not None:
cursor.execute("SELECT %s::timestamptz;", (expires,))
expires_with_tz = cursor.fetchone()[0]
expires_changing = expires_with_tz != current_role_attrs.get('rolvaliduntil')
else:
expires_changing = False
conn_limit_changing = (conn_limit is not None and conn_limit != current_role_attrs['rolconnlimit'])
if not pwchanging and not role_attr_flags_changing and not expires_changing and not conn_limit_changing:
return False
alter = ['ALTER USER %(user)s' % {"user": pg_quote_identifier(user, 'role')}]
if pwchanging:
alter.append("WITH %(crypt)s" % {"crypt": encrypted})
alter.append("PASSWORD %(password)s")
alter.append(role_attr_flags)
elif role_attr_flags:
alter.append('WITH %s' % role_attr_flags)
if expires is not None:
alter.append("VALID UNTIL %(expires)s")
if conn_limit is not None:
alter.append("CONNECTION LIMIT %(conn_limit)s" % {"conn_limit": conn_limit})
query_password_data = dict(password=password, expires=expires)
try:
cursor.execute(' '.join(alter), query_password_data)
changed = True
except psycopg2.InternalError as e:
if e.pgcode == '25006':
# Handle errors due to read-only transactions indicated by pgcode 25006
# ERROR: cannot execute ALTER ROLE in a read-only transaction
changed = False
module.fail_json(msg=e.pgerror, exception=traceback.format_exc())
return changed
else:
raise psycopg2.InternalError(e)
elif no_password_changes and role_attr_flags != '':
# Grab role information from pg_roles instead of pg_authid
select = "SELECT * FROM pg_roles where rolname=%(user)s"
cursor.execute(select, {"user": user})
# Grab current role attributes.
current_role_attrs = cursor.fetchone()
role_attr_flags_changing = False
if role_attr_flags:
role_attr_flags_dict = {}
for r in role_attr_flags.split(' '):
if r.startswith('NO'):
role_attr_flags_dict[r.replace('NO', '', 1)] = False
else:
role_attr_flags_dict[r] = True
for role_attr_name, role_attr_value in role_attr_flags_dict.items():
if current_role_attrs[PRIV_TO_AUTHID_COLUMN[role_attr_name]] != role_attr_value:
role_attr_flags_changing = True
if not role_attr_flags_changing:
return False
alter = ['ALTER USER %(user)s' %
{"user": pg_quote_identifier(user, 'role')}]
if role_attr_flags:
alter.append('WITH %s' % role_attr_flags)
try:
cursor.execute(' '.join(alter))
except psycopg2.InternalError as e:
if e.pgcode == '25006':
# Handle errors due to read-only transactions indicated by pgcode 25006
# ERROR: cannot execute ALTER ROLE in a read-only transaction
changed = False
module.fail_json(msg=e.pgerror, exception=traceback.format_exc())
return changed
else:
raise psycopg2.InternalError(e)
# Grab new role attributes.
cursor.execute(select, {"user": user})
new_role_attrs = cursor.fetchone()
# Detect any differences between current_ and new_role_attrs.
changed = current_role_attrs != new_role_attrs
return changed
def user_delete(cursor, user):
"""Try to remove a user. Returns True if successful otherwise False"""
cursor.execute("SAVEPOINT ansible_pgsql_user_delete")
try:
cursor.execute("DROP USER %s" % pg_quote_identifier(user, 'role'))
except:
cursor.execute("ROLLBACK TO SAVEPOINT ansible_pgsql_user_delete")
cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete")
return False
cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete")
return True
def has_table_privileges(cursor, user, table, privs):
"""
Return the difference between the privileges that a user already has and
the privileges that they desire to have.
:returns: tuple of:
* privileges that they have and were requested
* privileges they currently hold but were not requested
* privileges requested that they do not hold
"""
cur_privs = get_table_privileges(cursor, user, table)
have_currently = cur_privs.intersection(privs)
other_current = cur_privs.difference(privs)
desired = privs.difference(cur_privs)
return (have_currently, other_current, desired)
def get_table_privileges(cursor, user, table):
if '.' in table:
schema, table = table.split('.', 1)
else:
schema = 'public'
query = '''SELECT privilege_type FROM information_schema.role_table_grants
WHERE grantee=%s AND table_name=%s AND table_schema=%s'''
cursor.execute(query, (user, table, schema))
return frozenset([x[0] for x in cursor.fetchall()])
def grant_table_privileges(cursor, user, table, privs):
# Note: priv escaped by parse_privs
privs = ', '.join(privs)
query = 'GRANT %s ON TABLE %s TO %s' % (
privs, pg_quote_identifier(table, 'table'), pg_quote_identifier(user, 'role'))
cursor.execute(query)
def revoke_table_privileges(cursor, user, table, privs):
# Note: priv escaped by parse_privs
privs = ', '.join(privs)
query = 'REVOKE %s ON TABLE %s FROM %s' % (
privs, pg_quote_identifier(table, 'table'), pg_quote_identifier(user, 'role'))
cursor.execute(query)
def get_database_privileges(cursor, user, db):
priv_map = {
'C': 'CREATE',
'T': 'TEMPORARY',
'c': 'CONNECT',
}
query = 'SELECT datacl FROM pg_database WHERE datname = %s'
cursor.execute(query, (db,))
datacl = cursor.fetchone()[0]
if datacl is None:
return set()
r = re.search(r'%s\\?"?=(C?T?c?)/[^,]+,?' % user, datacl)
if r is None:
return set()
o = set()
for v in r.group(1):
o.add(priv_map[v])
return normalize_privileges(o, 'database')
def has_database_privileges(cursor, user, db, privs):
"""
Return the difference between the privileges that a user already has and
the privileges that they desire to have.
:returns: tuple of:
* privileges that they have and were requested
* privileges they currently hold but were not requested
* privileges requested that they do not hold
"""
cur_privs = get_database_privileges(cursor, user, db)
have_currently = cur_privs.intersection(privs)
other_current = cur_privs.difference(privs)
desired = privs.difference(cur_privs)
return (have_currently, other_current, desired)
def grant_database_privileges(cursor, user, db, privs):
# Note: priv escaped by parse_privs
privs = ', '.join(privs)
if user == "PUBLIC":
query = 'GRANT %s ON DATABASE %s TO PUBLIC' % (
privs, pg_quote_identifier(db, 'database'))
else:
query = 'GRANT %s ON DATABASE %s TO %s' % (
privs, pg_quote_identifier(db, 'database'),
pg_quote_identifier(user, 'role'))
cursor.execute(query)
def revoke_database_privileges(cursor, user, db, privs):
# Note: priv escaped by parse_privs
privs = ', '.join(privs)
if user == "PUBLIC":
query = 'REVOKE %s ON DATABASE %s FROM PUBLIC' % (
privs, pg_quote_identifier(db, 'database'))
else:
query = 'REVOKE %s ON DATABASE %s FROM %s' % (
privs, pg_quote_identifier(db, 'database'),
pg_quote_identifier(user, 'role'))
cursor.execute(query)
def revoke_privileges(cursor, user, privs):
if privs is None:
return False
revoke_funcs = dict(table=revoke_table_privileges,
database=revoke_database_privileges)
check_funcs = dict(table=has_table_privileges,
database=has_database_privileges)
changed = False
for type_ in privs:
for name, privileges in iteritems(privs[type_]):
# Check that any of the privileges requested to be removed are
# currently granted to the user
differences = check_funcs[type_](cursor, user, name, privileges)
if differences[0]:
revoke_funcs[type_](cursor, user, name, privileges)
changed = True
return changed
def grant_privileges(cursor, user, privs):
if privs is None:
return False
grant_funcs = dict(table=grant_table_privileges,
database=grant_database_privileges)
check_funcs = dict(table=has_table_privileges,
database=has_database_privileges)
changed = False
for type_ in privs:
for name, privileges in iteritems(privs[type_]):
# Check that any of the privileges requested for the user are
# currently missing
differences = check_funcs[type_](cursor, user, name, privileges)
if differences[2]:
grant_funcs[type_](cursor, user, name, privileges)
changed = True
return changed
def parse_role_attrs(cursor, role_attr_flags):
"""
Parse role attributes string for user creation.
Format:
attributes[,attributes,...]
Where:
attributes := CREATEDB,CREATEROLE,NOSUPERUSER,...
[ "[NO]SUPERUSER","[NO]CREATEROLE", "[NO]CREATEUSER", "[NO]CREATEDB",
"[NO]INHERIT", "[NO]LOGIN", "[NO]REPLICATION",
"[NO]BYPASSRLS" ]
Note: "[NO]BYPASSRLS" role attribute introduced in 9.5
"""
flags = frozenset(role.upper() for role in role_attr_flags.split(',') if role)
valid_flags = frozenset(itertools.chain(FLAGS, get_valid_flags_by_version(cursor)))
valid_flags = frozenset(itertools.chain(valid_flags, ('NO%s' % flag for flag in valid_flags)))
if not flags.issubset(valid_flags):
raise InvalidFlagsError('Invalid role_attr_flags specified: %s' %
' '.join(flags.difference(valid_flags)))
return ' '.join(flags)
def normalize_privileges(privs, type_):
new_privs = set(privs)
if 'ALL' in new_privs:
new_privs.update(VALID_PRIVS[type_])
new_privs.remove('ALL')
if 'TEMP' in new_privs:
new_privs.add('TEMPORARY')
new_privs.remove('TEMP')
return new_privs
def parse_privs(privs, db):
"""
Parse privilege string to determine permissions for database db.
Format:
privileges[/privileges/...]
Where:
privileges := DATABASE_PRIVILEGES[,DATABASE_PRIVILEGES,...] |
TABLE_NAME:TABLE_PRIVILEGES[,TABLE_PRIVILEGES,...]
"""
if privs is None:
return privs
o_privs = {
'database': {},
'table': {}
}
for token in privs.split('/'):
if ':' not in token:
type_ = 'database'
name = db
priv_set = frozenset(x.strip().upper()
for x in token.split(',') if x.strip())
else:
type_ = 'table'
name, privileges = token.split(':', 1)
priv_set = frozenset(x.strip().upper()
for x in privileges.split(',') if x.strip())
if not priv_set.issubset(VALID_PRIVS[type_]):
raise InvalidPrivsError('Invalid privs specified for %s: %s' %
(type_, ' '.join(priv_set.difference(VALID_PRIVS[type_]))))
priv_set = normalize_privileges(priv_set, type_)
o_privs[type_][name] = priv_set
return o_privs
def get_valid_flags_by_version(cursor):
"""
Some role attributes were introduced after certain versions. We want to
compile a list of valid flags against the current Postgres version.
"""
current_version = cursor.connection.server_version
return [
flag
for flag, version_introduced in FLAGS_BY_VERSION.items()
if current_version >= version_introduced
]
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default="postgres"),
login_password=dict(default="", no_log=True),
login_host=dict(default=""),
login_unix_socket=dict(default=""),
user=dict(required=True, aliases=['name']),
password=dict(default=None, no_log=True),
state=dict(default="present", choices=["absent", "present"]),
priv=dict(default=None),
db=dict(default=''),
port=dict(default='5432'),
fail_on_user=dict(type='bool', default='yes'),
role_attr_flags=dict(default=''),
encrypted=dict(type='bool', default='no'),
no_password_changes=dict(type='bool', default='no'),
expires=dict(default=None),
ssl_mode=dict(default='prefer', choices=[
'disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full']),
ssl_rootcert=dict(default=None),
conn_limit=dict(default=None)
),
supports_check_mode=True
)
user = module.params["user"]
password = module.params["password"]
state = module.params["state"]
fail_on_user = module.params["fail_on_user"]
db = module.params["db"]
if db == '' and module.params["priv"] is not None:
module.fail_json(msg="privileges require a database to be specified")
privs = parse_privs(module.params["priv"], db)
no_password_changes = module.params["no_password_changes"]
if module.params["encrypted"]:
encrypted = "ENCRYPTED"
else:
encrypted = "UNENCRYPTED"
expires = module.params["expires"]
sslrootcert = module.params["ssl_rootcert"]
conn_limit = module.params["conn_limit"]
if not postgresqldb_found:
module.fail_json(msg="the python psycopg2 module is required")
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host": "host",
"login_user": "user",
"login_password": "password",
"port": "port",
"db": "database",
"ssl_mode": "sslmode",
"ssl_rootcert": "sslrootcert"
}
kw = dict((params_map[k], v) for (k, v) in iteritems(module.params)
if k in params_map and v != "" and v is not None)
# If a login_unix_socket is specified, incorporate it here.
is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
if is_localhost and module.params["login_unix_socket"] != "":
kw["host"] = module.params["login_unix_socket"]
if psycopg2.__version__ < '2.4.3' and sslrootcert is not None:
module.fail_json(
msg='psycopg2 must be at least 2.4.3 in order to user the ssl_rootcert parameter')
try:
db_connection = psycopg2.connect(**kw)
cursor = db_connection.cursor(
cursor_factory=psycopg2.extras.DictCursor)
except TypeError as e:
if 'sslrootcert' in e.args[0]:
module.fail_json(
msg='Postgresql server must be at least version 8.4 to support sslrootcert')
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
except Exception as e:
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
try:
role_attr_flags = parse_role_attrs(cursor, module.params["role_attr_flags"])
except InvalidFlagsError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
kw = dict(user=user)
changed = False
user_removed = False
if state == "present":
if user_exists(cursor, user):
try:
changed = user_alter(db_connection, module, user, password,
role_attr_flags, encrypted, expires, no_password_changes, conn_limit)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
else:
try:
changed = user_add(cursor, user, password,
role_attr_flags, encrypted, expires, conn_limit)
except psycopg2.ProgrammingError as e:
module.fail_json(msg="Unable to add user with given requirement "
"due to : %s" % to_native(e),
exception=traceback.format_exc())
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
try:
changed = grant_privileges(cursor, user, privs) or changed
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
else:
if user_exists(cursor, user):
if module.check_mode:
changed = True
kw['user_removed'] = True
else:
try:
changed = revoke_privileges(cursor, user, privs)
user_removed = user_delete(cursor, user)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
changed = changed or user_removed
if fail_on_user and not user_removed:
msg = "unable to remove user"
module.fail_json(msg=msg)
kw['user_removed'] = user_removed
if changed:
if module.check_mode:
db_connection.rollback()
else:
db_connection.commit()
kw['changed'] = changed
module.exit_json(**kw)
if __name__ == '__main__':
main()
|
tsdmgz/ansible
|
lib/ansible/modules/database/postgresql/postgresql_user.py
|
Python
|
gpl-3.0
| 31,487
|
# encoding: utf-8
"""
Pensieve: a tool for interacting with and modifying media through openCV and
other python tools
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2017, Gary Hendrick.
#
# This file is part of Pensieve.
#
# Pensieve is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Pensieve is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Foobar. If not, see <http://www.gnu.org/licenses/>.
#
# The full license is in the file LICENSE, distributed with this software.
#-----------------------------------------------------------------------------
import os
import platform
import pprint
import sys
from pensieve.core import release
def pkg_info(pkg_path):
"""Return dict describing the context of this package
Parameters
----------
pkg_path : str
path containing __init__.py for package
Returns
-------
context : dict
with named parameters of interest
"""
return dict(
ipython_version=release.version,
ipython_path=pkg_path,
sys_version=sys.version,
sys_executable=sys.executable,
sys_platform=sys.platform,
platform=platform.platform(),
os_name=os.name,
)
def get_sys_info():
"""Return useful information about IPython and the system, as a dict."""
p = os.path
path = p.realpath(p.dirname(p.abspath(p.join(__file__, '..'))))
return pkg_info(path)
def sys_info():
"""Return useful information about IPython and the system, as a string.
Examples
--------
::
In [2]: print sys_info()
{'commit_hash': '144fdae', # random
'commit_source': 'repository',
'ipython_path': '/home/fperez/usr/lib/python2.6/site-packages/IPython',
'ipython_version': '0.11.dev',
'os_name': 'posix',
'platform': 'Linux-2.6.35-22-generic-i686-with-Ubuntu-10.10-maverick',
'sys_executable': '/usr/bin/python',
'sys_platform': 'linux2',
'sys_version': '2.6.6 (r266:84292, Sep 15 2010, 15:52:39) \\n[GCC 4.4.5]'}
"""
return pprint.pformat(get_sys_info())
|
GaryHendrick/Pensieve
|
pensieve/utils/sysinfo.py
|
Python
|
gpl-3.0
| 2,648
|
from microbit import *
import neopixel
import random
num_of_pix = 24
np = neopixel.NeoPixel(pin0, num_of_pix) # create a NeoPixel object on pin0
p = 0 # set pixel pointer to 0
while True:
#Set Random RGB Values
r = random.randint(0,255)
g = random.randint(0,255)
b = random.randint(0,255)
p = random.randint(0,23) # random LED
np.clear()
np[p] = (r,g,b) # set the RGB values to be blue (red=0, green=0, blue=60)
np.show()
sleep(5)
|
markcarline/CoderDojo
|
Microbit/Microbit Halo/Flash Randomly - THIS IS THE BAD ONE.py
|
Python
|
gpl-3.0
| 474
|
import hashlib
import sys
import random
import os
'''
--- Part Two ---
As the door slides open, you are presented with a second door that uses a slightly more inspired security mechanism.
Clearly unimpressed by the last version (in what movie is the password decrypted in order?!), the Easter Bunny
engineers have worked out a better solution.
Instead of simply filling in the password from left to right, the hash now also indicates the position within the
password to fill. You still look for hashes that begin with five zeroes; however, now, the sixth character
represents the position (0-7), and the seventh character is the character to put in that position.
A hash result of 000001f means that f is the second character in the password. Use only the first result for each
position, and ignore invalid positions.
For example, if the Door ID is abc:
The first interesting hash is from abc3231929, which produces 0000015...; so, 5 goes in position 1: _5______.
In the previous method, 5017308 produced an interesting hash; however, it is ignored, because it specifies an
invalid position (8).
The second interesting hash is at index 5357525, which produces 000004e...; so, e goes in position
4: _5__e___.
You almost choke on your popcorn as the final character falls into place, producing the password 05ace8e3.
Given the actual Door ID and this new method, what is the password? Be extra proud of your solution if it uses a
cinematic "decrypting" animation.
Your puzzle answer was 694190cd.
Both parts of this puzzle are complete! They provide two gold stars: **
'''
class MovieDecrypter(object):
def __init__(self, prefix='uqwqemis'):
os.system('clear')
self.prefix = prefix
self.password = [[],[],[],[],[],[],[],[]]
self.solved = 0
self.solver()
print()
def solver(self):
index = 0
while self.solved <= 7:
if index % 10 == 0:
self.print_routine()
index += 1
string_to_hash = self.prefix + str(index)
hash_object = hashlib.md5(string_to_hash.encode('utf-8'))
hex = hash_object.hexdigest()[:7]
if hex[:5] == '00000':
self.check_hash(hex[5], hex[6])
self.print_routine()
def check_hash(self, position, letter):
try:
if not self.password[int(position)]:
self.password[int(position)] = letter
self.solved += 1
except IndexError:
pass
except ValueError:
pass
def print_routine(self):
print_list = []
for char in self.password:
if char:
print_list.append(char)
else:
print_list.append(random.choice('!@#$%^&*()_-+={}\|/?.><,`~'))
print_list = ''.join(print_list)
sys.stdout.write("\r %s" % print_list)
sys.stdout.flush()
MovieDecrypter()
|
tetrismegistus/advent16
|
day5/day5_2.py
|
Python
|
gpl-3.0
| 3,157
|
import sys
from PySide2.QtWidgets import (
QWidget,
QSlider,
QApplication,
QStyleOptionSlider,
QSizePolicy,
QStyle,
)
from PySide2.QtCore import Qt, QRect, QSize, Signal
from PySide2.QtGui import QPainter, QPaintEvent, QMouseEvent, QPalette, QBrush
class RangeSlider(QWidget):
rangeChanged = Signal(int, int)
def __init__(self, parent=None):
super().__init__(parent)
self.first_position = 1
self.second_position = 8
self.tick_visible = False
self.opt = QStyleOptionSlider()
self.opt.minimum = 0
self.opt.maximum = 10
self.setTickPosition(QSlider.TicksAbove)
self.setTickInterval(1)
self.setSizePolicy(
QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed, QSizePolicy.Slider)
)
def setRangeLimit(self, minimum: int, maximum: int):
self.opt.minimum = minimum
self.opt.maximum = maximum
def setRange(self, start: int, end: int):
self.first_position = start
self.second_position = end
def getRange(self):
return (self.first_position, self.second_position)
def setTickPosition(self, position: QSlider.TickPosition):
self.opt.tickPosition = position
def setTickInterval(self, ti: int):
self.opt.tickInterval = ti
def setTickVisible(self, visible: bool):
self.tick_visible = visible
def paintEvent(self, event: QPaintEvent):
painter = QPainter(self)
# Draw rule
self.opt.initFrom(self)
self.opt.rect = self.rect()
self.opt.sliderPosition = 0
if self.tick_visible:
self.opt.subControls = QStyle.SC_SliderGroove | QStyle.SC_SliderTickmarks
else:
self.opt.subControls = QStyle.SC_SliderGroove
# Draw GROOVE
self.style().drawComplexControl(QStyle.CC_Slider, self.opt, painter)
# Draw INTERVAL
color = self.palette().color(QPalette.Highlight)
color.setAlpha(160)
painter.setBrush(QBrush(color))
painter.setPen(Qt.NoPen)
self.opt.sliderPosition = self.first_position
x_left_handle = (
self.style()
.subControlRect(QStyle.CC_Slider, self.opt, QStyle.SC_SliderHandle)
.right()
)
self.opt.sliderPosition = self.second_position
x_right_handle = (
self.style()
.subControlRect(QStyle.CC_Slider, self.opt, QStyle.SC_SliderHandle)
.left()
)
groove_rect = self.style().subControlRect(
QStyle.CC_Slider, self.opt, QStyle.SC_SliderGroove
)
selection = QRect(
x_left_handle,
groove_rect.y(),
x_right_handle - x_left_handle,
groove_rect.height(),
).adjusted(-1, 1, 1, -1)
painter.drawRect(selection)
# Draw first handle
self.opt.subControls = QStyle.SC_SliderHandle
self.opt.sliderPosition = self.first_position
self.style().drawComplexControl(QStyle.CC_Slider, self.opt, painter)
# Draw second handle
self.opt.sliderPosition = self.second_position
self.style().drawComplexControl(QStyle.CC_Slider, self.opt, painter)
def mousePressEvent(self, event: QMouseEvent):
self.opt.sliderPosition = self.first_position
self._first_sc = self.style().hitTestComplexControl(
QStyle.CC_Slider, self.opt, event.pos(), self
)
self.opt.sliderPosition = self.second_position
self._second_sc = self.style().hitTestComplexControl(
QStyle.CC_Slider, self.opt, event.pos(), self
)
def mouseMoveEvent(self, event: QMouseEvent):
distance = self.opt.maximum - self.opt.minimum
pos = self.style().sliderValueFromPosition(
0, distance, event.pos().x(), self.rect().width()
)
if self._first_sc == QStyle.SC_SliderHandle:
if pos <= self.second_position:
self.first_position = pos
self.update()
return
if self._second_sc == QStyle.SC_SliderHandle:
if pos >= self.first_position:
self.second_position = pos
self.update()
def mouseReleaseEvent(self, event: QMouseEvent):
self.rangeChanged.emit(self.first_position, self.second_position)
def sizeHint(self):
""" override """
SliderLength = 84
TickSpace = 5
w = SliderLength
h = self.style().pixelMetric(QStyle.PM_SliderThickness, self.opt, self)
if (
self.opt.tickPosition & QSlider.TicksAbove
or self.opt.tickPosition & QSlider.TicksBelow
):
h += TickSpace
return (
self.style()
.sizeFromContents(QStyle.CT_Slider, self.opt, QSize(w, h), self)
.expandedTo(QApplication.globalStrut())
)
if __name__ == "__main__":
app = QApplication(sys.argv)
w = RangeSlider()
w.show()
# q = QSlider()
# q.show()
app.exec_()
|
labsquare/CuteVariant
|
cutevariant/gui/widgets/range_slider.py
|
Python
|
gpl-3.0
| 5,112
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# firewall.py
#
# Copyright © 2013-2018 Antergos
# Based on parts of ufw code © 2012 Canonical
#
# This file is part of Cnchi.
#
# Cnchi is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Cnchi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# The following additional terms are in effect as per Section 7 of the license:
#
# The preservation of all legal notices and author attributions in
# the material or in the Appropriate Legal Notices displayed
# by works containing it is required.
#
# You should have received a copy of the GNU General Public License
# along with Cnchi; If not, see <http://www.gnu.org/licenses/>.
""" Manage ufw setup """
import logging
from misc.run_cmd import chroot_call
try:
import ufw
_UFW = True
except ImportError:
_UFW = False
# When testing, no _() is available
try:
_("")
except NameError as err:
def _(message):
return message
def run(params, dest_dir="/install"):
""" Setup ufw """
cmd = ["ufw"]
cmd.extend(params)
if not _UFW:
# Could not import ufw module (missing?)
# Will call ufw command directly
try:
chroot_call(cmd, dest_dir)
except OSError as os_error:
logging.warning(os_error)
return
app_action = False
# Remember, will have to take --force into account if we use it with 'app'
idx = 1
if len(cmd) > 1 and cmd[1].lower() == "--dry-run":
idx += 1
if len(cmd) > idx and cmd[idx].lower() == "app":
app_action = True
res = ""
try:
cmd_line = ufw.frontend.parse_command(cmd)
frontend = ufw.frontend.UFWFrontend(cmd_line.dryrun)
if app_action and 'type' in cmd_line.data and cmd_line.data['type'] == 'app':
res = frontend.do_application_action(
cmd_line.action, cmd_line.data['name'])
else:
bailout = False
if cmd_line.action == "enable" and not cmd_line.force and \
not frontend.continue_under_ssh():
res = _("Aborted")
bailout = True
if not bailout:
if 'rule' in cmd_line.data:
res = frontend.do_action(
cmd_line.action,
cmd_line.data['rule'],
cmd_line.data['iptype'],
cmd_line.force)
else:
res = frontend.do_action(
cmd_line.action,
"",
"",
cmd_line.force)
except (ValueError, ufw.UFWError) as ufw_error:
logging.error(ufw_error)
# Error using ufw module
# Will call ufw command directly
try:
chroot_call(cmd, dest_dir)
except OSError as os_error:
logging.warning(os_error)
return
logging.debug(res)
|
Antergos/Cnchi
|
src/installation/firewall.py
|
Python
|
gpl-3.0
| 3,318
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import os
import sys; sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), ".."))
from gtsam import *
def vector(vec):
return np.array([vec.coeff(i) for i in range(vec.rows())])
def matrix(mat):
return np.array([[mat.coeff(i, j) for j in range(mat.cols())] for i in range(mat.rows())])
v2 = VectorXd(2)
print (vector(v2))
p2 = Point2(v2)
p2.print_()
print (vector(p2.vector()))
v3 = VectorXd(3)
print (vector(v3))
p3 = Point3(v3)
p3.print_()
print (vector(p3.vector()))
from math import pi
r2 = Rot2(pi/4)
r2.print_()
print (r2.theta())
print (matrix(r2.matrix()))
r3 = Rot3_quaternion(1/3**.5, 1/3**.5, 0., 1/3**.5)
r3.print_()
print (vector(r3.quaternion()))
print (matrix(r3.matrix()))
p = Pose2(1., 2., -1.)
p.print_()
pp = Pose3(r3, p3)
pp.print_()
s = Diagonal_Sigmas(v3)
s.print_("")
s2 = Isotropic_Sigma(3, 4.)
s2.print_("")
k = 1
f = PriorFactor_Pose2(k, p, s)
f.print_("")
ks = symbol('x', 1)
print (symbolChr(ks), symbolIndex(ks))
fs = PriorFactor_Pose2(ks, p, s)
fs.print_("")
g = NonlinearFactorGraph()
g.add(fs)
g.print_()
i = Values()
i.insert(ks, p)
i.print_()
i.exists(k)
i.exists(ks)
ps = LevenbergMarquardtParams()
ps.getVerbosityLM()
l = LevenbergMarquardtOptimizer(g, i, ps)
r = l.optimize()
r.print_()
m = Marginals(g, r)
cov = m.marginalCovariance(ks)
print (matrix(cov))
g.resize(0)
g.print_()
i.insert(k, p3)
i.print_()
Values(i.filter_Point3()).print_()
i.clear()
i.print_()
i.exists(ks)
NonlinearEquality_Pose2(k, p).print_()
BetweenFactor_Pose2(ks, k, p, s).print_("")
c = Cal3_S2(7., -5., 0.5, .25, .35)
c.print_()
print (matrix(c.K()))
c2 = Cal3DS2(7., -5., 0.5, .25, .35, 1., 0.4, .1, .15)
c2.print_()
print (matrix(c2.K()))
print (c2.k1(), c2.k2(), c2.p1(), c2.p2())
isam = NonlinearISAM()
g.add(fs)
i.insert(ks, p)
g.add(PriorFactor_Point3(k, p3, s2))
i.insert(k, p3)
isam.update(g, i)
isam.print_()
isam.printStats()
est = isam.estimate()
est.print_()
cov = isam.marginalCovariance(k)
print (matrix(cov))
isam.saveGraph("test_graph.dot")
isam2_params = ISAM2Params()
isam2_params.getFactorization()
isam2_params.factorization = Factorization.QR
isam2_params.getFactorization()
isam2 = ISAM2(isam2_params)
isam2.update(g, i)
isam2.update()
isam2.print_()
isam2.printStats()
est = isam2.calculateEstimate()
est.print_()
est = isam2.calculateBestEstimate()
est.print_()
cov = isam2.marginalCovariance(k)
print (matrix(cov))
|
Eliasvan/Multiple-Quadrotor-SLAM
|
Work/python_libs/gtsam/test.py
|
Python
|
gpl-3.0
| 2,474
|
import os
import json
import sys
import datetime
import dateutil.parser as dp
import logging
import utils
if __name__ == "__main__":
logging.basicConfig(filename=utils.get_logfile(), level=utils.log_level)
utils.log_intro(__file__)
event_loc = utils.jsondump_dir
current_time = datetime.datetime.now()
list_of_events = os.listdir(event_loc)
old_event_ids = set()
old_event_locs = []
for event in list_of_events:
event_path = os.path.join(event_loc, event)
with open(event_path) as event_file:
ev_json = json.load(event_file)
ev_id = ev_json["id"]
if "end_time" in ev_json:
ev_time = ev_json["end_time"]
else:
ev_time = ev_json["start_time"]
result = dp.parse(ev_time)
if (result.replace(tzinfo=None) < current_time):
old_event_locs.append(event_path)
old_event_ids.add(ev_id)
event_file.close()
for event in old_event_locs:
os.remove(event)
#if old_event_ids has elements
if old_event_ids:
logging.info("{} events removed:".format(len(old_event_ids)))
for event in old_event_ids:
logging.info(event)
|
tonisbones/thisiship
|
event_cleanup.py
|
Python
|
gpl-3.0
| 1,252
|