repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
thocoo/gamma-desk | gdesk/core/history.py | import sqlite3
import time
import shutil
from pathlib import Path
try:
import msvcrt
except ModuleNotFoundError:
#Not on Linux
pass
from .conf import config
class LogDir(object):
def __init__(self, rootpath):
self.rootpath = Path(rootpath)
if not self.rootpath.exists():
self.rootpath.mkdir(parents=True)
def make_lock_file(self, lock_file):
"""
Make a empty lock file in the logpath.
A windows readlock is attached to the file.
The lock will be removed by windows if the process dies.
When booting another instance of Ghawk, this lock file will
be used (the possibility to removed it) to detect if the
logpath is currently in use by another Ghawk process.
"""
self.lock_file = lock_file
self.lock_file.touch()
self.lock_file_ptr = open(str(self.lock_file), 'r')
msvcrt.locking(self.lock_file_ptr.fileno(), msvcrt.LK_RLCK, 1024)
def release_lock_file(self):
"""
Release and remove the current lock file.
When this process dies, the lock is removed by windows but the
file is not removed.
"""
msvcrt.locking(self.lock_file_ptr.fileno(), msvcrt.LK_UNLCK, 1024)
self.lock_file_ptr.close()
self.lock_file.unlink()
def get_active_lock_files(self):
active_locks = []
for p in self.rootpath.glob('log.*'):
if (p / 'cmdlog.lock').exists():
try:
(p / 'cmdlog.lock').unlink()
except PermissionError:
active_locks.append(p / 'cmdlog.lock')
return active_locks
def remove_inactive_log_dirs(self):
active_locks = self.get_active_lock_files()
for p in self.rootpath.glob('log.*'):
if not (p / 'cmdlog.lock') in active_locks:
shutil.rmtree(str(p))
def find_log_path(self):
"""
Find a suitable rootpath/log.N path.
Which logpath contains an active lock file?
Create a new log.N if needed.
"""
priorlogpath = None
for n in range(config['max_log_paths']):
logpath_propose = self.rootpath / ('log.%d' % n)
if logpath_propose.exists():
# Is the path in use by other process?
if not (logpath_propose / 'cmdlog.lock').exists():
logpath = logpath_propose
break
else:
try:
(logpath_propose / 'cmdlog.lock').unlink()
# succesfull delete of lock file
# no process was using it
logpath = logpath_propose
break
except:
# Logs from this logpath should be copied to own logs.
priorlogpath = logpath_propose
else:
logpath_propose.mkdir()
logpath = logpath_propose
break
else:
print(f'Maximum number of {config["max_log_paths"]} log dirs reached')
print(f'Please, remove deprecated dirs')
print(f'{self.rootpath }')
raise SystemExit()
self.make_lock_file(logpath / 'cmdlog.lock')
self.logpath = logpath
self.priorlogpath = priorlogpath
return logpath, priorlogpath
class History(object):
def __init__(self, logdir):
self.init_server(logdir)
def init_server(self, logdir=None):
if logdir is None:
self.server_file = None
self.server = sqlite3.connect(':memory:')
self.define_tables()
else:
logdir = Path(logdir)
if not logdir.exists():
logdir.mkdir(parents=True)
logdir = logdir.absolute()
self.server_file = logdir / 'ghhist.db'
#Python 3.6 doesn't understand Path
self.server = sqlite3.connect(str(self.server_file))
self.define_tables()
def import_command_history(self, other_logfile):
self.server.execute('ATTACH [%s] as OTHERDB' % str(other_logfile))
q = 'INSERT INTO CMDHIST (TIME, CMD) SELECT TIME, CMD FROM [OTHERDB].[CMDHIST]'
self.server.execute(q)
self.server.commit()
self.server.execute('DETACH OTHERDB')
def define_tables(self):
query = """CREATE TABLE IF NOT EXISTS CMDHIST (
ID INTEGER PRIMARY KEY, TIME TEXT, CMD TEXT)"""
self.server.execute(query)
query = """CREATE TABLE IF NOT EXISTS PATHHIST (
ID INTEGER PRIMARY KEY, CATEGORY TEXT, TIME TEXT, PATH TEXT)"""
self.server.execute(query)
query = """ALTER TABLE PATHHIST ADD COLUMN CATEGORY TEXT"""
try:
self.server.execute(query)
except:
pass
def execfetch(self, query, parameters=()):
cur = self.server.cursor()
cur.execute(query, parameters)
row = cur.fetchone()
while row != None:
yield row
row = cur.fetchone()
def logcmd(self, cmd):
now = time.strftime('%Y-%m-%d %H:%M:%S')
query = "INSERT INTO CMDHIST (TIME, CMD) VALUES (?, ?)"
self.server.execute(query, (now, cmd,))
self.server.commit()
self.skip = -1
rowid = None
for col in self.execfetch("SELECT last_insert_rowid()"):
rowid = col[0]
return rowid
def storepath(self, path, delete_old_entry=True, category='image'):
if delete_old_entry:
self.server.execute("DELETE FROM PATHHIST WHERE PATH = ?", (path,))
now = time.strftime('%Y-%m-%d %H:%M:%S')
query = "INSERT INTO PATHHIST (CATEGORY, TIME, PATH) VALUES (?, ?, ?)\n"
self.server.execute(query, (category, now, path,))
self.server.commit()
rowid = None
for col in self.execfetch("SELECT last_insert_rowid()"):
rowid = col[0]
return rowid
def yield_recent_paths(self, count=20, category='image'):
for row in self.execfetch("SELECT ID, TIME, PATH FROM PATHHIST WHERE CATEGORY = ? ORDER BY ID DESC LIMIT ?", (category, count)):
yield row
def retrievecmd(self, part='', from_id=None, distinct=True, back=True, prefix=True):
query = self.make_retrieve_query(1, part, from_id, distinct, back, prefix)
for cmdid, cmd in self.execfetch(query):
return cmdid, cmd
return from_id, part
def tail(self, count=20, part='', from_id=None, distinct=False, back=True, prefix=True, reverse=True):
cmds = []
query = self.make_retrieve_query(count, part, from_id, distinct, back, prefix)
for row in self.execfetch(query):
cmds.append(row)
if reverse:
return cmds[::-1]
else:
return cmds
def make_retrieve_query(self, count=20, part='', from_id=None, distinct=False, back=True, prefix=True):
if back:
order = 'DESC'
if not (from_id is None or from_id == 0):
rng = f" WHERE ID < {from_id}"
else:
rng = ''
else:
order = 'ASC'
if not (from_id is None or from_id == 0):
rng = f" WHERE ID > {from_id}"
else:
rng = ''
if prefix:
wild = ''
else:
wild = '%'
part = part.replace("'","''")
if distinct:
query = f"SELECT LASTID AS ID, CMD FROM (SELECT MAX(ID) AS LASTID, CMD FROM CMDHIST WHERE CMD LIKE '{wild}{part}%' GROUP BY CMD ORDER BY LASTID {order}){rng} LIMIT {count}"
else:
query = f"SELECT ID, CMD FROM (SELECT ID, CMD FROM CMDHIST WHERE CMD LIKE '{part}%'{rng} ORDER BY ID {order}){rng} LIMIT {count}"
return query
|
thocoo/gamma-desk | gdesk/panels/imgview/opencv.py | <gh_stars>0
import os
import time
import collections
from pathlib import Path
import types
from collections.abc import Iterable
import queue
import logging
import numpy as np
logger = logging.getLogger(__name__)
from ... import config, gui
if config.get('qapp', False):
#only import qt stuff if process is gui
from ...panels import CheckMenu
from ...dialogs.formlayout import fedit
else:
#fake it
CheckMenu = object
# The nested functions are still callable
# from a non qt process
import cv2
class OpenCvMenu(CheckMenu):
def __init__(self, name, parentMenu=None, basePanel=None):
super().__init__(name, parentMenu)
#Process
basePanel.addMenuItem(self, 'Resize', self.image_resize,
statusTip="Resize the image", icon = 'resize_picture.png')
basePanel.addMenuItem(self, 'Box Blur', self.box_blur,
statusTip="Blur the image using a box sized kernel", icon = 'blur.png')
basePanel.addMenuItem(self, 'Gaussian Blur', self.gaussian_blur,
statusTip="Blur using guassian kernel", icon = 'blur.png')
basePanel.addMenuItem(self, 'Median Blur', self.median_blur,
statusTip="Blur using median filter", icon = 'blur.png')
basePanel.addMenuItem(self, 'Bilateral Filter', self.bilateral,
statusTip="Apply Bilateral Filter")
basePanel.addMenuItem(self, 'Laplacian', self.laplacian,
statusTip="Calculates the Laplacian")
basePanel.addMenuItem(self, 'Box Filter', self.box,
statusTip="The sum of the pixel values overlapping the filter")
basePanel.addMenuItem(self, 'Square Box Filter', self.sqrbox,
statusTip="The sum of squares of the pixel values overlapping the filter")
basePanel.addMenuItem(self, 'Demosaic', self.demosaic,
statusTip="Demosaicing using bilinear interpolation", icon='things_digital.png')
def image_resize(self):
interpoloptions = {
"Nearest": cv2.INTER_NEAREST,
"Linear": cv2.INTER_LINEAR,
"Cubic": cv2.INTER_CUBIC,
"Lanczos4": cv2.INTER_LANCZOS4,
"Area": cv2.INTER_AREA}
shape = gui.vs.shape[:2]
interpolkeys = list(interpoloptions.keys())
form = [("width", shape[1]), ("height", shape[0]), ("interpolation", [5] + interpolkeys)]
results = fedit(form, title='Resize')
if results is None: return
width, height, interpolind = results
interpol = interpoloptions[interpolkeys[interpolind-1]]
def console_run(width, height, interpol):
array = cv2.resize(gui.vs, (width, height), interpolation=interpol)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(width, height, interpol))
def box_blur(self):
form = [("Kernel Size", 15)]
results = fedit(form, title='Box Blur')
if results is None: return
ksize = results[0]
def console_run(ksize):
array = cv2.blur(gui.vs, ksize=(ksize, ksize))
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(ksize,))
def gaussian_blur(self):
form = [("Kernel Size", 15)]
results = fedit(form, title='Guassian Blur')
if results is None: return
ksize = results[0]
def console_run(ksize):
array = cv2.GaussianBlur(gui.vs, ksize=(ksize, ksize), sigmaX=0)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(ksize,))
def median_blur(self):
form = [("Kernel Size", 15)]
results = fedit(form, title='Median Blur')
if results is None: return
ksize = results[0]
def console_run(ksize):
array = cv2.medianBlur(gui.vs, ksize=ksize)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(ksize,))
def bilateral(self):
borders = {
'Reflect 101': cv2.BORDER_REFLECT_101,
'Constant': cv2.BORDER_CONSTANT,
'Replicate': cv2.BORDER_REPLICATE,
'Reflect': cv2.BORDER_REFLECT,
'Wrap': cv2.BORDER_WRAP,
'Transparant': cv2.BORDER_TRANSPARENT,
'Isolated': cv2.BORDER_ISOLATED}
border_keys = list(borders.keys())
form = [
("Diameter", 10),
("Sigma Color", 10.0),
("Sigma Space", 10.0),
("Border", [1] + border_keys)]
results = fedit(form, title='Bilateral Filter')
if results is None: return
d, sigma_color, sigma_space, border_index = results
border = borders[border_keys[border_index - 1]]
def console_run(d, sigma_color, sigma_space, border):
array = cv2.bilateralFilter(gui.vs, d, sigma_color, sigma_space, border)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(d, sigma_color, sigma_space, border))
def laplacian(self):
borders = {
'Reflect 101': cv2.BORDER_REFLECT_101,
'Constant': cv2.BORDER_CONSTANT,
'Replicate': cv2.BORDER_REPLICATE,
'Reflect': cv2.BORDER_REFLECT,
'Transparant': cv2.BORDER_TRANSPARENT,
'Isolated': cv2.BORDER_ISOLATED}
border_keys = list(borders.keys())
form = [
("Desired Depth", 5),
("Aperture Size", 1),
("Scale", 1.0),
("Delta", 1.0),
("Border", [1] + border_keys)]
results = fedit(form, title='Laplacian')
if results is None: return
ddepth, ksize, scale, delta, border_index = results
border = borders[border_keys[border_index - 1]]
def console_run(ddepth, ksize, scale, delta, border):
array = cv2.Laplacian(gui.vs, ddepth, ksize=ksize, scale=scale, delta=delta, borderType=border)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(ddepth, ksize, scale, delta, border))
def box(self):
borders = {
'Reflect 101': cv2.BORDER_REFLECT_101,
'Constant': cv2.BORDER_CONSTANT,
'Replicate': cv2.BORDER_REPLICATE,
'Reflect': cv2.BORDER_REFLECT,
'Transparant': cv2.BORDER_TRANSPARENT,
'Isolated': cv2.BORDER_ISOLATED}
border_keys = list(borders.keys())
form = [
("Depth", -1),
("Kernel Size", 5),
("Normalize", False),
("Border", [1] + border_keys)]
results = fedit(form, title='Box Filter')
if results is None: return
ddepth, ksize, normalize, border_index = results
border = borders[border_keys[border_index - 1]]
def console_run(ddepth, ksize, normalize, border):
array = cv2.boxFilter(gui.vs, ddepth=ddepth, ksize=(ksize, ksize), normalize=normalize, borderType=border)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(ddepth, ksize, normalize, border))
def sqrbox(self):
borders = {
'Reflect 101': cv2.BORDER_REFLECT_101,
'Constant': cv2.BORDER_CONSTANT,
'Replicate': cv2.BORDER_REPLICATE,
'Reflect': cv2.BORDER_REFLECT,
'Transparant': cv2.BORDER_TRANSPARENT,
'Isolated': cv2.BORDER_ISOLATED}
border_keys = list(borders.keys())
form = [
("Depth", -1),
("Kernel Size", 5),
("Normalize", False),
("Border", [1] + border_keys)]
results = fedit(form, title='Square Box Filter')
if results is None: return
ddepth, ksize, normalize, border_index = results
border = borders[border_keys[border_index - 1]]
def console_run(ddepth, ksize, normalize, border):
array = cv2.sqrBoxFilter(gui.vs, ddepth=ddepth, ksize=(ksize, ksize), normalize=normalize, borderType=border)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(ddepth, ksize, normalize, border))
def demosaic(self):
bayerconfigs = {
"BG": cv2.COLOR_BayerBG2BGR,
"GB": cv2.COLOR_BayerGB2BGR,
"RG": cv2.COLOR_BayerRG2BGR,
"GR": cv2.COLOR_BayerGR2BGR}
bayerconfigkeys = list(bayerconfigs.keys())
form = [("Bayer Config", [1] + bayerconfigkeys)]
results = fedit(form, title='Demosaic')
if results is None: return
bayerconfigind = results[0]
bayerconfig = bayerconfigs[bayerconfigkeys[bayerconfigind-1]]
def console_run(bayerconfig):
array = cv2.demosaicing(gui.vs, code=bayerconfig)
gui.show(array)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(console_run, args=(bayerconfig,))
|
thocoo/gamma-desk | gdesk/panels/base.py | import pathlib
import random
import logging
from collections import OrderedDict
from qtpy import QtCore, QtGui, QtWidgets
from qtpy.QtCore import Qt, Signal
from qtpy.QtGui import QKeySequence, QIcon, QPixmap, QCursor
from qtpy.QtWidgets import (QApplication, QWidget, QAction, QMainWindow, QDockWidget, QToolButton,
QMdiSubWindow, QDockWidget, QShortcut, QPushButton, QMenu, QRadioButton, QButtonGroup)
from .. import gui, config
from ..gcore.utils import getMenuTrace, relax_menu_text, relax_menu_trace
logger = logging.getLogger(__name__)
respath = pathlib.Path(config['respath'])
class FuncToPanel(object):
def __init__(self, func, *args):
self.func = func
self.args = args
def __call__(self):
self.func(*self.args)
class CheckMenu(QtWidgets.QMenu):
def __init__(self, name, parent=None):
super().__init__(name, parent)
if not parent is None:
parent.addMenu(self)
self.action_checked_calls = []
self.action_enable_calls = []
def showEvent(self, event):
for action, checked_call in self.action_checked_calls:
action.setChecked(checked_call())
for action, enable_call in self.action_enable_calls:
action.setEnabled(enable_call())
def addAction(self, *args, **kwargs):
action = super().addAction(*args, **kwargs)
if action is None: action = args[0]
if not kwargs.get('checkcall', None) is None:
action.setCheckable(True)
self.action_checked_calls.append((action, kwargs.get('checkcall')))
if not kwargs.get('enablecall', None) is None:
self.action_enable_calls.append((action, kwargs.get('enablecall')))
return action
class PanelsMenu(QMenu):
def __init__(self, parent, name, categories, func):
super().__init__(name, parent)
qapp = QApplication.instance()
self.panels = qapp.panels
self.categories = categories
self.panel = func.__self__
self.func = func
def showEvent(self, event):
self.initactions()
def initactions(self):
self.clear()
self.actions = []
for category in self.categories:
if not category in self.panels.keys(): continue
panels = self.panels[category]
keys = sorted(panels.keys())
for panid in keys:
if category == self.parent().category and panid == self.parent().panid: continue
panel = panels[panid]
action = QAction(panel.windowTitle())
action.setCheckable(True)
action.setChecked((category, panel.panid) in self.panel.bindings)
action.triggered.connect(FuncToPanel(self.func, category, panel.panid))
self.addAction(action)
self.actions.append(action)
self.addSeparator()
class MyStatusBar(QWidget):
def __init__(self, parent=None):
super().__init__(parent)
hboxlayout = QtWidgets.QHBoxLayout()
hboxlayout.setContentsMargins(0, 0, 0, 0)
self.setLayout(hboxlayout)
self.setMinimumWidth(100)
fontmetric = QtGui.QFontMetrics(self.font())
fontheight = fontmetric.height()
self.setFixedHeight(fontheight + 2)
pal = self.palette()
pal.setColor(QtGui.QPalette.Background, QtGui.QColor(192,192,192))
self.setPalette(pal)
self.setAutoFillBackground(True)
def addWidget(self, widget, stretch=0, alignment=None):
widget.setParent(self)
if not alignment is None:
self.layout().addWidget(widget, stretch, alignment)
else:
self.layout().addWidget(widget, stretch)
def removeWidget(self, widget):
widget.setParent(None)
self.layout().removeWidget(widget)
def thisPanel(widget):
while not (isinstance(widget, BasePanel) or widget is None):
if hasattr(widget, 'container'):
cat, panid = widget.container.panelIds[0]
return gui.qapp.panels[cat][panid]
elif hasattr(widget, 'parentWidget'):
#Used by matplotlib widgets
parent_attr = widget.parentWidget
else:
parent_attr = widget.parent
widget = parent_attr()
return widget
def selectThisPanel(widget):
thisPanel(widget).select()
class BasePanel(QMainWindow):
panelCategory = None
panelShortName = None
userVisible = True
def __init__(self, parent, panid=None, category='console'):
super().__init__()
self.baseWindowName = None
self.category = category
#Also creates the category if not exists
id_exists = self.qapp.panels.id_exists(self.category, panid)
if panid is None: panid = self.qapp.panels.new_id(self.category)
self.panid = panid
self.qapp.panels[self.category][self.panid] = self
self.bindings = []
self.long_title = self.short_title
self.setWindowTitle(self.short_title)
self.setFocusPolicy(Qt.StrongFocus)
#self.setAttribute(Qt.WA_DeleteOnClose, True)
self.use_global_menu = True
#self.myMainWidget = MyMainWidget()
#super().setCentralWidget(self.myMainWidget)
selIcon = QIcon()
selIcon.addFile(str(respath / 'icons' / 'mark_16px.png'), state=QIcon.On)
selIcon.addFile(str(respath / 'icons' / 'unmark_16px.png'), state=QIcon.Off)
self.setAutoFillBackground(True)
self.statusBar().setSizeGripEnabled(False)
@property
def qapp(self):
return QApplication.instance()
@property
def short_title(self):
return f'{self.category}#{self.panid}'
@property
def baseWindow(self):
#return self.qapp.windows.get(self.baseWindowName, None)
container = self.get_container()
if not container is None:
return container.parent()
else:
return None
def duplicate(self, floating=False):
newpanel = gui.qapp.panels.new_panel(type(self), None, None, floating=floating)
return newpanel
def addMenuItem(self, menu, text, triggered, checkcall=None, enabled=True, statusTip=None, icon=None, enablecall=None):
menuTrace = getMenuTrace(menu)
menuTrace.append(text)
catMenyShortCuts = self.qapp.menuCallShortCuts.get(self.category, {})
keySequence = catMenyShortCuts.get(relax_menu_trace(menuTrace), None)
# if not keySequence is None:
# Note that the \t will place the keySequence in a nice second column
# Like a real shortcut
# text = f'{text}\t[{keySequence}]'
action = QAction(text, self, enabled=enabled, statusTip=statusTip)
#action = TriggerlessShortcutAction(text, self, enabled=enabled, statusTip=statusTip)
action.triggered.connect(triggered)
if not keySequence is None:
action.setShortcut(QtGui.QKeySequence(keySequence))
#Disable the shortcut,
#it is handled by the qapp.panelsDialog window
#Shortcuts are send to the correct selected panel
action.setShortcutContext(Qt.WidgetShortcut)
if isinstance(icon, str):
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / icon))
if not icon is None:
action.setIcon(icon)
menu.addAction(action, checkcall=checkcall, enablecall=enablecall)
return action
def addBaseMenu(self, bindCategories=[]):
self.bindMenu = PanelsMenu(self, 'bind to', bindCategories, self.toggleBindingTo)
self.menuBar().hide()
def toggleBindingTo(self, category, panid):
added = self.addBindingTo(category, panid)
if added is None:
self.removeBindingTo(category, panid)
def addBindingTo(self, category, panid):
try:
index = self.bindings.index((category, panid))
return None
except ValueError:
pass
targetPanel = self.qapp.panels[category][panid]
self.bindings.append((category, panid))
return targetPanel
def removeBindingTo(self, category, panid):
try:
index = self.bindings.index((category, panid))
except ValueError:
logger.debug(f'{category, panid} not in to the bindings')
return None
targetPanel = self.qapp.panels[category][panid]
self.bindings.pop(index)
return targetPanel
def panIdsOfBounded(self, category):
return [bindpanid for bindcat, bindpanid in self.bindings if bindcat == category]
def bindedPanel(self, category, pos=0):
panids = self.panIdsOfBounded(category)
if len(panids) == 0:
return None
return self.qapp.panels[category][panids[pos]]
def targetPanels(self, category):
panids = self.panIdsOfBounded(category)
if len(panids) == 0:
panel = gui.qapp.panels.selected(category)
if not panel is None: return [panel]
else:
return [self.qapp.panels[category][panid] for panid in panids]
def select(self):
thisWasSelected = self.isSelected()
self.qapp.panels.move_to_end(self)
if self.category in self.qapp.panels.ezm.bindGroups.keys():
bindButton = self.qapp.panels.ezm.bindGroups[self.category].button(self.panid)
if not bindButton is None:
bindButton.setChecked(True)
if not self.use_global_menu:
self.menuBar().show()
else:
self.push_menu_to_main()
baseWindow = self.baseWindow
if not baseWindow is None:
baseWindow.setPanelInfo(self)
return thisWasSelected
def isSelected(self):
return self.qapp.panels.selected(self.category).panid == self.panid
def push_menu_to_main(self):
mainWindow = self.baseWindow
if mainWindow is None: return
mainWindow.remove_panel_menu()
for child in self.menuBar().children():
if isinstance(child, QMenu) and child.title() != '':
mainWindow.menuBar().addMenu(child)
self.menuBar().hide()
def pull_menu_from_main(self):
mainWindow = self.baseWindow
if mainWindow is None: return
menubar = self.menuBar()
mainWindow.remove_panel_menu()
menubar.show()
def toggleMenu(self):
self.use_global_menu = not self.use_global_menu
self.select()
def toggleStatusBar(self):
statusBar = self.statusBar()
if statusBar.isVisible():
statusBar.hide()
else:
statusBar.show()
def detach(self):
statusBar = self.statusBar()
self.setParent(None)
if self.use_global_menu:
self.pull_menu_from_main()
def show_me(self):
container = self.get_container()
if not container is None:
window = container.parent()
if window.isMinimized():
window.showNormal()
else:
window.show()
window.raise_()
gui.qapp.setActiveWindow(window)
else:
self.show()
def unregister(self):
self.qapp.panels[self.category].pop(self.panid)
@classmethod
def userPanelClasses(cls):
l = []
for SubClass in cls.__subclasses__():
l.extend(SubClass.userPanelClasses())
l.append((SubClass.panelCategory, SubClass))
if cls is BasePanel:
result = dict()
for category, Cls in l:
if not category in result.keys():
result[category] = []
result[category].append(Cls)
return result
else:
return l
def get_container(self):
from ..ezdock.ezdock import DockContainer
candidate = self
while not (candidate is None or isinstance(candidate, DockContainer)):
candidate = candidate.parent()
return candidate
def mousePressEvent(self, event):
self.select()
def close_panel(self):
container = self.get_container()
window, laystruct = container.detach('panel', self.category, self.panid, False)
self.qapp.processEvents()
self.qapp.panels.removeBindingsTo(self.category, self.panid)
self.unregister()
window.unregister()
self.deleteLater()
window.deleteLater()
|
thocoo/gamma-desk | gdesk/panels/imgview/__init__.py | from ... import config
if config.get('qapp', False):
from .imgview import ImageViewer, ImageProfilePanel
from .proxy import ImageGuiProxy |
thocoo/gamma-desk | gdesk/utils/shared.py | #
# SharedArray can be initialized in Parent or Child process
# Uses anonymous mmap with tagnames
# A SharedArray can be send of multiprocessing queues
# Pickles only the mmap, tagname, sizes, ... but not the buffer.
import numpy as np
import os, time
import mmap, _winapi
import tempfile
class SharedArray:
"""
Using shared memory.
If pickled to another process, the memory is not copied.
"""
#part of code copied from multiprocessing.heap module
_rand = tempfile._RandomNameSequence()
def __init__(self, shape, dtype=float):
#convert dtype string to real dtype
self.dtype = np.dtype(dtype)
self.shape = shape
for i in range(100):
name = 'pym-%d-%s' % (os.getpid(), next(self._rand))
buf = mmap.mmap(-1, self.bytesize, name)
if _winapi.GetLastError() == 0:
break
# We have reopened a preexisting mmap.
buf.close()
else:
raise FileExistsError('Cannot find name for new mmap')
self.name = name
self._ndarray = None
self._bindex = None
self.base = buf
@staticmethod
def from_ndarray(array):
sa = SharedArray(array.shape, array.dtype)
sa.ndarray[:] = array
return sa
@property
def size(self):
return np.multiply.reduce(self.shape, dtype='int64')
@property
def ndim(self):
return len(self.shape)
@property
def bytesize(self):
return self.size * self.dtype.itemsize
def __getstate__(self):
return (self.name, self.dtype, self.shape)
def __setstate__(self, state):
self.name, self.dtype, self.shape = self._state = state
self.base = mmap.mmap(-1, self.bytesize, self.name)
assert _winapi.GetLastError() == _winapi.ERROR_ALREADY_EXISTS
self._ndarray = None
def _as_ndarray(self):
arr = np.frombuffer(self.base, self.dtype, self.size).reshape(self.shape)
return arr
@property
def ndarray(self):
if self._ndarray is None:
self._ndarray = self._as_ndarray()
return self._ndarray
def __getitem__(self, slices):
return self.ndarray.__getitem__(slices)
def __setitem__(self, slices, other):
return self.ndarray.__setitem__(slices, other)
def __str__(self):
return self.ndarray.__str__()
def __repr__(self):
s = self.ndarray.__repr__()
s = s.replace('array', 'SharedArray')
return s
def __dir__(self):
d = dir(self.ndarray)
d.extend(self.__dict__.keys())
return d
def __getattr__(self, attr):
return getattr(self.ndarray, attr)
|
thocoo/gamma-desk | gdesk/test/__init__.py | <filename>gdesk/test/__init__.py
import unittest
class GammaDeskSuite(unittest.TestCase):
panid = 1
def test_screenstate_1(self):
from gdesk import gui
from pylab import plt
from pathlib import Path
gui.load_layout('console')
samplePath = Path(r'./samples')
gui.img.select(1)
gui.img.open(samplePath / 'kodim05.png')
gui.img.zoom_fit()
plt.plot(gui.vs.mean(2).mean(1))
plt.title('Column means of image 1')
plt.xlabel('Column Number')
plt.ylabel('Mean')
plt.grid()
plt.show()
gui.img.select(2)
gui.img.open(samplePath / 'kodim23.png')
gui.img.zoom_full()
plt.figure()
plt.plot(gui.vs.mean(2).mean(0))
plt.title('Row means of image 2')
plt.xlabel('Row Number')
plt.ylabel('Mean')
plt.grid()
plt.show()
def test_small_loop_and_print(self):
import time
import sys
from gdesk import gui
gui.clc()
expectedOutput = ''
for i in range(42):
line = f'i = {i}'
print(line)
expectedOutput += f'{line}\n'
time.sleep(0.01)
sys.stdout.flush()
text = gui.console.text()
assert expectedOutput == text
def test_menu_file(self):
from gdesk import gui
gui.load_layout('image, levels & console')
gui.img.select(1)
gui.img.menu(['File', 'New...'], 1920, 1080, 4, 'uint8', 127)
self.assertEqual(gui.vs.shape[0], 1080)
self.assertEqual(gui.vs.shape[1], 1920)
self.assertEqual(gui.vs.shape[2], 4)
self.assertEqual(gui.vs.dtype, 'uint8')
self.assertEqual(gui.vs.min(), 127)
self.assertEqual(gui.vs.max(), 127)
gui.img.menu(['File', 'New...'], 3840, 2160, 1, 'uint16', 2**16-1)
self.assertEqual(gui.vs.shape[0], 2160)
self.assertEqual(gui.vs.shape[1], 3840)
self.assertEqual(len(gui.vs.shape), 2)
self.assertEqual(gui.vs.dtype, 'uint16')
self.assertEqual(gui.vs.min(), 2**16-1)
self.assertEqual(gui.vs.max(), 2**16-1)
gui.img.menu(['File', 'New...'], 640, 480, 3, 'double', 0.5)
self.assertEqual(gui.vs.shape[0], 480)
self.assertEqual(gui.vs.shape[1], 640)
self.assertEqual(gui.vs.shape[2], 3)
self.assertEqual(gui.vs.dtype, 'double')
self.assertEqual(gui.vs.min(), 0.5)
self.assertEqual(gui.vs.max(), 0.5)
#https://imageio.readthedocs.io/en/stable/standardimages.html
gui.img.menu(['File', 'Open Image...'], 'imageio:astronaut.png')
gui.img.menu(['File', 'Open Image...'], 'imageio:wood.jpg')
gui.img.menu(['File', 'Open Image...'], 'imageio:camera.png')
gui.img.cmap('turbo')
def test_menu_canvas(self):
from gdesk import gui
import scipy.misc
arr = scipy.misc.face()
height, width = arr.shape[:2]
gui.load_layout('image, levels & console')
gui.img.select(1)
gui.show(arr)
gui.img.zoom_fit()
gui.menu_trigger('image', None, ['Canvas', 'Flip Horizontal'])
gui.menu_trigger('image', None, ['Canvas', 'Flip Vertical'])
gui.menu_trigger('image', None, ['Canvas', 'Rotate Left 90'])
gui.menu_trigger('image', None, ['Canvas', 'Rotate Right 90'])
gui.menu_trigger('image', None, ['Canvas', 'Rotate 180'])
gui.menu_trigger('image', None, ['Canvas', 'Resize Canvas...'], width+64, height+32)
gui.menu_trigger('image', None, ['Canvas', 'Resize Canvas...'], width, height)
assert (arr == gui.vs).all()
def test_menu_view(self):
gui.load_layout('image, levels & console')
gui.img.select(1)
gui.img.menu(['File', 'Open Image...'], 'imageio:camera.png')
gui.img.menu(['View', 'Refresh'])
gui.img.menu(['View', 'Zoom In'])
gui.img.menu(['View', 'Zoom Out'])
gui.img.menu(['View', 'Zoom', 'Zoom 100%'])
gui.img.menu(['View', 'Zoom', 'Zoom Fit'])
gui.img.menu(['View', 'Zoom', 'Zoom Full'])
gui.img.menu(['View', 'Zoom', 'Zoom Auto'])
gui.img.menu(['View', 'Zoom', 'Zoom Exact...'], 50)
gui.img.menu(['View', 'Default Offset & Gain'])
gui.img.menu(['View', 'Set Current as Default'])
gui.img.menu(['View', 'Offset & Gain...'], 0, 1, 2, 'grey')
gui.img.menu(['View', 'Black & White...'], 10, 245, 'turbo')
gui.img.menu(['View', 'Grey & Gain...'], 127, 4, 'jet')
gui.img.menu(['View', 'Gain to Min-Max'])
gui.img.menu(['View', 'Gain to Sigma', 'Gain to Sigma 1'])
gui.img.menu(['View', 'Gain to Sigma', 'Gain to Sigma 2'])
gui.img.menu(['View', 'Gain to Sigma', 'Gain to Sigma 3'])
gui.img.menu(['View', 'HQ Zoom Out'])
gui.img.menu(['View', 'Bind', 'Bind All Image Viewers'])
gui.img.menu(['View', 'Bind', 'Unbind All Image Viewers'])
gui.img.menu(['View', 'Bind', 'Absolute Zoom Link'])
gui.img.menu(['View', 'Colormap...'], 'grey')
gui.img.menu(['View', 'Colormap...'], 'clip')
gui.img.menu(['View', 'Colormap...'], 'turbo')
gui.img.menu(['View', 'Colormap...'], 'jet')
gui.img.menu(['View', 'Colormap...'], 'invert')
gui.img.menu(['View', 'Colormap...'], 'hot')
gui.img.menu(['View', 'Colormap...'], 'cold')
gui.img.menu(['View', 'Background Color...'], 58, 110, 165)
gui.img.menu(['View', 'Selection Color...'], 255, 0, 0)
gui.img.menu(['View', 'Value Format', 'Decimal'])
gui.img.menu(['View', 'Value Format', 'Hex'])
gui.img.menu(['View', 'Value Format', 'Binary'])
gui.img.menu(['View', 'Show/Hide Profiles'])
def test_menu_image_1(self):
from gdesk import gui
import imageio
arr = imageio.imread('imageio:astronaut.png')
gui.load_layout('image, levels & console')
gui.img.select(1)
imgpanid = gui.show(arr)
gui.img.zoom_full()
gui.menu_trigger('image', imgpanid, ['Image', 'Invert'])
gui.menu_trigger('image', imgpanid, ['Image', 'Swap RGB | BGR'])
gui.menu_trigger('image', imgpanid, ['Image', 'Adjust Lighting...'], 255, -1)
gui.menu_trigger('image', imgpanid, ['Image', 'Swap RGB | BGR'])
assert (arr == gui.vs).all()
def test_menu_image_2(self):
from gdesk import gui
import imageio
arr = imageio.imread('imageio:astronaut.png')
gui.load_layout('image, levels & console')
gui.img.select(1)
gui.show(arr)
gui.menu_trigger('image', None, ['Image', 'to Monochroom'])
gui.menu_trigger('image', None, ['Edit', 'Show Prior Image'])
gui.menu_trigger('image', None, ['Image', 'to Photometric Monochroom'])
gui.menu_trigger('image', None, ['Edit', 'Show Prior Image'])
assert (arr == gui.vs).all()
def test_code_3(self):
from gdesk import gui
from pylab import plt
plt.plot(gui.vs.mean(1))
plt.grid(True)
plt.title('Column Means')
plt.show()
plt.figure()
plt.plot(gui.vs.mean(0))
plt.grid(True)
plt.title('Row Means')
plt.show()
answer = gui.question('Looks everything OK?')
plt.close('all')
gui.menu_trigger('image', GammaDeskSuite.panid, ['Edit', 'Show Prior Image'])
def test_colors(self):
import sys
from gdesk import gui
from pylab import plt
gui.clc()
plt.close('all')
gui.load_layout('console')
banner = r" _____ ______ _ " + "\n" \
r"| __ \ | _ \ | | " + "\n" \
r"| | \/ __ _ _ __ ___ _ __ ___ __ _ | | | | ___ ___ | | __" + "\n" \
r"| | __ / _` || '_ ` _ \ | '_ ` _ \ / _` | | | | | / _ \/ __|| |/ /" + "\n" \
r"| |_\ \| (_| || | | | | || | | | | || (_| | | |/ / | __/\__ \| < " + "\n" \
r" \____/ \__,_||_| |_| |_||_| |_| |_| \__,_| |___/ \___||___/|_|\_" + "\\"
fgs = [227, 227, 222, 217, 212, 207]
for fg, line in zip(fgs, banner.splitlines()):
bg = 236
pline = '\033[1m\033[48;5;{0:03d}m\033[38;5;{1:03d}m'.format(bg, fg) + line + '\033[0m'
print(pline)
answer = gui.question('Does GammaDesk look nice in the console output?')
assert answer
def test_calc_pi_break(self):
"""Calculate pi digits and break after a few seconds"""
import sys
import time
import threading
from gdesk import shell, gui
interpreter = shell.this_interpreter()
ptid = threading.get_ident()
panid = gui.console.selected()
def delayedBreak():
time.sleep(1)
gui.redirects[threading.get_ident()] = ptid
gui.menu_trigger('console', panid, ['Execution', 'Async Break'])
threading.Thread(target=delayedBreak).start()
def calcPi():
q, r, t, k, n, l = 1, 0, 1, 1, 3, 3
while True:
if 4*q+r-t < n*t:
yield n
nr = 10*(r-n*t)
n = ((10*(3*q+r))//t)-10*n
q *= 10
r = nr
else:
nr = (2*q+r)*l
nn = (q*(7*k)+2+(r*l))//(t*l)
q *= k
t *= l
l += 2
k += 1
n = nn
r = nr
pi_digits = calcPi()
i = 0
print(f"{i:05d} ", end='')
try:
for d in pi_digits:
sys.stdout.write(str(d))
i += 1
if (i % 64) == 0: print(f"\n{i:05d} ", end='')
except KeyboardInterrupt:
interpreter.break_sent = False
def suite():
suite = unittest.TestSuite()
suite.addTest(GammaDeskSuite('test_screenstate_1'))
suite.addTest(GammaDeskSuite('test_small_loop_and_print'))
suite.addTest(GammaDeskSuite('test_calc_pi_break'))
suite.addTest(GammaDeskSuite('test_colors'))
suite.addTest(GammaDeskSuite('test_menu_file'))
suite.addTest(GammaDeskSuite('test_menu_canvas'))
suite.addTest(GammaDeskSuite('test_menu_image_1'))
suite.addTest(GammaDeskSuite('test_menu_image_2'))
suite.addTest(GammaDeskSuite('test_code_3'))
return suite
|
thocoo/gamma-desk | gdesk/panels/window.py | <reponame>thocoo/gamma-desk<gh_stars>0
import sys, io
import os
import threading
import time
from pathlib import Path
from collections import OrderedDict
import logging
import psutil
import pprint
import enum
from qtpy import QtCore, QtGui, QtWidgets
from qtpy.QtCore import (QFile, QFileInfo, QPoint, QSettings, QSignalMapper, QTimer,
QSize, QTextStream, Qt, QObject, QMetaObject, Slot, QUrl, QByteArray)
from qtpy.QtGui import QIcon, QKeySequence, QPixmap
from qtpy.QtWidgets import (QAction, QApplication, QFileDialog, QMainWindow, QShortcut, QDialog, QLabel,
QMdiArea, QMessageBox, QTextEdit, QWidget, QStyle, QStyleFactory, QActionGroup, QButtonGroup)
from qtpy.QtWidgets import QMenu, QDockWidget, QListWidget
from qtpy import QtWidgets
from .. import config, gui
from .base import CheckMenu
from ..ezdock import overlay
from ..gcore.utils import getMenuAction, relax_menu_trace, relax_menu_text
from ..dialogs.main import NewPanelMenu, ShowMenu, WindowMenu
from ..dialogs.formlayout import fedit
from ..dialogs.about import AboutScreen
respath = Path(config['respath'])
logger = logging.getLogger(__name__)
def debug_print(message):
sys.__stdout__.write(f'{message}\n')
sys.__stdout__.flush()
class DragState(enum.Enum):
placed = enum.auto()
titlePressed = enum.auto()
dragging = enum.auto()
class MainWindow(QMainWindow):
"""
The Main QT Window.
"""
moveQueued = QtCore.Signal(object)
def __init__(self, qapp, name, parentWinName=None):
super(MainWindow, self).__init__()
self.qapp = qapp
self.name = name
self.panels = qapp.panels
self.dragState = DragState.placed
self.qapp.panels.ezm
if not parentWinName is None:
self.setParent(self.qapp.windows[parentWinName])
self.setWindowFlags(self.windowFlags() | Qt.Tool)
#self.setWindowFlags(self.windowFlags() | Qt.WindowStaysOnTopHint)
self.container = self.qapp.panels.ezm.new_container(self, self.name)
self.setCentralWidget(self.container)
self.setWindowTitle(f'[{self.name}]')
self.createMenus()
self.createStatusBar()
#Hiding panelsDialog will disable all shortcuts
sc = QShortcut(QKeySequence("Ctrl+Shift+Alt+F12"), self,
lambda: self.qapp.panelsDialog.setVisible(not self.qapp.panelsDialog.isVisible()))
sc.setContext(Qt.ApplicationShortcut)
self.moveQueued.connect(self.moveWindow, Qt.QueuedConnection)
self.activeCategory = None
self.activePanId = None
self.priorHoverButton = None
def moveWindow(self, pos):
self.move(pos)
def setPanelInfo(self, panel=None):
if panel is None:
self.setWindowTitle(f'[{self.name}]')
self.panelName.setText('')
self.activeCategory = None
self.activePanId = None
return
self.setWindowTitle(f'[{self.name}] {panel.long_title}')
self.panelName.setText(panel.short_title)
self.activeCategory = panel.category
self.activePanId = panel.panid
def keyPressEvent(self, event):
pass
def createMenus(self):
self.layoutMenu = QtWidgets.QMenu()
panDiaAct = QAction("Panels Dialog", self, triggered=self.qapp.showDialog)
panDiaAct.setIcon(QIcon(str(respath / 'icons' / 'px16' / 'application_view_gallery.png')))
self.layoutMenu.addAction(panDiaAct)
self.newPanelMenu = NewPanelMenu(self, showIcon=True)
self.layoutMenu.addMenu(self.newPanelMenu)
self.panelMenu = ShowMenu(self, showIcon=True)
self.layoutMenu.addMenu(self.panelMenu)
self.layoutMenu.addMenu(WindowMenu(self, showIcon=True))
def addWindowMenuItem(caption, function, icon=None):
keySequence = self.qapp.menuCallShortCuts.get('window', {}).get((relax_menu_text(caption),), None)
if not keySequence is None:
caption = f'{caption}\t{keySequence}'
if isinstance(icon, str):
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / icon))
if not icon is None:
action = QAction(caption, self.windowMenu, triggered=function, icon=icon)
else:
action = QAction(caption, self.windowMenu, triggered=function)
self.windowMenu.addAction(action)
self.windowMenu = CheckMenu("&Layout Edit")
self.windowMenu.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'layout_edit.png')))
self.layoutMenu.addMenu(self.windowMenu)
self.toolWinAction = QAction(self.winActionLabel(), self, triggered=self.asToolWindow)
self.windowMenu.addAction(self.toolWinAction, checkcall=self.isToolWindow)
addWindowMenuItem("Distribute", self.container.distribute, 'layouts_six_grid.png')
addWindowMenuItem("Drop In", lambda: self.qapp.panels.ezm.drop_in(self.container),
'layouts_body_select.png')
addWindowMenuItem("Screenshot to Clipboard", self.screenShot, 'lcd_tv_image.png')
addWindowMenuItem("Cycle Tag Level", self.cycle_tag_level)
addWindowMenuItem("Full Screen", self.fullScreen, 'view_fullscreen_view.png')
addWindowMenuItem("Hide/Show Menu && Statusbar", self.toggleMenuStatusbar)
addWindowMenuItem('Save Layout...', self.qapp.panelsDialog.layoutMenu.saveLayout)
self.layoutMenu.addSeparator()
self.qapp.panelsDialog.layoutMenu.addLayoutActions(self.layoutMenu)
self.layoutMenu.addSeparator()
act = QAction("E&xit", self, shortcut=QKeySequence.Quit,
statusTip="Exit the application",
triggered=QApplication.instance().quit)
act.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'door_out.png')))
self.layoutMenu.addAction(act)
self.panelsDialogBtn = QtWidgets.QToolButton(self)
self.panelsDialogBtn.setIcon(QIcon(str(respath / 'icons' / 'px16' / 'application_view_gallery.png')))
self.panelsDialogBtn.clicked.connect(self.qapp.showDialog)
self.panelsDialogBtn.setMenu(self.layoutMenu)
self.panelsDialogBtn.setPopupMode(QtWidgets.QToolButton.MenuButtonPopup)
self.menuBar().setCornerWidget(self.panelsDialogBtn, corner= Qt.TopLeftCorner)
self.cycleTabBtn = QtWidgets.QToolButton(self)
self.cycleTabBtn.setIcon(QIcon(str(respath / 'icons' / 'px16' / 'layout_content.png')))
self.cycleTabBtn.clicked.connect(self.cycle_tag_level)
self.cycleTabBtn.setMenu(self.windowMenu)
self.cycleTabBtn.setPopupMode(QtWidgets.QToolButton.MenuButtonPopup)
self.menuBar().setCornerWidget(self.cycleTabBtn, corner= Qt.TopRightCorner)
@property
def windowName(self):
return self.name
def cycle_tag_level(self):
self.container.cycle_tag_level()
def remove_panel_menu(self):
main_childs = []
for main_child in self.menuBar().children():
main_childs.append(main_child)
self.menuBar().clear()
for child in main_childs:
if isinstance(child, QMenu):
self.menuBar().addMenu(child)
def screenShot(self):
pixmap = self.grab()
qimage = pixmap.toImage()
clipboard = self.qapp.clipboard()
clipboard.setImage(qimage)
def asToolWindow(self, windowName=None):
if (not self.isToolWindow()) and windowName is None:
winNames = [winname for winname, window in gui.qapp.windows.items()
if not (window.isToolWindow() or winname == self.name)]
winNames.append('None')
winNameIndex = fedit([('Window', [1]+winNames)])[0]
winName = winNames[winNameIndex-1]
else:
winName = windowName
assert winName != self.name
if winName in [None, 'None']:
self.setWindowFlags(self.windowFlags() & ~Qt.Tool)
self.setParent(None)
#setParent doc: Sets the parent of the widget to parent , and resets the window flags.
self.toolWinAction.setText(self.winActionLabel())
#BUG
# It seems that the window don't accept panel drops anymore
# Only windows which had parent=None at __init__, seems to accept panel drops
# Or windows with parent != None
# But not windows are not reparent to None
# Removing the Tool flag before setting parent to None seems to solve it
self.show()
return
self.setParent(self.qapp.windows[winName])
self.toolWinAction.setText(self.winActionLabel())
self.setWindowFlags(self.windowFlags() | Qt.Tool)
#It seems that the child behavior (on top of parent, minimize together with parent)
#is lost as soon the parent is hidden and shown again
#Toggling the stay on top flags restores the parent-child behavior
self.setWindowFlags(self.windowFlags() | Qt.WindowStaysOnTopHint)
self.qapp.processEvents()
self.setWindowFlags(self.windowFlags() & ~Qt.WindowStaysOnTopHint)
self.show()
def winActionLabel(self):
if self.parent() is None:
return 'Tool of...'
else:
return f'Tool of {self.parent().name}'
def isToolWindow(self):
return (self.windowFlags() & Qt.Tool) == Qt.Tool
def createStatusBar(self):
self.panelName = QLabel('')
self.panelName.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.panelName.customContextMenuRequested.connect(self.showPanelMenu)
self.statusBar().addWidget(self.panelName,1)
def showPanelMenu(self, point=None):
self.panelMenu.exec_(QtGui.QCursor().pos())
def fullScreen(self):
if not self.isFullScreen():
self.showFullScreen()
else:
self.showNormal()
def toggleMenuStatusbar(self):
if self.menuBar().isVisible():
self.menuBar().hide()
self.statusBar().hide()
else:
self.menuBar().show()
self.statusBar().show()
def unregister(self):
#Detach all tool windows of this window
for window in self.qapp.windows.values():
if window.parent() == self:
window.asToolWindow(None)
#Unregister this window
self.qapp.windows.pop(self.name)
self.qapp.panels.ezm.containers.pop(self.name)
self.deleteLater()
self.qapp.hideWindow()
def getWidgetUnderMouse(self):
'''
Get the widget outside this window under the current mouse position.
'''
self.setAttribute(Qt.WA_TransparentForMouseEvents, True)
pos = QtGui.QCursor.pos()
widget = gui.qapp.widgetAt(pos)
self.setAttribute(Qt.WA_TransparentForMouseEvents, False)
if widget is None: return None, None
locPos = widget.mapFromGlobal(pos)
return widget, locPos
def moveEvent(self, event):
if self.dragState == DragState.placed: return
elif self.dragState == DragState.titlePressed:
self.dragState = DragState.dragging
self.setWindowOpacity(0.5)
self.qapp.panels.ezm.drop_in(self.container, hide=False, allWindows=self.dragToAllWindows)
elif self.dragState == DragState.dragging:
widget, locPos = self.getWidgetUnderMouse()
if widget is None: return
if self.priorHoverButton == widget: return
if not self.priorHoverButton is None:
self.priorHoverButton.endPreview()
# It seems that sometimes, the priorHoverButton is not valid?
# Traceback (most recent call last):
# File "c:\tools\gh2\venv\lib\site-packages\ghawk2\panels\window.py", line 320, in moveEvent
# self.priorHoverButton.endPreview()
# File "c:\tools\gh2\venv\lib\site-packages\ghawk2\ezdock\overlay.py", line 59, in endPreview
# self.parent().endPreview()
# RuntimeError: Internal C++ object (HoverButton) already deleted.
self.priorHoverButton = None
if isinstance(widget, overlay.HoverButton):
widget.startPreview()
self.priorHoverButton = widget
def event(self, event):
if self.dragState == DragState.placed:
if event.type() == QtCore.QEvent.NonClientAreaMouseButtonPress:
if event.modifiers() & QtCore.Qt.ControlModifier:
if not self.isToolWindow() or (event.modifiers() & QtCore.Qt.ShiftModifier):
self.startMoving(True)
else:
self.startMoving(False)
else:
return super().event(event)
return True
elif event.type() == QtCore.QEvent.NonClientAreaMouseButtonDblClick \
or event.type() == QtCore.QEvent.Resize:
self.windowPlaced(canceled=True)
elif event.type() == QtCore.QEvent.NonClientAreaMouseButtonRelease:
self.windowPlaced()
return True
return super().event(event)
def startMoving(self, allWindows=True):
self.dragState = DragState.titlePressed
self.dragToAllWindows = allWindows
self.priorHoverButton = None
def windowPlaced(self, canceled=False):
if not canceled:
widget, locPos = self.getWidgetUnderMouse()
if isinstance(widget, overlay.HoverButton):
widget.clicked.emit()
self.qapp.deleteEmptyWindows()
else:
canceled = True
if canceled:
self.qapp.panels.ezm.hide_overlays()
self.setWindowOpacity(1.0)
self.dragState = DragState.placed
self.priorHoverButton = None
def closeEvent(self, event):
self.qapp.hideWindow(self)
event.accept() |
thocoo/gamma-desk | gdesk/version.py | <reponame>thocoo/gamma-desk
"""Version details of Gamma Bench"""
VERSION_INFO = (0, 2, 0)
VERSION = '.'.join(str(v) for v in VERSION_INFO)
|
thocoo/gamma-desk | gdesk/graphics/rulers.py | from qtpy import QtCore, QtGui, QtWidgets
from ..utils.ticks import tickValues, Ticks
fonts = []
# fonts.append(QtGui.QFont('Arial', 12))
# fonts.append(QtGui.QFont('Arial', 10))
# fonts.append(QtGui.QFont('Arial', 9))
fonts.append(QtGui.QFont('Arial', 8))
fonts.append(QtGui.QFont('Arial', 7))
fonts.append(QtGui.QFont('Arial', 5))
grid_pens = []
grid_pens.append(QtGui.QPen(QtGui.QColor(159,159,159), 0, QtCore.Qt.SolidLine))
grid_pens.append(QtGui.QPen(QtGui.QColor(191,191,191), 0, QtCore.Qt.DashLine))
grid_pens.append(QtGui.QPen(QtGui.QColor(223,223,223), 0, QtCore.Qt.DotLine))
class LabelItem(QtWidgets.QGraphicsLineItem):
def __init__(self, text='', level=0, grid=False, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.setLine(0, 0, 0, 10)
self.setFlags(QtWidgets.QGraphicsItem.ItemIgnoresTransformations)
self.label = QtWidgets.QGraphicsTextItem(text, self)
self.label.setFont(fonts[level])
self.label.setPos(-1, 2)
if grid:
self.gline = QtWidgets.QGraphicsLineItem(self)
self.gline.setPen(grid_pens[level])
self.gline.setLine(0, -1e6, 0, 0)
#self.gline.setZValue(0)
def setRightAlign(self):
self.label.setPos(2 - self.label.boundingRect().width(), 2)
class GridItem(QtWidgets.QGraphicsLineItem):
def __init__(self, level=0, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.setFlags(QtWidgets.QGraphicsItem.ItemIgnoresTransformations)
self.gline = QtWidgets.QGraphicsLineItem(self)
self.gline.setPen(grid_pens[level])
self.gline.setLine(0, -1e6, 0, 0)
class yAxisLabel(QtWidgets.QGraphicsLineItem):
def __init__(self, text='', fontNumber=0, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.setFlags(QtWidgets.QGraphicsItem.ItemIgnoresTransformations)
self.bgrect = QtWidgets.QGraphicsRectItem(-40, -10, 40, 20, parent=self)
self.bgrect.setPen(QtGui.QPen(QtGui.QColor(250,250,250, 200)))
self.bgrect.setBrush(QtGui.QBrush(QtGui.QColor(250,250,250, 200), QtCore.Qt.SolidPattern))
self.label = QtWidgets.QGraphicsTextItem(text, self)
self.label.setDefaultTextColor(QtGui.QColor(120,120,120))
self.label.setFont(fonts[fontNumber])
self.label.setPos(-self.label.boundingRect().width(), -10)
def setRightAlign(self):
self.label.setPos(2 - self.label.boundingRect().width(), 2)
class SubDivisionX(QtWidgets.QGraphicsLineItem):
def __init__(self, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.setLine(0, 0, 0, 3)
self.setFlags(QtWidgets.QGraphicsItem.ItemIgnoresTransformations)
class SubDivisionY(QtWidgets.QGraphicsLineItem):
def __init__(self, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.setLine(0, 0, 3, 0)
self.setFlags(QtWidgets.QGraphicsItem.ItemIgnoresTransformations)
#I think QGraphicsItemGroup is better, but i prevents moving the indicators
#I don't know why?
#class TickedRuler(QtWidgets.QGraphicsItemGroup):
class TickedRuler(QtWidgets.QGraphicsPolygonItem):
def __init__(self, orientation, start, stop, scale, noDecimals=True, parent=None, scene=None):
#super().__init__(parent=parent, scene=scene)
super().__init__(parent=parent)
#self.setPos(0,0)
#self.setLine(-1, -1, 1, 1)
#self.setPen(QtGui.QPen(QtGui.QColor(200,50,50),1))
#self.setBrush(QtGui.QBrush(QtGui.QColor(200,50,50), QtCore.Qt.SolidPattern))
self.orientation = orientation
self.noDecimals = noDecimals
self.create_ticks(start, stop, scale)
self.init_bg()
self.labelItems = dict()
self.make_labels(self.ticks.push_values)
def create_ticks(self, start, stop, scale):
self.start = start
self.stop = stop
self.scale = scale
#self.thicks = tickValues(self.start, self.stop, self.scale, 40, self.noDecimals)
self.ticks = Ticks(self.start, self.stop, self.scale, 60, self.noDecimals)
@property
def thicks(self):
return self.ticks.values
def update_labels(self, start, stop, scale, grid=False):
self.start = start
self.stop = stop
self.scale = scale
self.ticks.update(start, stop, scale)
self.remove_labels(self.ticks.pop_values)
self.make_labels(self.ticks.push_values, grid)
if self.orientation == 0:
self.axline.setLine(start, 0, stop, 0)
else:
self.axline.setLine(0, start, 0, stop)
def init_bg(self):
if self.orientation == 0:
self.bgrect = QtWidgets.QGraphicsRectItem(-1e6, 0, 2e6, 22, parent=self)
self.axline = QtWidgets.QGraphicsLineItem(self.start, 0, self.stop, 0, parent=self)
if self.orientation == 90:
self.bgrect = QtWidgets.QGraphicsRectItem(0, -1e6, 22, 2e6, parent=self)
self.axline = QtWidgets.QGraphicsLineItem(0, self.start, 0, self.stop, parent=self)
if self.orientation == -90:
self.bgrect = QtWidgets.QGraphicsRectItem(-22, -1e6, 22, 2e6, parent=self)
self.axline = QtWidgets.QGraphicsLineItem(0, self.start, 0, self.stop, parent=self)
self.bgrect.setFlags(QtWidgets.QGraphicsItem.ItemIgnoresTransformations)
self.bgrect.setPen(QtGui.QPen(QtGui.QColor(255,255,255), 0))
self.bgrect.setBrush(QtGui.QBrush(QtGui.QColor(255,255,255), QtCore.Qt.SolidPattern))
self.axline.setPen(QtGui.QPen(QtGui.QColor(0,0,0), 0))
def remove_labels(self, pop_values):
if len(pop_values) == 0:
return
for k in pop_values[0]:
self.labelItems[k].setParentItem(None)
self.labelItems.pop(k)
for k in pop_values[1]:
self.labelItems[k].setParentItem(None)
self.labelItems.pop(k)
for k in pop_values[2]:
self.labelItems[k].setParentItem(None)
self.labelItems.pop(k)
def make_labels(self, push_values, grid=False):
if self.noDecimals:
fmt = "%d"
else:
fmt = "%0.5g"
if self.orientation == 0:
for i in push_values[0]:
i0 = LabelItem(fmt % i, 0, grid, self)
i0.setPos(i, 0)
self.labelItems[i] = i0
for i in push_values[1]:
i0 = LabelItem(fmt % i, 1, grid, self)
i0.setPos(i, 0)
self.labelItems[i] = i0
for i in push_values[2]:
line = SubDivisionX(parent=self)
line.setPos(i, 0)
self.labelItems[i] = line
if abs(self.orientation) == 90:
for i in push_values[0]:
i0 = LabelItem(fmt % i, 0, grid, self)
i0.setRightAlign()
i0.setPos(0, i)
i0.setRotation(-self.orientation)
self.labelItems[i] = i0
for i in push_values[1]:
i0 = LabelItem(fmt % i, 1, grid, self)
i0.setRightAlign()
i0.setPos(0, i)
i0.setRotation(-self.orientation)
self.labelItems[i] = i0
for i in push_values[2]:
line = SubDivisionY(parent=self)
line.setPos(0, i)
self.labelItems[i] = line
#class Grid(QtWidgets.QGraphicsLineItem):
class Grid(QtWidgets.QGraphicsItemGroup):
def __init__(self, ruler=None, parent=None, scene=None):
#super().__init__(parent=parent, scene=scene)
super().__init__(parent=parent)
# self.setLine(-1, 1, 1, -1)
# self.setPen(QtGui.QPen(QtGui.QColor(50,200,50),1))
self.ruler = ruler
self.labelItems = dict()
self.make_labels(self.ticks.push_values)
@property
def ticks(self):
return self.ruler.ticks
@property
def orientation(self):
return self.ruler.orientation
def update_labels(self, grid=True):
self.remove_labels(self.ticks.pop_values)
self.make_labels(self.ticks.push_values, grid)
def remove_labels(self, pop_values):
if len(pop_values) == 0:
return
for k in pop_values[0]:
self.labelItems[k].setParentItem(None)
self.labelItems.pop(k)
for k in pop_values[1]:
self.labelItems[k].setParentItem(None)
self.labelItems.pop(k)
for k in pop_values[2]:
self.labelItems[k].setParentItem(None)
self.labelItems.pop(k)
def make_labels(self, push_values, grid=False):
if self.orientation == 0:
for i in push_values[0]:
i0 = GridItem(0, self)
i0.setPos(i, 0)
self.labelItems[i] = i0
for i in push_values[1]:
i0 = GridItem(1, self)
i0.setPos(i, 0)
self.labelItems[i] = i0
for i in push_values[2]:
line = GridItem(2, self)
line.setPos(i, 0)
self.labelItems[i] = line
if abs(self.orientation) == 90:
for i in push_values[0]:
i0 = GridItem(0, self)
i0.setPos(0, i)
i0.setRotation(-self.orientation)
self.labelItems[i] = i0
for i in push_values[1]:
i0 = GridItem(1, self)
i0.setPos(0, i)
i0.setRotation(-self.orientation)
self.labelItems[i] = i0
for i in push_values[2]:
line = GridItem(2, self)
line.setPos(0, i)
line.setRotation(-self.orientation)
self.labelItems[i] = line
class Axis(QtWidgets.QGraphicsLineItem):
def __init__(self, plotAngle, start, stop, thicks, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.setLine(0, 0, 0, 0)
self.start = start
self.stop = stop
self.thicks = thicks
self.plotAngle = plotAngle
self.createAxis()
def createAxis(self):
if self.plotAngle == 0:
for thickLevel in range(len(self.thicks)):
if self.thicks[thickLevel][0] > 15:
for i in self.thicks[thickLevel][1]:
label = yAxisLabel('%0.5g' % i, thickLevel, self)
label.setPos(0, i)
else:
for thickLevel in range(len(self.thicks)):
if self.thicks[thickLevel][0] > 15:
for i in self.thicks[thickLevel][1]:
label = yAxisLabel('%0.5g' % i, thickLevel, self)
label.setPos(i, 0)
label.setRotation(-90)
|
thocoo/gamma-desk | gdesk/matplotbe/__init__.py | <reponame>thocoo/gamma-desk<gh_stars>0
"""
Render to qt from agg.
"""
from .. import config
import os
import ctypes
import sys
import threading
import pickle
import logging
import warnings
from distutils.version import LooseVersion
import matplotlib
from matplotlib.transforms import Bbox
from matplotlib.figure import Figure
from matplotlib import cbook
from matplotlib._pylab_helpers import Gcf
from matplotlib.backend_bases import FigureCanvasBase, FigureManagerBase
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.backends.backend_qt5 import (
_BackendQT5, FigureCanvasQT, FigureManagerQT,
NavigationToolbar2QT, backend_version)
from matplotlib.backends.qt_compat import QT_API
from matplotlib.backends.backend_template import FigureCanvasTemplate, FigureManagerTemplate
from .. import gui
if config['qapp']:
from qtpy import QtCore, QtGui
from ..panels.matplot import PlotPanel
if LooseVersion(matplotlib.__version__) < LooseVersion('3.2'):
warnings.warn(
f'Matplotlib version {matplotlib.__version__} not supported.\n'
f'Version should be 3.2.x, 3.3.x, 3.4.x or 3.5.x')
elif LooseVersion(matplotlib.__version__) < LooseVersion('3.3'):
setDevicePixelRatio = QtGui.QImage.setDevicePixelRatio
DEV_PIXEL_RATIO_ATTR = "_dpi_ratio"
elif LooseVersion(matplotlib.__version__) < LooseVersion('3.4'):
#Version 3.2, 3.3
from matplotlib.backends.qt_compat import _setDevicePixelRatioF
setDevicePixelRatio = _setDevicePixelRatioF
DEV_PIXEL_RATIO_ATTR = "_dpi_ratio"
elif LooseVersion(matplotlib.__version__) < LooseVersion('3.5'):
from matplotlib.backends.qt_compat import _setDevicePixelRatio
setDevicePixelRatio = _setDevicePixelRatio
DEV_PIXEL_RATIO_ATTR = "_dpi_ratio"
elif LooseVersion(matplotlib.__version__) >= LooseVersion('3.5'):
from matplotlib.backends.qt_compat import _setDevicePixelRatio
setDevicePixelRatio = _setDevicePixelRatio
DEV_PIXEL_RATIO_ATTR = "_device_pixel_ratio"
logger = logging.getLogger(__name__)
warnings.filterwarnings("ignore", "Starting a Matplotlib GUI outside of the main thread will likely fail.")
def draw_if_interactive():
"""
For image backends - is not required.
For GUI backends - this should be overridden if drawing should be done in
interactive python mode.
"""
if matplotlib.is_interactive():
show()
def show(*, block=None):
"""
For image backends - is not required.
For GUI backends - show() is usually the last line of a pyplot script and
tells the backend that it is time to draw. In interactive mode, this
should do nothing.
"""
#manager = Gcf.get_active()
if matplotlib.is_interactive(): return
for manager in Gcf.get_all_fig_managers():
manager.show()
def new_figure_manager(num, *args, FigureClass=Figure, **kwargs):
"""Create a new figure manager instance."""
# If a main-level app must be created, this (and
# new_figure_manager_given_figure) is the usual place to do it -- see
# backend_wx, backend_wxagg and backend_tkagg for examples. Not all GUIs
# require explicit instantiation of a main-level app (e.g., backend_gtk3)
# for pylab.
thisFig = FigureClass(*args, **kwargs)
return new_figure_manager_given_figure(num, thisFig)
def new_figure_manager_given_figure(num, figure):
"""Create a new figure manager instance for the given figure."""
#print(f'timer: {time.perf_counter()}')
if not gui.valid() or gui._qapp is None:
#In case of comming from other Process
#Don't do a guicall, FigureCanvasGh2 or FigureManagerQT is not pickable!
#Is called if figure, line, ... is depickled from the interprocess queue
canvas = FigureCanvasBase(figure)
manager = FigureManagerGh2Child(canvas, num)
else:
canvas = gui.gui_call(FigureCanvasGh2, figure)
manager = FigureManagerGh2(canvas, num)
return manager
class FigureCanvasGh2(FigureCanvasAgg, FigureCanvasQT):
def __init__(self, figure):
# Must pass 'figure' as kwarg to Qt base class.
super().__init__(figure=figure)
@property
def dev_pixel_ratio(self):
return getattr(self, DEV_PIXEL_RATIO_ATTR)
def paintEvent(self, event):
"""
Copy the image from the Agg canvas to the qt.drawable.
In Qt, all drawing should be done inside of here when a widget is
shown onscreen.
"""
logger.debug('calling paintEvent')
if matplotlib.__version__[:3] in ['3.2', '3.3']:
if self._update_dpi():
# The dpi update triggered its own paintEvent.
return
self._draw_idle() # Only does something if a draw is pending.
# If the canvas does not have a renderer, then give up and wait for
# FigureCanvasAgg.draw(self) to be called.
if not hasattr(self, 'renderer'):
return
painter = QtGui.QPainter(self)
try:
# See documentation of QRect: bottom() and right() are off
# by 1, so use left() + width() and top() + height().
rect = event.rect()
# scale rect dimensions using the screen dpi ratio to get
# correct values for the Figure coordinates (rather than
# QT5's coords)
width = rect.width() * self.dev_pixel_ratio
height = rect.height() * self.dev_pixel_ratio
left, top = self.mouseEventCoords(rect.topLeft())
# shift the "top" by the height of the image to get the
# correct corner for our coordinate system
bottom = top - height
# same with the right side of the image
right = left + width
# create a buffer using the image bounding box
bbox = Bbox([[left, bottom], [right, top]])
reg = self.copy_from_bbox(bbox)
buf = cbook._unmultiplied_rgba8888_to_premultiplied_argb32(
memoryview(reg))
# clear the widget canvas
painter.eraseRect(rect)
qimage = QtGui.QImage(buf, buf.shape[1], buf.shape[0],
QtGui.QImage.Format_ARGB32_Premultiplied)
setDevicePixelRatio(qimage, self.dev_pixel_ratio)
# set origin using original QT coordinates
origin = QtCore.QPoint(rect.left(), rect.top())
painter.drawImage(origin, qimage)
# Adjust the buf reference count to work around a memory
# leak bug in QImage under PySide on Python 3.
if QT_API in ('PySide', 'PySide2'):
ctypes.c_long.from_address(id(buf)).value = 1
self._draw_rect_callback(painter)
finally:
painter.end()
def draw_idle(self):
logger.debug('calling draw_idle')
gui.gui_call(FigureCanvasQT.draw_idle, self)
def destroy(self, *args):
gui.gui_call(FigureCanvasQT.destroy, self, *args)
def blit(self, bbox=None):
# docstring inherited
# If bbox is None, blit the entire canvas. Otherwise
# blit only the area defined by the bbox.
if bbox is None and self.figure:
bbox = self.figure.bbox
# repaint uses logical pixels, not physical pixels like the renderer.
l, b, w, h = [int(pt / self._dpi_ratio) for pt in bbox.bounds]
t = b + h
self.repaint(l, self.renderer.height / self._dpi_ratio - t, w, h)
def print_figure(self, *args, **kwargs):
super().print_figure(*args, **kwargs)
self.draw()
def make_and_hide_plot_panel(PanelClass, parentName=None, panid=None, floating=False,
position=None, size=None, args=(), kwargs={}):
panel = gui._qapp.panels.new_panel(PanelClass, parentName, panid, floating, position, size, args, kwargs)
# if not matplotlib.is_interactive():
# panel.window().hide()
return panel
class FigureManagerGh2(FigureManagerBase):
"""
Wrap everything up into a window for the pylab interface
For non interactive backends, the base class does all the work
"""
def __init__(self, canvas, num):
super().__init__(canvas, num)
if matplotlib.is_interactive():
self.panel = gui.gui_call(make_and_hide_plot_panel, PlotPanel, 'main', self.num, None, args=(self.canvas,))
else:
self.panel = None
def show(self):
"""
For GUI backends, show the figure window and redraw.
For non-GUI backends, raise an exception to be caught
by :meth:`~matplotlib.figure.Figure.show`, for an
optional warning.
"""
if gui.valid():
if self.panel is None:
self.panel = gui.gui_call(make_and_hide_plot_panel, PlotPanel, 'main', self.num, None, args=(self.canvas,))
gui.gui_call(PlotPanel.show_me, self.panel)
gui.gui_call(PlotPanel.refresh, self.panel)
else:
raise ValueError(f'gui called from unknown thread {os.getpid()}/{threading.current_thread()}')
def destroy(self, *args):
if 'plot' in gui._qapp.panels.keys():
if not self.panel is None:
gui.gui_call(PlotPanel.close_panel, self.panel)
else:
pass
class FigureManagerGh2Child(FigureManagerBase):
"""
Wrap everything up into a window for the pylab interface
For non interactive backends, the base class does all the work
"""
def __init__(self, canvas, num):
super().__init__(canvas, num)
self.panel = None
def show(self):
"""
For GUI backends, show the figure window and redraw.
For non-GUI backends, raise an exception to be caught
by :meth:`~matplotlib.figure.Figure.show`, for an
optional warning.
"""
if gui.valid():
gui.plot.show(self.canvas.figure)
else:
raise ValueError(f'gui called from unknown thread {os.getpid()}/{threading.current_thread()}')
FigureCanvas = FigureCanvasGh2
FigureManager = FigureManagerGh2
|
thocoo/gamma-desk | gdesk/panels/matplot/plotpanel.py | <gh_stars>0
import sys
import time
import logging
import pickle
from pathlib import Path
from matplotlib.backends.backend_qt5agg import FigureCanvas, NavigationToolbar2QT as NavigationToolbar
from matplotlib._pylab_helpers import Gcf
from matplotlib.backends.backend_template import FigureCanvasTemplate, FigureManagerTemplate
import pylab
from qtpy import QtWidgets, QtCore, QtGui
from ... import gui, config
from .. import BasePanel, CheckMenu
respath = Path(config['respath'])
logger = logging.getLogger(__name__)
set_active_backup = Gcf.set_active
def gcf_set_active(manager):
# Note that this function is called by pyplot
# - when the user clicks on the canvas (callback needed on canvas object?)
# - on pyplot.figure(number)
if pylab.get_backend() == config.get("matplotlib", {}).get("backend", None):
number = manager.num
if number in gui._qapp.panels.get('plot', {}).keys():
gui.plot.select(manager.num)
else:
set_active_backup(manager)
else:
set_active_backup(manager)
#Hacking pyplot !
Gcf.set_active = gcf_set_active
def restore_gcf_set_active():
Gcf.set_active = set_active_backup
class PlotPanel(BasePanel):
panelCategory = 'plot'
panelShortName = 'basic'
userVisible = True
classIconFile = str(respath / 'icons' / 'px16' / 'chart_curve.png')
def __init__(self, parent=None, pid=None, figure_or_canvas=None):
super().__init__(parent, pid, 'plot')
self.fileMenu = self.menuBar().addMenu("&File")
self.addMenuItem(self.fileMenu, 'Open...' , self.openFigure)
self.addMenuItem(self.fileMenu, 'Save As...' , self.saveFigure)
self.addMenuItem(self.fileMenu, 'Close' , self.close_me_from_menu, icon = 'cross.png')
self.addMenuItem(self.fileMenu, 'Close Others', self.close_others)
self.viewMenu = CheckMenu("&View", self.menuBar())
self.menuBar().addMenu(self.viewMenu)
self.addMenuItem(self.viewMenu, 'Refresh', self.refresh,
statusTip = "Refresh the plot",
icon = 'update.png')
self.addMenuItem(self.viewMenu, 'Grid', self.grid,
statusTip = "Show/hide grid",
icon = 'layer_grid.png')
self.addMenuItem(self.viewMenu, 'Tight', self.tight,
statusTip = "Adjust the padding between and around subplots",
icon = 'canvas_size.png')
self.addMenuItem(self.viewMenu, 'Interactive', self.toggle_interactive,
checkcall=lambda: pylab.isinteractive())
self.addBaseMenu()
if hasattr(figure_or_canvas, 'canvas'):
self.figure = figure_or_canvas
self.canvas = figure_or_canvas.canvas
else:
self.figure = figure_or_canvas.figure
self.canvas = figure_or_canvas
self.setCentralWidget(self.canvas)
self.nav = NavigationToolbar(self.canvas, self)
self.nav.setIconSize(QtCore.QSize(20, 20))
self.addToolBar(self.nav)
self.statusBar().hide()
def showNewFigure(self, figure):
from ...matplotbe import FigureCanvasGh2
mgr = self.canvas.manager
self.figure = figure
self.canvas = FigureCanvasGh2(figure)
self.canvas.manager = mgr
mgr.canvas = self.canvas
self.setCentralWidget(self.canvas)
self.removeToolBar(self.nav)
self.nav = NavigationToolbar(self.canvas, self)
self.nav.setIconSize(QtCore.QSize(20,20))
self.addToolBar(self.nav)
self.canvas.draw_idle()
def openFigure(self):
import pickle
filter = "Pickle (*.pkl)"
filepath, selectedFilter = gui.getfile(filter=filter, title='Open Figure')
if filepath == '':
return
pickle.load(open(filepath, 'rb'))
def saveFigure(self):
figure = self.figure
filter = "Portable Network Graphics (*.png)"
filter += ";;Scalable Vector Graphics (*.svg)"
filter += ";;Pickle (*.pkl)"
filepath, selectedFilter = gui.putfile(filter=filter, title='Save Figure as', defaultfilter="Scalable Vector Graphics (*.svg)")
if filepath == '':
return
if selectedFilter == "Pickle (*.pkl)":
import pickle
with open(filepath, 'wb') as fp:
pickle.dump(figure, fp)
else:
plt = gui.prepareplot()
plt.savefig(filepath)
def grid(self):
f = self.figure
for axe in f.axes:
axe.grid()
self.canvas.draw_idle()
def tight(self):
self.figure.tight_layout()
self.canvas.draw_idle()
def toggle_interactive(self):
pylab.interactive(not pylab.isinteractive())
def refresh(self):
self.canvas.draw_idle()
def select(self):
manager = self.canvas.manager
set_active_backup(manager)
super().select()
def close_me_from_menu(self):
#Use the matplotlib backend to close
pylab.close(self.panid)
def close_others(self):
other_panids = list(Gcf.figs.keys())
for panid in other_panids:
if panid == self.panid: continue
pylab.close(panid)
def close_panel(self):
#Called from the matplotlib backend
super().close_panel()
|
thocoo/gamma-desk | gdesk/core/watcher.py | import logging
import time
import sys, os
import threading
import queue
import socket
import json
import zmq
from . import conf
from .conf import config
from .gui_proxy import gui
logger = logging.getLogger(__name__)
context = zmq.Context()
class CommandServer(object):
def __init__(self, shell):
self.shell = shell
self.server_thread = None
self.socket_loop = False
self.queue_loop = False
self.cmd_queue = queue.Queue()
def start(self, qapp):
self.socket = context.socket(zmq.REP)
min_port = config.get('zmq_watcher_min_port', 5550)
max_port = config.get('zmq_watcher_max_port', 5560)
try:
self.port = self.socket.bind_to_random_port('tcp://*',
min_port=min_port, max_port=max_port, max_tries=100)
logger.info(self.host_info())
except zmq.error.ZMQError as err:
if err.strerror == 'Address in use':
self.socket.close()
logger.info(f'Watcher port {self.port} is already in use')
return False
else:
raise
with open(self.shell.logdir.logpath / 'cmdserver.json', 'w') as fp:
info = {'port': self.port}
json.dump(info, fp)
self.socket_loop = True
self.server_thread = threading.Thread(target=self.recv_socket_socket_loop, args=(qapp.handover,))
self.server_thread.setDaemon(True)
self.server_thread.start()
return True
def host_info(self):
hostname_ex, aliaslist, ipaddrlist = socket.gethostbyname_ex(socket.gethostname())
message = []
message.append(f'Watcher port : {self.port}')
message.append(f'Hostname : {hostname_ex}')
message.append(f'Aliases : {aliaslist}')
message.append(f'IP addresses : {ipaddrlist}')
return '\n'.join(message)
def start_queue_loop(self, qapp):
self.queue_loop = True
self.cmd_queue_thread = threading.Thread(target=self.recv_queue_socket_loop, args=(qapp.handover,))
self.cmd_queue_thread.setDaemon(True)
self.cmd_queue_thread.start()
def stop(self):
self.socket_loop = False
self.socket = context.socket(zmq.REQ)
self.socket.connect(f"tcp://localhost:{self.port}")
self.socket.send_pyobj('close')
self.socket.close()
@staticmethod
def open_images(*image_paths):
for image_path in image_paths:
gui.img.open(image_path, new=True)
@staticmethod
def connect_process(cqs_config=None):
from .tasks import ZmqQueues
print(f'cqs_config:{cqs_config}')
if cqs_config is None:
cqs = conf.config_objects['cqs']
else:
cqs = ZmqQueues.from_json(cqs_config)
cqs.setup_as_client()
gui.qapp.panels['console'][0].get_container().window().hide()
gui.qapp.panels.select_or_new('console', None, 'child', args=(cqs,))
return cqs_config
@staticmethod
def connect_zmq_process(cqs_config=None):
from .tasks import ZmqQueues
if cqs_config is None:
cqs = ZmqQueues()
cqs.setup_as_server()
else:
cqs = ZmqQueues.from_json(cqs_config)
cqs.setup_as_client(cqs.hostname_ex)
gui.qapp.panels['console'][0].get_container().window().hide()
gui.qapp.panels.select_or_new('console', None, 'child', args=(cqs,))
return cqs.to_json()
@staticmethod
def execute_file(init_file, console_id):
gui.console.execute_file(init_file, console_id)
@staticmethod
def execute_code(init_code, console_id):
from .. import gui
gui.console.execute_code(init_code, console_id)
@staticmethod
def start_kernel(kerneltype='ipykernel', connectfile='', rundir=None, child=True, threaded=False):
from .. import gui
from ..external import jupyter
if child:
gui.console.child(jupyter.start_kernel, kerneltype, connectfile, rundir, threaded)
else:
pass
def recv_socket_socket_loop(self, handover):
while self.socket_loop:
request = self.socket.recv_pyobj()
if request == 'close':
break
else:
cmd, args = request['cmd'], request['args']
answer = self.execute_command(handover, cmd, args)
self.socket.send_pyobj(answer or request)
self.socket.close()
logger.info(f'Watcher stoppped')
def recv_queue_socket_loop(self, handover):
while self.queue_loop:
request = self.cmd_queue.get()
if request == 'close':
break
else:
cmd, args = request['cmd'], request['args']
answer = self.execute_command(handover, cmd, args)
def execute_command(self, handover, cmd, args):
if cmd is None:
return
elif cmd == 'open_images':
return handover.send(True, CommandServer.open_images, *args)
elif cmd == 'start_kernel':
return handover.send(True, CommandServer.start_kernel, *args)
elif cmd == 'connect_process':
return handover.send(True, CommandServer.connect_process, *args)
elif cmd == 'connect_zmq_process':
return handover.send(True, CommandServer.connect_zmq_process, *args)
elif cmd == 'execute_file':
return handover.send(True, CommandServer.execute_file, *args)
elif cmd == 'execute_code':
return handover.send(True, CommandServer.execute_code, *args)
class CommandClient(object):
def __init__(self, port=None, host='localhost'):
self.timeout = 10000
self.retries = 1
self.port = port or config.get('watcher_port', 5998)
self.host = host
def send(self, message, timeout=None, retries=None):
timeout = timeout or self.timeout
retries_left = retries or self.retries
server_endpoint = f"tcp://{self.host}:{self.port}"
logging.info(f"Connecting to server {server_endpoint}...")
self.socket = context.socket(zmq.REQ)
try:
self.socket.connect(server_endpoint)
except:
raise
request = message
logging.debug(f"Sending {request}")
self.socket.send_pyobj(request)
while True:
if (self.socket.poll(timeout) & zmq.POLLIN) != 0:
reply = self.socket.recv_pyobj()
return reply
retries_left -= 1
logging.warning("No response from server")
# Socket is confused. Close and remove it.
self.socket.setsockopt(zmq.LINGER, 0)
self.socket.close()
if retries_left == 0:
logging.error("Server seems to be offline, abandoning")
break
logging.info("Reconnecting to server...")
# Create new connection
self.socket = context.socket(zmq.REQ)
self.socket.connect(server_endpoint)
logging.info("Resending (%s)", request)
self.socket.send_pyobj(request)
self.socket.close() |
thocoo/gamma-desk | gdesk/rectable/base.py | <filename>gdesk/rectable/base.py
import textwrap
import shutil
from collections import OrderedDict
import numpy as np
from .styles import styles
INCR = 100
MAXWIDTH = shutil.get_terminal_size().columns - 4
DEFAULT_STYLE = 'rst-simple'
class ColumnAttributes(object):
def __init__(self, rectable, colname):
self._rectable = rectable
self._colname = colname
self._attrs = dict()
def keys(self):
return tuple(self._attrs.keys())
def __getitem__(self, key):
return self._attrs[key]
def __setitem__(self, key, value):
self._attrs[key] = value
class ColumnInfo(object):
def __init__(self, rectable, colname):
self.rectable = rectable
self.colname = colname
self.attrs = ColumnAttributes(self, self.colname)
def distinct(self):
return np.unique(self.rectable.sa[self.colname])
def get_vector(self):
return self.rectable.sa[self.colname]
def set_vector(self, value):
self.rectable.sa[self.colname] = value
vector = property(get_vector, set_vector)
class More(object):
def __init__(self, text):
self.text = text
self.all_lines = self.text.splitlines()
self.skip = 0
self.limit = 20
@property
def more(self):
self.skip += self.limit
lines = self.all_lines[self.skip:self.skip+self.limit]
print('\n'.join(lines))
def __repr__(self):
lines = self.all_lines[self.skip:self.skip+self.limit]
return '\n'.join(lines)
class RecordTable(object):
def __init__(self, fields=[], data=None, size=0, dtype=None):
"""
fields = ['foo', 'bar', baz']
or
fields=[('foo', 'i4'),('bar', 'f4'), ('baz', 'S10')]
"""
dtype = [(field, 'O') if isinstance(field, str) else field for field in fields] if dtype is None else dtype
if data is None:
self._strarr = np.empty(size, dtype=dtype)
elif isinstance(data, np.ndarray):
self._strarr = np.array(data, dtype=dtype)
size = data.size
else:
#if data is dataframe
df = data
#Remove index??
self._strarr = df.to_records(index=False)
size = len(df)
self._stop = size
self.base = None
self.active_row = None
self.maxcolwidth = None
self.maxwidth = MAXWIDTH
self.keepnextlines = True
self._colinfo = OrderedDict()
for colname in self._strarr.dtype.names:
self._colinfo[colname] = ColumnInfo(self, colname)
self._index_colnames = []
def set_index(self, colnames):
if isinstance(colnames, str):
self._index_colnames = [colnames]
else:
assert isinstance(colnames, tuple) or isinstance(colnames, list)
self._index_colnames = list(colnames)
def get_column_info(self, colname):
return self._colinfo[colname]
@classmethod
def empty(cls, dtype, size=0):
self = cls()
def get_size(self):
return self._stop
def set_size(self, length):
incr = (length - len(self._strarr))
min_incr = max(incr, INCR)
self._stop = length
if self._stop > len(self._strarr):
self._strarr = np.resize(self._strarr, len(self._strarr) + min_incr)
size = property(get_size, set_size)
@property
def sa(self):
return self._strarr[0:self._stop]
@property
def ra(self):
return np.rec.array(self.sa, copy=False)
def copy(self):
return RecordTable(data=self.sa, dtype=self.sa.dtype)
@property
def df(self):
from pandas import DataFrame
return DataFrame(self.sa)
def set_header(self, header, dtypes=None):
curr_colnames = self.colnames
if len(header) > len(curr_colnames):
for colname in header[len(curr_colnames):]:
self.add_column(colname)
self.colnames = header
def __truediv__(self, colname):
self.add_column(colname)
return self
def add_record(self, *record_items, **record_dict):
if len(record_dict) > 0:
self.add_record_dict(record_dict)
else:
self.add_record_tuple(record_items)
def add_record_dict(self, record_as_dict):
record = tuple(record_as_dict[field] for field in self.colnames)
self.add_record_tuple(record)
def add_record_tuple(self, record):
self[self.size] = record
def add_row(self, row):
self.add_record_tuple(tuple(row))
def add_column(self, name, title=None, data=None, dtype=object, method='linear'):
source = self.sa
existing_descr = source.dtype.descr
existing_descr = [] if existing_descr == [('','<f8')] else existing_descr
self._strarr = np.empty(len(source), dtype=existing_descr+[((title,name), dtype)])
colnames = source.dtype.names
if not colnames is None:
for colname in colnames:
self._strarr[colname] = source[colname]
if not data is None:
m = len(data)
if method == 'linear':
if m > self.size:
self.size = m
self.sa[name] = data
elif m < self.size:
self.sa[name][:m] = data
else:
self.sa[name] = data
elif method == 'orthogonal':
prior_n = self.size
self.size = self.size * m
self._strarr = np.tile(self.sa, m)
self.sa[name] = np.repeat(data, prior_n)
def __getitem__(self, slices):
if isinstance(slices, RecordTable):
slices = slices.sa.view(bool)
elif isinstance(slices, str):
#result = self.sa.__getitem__(slices)
result = self.get_column_info(slices)
return result
result = self.sa.__getitem__(slices)
if isinstance(result, np.ndarray):
arr = RecordTable(dtype=result.dtype)
arr._strarr = result
arr._stop = result.size
return arr
else:
return result
def __setitem__(self, slices, values):
if isinstance(slices, int) and slices > (self.size-1):
self.size = slices + 1
self.sa.__setitem__(slices, values)
def get_colnames(self):
if self._strarr.dtype.names is None:
return []
return self._strarr.dtype.names
def set_colnames(self, colnames):
self._strarr.dtype.names = colnames
colnames = property(get_colnames, set_colnames)
field_names = colnames
def __str__(self):
return self.tabulate(style=DEFAULT_STYLE)
def _operate_as_sa(self, func, other):
if isinstance(other, RecordTable):
other = other.sa
result = func(self.sa, other)
return RecordTable(result, dtype=[('mask', bool)])
def __or__(self, cell):
if not self.active_row is None:
self.active_row.end()
self.active_row = self.new_record()
self.active_row.add_cell(cell)
return self.active_row
def __eq__(self, other):
return self._operate_as_sa(np.ndarray.__eq__, other)
def __ne__(self, other):
return self._operate_as_sa(np.ndarray.__ne__, other)
def __lt__(self, other):
return self._operate_as_sa(np.ndarray.__lt__, other)
def __le__(self, other):
return self._operate_as_sa(np.ndarray.__le__, other)
def __gt__(self, other):
return self._operate_as_sa(np.ndarray.__gt__, other)
def __ge__(self, other):
return self._operate_as_sa(np.ndarray.__ge__, other)
def __and__(self, other):
return self._operate_as_sa(np.ndarray.__and__, other)
def __or__(self, other):
return self._operate_as_sa(np.ndarray.__or__, other)
def __xor__(self, other):
return self._operate_as_sa(np.ndarray.__xor__, other)
def __len__(self):
return self.get_size()
def new_record(self):
return Record(self)
def end(self):
if not self.active_row is None:
self.active_row.end()
self.active_row = None
def tabulate(self, style='rst-simple', haligns='l', valigns='t', maxwidth=None, debug=False, auto_index='pos'):
if not style in styles.keys():
raise AttributeError(f"Style {style} doesn't exists. Choose between:\n{styles.keys()}")
style_param = styles[style]
art = style_param['art']
hhaligns = style_param['hhaligns']
hhaligns = hhaligns.ljust(len(self.colnames), hhaligns[-1])
haligns = haligns.ljust(len(self.colnames), haligns[-1])
valigns = valigns.ljust(len(self.colnames), valigns[-1])
align_map = dict(l = '<', c = '^', r = '>')
def wrap_paragraph(text):
lines = []
for line in text.splitlines():
lines.extend(txtwr.wrap(line))
return [''] if len(lines) == 0 else lines
def wrap_ignore_new_lines(text):
lines = txtwr.wrap(text)
return [''] if len(lines) == 0 else lines
def lines_html(text):
return [text.replace('\n', '<br>')]
dtype_obj = [(colname, 'O') for colname in self.colnames]
if auto_index:
dtype_obj = [('AUTO_INDEX', 'O')] + dtype_obj
arr_str = np.empty(self.size, dtype=dtype_obj)
if auto_index:
arr_str['AUTO_INDEX'] = [[str(item)] for item in range(self.size)]
maxwidth = maxwidth or self.maxwidth
maxcolwidth = maxwidth if self.maxcolwidth is None else self.maxcolwidth
if auto_index:
maxcolwidth_index = max(3, max(max(len(line) for line in field) for field in arr_str['AUTO_INDEX']))
maxcolwidth = min(maxwidth - len(art[3][0]) - len(art[3][3]) - maxcolwidth_index - len(art[3][2]), maxcolwidth)
else:
maxcolwidth = min(maxwidth - len(art[3][0]) - len(art[3][3]), maxcolwidth)
txtwr = textwrap.TextWrapper(maxcolwidth)
if style == 'html':
lines_wrapper = lines_html
else:
if self.keepnextlines:
lines_wrapper = wrap_paragraph
else:
lines_wrapper = wrap_ignore_new_lines
for colname in self.colnames:
arr_str[colname] = [lines_wrapper(str(item)) for item in self.sa[colname]]
colnames = list(self.colnames)
titles = list(self.colnames)
if auto_index:
colnames = ['AUTO_INDEX'] + colnames
titles = [auto_index] + titles
hhaligns = 'r' + hhaligns
haligns = 'r' + haligns
valigns = 't' + valigns
if self.size == 0:
colwidths = [len(title) for title in titles]
else:
colwidths = []
for colname, title in zip(colnames, titles):
colwidths.append(max(len(title), max(max(len(line) for line in field) for field in arr_str[colname])))
result = ''
def add_line(s):
nonlocal result
result += s + '\n'
def calc_width_for_selection(selection):
width = len(art[3][0]) + len(art[3][3])
width += sum(colwidths[pos] + len(art[3][2]) for pos in selection)
width -= len(art[3][2])
return width
selections = []
if auto_index:
minumum_selection = [0]
else:
minumum_selection = []
leftover_selection = list(range(len(colnames)))
[leftover_selection.pop(pos) for pos in minumum_selection]
start = 0
stop = 0
while stop <= len(leftover_selection):
stop += 1
scan_selection = minumum_selection.copy()
scan_selection.extend(leftover_selection[start:stop])
width = calc_width_for_selection(scan_selection)
if debug:
print(f'Start {start}; Stop {stop}; Selection {scan_selection}; Width {width}')
if (width > maxwidth):
selections.append(scan_selection[:-1])
start = stop - 1
elif stop > len(leftover_selection):
selections.append(scan_selection)
if debug:
print(selections)
for selection in selections:
if debug:
add_line(''.join(str(i % 10) for i in range(maxwidth)))
selcolwidth = [colwidths[pos] for pos in selection]
selhaligns = [haligns[pos] for pos in selection]
selhhaligns = [hhaligns[pos] for pos in selection]
seltitles = [titles[pos] for pos in selection]
if not art[0] is None:
add_line(art[0][0] + art[0][2].join(art[0][1] * colwidth for colwidth in selcolwidth) + art[0][3])
if not art[1] is None:
head_template = art[1][0] + art[1][2].join(f"{{{i}:{align_map[align]}{width}}}" for i, (width, align) in enumerate(zip(selcolwidth, selhhaligns))) + art[1][3]
add_line(head_template.format(*seltitles))
if not art[2] is None:
add_line(art[2][0] + art[2][2].join(art[2][1] * colwidth for colwidth in selcolwidth) + art[2][3])
row_template = art[3][0] + art[3][2].join(f"{{{i}:{align_map[align]}{width}}}" for i, (width, align) in enumerate(zip(selcolwidth, selhaligns))) + art[3][3]
for ind, record in enumerate(arr_str):
selrecord = [record[pos] for pos in selection]
last = ind == (arr_str.size-1)
line_counts = [len(field) for field in selrecord]
max_line_count = max(line_counts)
for linenr in range(max_line_count):
line = []
for fnr, field in enumerate(selrecord):
valign = valigns[fnr]
if valign == 't':
start = 0
stop = len(field)
elif valign == 'c':
start = (max_line_count - len(field)) // 2
stop = start + len(field)
elif valign == 'b':
start = max_line_count - len(field)
stop = max_line_count
if start <= linenr < stop:
line.append(field[linenr-start])
else:
line.append('')
add_line(row_template.format(*line))
if not last and not art[4] is None:
add_line(art[4][0] + art[4][2].join(art[4][1] * colwidth for colwidth in selcolwidth) + art[4][3])
elif last and not art[5] is None:
add_line(art[5][0] + art[5][2].join(art[5][1] * colwidth for colwidth in selcolwidth) + art[5][3])
return result
def get_html_string(self):
htmlstr = '<table>\n' + self.tabulate(style='html', maxwidth=65536) + '</table>\n'
return htmlstr
def to_clipboard_html(self):
from .htmlclip import PutHtml
htmlstr = '<table>\n' + self.tabulate(style='html') + '</table>\n'
PutHtml(htmlstr)
def to_clipboard_pre_formatted(self):
import win32clipboard as cb
try:
cb.OpenClipboard()
cb.EmptyClipboard()
prestr = self.tabulate()
cb.SetClipboardText(prestr)
finally:
cb.CloseClipboard()
def show(self, ipython=False):
if ipython:
ht = self.get_html_string()
from IPython.core.display import display, HTML
display(HTML(ht))
else:
print(self)
class Record(object):
def __init__(self, table):
self.table = table
self.cells = []
def add_cell(self, cell):
self.cells.append(cell)
def __or__(self, cell):
self.add_cell(cell)
return self
def end(self):
self.table.add_record(self.cells)
|
thocoo/gamma-desk | gdesk/ezdock/dockwidgets.py | import collections
import importlib
import pprint
import logging
import pathlib
from qtpy.QtWidgets import *
from qtpy.QtCore import *
from qtpy.QtGui import *
from qtpy import QtCore, QtGui, QtWidgets
from .. import gui, config
from ..panels.base import BasePanel
from .laystruct import LayoutStruct
from .docks import DockBase
from .boxes import DockHBox, DockVBox, DockBox
respath = pathlib.Path(config['respath'])
logger = logging.getLogger(__name__)
DEBUG = {'Use_ScrollBox': True}
class DockTabBar(QTabBar):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setMovable(True)
fontmetric = QtGui.QFontMetrics(self.font())
self.fontheight = fontmetric.height()
self.movingWindow = None
self.movingWindowOffset = None
def add_tab_menu(self, index, leftWidget, rightWidget=None):
if index >= 0:
if not leftWidget is None:
self.setTabButton(index, QTabBar.LeftSide, leftWidget)
if not rightWidget is None:
self.setTabButton(index, QTabBar.RightSide, rightWidget)
def mouseDoubleClickEvent(self, event):
ezm = gui.qapp.panels.ezm
widget = self.parent().currentWidget()
container = widget.get_container()
globalpos = widget.mapToGlobal(self.pos())
if event.modifiers() == 0:
window, node = self.detach()
if not window is container.parent():
window.move(globalpos)
return
def mousePressEvent(self, event):
super().mousePressEvent(event)
widget = self.parent().currentWidget()
self.drag_start_pos = event.pos()
if isinstance(widget, BasePanel):
if event.button() == Qt.LeftButton:
widget.select()
def mouseReleaseEvent(self, event):
widget = self.parent().currentWidget()
if event.button() == Qt.RightButton:
if not self.movingWindow is None:
self.movingWindow.windowPlaced()
self.movingWindow = None
elif isinstance(widget, BasePanel):
#Show the panel menu
menu = QMenu('menu')
for child in widget.menuBar().children():
#One of the QMenu children seems to be itself
if isinstance(child, QMenu) and child.title() != '':
menu.addMenu(child)
menu.exec_(QCursor.pos())
else:
super().mouseReleaseEvent(event)
def mouseMoveEvent(self, event):
movePoint = self.drag_start_pos - event.pos()
moveDistance = movePoint.x() ** 2 + movePoint.y() ** 2
if event.buttons() == Qt.RightButton and moveDistance > 32:
if self.movingWindow is None:
pos = self.mapToGlobal(QPoint(0, 0))
self.movingWindowOffset = QtGui.QCursor.pos() - pos
window, node = self.detach()
window.startMoving()
self.movingWindow = window
else:
self.movingWindow.move(QtGui.QCursor.pos() - self.movingWindowOffset)
else:
super().mouseMoveEvent(event)
def detach(self):
if isinstance(self.parent(), DockTag):
widget = self.parent()
elif isinstance(self.parent(), DockTab):
widget = self.parent().currentWidget()
if isinstance(widget, BasePanel):
#It this case still used?
logger.debug('Detaching BasePanel')
container = self.parent().get_container()
geo = widget.geometry()
window, node = container.detach('panel', widget.category, widget.panid, True, geo.width(), geo.height())
else:
logger.debug('Detaching Layout')
window, node = widget.detach()
return window, node
class DockTabBase(QTabWidget, DockBase):
def __init__(self, parent=None, collapse=None):
QTabWidget.__init__(self, parent=parent)
if DEBUG['Use_ScrollBox']:
self.toprightbtn = QtWidgets.QToolButton(self)
self.toprightbtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'application_split.png')))
#self.toprightbtn.setText('...')
self.toprightbtn.setPopupMode(QtWidgets.QToolButton.InstantPopup)
self.setCornerWidget(self.toprightbtn, Qt.TopRightCorner)
self.pinMenu = QtWidgets.QMenu('pin')
if config.get("scroll_area", False):
self.pinMenu.addAction(QtWidgets.QAction('Move to Pinned/Scroll Area', self, triggered=self.moveToOtherArea))
self.pinMenu.addAction(QtWidgets.QAction('Duplicate', self, triggered=self.duplicate,
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'application_double.png'))))
self.pinMenu.addAction(QtWidgets.QAction('Split Horizontal', self, triggered=self.splitHorizontal,
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'layouts_split.png'))))
self.pinMenu.addAction(QtWidgets.QAction('Split Vertical', self, triggered=self.splitVertical,
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'layouts_split_vertical.png'))))
self.pinMenu.addSeparator()
action = QtWidgets.QAction('Screenshot to Clipboard', self, triggered=self.screenShot)
action.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'lcd_tv_image.png')))
self.pinMenu.addAction(action)
self.pinMenu.addAction(QtWidgets.QAction('Toggle global Menu usage', self, triggered=self.toggleMenu,
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'menubar.png'))))
self.pinMenu.addAction(QtWidgets.QAction('Show/Hide Status Bar', self, triggered=self.toggleStatusBar,
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'status_bar.png'))))
self.toprightbtn.setMenu(self.pinMenu)
if collapse == 'h':
self.topleftbtn = QtWidgets.QToolButton(self)
self.topleftbtn.setIcon(gui.qapp.resizeicon)
self.topleftbtn.setCheckable(True)
self.topleftbtn.setDisabled(True)
#self.topleftbtn.clicked.connect(self.toggleHorizontalContent)
self.setCornerWidget(self.topleftbtn, Qt.TopLeftCorner)
elif collapse == 'v':
self.topleftbtn = QtWidgets.QToolButton(self)
self.topleftbtn.setIcon(gui.qapp.resizeicon)
self.topleftbtn.setCheckable(True)
self.topleftbtn.clicked.connect(lambda: self.parent().collapse(self))
self.setCornerWidget(self.topleftbtn, Qt.TopLeftCorner)
self.setTabBarAutoHide(False)
self.setTabBar(DockTabBar())
self.title = None
self.setAutoFillBackground(True)
self.collapsed = False
def duplicate(self):
panel = self.currentWidget()
newpanel = panel.duplicate()
newpanel.show_me()
def splitHorizontal(self):
panel = self.currentWidget()
container = panel.get_container()
newpanel = panel.duplicate(floating=True)
container.insert((newpanel.category, newpanel.panid), 'right', (panel.category, panel.panid))
return panel.panid
def splitVertical(self):
panel = self.currentWidget()
container = panel.get_container()
newpanel = panel.duplicate(floating=True)
container.insert((newpanel.category, newpanel.panid), 'bottom', (panel.category, panel.panid))
return panel.panid
def moveToOtherArea(self):
self.get_dock_box().moveToOtherArea(self)
def collapseVertical(self):
if self.collapsed:
for pos in range(self.count()):
self.widget(pos).show()
self.setMinimumHeight(self.priorMinimumHeight)
self.setMaximumHeight(self.priorMaximumHeight)
try:
self.parent().changeWidgetSize(self, self.priorHeight, fixed=False)
except:
pass
self.collapsed = False
self.topleftbtn.setChecked(False)
else:
for pos in range(self.count()):
self.widget(pos).hide()
self.priorMinimumHeight = self.minimumHeight()
self.priorMaximumHeight = self.maximumHeight()
self.priorHeight = self.height()
tabbarheight = self.tabBar().height()
try:
self.parent().changeWidgetSize(self, tabbarheight, fixed=True)
self.setFixedHeight(tabbarheight)
except:
self.setFixedHeight(tabbarheight)
self.collapsed = True
self.topleftbtn.setChecked(True)
def set_tab_header(self, widget, title):
index = self.indexOf(widget)
bindButton = QToolButton(widget)
bindButton.setIcon(gui.qapp.bindicon)
#bindButton.setFixedHeight(26)
bindButton.setCheckable(True)
if widget.isSelected():
bindButton.setChecked(True)
bindButton.clicked.connect(widget.select)
bindButton.setPopupMode(QToolButton.MenuButtonPopup)
bindButton.setMenu(widget.bindMenu)
bindButton.setFixedHeight(self.tabBar().fontheight + 4)
gui.qapp.panels.ezm.add_button_to_bindgroup(widget.category, widget.panid, bindButton)
self.tabBar().add_tab_menu(index, bindButton, None)
def mouseReleaseEvent(self, event):
if event.button() == Qt.RightButton:
self.get_container().parent().panelMenu.exec_(QCursor.pos())
super().mouseReleaseEvent(event)
def screenShot(self):
pixmap = self.currentWidget().grab()
qimage = pixmap.toImage()
clipboard = gui.qapp.clipboard()
clipboard.setImage(qimage)
def toggleMenu(self):
panel = self.currentWidget()
panel.use_global_menu = not panel.use_global_menu
panel.select()
def toggleStatusBar(self):
panel = self.currentWidget()
statusBar = panel.statusBar()
if statusBar.isVisible():
statusBar.hide()
else:
statusBar.show()
class DockTab(DockTabBase):
category = 'tab'
def __init__(self, parent=None, collapse=None):
super().__init__(parent, collapse)
self.setStyleSheet("QTabBar::tab { height: " + str(self.tabBar().fontheight + 6) + ";}")
def addWidget(self, widget, title=None):
if isinstance(widget, BasePanel):
self.addTab(widget, title)
self.set_tab_header(widget, title)
else:
self.addTab(widget, title)
class DockTag(DockTabBase):
category = 'tag'
def __init__(self, parent=None, collapse=None):
super().__init__(parent, collapse)
def addWidget(self, widget, title=None):
self.addTab(widget, title)
if title.startswith('vbox'):
self.setTabPosition(QTabWidget.West)
pal = self.palette()
pal.setColor(QPalette.Background, QColor(192,192,224))
self.setPalette(pal)
elif title.startswith('hbox'):
pal = self.palette()
pal.setColor(QPalette.Background, QColor(192,224,192))
self.setPalette(pal)
else:
self.setStyleSheet("QTabBar::tab { height: " + str(self.tabBar().fontheight + 6) + ";}")
pal = self.palette()
pal.setColor(QPalette.Background, QColor(242,242,242))
self.setPalette(pal)
if isinstance(widget, BasePanel):
self.addTab(widget, title)
self.set_tab_header(widget, title)
else:
self.addTab(widget, title)
class DockContainer(QWidget):
def __init__(self, manager, parent=None, name='main'):
super().__init__(parent=parent)
#QWidget needs a top level QLayout object
#Note that a QSplitter is not a QLayout object
layout = QVBoxLayout()
layout.setContentsMargins(0,0,0,0)
layout.setSpacing(0)
self.taglevel = None
self.setLayout(layout)
self.manager = manager
self.name = name
self.laywidget = None
self.panelIds = []
self.tabindex = 0
self.vboxindex = 0
self.hboxindex = 0
@property
def all_panels(self):
return self.manager.panels
def is_empty(self, check=False):
if check:
return self.get_layout_struct().is_empty()
else:
return len(self.panelIds) == 0
def panel_count(self):
return len(self.panelIds)
def update_layout(self, layout_struct):
if not self.laywidget is None:
self.detach_panels()
self.layout().removeWidget(self.laywidget)
self.tabindex = 0
self.vboxindex = 0
self.hboxindex = 0
parentnode = {'type': 'layout', 'category': 'root'}
if self.taglevel is None:
if layout_struct.root.get('type', None) == 'panel' and config.get("hide_solo_panel", False):
self.taglevel = 0
else:
self.taglevel = 1
self.laywidget = self.make_layout_widget_branch(layout_struct.root, parentnode)
self.laywidget.show()
#self.show_all(self.laywidget)
self.layout().addWidget(self.laywidget)
def detach_panels(self):
for cat, panid in self.panelIds:
try:
panel = self.all_panels[cat][panid]
panel.detach()
except KeyError:
print(f'Could not find panel {cat}#{panid}')
self.panelIds.clear()
def detach_top(self):
l = self.get_layout_struct()
return self.detach(l.root['type'], l.root['category'], l.root['id'], False)
def detach(self, nodetype, category, nodeid, to_new_window=True, width=640, height=480):
drop_layout = self.get_layout_struct()
place_layout = drop_layout.pop_node(nodetype, category, nodeid) #tag or tab?
drop_layout.compact()
place_layout.compact()
if drop_layout.is_empty():
return self.parent(), place_layout
self.update_layout(drop_layout)
window = self.manager.new_window_using_layout(place_layout, width, height, self.parent().name)
return window, place_layout
def compact(self):
ls = self.get_layout_struct()
ls.compact()
self.update_layout(ls)
def distribute(self):
ls = self.get_layout_struct()
ls.compact()
ls.distribute()
self.update_layout(ls)
def insert(self, panel, relative_pos, to_panel):
"""
:param dict insert_node: (category, panid)
:param str relative_pos: 'tab', 'top', 'bottom', 'left' or 'right'
:param refpanqualid: (category, panid)
"""
ls = self.get_layout_struct()
ls.compact()
ls.insert_panel(panel, relative_pos, to_panel)
self.update_layout(LayoutStruct())
self.update_layout(ls)
def make_layout_widget_branch(self, node, parentnode=None, ind=None):
if len(node.keys()) == 0:
lay = DockTab()
return lay
elif node['type'] == 'panel':
#No layout required at all
cat = node['category']
panid = node['id']
panel = self.all_panels[cat][panid]
panel.title = panel.short_title
self.panelIds.append((cat, panid))
lay = panel
elif node['type'] == 'layout':
if node['category'] == 'hbox':
self.hboxindex += 1
lay = DockHBox()
lay.title = f'hbox#{self.hboxindex}'
elif node['category'] == 'vbox':
self.vboxindex += 1
lay = DockVBox()
lay.title = f'vbox#{self.vboxindex}'
elif node['category'] == 'tab':
self.tabindex += 1
if parentnode['category'] == 'hbox':
lay = DockTab(None, 'h')
elif parentnode['category'] == 'vbox':
lay = DockTab(None, 'v')
else:
lay = DockTab()
lay.title = f'tab#{self.tabindex}'
else:
raise TypeError(f'Unknown node category {node}')
else:
raise TypeError(f'Unknown node type {node}')
lay.nodeinfo = {'parent': parentnode, 'index': ind}
if isinstance(lay, DockBox):
items = node.get('items', [])
pin_sizes = node.get('sizes', [])
scroll_sizes = node.get('scroll', [])
areas = len(pin_sizes) * [DockBox.PinArea] + len(scroll_sizes) * [DockBox.ScrollArea]
for ind, (item, area) in enumerate(zip(items, areas)):
branch = self.make_layout_widget_branch(item, node, ind)
lay.addWidget(branch, area=area, title=branch.title)
else:
for ind, item in enumerate(node.get('items', [])):
branch = self.make_layout_widget_branch(item, node, ind)
lay.addWidget(branch, title=branch.title)
if node['type'] == 'layout':
if 'sizes' in node.keys():
lay.setSizes(node['sizes'])
if 'scroll' in node.keys():
lay.setSizes(node['scroll'], area=DockBox.ScrollArea)
if 'pinscroll' in node.keys():
lay.setSizes(node['pinscroll'], area=DockBox.SplitArea)
if 'active' in node.keys():
lay.setCurrentIndex(node['active'])
if parentnode['category'] == 'tab':
return lay
if (node['type'] == 'panel' and self.taglevel in [0,1]) or self.taglevel == 2:
if parentnode['category'] == 'hbox':
tag = DockTag(None, 'h')
elif parentnode['category'] == 'vbox':
tag = DockTag(None, 'v')
else:
tag = DockTag()
if self.taglevel == 0:
tag.setTabBarAutoHide(True)
tag.title = lay.title
tag.addWidget(lay, title=lay.title)
lay = tag
return lay
def get_layout_struct(self):
layout_widget = self.laywidget
if not layout_widget is None and layout_widget.parent() == self:
root = DockContainer.from_layout_widget_branch(layout_widget)
ls = LayoutStruct()
ls.root = root
else:
ls = LayoutStruct()
return ls
@staticmethod
def from_layout_widget_branch(layout_widget):
node = {}
if isinstance(layout_widget, (DockHBox, DockVBox)):
if layout_widget.orientation() == Qt.Orientation.Horizontal:
node['type'] = 'layout'
node['category'] = 'hbox'
node['id'] = layout_widget.title
else:
node['type'] = 'layout'
node['category'] = 'vbox'
node['id'] = layout_widget.title
node['sizes'] = layout_widget.sizes()
try:
node['pinscroll'] = layout_widget.sizes(DockBox.SplitArea)
node['scroll'] = layout_widget.sizes(DockBox.ScrollArea)
except:
pass
elif isinstance(layout_widget, DockTab):
node['type'] = 'layout'
node['category'] = 'tab'
node['id'] = layout_widget.title
node['active'] = layout_widget.currentIndex()
elif isinstance(layout_widget, DockTag):
node['type'] = 'layout'
node['category'] = 'tag'
node['id'] = layout_widget.title
else:
panel = layout_widget
node = {'type': 'panel', 'category': panel.category, 'id': panel.panid}
return node
node['items'] = []
if isinstance(layout_widget, DockBox):
for index in range(layout_widget.count(DockBox.PinArea)):
widget = layout_widget.widget(index, DockBox.PinArea)
subnode = DockContainer.from_layout_widget_branch(widget)
node['items'].append(subnode)
for index in range(layout_widget.count(DockBox.ScrollArea)):
widget = layout_widget.widget(index, DockBox.ScrollArea)
subnode = DockContainer.from_layout_widget_branch(widget)
node['items'].append(subnode)
else:
for index in range(layout_widget.count()):
widget = layout_widget.widget(index)
subnode = DockContainer.from_layout_widget_branch(widget)
node['items'].append(subnode)
return node
def show_all(self, layout_widget):
if not (isinstance(layout_widget, QSplitter) or isinstance(layout_widget, (DockTab, DockTag))):
layout_widget.show()
else:
for index in range(layout_widget.count()):
widget = layout_widget.widget(index)
self.show_all(widget)
def cycle_tag_level(self):
if len(self.panelIds) < 2:
self.taglevel = (self.taglevel + 1) % 2
else:
self.taglevel = (self.taglevel + 1) % 3
self.compact()
|
thocoo/gamma-desk | gdesk/graphics/plotview.py | from qtpy import QtCore, QtGui, QtOpenGL, QtWidgets
from .point import Point
from . import functions as fn
QtSignal = QtCore.Signal
HAVE_OPENGL = True
class PlotView(QtWidgets.QGraphicsView):
"""
Re-implementation of QGraphicsView without scrollbars.
Allow unambiguous control of the viewed coordinate range.
"""
matrixUpdated = QtSignal()
doubleClicked = QtSignal()
scaleXUpdated = QtSignal()
panXUpdated = QtSignal()
scaleYUpdated = QtSignal()
panYUpdated = QtSignal()
def __init__(self, parent=None, background='default'):
super().__init__(parent)
# There is the experimental options of using OpenGl
self.useOpenGL(False)
self.setCacheMode(self.CacheBackground)
self.setBackground(background)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setFrameShape(QtWidgets.QFrame.NoFrame)
self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setTransformationAnchor(QtWidgets.QGraphicsView.NoAnchor)
self.setResizeAnchor(QtWidgets.QGraphicsView.NoAnchor)
#self.setResizeAnchor(QtWidgets.QGraphicsView.AnchorViewCenter)
self.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignTop)
#self.setViewportUpdateMode(QtWidgets.QGraphicsView.MinimalViewportUpdate)
self.setViewportUpdateMode(QtWidgets.QGraphicsView.NoViewportUpdate)
#self.setStyleSheet( "QGraphicsView { border-style: none; }" )
#self.setMouseTracking(True)
#self.range = QtCore.QRectF(0, 0, 1, 1)
self.scale = [100 / 2**16, 100 / 2**16]
self.center = [2**15, 2**15]
self.lastMousePos = None
self.fixScaleX = False
self.fixScaleY = False
fullrange = QtCore.QRectF(-1e6, -1e6, 2e6, 2e6)
self.setSceneRect(fullrange)
self.updateMatrix()
self.initMenu()
@property
def auto_zoom(self):
"""
Check whether the 'Auto Zoom' checkbox is checked.
:return bool: True when auto zoom is checked / enabled.
"""
return self.autoAction.isChecked()
def initMenu(self):
self.menu = QtWidgets.QMenu(self)
self.autoAction = QtWidgets.QAction('Auto Zoom', self)
self.autoAction.setCheckable(True)
self.autoAction.setChecked(True)
self.autoAction.triggered.connect(self.toggleAutoZoom)
self.menu.addAction(self.autoAction)
def toggleAutoZoom(self):
#just toggle the check boxs
pass
def useOpenGL(self, b=True):
if b:
if not HAVE_OPENGL:
raise Exception("Requested to use OpenGL with QGraphicsView, but QtOpenGL module is not available.")
v = QtOpenGL.QGLWidget()
else:
v = QtWidgets.QWidget()
self.setViewport(v)
def setBackground(self, background):
"""
Set the background color of the GraphicsView.
To make the background transparent, use background=None.
"""
self._background = background
if background == 'default':
background = (250, 250, 250)
if background is None:
self.setBackgroundRole(QtGui.QPalette.NoRole)
else:
brush = fn.mkBrush(background)
self.setBackgroundBrush(brush)
def updateMatrix(self, propagate=True):
#t = self.transform()
self.limitRefreshRange()
self.setTransform(QtGui.QTransform(\
self.scale[0], 0 , 0,\
0 , self.scale[1], 0,\
0 , 0 , 1))
#self.ensureVisible(self.range,0,0)
self.centerOn(*self.center)
#self.setSceneRect(self.range)
#self.fitInView(self.range, QtCore.Qt.IgnoreAspectRatio)
self.matrixUpdated.emit()
self.viewport().update()
#self.updateSceneRect(self.range)
def limitRefreshRange(self):
self.range = QtCore.QRectF()
self.range.setWidth((self.width() + 20)/ self.scale[0])
self.range.setHeight((self.height() + 20) / self.scale[1])
self.range.moveCenter(QtCore.QPointF(*self.center))
self.setSceneRect(self.range)
def translate(self, dx, dy):
self.center = [self.center[0] +dx, self.center[1] + dy]
self.updateMatrix()
if dx != 0:
self.panXUpdated.emit()
if dy != 0:
self.panYUpdated.emit()
def setXCenter(self, pos):
self.center[0] = pos
self.limitRefreshRange()
t = self.transform()
self.setTransform(QtGui.QTransform(\
self.scale[0], 0 , 0,\
0 , t.m22(), 0,\
0 , 0 , 1))
self.centerOn(*self.center)
self.scaleXUpdated.emit()
self.panXUpdated.emit()
self.matrixUpdated.emit()
self.viewport().update()
def setXPosScale(self, pos, scale):
if scale == 0:
return
self.scale[0] = scale
self.center[0] = pos + self.width() / 2.0 / scale
self.limitRefreshRange()
t = self.transform()
self.setTransform(QtGui.QTransform(\
scale, 0 , 0,\
0 , t.m22(), 0,\
0 , 0 , 1))
self.centerOn(*self.center)
self.scaleXUpdated.emit()
self.panXUpdated.emit()
self.matrixUpdated.emit()
self.viewport().update()
#self.updateSceneRect(self.range)
def setYCenter(self, pos):
self.center[1] = pos
self.limitRefreshRange()
t = self.transform()
self.setTransform(QtGui.QTransform(\
t.m11(), 0 , 0,\
0 , self.scale[1], 0,\
0 , 0 , 1))
self.centerOn(*self.center)
self.scaleYUpdated.emit()
self.panYUpdated.emit()
self.matrixUpdated.emit()
self.viewport().update()
def setYPosScale(self, pos, scale):
if scale == 0:
return
self.scale[1] = scale
self.center[1] = pos + self.height() / 2.0 / scale
self.limitRefreshRange()
t = self.transform()
self.setTransform(QtGui.QTransform(\
t.m11(), 0 , 0,\
0 , scale, 0,\
0 , 0 , 1))
self.centerOn(*self.center)
self.scaleYUpdated.emit()
self.panYUpdated.emit()
self.matrixUpdated.emit()
self.viewport().update()
#self.updateSceneRect(self.range)
def getXLimits(self):
x0 = self.center[0] - self.width() / 2 / self.scale[0]
x1 = self.center[0] + self.width() / 2 / self.scale[0]
return x0, x1
def setXLimits(self, x0, x1, left_border=0, right_border=0):
scale = (self.width() - left_border - right_border)/ (x1 - x0)
self.setXPosScale(x0, scale)
def getYLimits(self):
y0 = self.center[1] - self.height() / 2 / self.scale[1]
y1 = self.center[1] + self.height() / 2 / self.scale[1]
return y0, y1
def setYLimits(self, y0, y1, bottom_border=0, top_border=0):
scale = (self.height() - bottom_border - top_border) / (y0 - y1)
self.setYPosScale(y1, scale)
def refresh(self):
self.viewport().update()
def setScale(self, sx, sy):
self.scale = [sx, sy]
self.updateMatrix()
def mousePressEvent(self, ev):
#QtWidgets.QGraphicsView.mousePressEvent(self, ev)
if (ev.buttons() == QtCore.Qt.LeftButton):
self.lastMousePos = Point(ev.pos())
self.mousePressPos = ev.pos()
if (ev.buttons() == QtCore.Qt.RightButton):
pos = QtGui.QCursor.pos()
self.menu.exec_(pos)
self.clickAccepted = ev.isAccepted()
def mouseMoveEvent(self, ev):
if self.lastMousePos is None:
self.lastMousePos = Point(ev.pos())
delta = Point(ev.pos() - self.lastMousePos.toPoint())
self.lastMousePos = Point(ev.pos())
# QtWidgets.QGraphicsView.mouseMoveEvent(self, ev)
if ev.buttons() in [QtCore.Qt.MidButton, QtCore.Qt.LeftButton]: ## Allow panning by left or mid button.
px = self.pixelSize()
tr = -delta * px
if self.fixScaleX:
tr[0] = 0
if self.fixScaleY:
tr[1] = 0
self.translate(tr[0], tr[1])
def mouseDoubleClickEvent(self, event):
self.doubleClicked.emit()
def wheelEvent(self, ev):
# QtWidgets.QGraphicsView.wheelEvent(self, ev)
if not self.fixScaleX and not self.fixScaleY:
self.scale[0] = self.scale[0] * 1.001 ** ev.delta()
self.scale[1] = self.scale[1] * 1.001 ** ev.delta()
self.updateMatrix()
self.scaleXUpdated.emit()
self.panXUpdated.emit()
self.scaleYUpdated.emit()
self.panYUpdated.emit()
elif self.fixScaleX and not self.fixScaleY:
self.scale[1] = self.scale[1] * 1.001 ** ev.delta()
self.updateMatrix()
self.scaleXUpdated.emit()
self.panXUpdated.emit()
elif not self.fixScaleX and self.fixScaleY:
self.scale[0] = self.scale[0] * 1.001 ** ev.delta()
self.updateMatrix()
self.scaleXUpdated.emit()
self.panYUpdated.emit()
def pixelSize(self):
"""Return vector with the length and width of one view pixel in scene coordinates"""
p0 = Point(0,0)
p1 = Point(1,1)
tr = self.transform().inverted()[0]
p01 = tr.map(p0)
p11 = tr.map(p1)
return Point(p11 - p01)
|
thocoo/gamma-desk | gdesk/graphics/items.py | import numpy as np
from qtpy import QtCore, QtGui, QtWidgets
from .functions import arrayToQPath
QtSignal = QtCore.Signal
class ItemSignal(object):
def __init__(self):
self.func = None
def connect(self, func):
self.func = func
def emit(self, *args):
if not self.func is None:
self.func(*args)
class VectorCurve(QtWidgets.QGraphicsPathItem):
def __init__(self, path, xvector, yvector):
super().__init__(path)
self.xvector = xvector
self.yvector = yvector
def createCurve(x, y, color=None, z=0, fill=50, zero_ends=True):
if color == None:
pen = QtGui.QPen(QtCore.Qt.black, 0, QtCore.Qt.SolidLine)
if not fill is None:
brush = QtGui.QBrush(QtGui.QColor(0,0,0,100))
else:
pen = QtGui.QPen(color, 0, QtCore.Qt.SolidLine)
R,G,B,A = QtGui.QColor(color).toTuple()
if not fill is None:
brush = QtGui.QBrush(QtGui.QColor(R,G,B,fill))
#first create a Path
# path = QtGui.QPainterPath()
# path.moveTo(x[0], 0)
# path.lineTo(x[0], y[0])
# for i in range(1, len(y)):
# path.lineTo(x[i], y[i])
# path.lineTo(x[-1], 0)
if zero_ends:
path = arrayToQPath(np.r_[x[0], x, x[-1]], np.r_[0, y, 0])
else:
path = arrayToQPath(x, y)
#transform the Path to a PathItem
#curve = QtWidgets.QGraphicsPathItem(path)
curve = VectorCurve(path, np.array(x), np.array(y))
if z != 0:
curve.setZValue(z)
curve.setPen(pen)
if not fill is None:
curve.setBrush(brush)
return curve
class LabelItem(QtWidgets.QGraphicsPolygonItem):
def __init__(self, text='', color=QtGui.QColor(0,0,0), parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.makePolygon(40)
self.setPen(QtGui.QPen(color))
self.setBrush(QtGui.QColor(240, 240, 240))
self.setFlag(QtWidgets.QGraphicsItem.ItemIgnoresTransformations, True)
self.setFlag(QtWidgets.QGraphicsItem.ItemIsMovable, True)
#self.setFlag(QtWidgets.QGraphicsItem.ItemIsSelectable, False)
self.label = QtWidgets.QGraphicsTextItem('', self)
self.label.setFont(QtGui.QFont('Arial', 8))
self.label.setPos(-1, 0)
self.updateText(text)
def makePolygon(self, box_width):
polygon = QtGui.QPolygonF()
polygon.append(QtCore.QPointF(0, 0))
polygon.append(QtCore.QPointF(box_width / 2, 5))
polygon.append(QtCore.QPointF(box_width / 2, 20))
polygon.append(QtCore.QPointF(-box_width / 2, 20))
polygon.append(QtCore.QPointF(-box_width / 2, 5))
polygon.append(QtCore.QPointF(0, 0))
self.setPolygon(polygon)
def updateText(self, text):
self.label.setPlainText(text)
self.label.setPos(- self.label.boundingRect().width() / 2, 2)
self.makePolygon(self.label.boundingRect().width())
def mouseMoveEvent(self, e):
self.parentItem().mouseMoveEvent(e)
def mouseReleaseEvent(self, event):
self.parentItem().mouseReleaseEvent(event)
super().mouseReleaseEvent(event)
class YLabelItem(QtWidgets.QGraphicsPolygonItem):
def __init__(self, text='', color=QtGui.QColor(0,0,0), parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.setPen(QtGui.QPen(color))
self.setBrush(QtGui.QColor(240, 240, 240))
self.setFlag(QtWidgets.QGraphicsItem.ItemIgnoresTransformations, True)
self.setFlag(QtWidgets.QGraphicsItem.ItemIsMovable, True)
self.label = QtWidgets.QGraphicsTextItem('', self)
self.label.setFont(QtGui.QFont('Arial', 8))
self.offset = 0
self.updateText(text)
def update_offset(self, offset):
self.offset = offset
self.makePolygon()
def makePolygon(self):
box_width = self.text_width
offset = self.offset
polygon = QtGui.QPolygonF()
polygon.append(QtCore.QPointF(0, 0))
polygon.append(QtCore.QPointF(5, -5 + offset))
polygon.append(QtCore.QPointF(5, -20 + offset))
polygon.append(QtCore.QPointF(5 + box_width, -20 + offset))
polygon.append(QtCore.QPointF(5 + box_width, -5 + offset))
polygon.append(QtCore.QPointF(5, -5+offset))
polygon.append(QtCore.QPointF(0, 0))
self.label.setPos(5, -23 + offset)
self.setPolygon(polygon)
def updateText(self, text):
self.label.setPlainText(text)
self.text_width = self.label.boundingRect().width()
self.makePolygon()
def mouseMoveEvent(self, e):
self.parentItem().mouseMoveEvent(e)
def sortkey(self):
return self.pos().y()
class Indicator(QtWidgets.QGraphicsPolygonItem):
def __init__(self, color = QtCore.Qt.blue, text = None, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
self.mouse_released = ItemSignal()
self.setFlag(QtWidgets.QGraphicsItem.ItemIgnoresTransformations, True)
self.setFlag(QtWidgets.QGraphicsItem.ItemIsMovable, True)
self.setFlag(QtWidgets.QGraphicsItem.ItemIsSelectable, False)
self.setPen(QtGui.QPen(color))
polygon = QtGui.QPolygonF()
self.text = text
if text is None:
self.setBrush(color)
polygon.append(QtCore.QPointF(0, 0))
polygon.append(QtCore.QPointF(7, 10))
polygon.append(QtCore.QPointF(-7, 10))
polygon.append(QtCore.QPointF(0, 0))
polygon.append(QtCore.QPointF(0, -2160))
self.setPolygon(polygon)
self.label = None
else:
polygon.append(QtCore.QPointF(0, 0))
polygon.append(QtCore.QPointF(0, -2160))
polygon.append(QtCore.QPointF(0, 0))
self.setPolygon(polygon)
self.label = LabelItem(self.text, color, self, scene)
#self.addItem(self.label)
self.ylabels = []
def attach_curves(self, curves):
self.curves = curves
def set_ylabel_count(self, count):
for i in range(len(self.ylabels) - count):
ylabel = self.ylabels.pop(0)
self.scene().removeItem(ylabel)
for i in range(count - len(self.ylabels)):
ylabel = YLabelItem('test', parent = self)
self.ylabels.append(ylabel)
def updates_ylabels(self, x=None):
x = self.scenePos().x() if x is None else x
view = self.scene().views()[0]
self.set_ylabel_count(len(self.curves))
for curve, ylabel in zip(self.curves.values(), self.ylabels):
yval = np.interp(x, curve.xvector, curve.yvector,0,0)
yscale = view.scale[1]
ypos = (yval - self.scenePos().y()) * yscale
if ypos > 0:
ypos = 0
elif ypos < (-view.height()+23):
ypos = -view.height()+23
ylabel.setPos(0, ypos)
ylabel.updateText("%0.4g" % yval)
ylabel.setPen(curve.pen())
self.declutter_ylabels(-view.height()+22)
def declutter_ylabels(self, ymin=-4000, ymax=0):
self.ylabels = sorted(self.ylabels, key = YLabelItem.sortkey)
prior_bottom = ymin
for ylabel in self.ylabels:
ypos = ylabel.pos().y()
if (ypos - prior_bottom) < 21:
offset = abs(21 - (ypos - prior_bottom))
else:
offset = 0
ylabel.update_offset(offset)
prior_bottom = ypos -5 + offset
def mouseReleaseEvent(self, event):
self.mouse_released.emit(self.pos().x())
super().mouseReleaseEvent(event)
def mouseMoveEvent(self, event):
x = event.scenePos().x()
if (not self.label is None) and ('%' in self.text):
self.label.updateText(self.text % x)
self.setPos(x, 0)
self.updates_ylabels(x)
self.scene().moving_indicators = True
class Grid(QtWidgets.QGraphicsItem):
def __init__(self, direction, parent=None, scene=None):
super().__init__(parent=parent, scene=scene)
pens = []
pens.append(QtGui.QPen(QtGui.QColor(159,159,159), 0, QtCore.Qt.SolidLine))
pens.append(QtGui.QPen(QtGui.QColor(191,191,191), 0, QtCore.Qt.DashLine))
pens.append(QtGui.QPen(QtGui.QColor(223,223,223), 0, QtCore.Qt.DotLine))
pens.append(QtGui.QPen(QtGui.QColor(159,159,159), 0, QtCore.Qt.SolidLine))
pens.append(QtGui.QPen(QtGui.QColor(191,191,191), 0, QtCore.Qt.DashLine))
pens.append(QtGui.QPen(QtGui.QColor(223,223,223), 0, QtCore.Qt.DotLine))
self.pens = pens
self.grid_items = []
self.direction = direction
def boundingRect(self):
return QtCore.QRectF(-2, -2, 4, 4)
def paint(self, painter, option, widget):
pass
#painter.drawRoundedRect(-10, -10, 20, 20, 5, 5)
def attach_rulers(self, x_ruler, y_ruler):
self.x_ruler = x_ruler
self.y_ruler = y_ruler
self.update_grid()
def update_grid(self):
for path_item in self.grid_items:
self.scene().removeItem(path_item)
self.grid_items = []
pens = self.pens
paths = []
view = self.scene().views()[0]
for thicklevel in range(3):
paths.append(QtGui.QPainterPath())
for i in self.x_ruler.thicks[thicklevel][1]:
if self.direction == 0:
paths[-1].moveTo(i, self.y_ruler.start)
paths[-1].lineTo(i, self.y_ruler.stop)
else:
paths[-1].moveTo(self.x_ruler.start, i)
paths[-1].lineTo(self.x_ruler.stop, i)
for thicklevel in range(3):
paths.append(QtGui.QPainterPath())
for i in self.y_ruler.thicks[thicklevel][1]:
if self.direction == 0:
paths[-1].moveTo(self.x_ruler.start, i)
paths[-1].lineTo(self.x_ruler.stop, i)
else:
paths[-1].moveTo(i, self.y_ruler.start)
paths[-1].lineTo(i, self.y_ruler.stop)
for i in range(len(paths)):
path_item = QtWidgets.QGraphicsPathItem(paths[i], parent=self)
path_item.setPen(pens[i])
path_item.setZValue(-2)
self.grid_items.append(path_item)
|
thocoo/gamma-desk | gdesk/live/__init__.py | <reponame>thocoo/gamma-desk
"""The is the doctring of live.
Example of nested live script
-- scripts/info.py ---------------------
def get_info():
return f'Info from {__file__}'
----------------------------------------
-- scripts/map1/hello.py ---------------
from gdesk.live import using
info = using.info
def hello_world():
print(f'Hello world')
print(info.get_info())
----------------------------------------
-- Use of using at top level -----------
from gdesk.live import manager, use
manager.append_path('scripts')
hello = use.map1.hello
hello.hello_world()
"""
import sys
import os
from .manage import LiveScriptManager, LiveScriptScan
workspace = dict()
manager = LiveScriptManager(workspace)
PATH_SEPERATOR = ';' if sys.platform == 'win32' else ':'
if 'LIVEPATH' in os.environ.keys():
for path in os.environ['LIVEPATH'].split(PATH_SEPERATOR):
manager.append_path(path)
using = LiveScriptScan(manager, top='__main__') #Become top if current workspace name is __main__
using_sub = LiveScriptScan(manager, top=False)
use = using_top = LiveScriptScan(manager, top=True) |
thocoo/gamma-desk | gdesk/live/console.py | import sys, os
import platform
import traceback
import time
import argparse
from pathlib import Path
from .interactive import interact
from . import workspace, manager, using, using_top, use
from .__version__ import VERSION, VERSION_INFO
modname = '.'.join(globals()['__name__'].split('.')[:-1])
PATH_SEPERATOR = ';' if sys.platform == 'win32' else ':'
HEADER = f"""Live Scripting Environment {VERSION_INFO}
==================================="""
epilog = f"""Searchpaths
-----------
The list of absolute paths is available as {modname}.manager.path
Configuring the searchpaths:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
{modname } optional argument -p PATH, --path PATH:
PATH is '{PATH_SEPERATOR}'-separated list of directories
If not given, environment variable LIVEPATH is used.
Use of system environment variable LIVEPATH:
'{PATH_SEPERATOR}'-separated list of directories
If not given, search for higher directiory containing __live__.py
From withon Python:
{modname}.manager.path.append_path(some_path)
Examples
--------
{modname} -u script arg1 arg2
Execute '__main__' function of 'my_script.py' script file and use
'arg1' as first argument.
{modname} -u script.hello_world
Execute the 'hello_world' function of 'script.py' script file.
{modname} arg1 arg2
Execute '__main__' function of '__live__.py' script file and use
2 arguments: 'arg1' and 'arg2'
{modname} -i -u script.hello_world
Enter interative mode after executing script.hello_world()
"""
def argparser():
parser = argparse.ArgumentParser(description=HEADER, prog=f'python -m {modname}',
formatter_class=argparse.RawDescriptionHelpFormatter, epilog=epilog)
parser.add_argument("arguments", type=str, help="Arguments", nargs='*')
parser.add_argument("-p", "--path", type=str,
help=f"""'{PATH_SEPERATOR}'-separated list of directories prefixed to the default live search path.
If not given, environment variable LIVEPATH is used. The list of absolute paths is available as {modname}.manager.path.""")
parser.add_argument("-u", "--using", help="Start the __main__ function from this live module", nargs=1)
parser.add_argument("-l", "--list", help="List the available scripts", action='store_true')
parser.add_argument("-i", "--interactive", help="Start Python interactive", action='store_true')
parser.add_argument("-k", "--keycomplete", help="Enable key completer in interactive mode", action='store_true')
return parser
def argexec():
parser = argparser()
args = parser.parse_args()
# if len(sys.argv) <= 1:
# parser.print_help()
ws = workspace
live_paths = None
if args.path:
live_paths = args.path.split(PATH_SEPERATOR)
elif 'LIVEPATH' in os.environ.keys():
#This is automatic added by __init__
pass
else:
print('No live search paths defined')
path = Path('.').absolute()
depth = 20
while True:
try:
for item in os.scandir(path):
if item.is_dir() and (path / item.path / '__live__.py').exists():
p = str(path / item.path)
print(f'Found {p} having __live__.py')
live_paths = [p]
break
except:
print(f'Could not scan {path}')
if depth == 0 or path == path.parent:
break
path = path.parent
depth -= 1
if not live_paths is None:
manager.path.clear()
for path in live_paths:
manager.append_path(path)
if args.list:
print('Available top level scripts:')
print()
for item in dir(using_top):
print(f' {item}')
print()
ws['using'] = using
ws['use'] = use
ws['__name__'] = '__main__'
ws['__interactive__'] = True if args.interactive else False
if args.using:
use_name = args.using[0]
if not '.' in use_name:
func = eval(f'using_top.{use_name}.__main__')
else:
func = eval(f'using_top.{use_name}')
else:
try:
func = eval(f'using_top.__live__.__main__')
except:
func = None
argv = args.arguments
if func is None:
if len(sys.argv) <= 1:
parser.print_help()
else:
try:
ws['__returned_value__'] = func(*argv)
except:
traceback.print_exc()
if ws['__interactive__'] or func is None:
banner = 'Type using.<TAB> to get a list of the top level scripts\n'
banner += 'exit() to exit'
completer = 'key' if args.keycomplete else 'standard'
interact(ws, banner, completer=completer)
return ws
|
thocoo/gamma-desk | gdesk/panels/html/panel.py | from qtpy.QtCore import Qt
from qtpy.QtGui import QWindow
from qtpy.QtWidgets import QWidget
from qtpy import QtWebEngineWidgets
from ..base import BasePanel, CheckMenu
HTML_BANNER = '''<html>
<head>
<title>A Sample Page</title>
</head>
<body>
<h1>Hello, World!</h1>
<hr />
I have nothing to say.
</body>
</html>'''
class HtmlPanel(BasePanel):
panelCategory = 'html'
panelShortName = 'basic'
def __init__(self, parent, panid):
super().__init__(parent, panid, type(self).panelCategory)
self.fileMenu = CheckMenu("File", self.menuBar())
self.addMenuItem(self.fileMenu, "Close", self.close_panel,
statusTip = "Close this html panel",
icon = 'cross.png')
self.webview = QtWebEngineWidgets.QWebEngineView()
self.webview.setHtml(HTML_BANNER)
self.setCentralWidget(self.webview)
self.addBaseMenu() |
thocoo/gamma-desk | gdesk/panels/scriptwiz/__init__.py | from ... import config
from .proxy import ScriptWizardProxy
if config['qapp']:
from .panel import ScriptWizardPanel |
thocoo/gamma-desk | gdesk/dicttree/widgets.py | <reponame>thocoo/gamma-desk
# -*- coding: utf-8 -*-
import sys
from qtpy import QtGui, QtCore, QtWidgets
from .node import Node
class DictionaryTreeModel(QtCore.QAbstractItemModel):
"""Data model providing a tree of an arbitrary dictionary"""
def __init__(self, root, parent=None):
super(DictionaryTreeModel, self).__init__(parent)
self._rootNode = root
def rowCount(self, parent):
"""the number of rows is the number of children"""
if not parent.isValid():
parentNode = self._rootNode
else:
parentNode = parent.internalPointer()
return parentNode.childCount()
def columnCount(self, parent):
"""Number of columns is always 2 since dictionaries consist of key-value pairs"""
return 2
def data(self, index, role):
"""returns the data requested by the view"""
if not index.isValid():
return None
node = index.internalPointer()
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
return node.data(index.column())
def setData(self, index, value, role=QtCore.Qt.EditRole):
"""this method gets called when the user changes data"""
if index.isValid():
if role == QtCore.Qt.EditRole:
node = index.internalPointer()
node.setData(index.column(), value)
return True
return False
def headerData(self, section, orientation, role):
"""returns the name of the requested column"""
if role == QtCore.Qt.DisplayRole:
if section == 0:
return "Key"
if section == 1:
return "Value"
def flags(self, index):
"""everything is editable"""
return (QtCore.Qt.ItemIsEnabled |
QtCore.Qt.ItemIsSelectable |
QtCore.Qt.ItemIsEditable)
def parent(self, index):
"""returns the parent from given index"""
node = self.getNode(index)
parentNode = node.parent()
if parentNode == self._rootNode:
return QtCore.QModelIndex()
return self.createIndex(parentNode.row(), 0, parentNode)
def index(self, row, column, parent):
"""returns an index from given row, column and parent"""
parentNode = self.getNode(parent)
childItem = parentNode.child(row)
if childItem:
return self.createIndex(row, column, childItem)
else:
return QtCore.QModelIndex()
def getNode(self, index):
"""returns a Node() from given index"""
if index.isValid():
node = index.internalPointer()
if node:
return node
return self._rootNode
def insertRows(self, position, rows, parent=QtCore.QModelIndex()):
"""insert rows from starting position and number given by rows"""
parentNode = self.getNode(parent)
self.beginInsertRows(parent, position, position + rows - 1)
for row in range(rows):
childCount = parentNode.childCount()
childNode = Node("untitled" + str(childCount))
success = parentNode.insertChild(position, childNode)
self.endInsertRows()
return success
def removeRows(self, position, rows, parent=QtCore.QModelIndex()):
"""remove the rows from position to position+rows"""
parentNode = self.getNode(parent)
self.beginRemoveRows(parent, position, position + rows - 1)
for row in range(rows):
success = parentNode.removeChild(position)
self.endRemoveRows()
return success
def to_dict(self):
return self._rootNode.to_dict()
def node_structure_from_dict(datadict, parent=None, root_node=None):
"""returns a hierarchical node stucture required by the TreeModel"""
if not parent:
root_node = Node('Root')
parent = root_node
if isinstance(datadict, dict):
iterator = datadict.items()
elif isinstance(datadict, list):
iterator = zip(range(len(datadict)), datadict)
for name, data in iterator:
node = Node(name, parent)
if isinstance(data, (dict, list)):
node = node_structure_from_dict(data, node, root_node)
else:
node.value = str(data)
return root_node
class DictionaryTreeWidget(QtWidgets.QTreeView):
"""returns an object containing the tree of the given dictionary d.
example:
tree = DictionaryTree(d)
tree.edit()
d_edited = tree.dict()
d_edited contains the dictionary with the edited data.
this has to be refactored...
"""
def __init__(self, d):
super(DictionaryTreeWidget, self).__init__()
self.load_dictionary(d)
def load_dictionary(self,d):
"""load a dictionary into my tree applicatoin"""
self._d = d
self._nodes = node_structure_from_dict(d)
self._model = DictionaryTreeModel(self._nodes)
self.setModel(self._model)
def to_dict(self):
"""returns a dictionary from the tree-data"""
return self._model.to_dict()
class DictionaryTreeDialog(QtWidgets.QDialog):
"""guidata motivated dialog for editin dictionaries
"""
def __init__(self, d):
super(DictionaryTreeDialog, self).__init__()
treeWidget = DictionaryTreeWidget(d)
for c in range(treeWidget._model.columnCount(None)):
treeWidget.resizeColumnToContents(c)
self.treeWidget = treeWidget
self.buttonOk = QtWidgets.QPushButton('Ok', self)
self.buttonCancel = QtWidgets.QPushButton('Cancel', self)
vbox = QtWidgets.QVBoxLayout()
vbox.addWidget(self.treeWidget)
bhbox = QtWidgets.QHBoxLayout()
bhbox.addStretch()
bhbox.addWidget(self.buttonOk)
bhbox.addWidget(self.buttonCancel)
vbox.addLayout(bhbox)
self.setLayout(vbox)
self.connect(self.buttonOk, QtCore.SIGNAL('clicked()'), self.accept)
self.connect(self.buttonCancel, QtCore.SIGNAL('clicked()'), self.closeCancel)
def edit(self):
return self.exec_()
def to_dict(self):
return self.treeWidget.to_dict()
def closeCancel(self):
d = self.treeWidget._d
self.treeWidget.load_dictionary(d)
self.reject()
def closeEvent(self, event):
self.closeCancel()
if __name__=='__main__':
try:
app = QtGui.QApplication(sys.argv)
except:
app = QtGui.qApp
d = {'First name': 'Maximus',
'Last name': 'Mustermann',
'Nickname': 'Max',
'Address':{ 'Street': 'Musterstr.',
'House number': 13,
'Place': 'Orthausen',
'Zipcode': 76123},
'An Object': float,
'Great-grandpa':{
'Grandpa':{
'Pa': 'Child'}}
}
tree = DictionaryTreeDialog(d)
if tree.edit():
print('Accepted:')
else:
print('Cancelled')
edited_dict = tree.to_dict()
print('\nEdited dict: {}'.format(edited_dict))
print('\nEdited dict is the same as input dict: {}'.format(edited_dict==d))
print('\nMy object is still of type: {}'.format(edited_dict['An Object']))
|
thocoo/gamma-desk | setup.py | <filename>setup.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Build an installable package."""
import io
import os
import sys
import glob
from pathlib import Path
from setuptools import find_packages, setup, Command, Distribution
herepath = Path(__file__).parent.absolute()
here = str(herepath)
MODULE_NAME = 'gdesk'
DISTRO_NAME = 'gamma-desk'
DESCRIPTION = 'A Python work environment image viewers & plots'
URL = 'https://github.com/thocoo/gamma-desk'
EMAIL = '<EMAIL>'
AUTHOR = '<NAME>'
modpath = herepath / 'gdesk'
REQUIRED = [
'numpy==1.20.3', #numba requires numpy <= 1.20
'imageio',
'matplotlib',
'scipy',
'PySide2',
'qtpy==1.9.0',
'psutil',
'numba',
'pyzmq',
'ninja2',
'toml',
'pywinpty'
]
PYTHON_REQUIRED = '>=3.6'
def get_resources():
found_resources = []
found_resources.append(str(modpath / 'config' / 'defaults.json'))
for path in modpath.glob('resources/**/*'):
if path.is_dir(): continue
found_resources.append(str(path))
return found_resources
with open(modpath / 'version.py') as fp:
exec(fp.read())
# Import the README and use it as the long-description.
with open(herepath / 'README.md', encoding='utf-8') as fp:
LONG_DESCRIPTION = '\n' + fp.read()
setup(
name=DISTRO_NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
author=AUTHOR,
author_email=EMAIL,
license='Apache License 2.0',
url=URL,
packages=find_packages(exclude=('tests',)),
package_data=dict(gdesk=get_resources(),),
entry_points={'console_scripts': [f'{MODULE_NAME} = {MODULE_NAME}.console:argexec']},
install_requires=REQUIRED,
include_package_data=True,
python_requires=PYTHON_REQUIRED,
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
)
|
thocoo/gamma-desk | gdesk/panels/scriptwiz/panel.py | <gh_stars>0
import logging
from pathlib import Path
import json
from collections import OrderedDict
from qtpy import QtCore, QtGui, QtWidgets
from qtpy.QtCore import Qt
from jinja2 import Template
from ... import gui, config
from ...panels.base import BasePanel, CheckMenu
from ...dialogs.formlayout import fedit
logger = logging.getLogger(__name__)
respath = Path(config['respath'])
class FileLayout(QtWidgets.QWidget):
"""File-specialized QLineEdit layout"""
def __init__(self, text='', loadCall=None, saveCall=None, save=False, edit=True):
super().__init__()
self.loadCall = loadCall
self.saveCall = saveCall
self.save = save
hbox = QtWidgets.QHBoxLayout()
hbox.setMargin(0)
hbox.setContentsMargins(0,0,0,0)
self.setLayout(hbox)
self.fileExplBtn = QtWidgets.QToolButton()
self.fileExplBtn.clicked.connect(self.fileExplore)
self.fileExplBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'folders_explorer.png')))
hbox.addWidget(self.fileExplBtn)
self.lineedit = QtWidgets.QLineEdit(text)
hbox.addWidget(self.lineedit)
if self.loadCall is None:
self.loadCall = lambda filepath: None
else:
self.fileLoadbtn = QtWidgets.QToolButton()
self.fileLoadbtn.clicked.connect(self.loadFile)
self.fileLoadbtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'folder.png')))
hbox.addWidget(self.fileLoadbtn)
if self.saveCall is None:
self.saveCall = lambda filepath: None
else:
self.fileSavebtn = QtWidgets.QToolButton()
self.fileSavebtn.clicked.connect(self.savefile)
self.fileSavebtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'diskette.png')))
hbox.addWidget(self.fileSavebtn)
if edit:
self.editbtn = QtWidgets.QToolButton()
self.editbtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'script_edit.png')))
self.editbtn.clicked.connect(self.editfile)
hbox.addWidget(self.editbtn)
def fileExplore(self):
selectedFile = gui.dialog.selectfile(default_path=self.text())
if selectedFile is None: return
self.lineedit.setText(selectedFile)
if not self.save:
self.loadCall(Path(self.text()))
def loadFile(self):
self.loadCall(Path(self.lineedit.text()))
def savefile(self):
self.saveCall(Path(self.lineedit.text()))
def editfile(self):
from gdesk import shell
shell.edit_file(self.lineedit.text())
def text(self):
return self.lineedit.text()
def setText(self, text):
self.lineedit.setText(text)
class DirFileLayout(QtWidgets.QWidget):
"""File-specialized QLineEdit layout"""
def __init__(self, fullFilePath='', loadCall=None, saveCall=None, save=False, edit=True):
super().__init__()
self.loadCall = loadCall
self.saveCall = saveCall
hbox = QtWidgets.QHBoxLayout()
hbox.setMargin(0)
hbox.setContentsMargins(0,0,0,0)
self.setLayout(hbox)
self.dirExplBtn = QtWidgets.QToolButton()
self.dirExplBtn.clicked.connect(self.getMap)
self.dirExplBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'folders_explorer.png')))
hbox.addWidget(self.dirExplBtn)
fullFilePath = Path(fullFilePath)
self.dirEdit = QtWidgets.QLineEdit(str(fullFilePath.parent))
hbox.addWidget(self.dirEdit, 2)
self.fileNameEdit = QtWidgets.QLineEdit(fullFilePath.name)
hbox.addWidget(self.fileNameEdit, 1)
if self.loadCall is None:
self.loadCall = lambda filepath: None
else:
self.fileLoadbtn = QtWidgets.QToolButton()
self.fileLoadbtn.clicked.connect(self.loadFile)
self.fileLoadbtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'folder_vertical_document.png')))
hbox.addWidget(self.fileLoadbtn)
if self.saveCall is None:
self.saveCall = lambda filepath: None
else:
self.fileSavebtn = QtWidgets.QToolButton()
self.fileSavebtn.clicked.connect(self.savefile)
self.fileSavebtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'diskette.png')))
hbox.addWidget(self.fileSavebtn)
if edit:
self.editbtn = QtWidgets.QToolButton()
self.editbtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'script_edit.png')))
self.editbtn.clicked.connect(self.editfile)
hbox.addWidget(self.editbtn)
@property
def fullFileName(self):
return Path(self.dirEdit.text()) / self.fileNameEdit.text()
def getMap(self):
filepath = Path(gui.dialog.selectfile())
self.dirEdit.setText(str(filepath.parent))
self.fileNameEdit.setText(filepath.name)
def loadFile(self):
fileName = self.fileNameEdit.text()
if fileName.strip() == '':
fullFileName = Path(gui.getfilename(file=self.dirEdit.text()))
self.dirEdit.setText(str(fullFileName.parent))
self.fileNameEdit.setText(str(fullFileName.name))
self.loadCall(self.fullFileName)
def savefile(self):
fileName = self.fileNameEdit.text()
if fileName.strip() == '':
fullFileName = Path(gui.putfilename())
self.dirEdit.setText(str(fullFileName.parent))
self.fileNameEdit.setText(str(fullFileName.name))
elif self.fullFileName.exists():
if not gui.question('File exists. Do you want to overwrite?'):
fullFileName = Path(gui.putfilename())
self.dirEdit.setText(str(fullFileName.parent))
self.fileNameEdit.setText(str(fullFileName.name))
self.saveCall(self.fullFileName)
def editfile(self):
from gdesk import shell
shell.edit_file(self.fullFileName)
def text(self):
return str(self.fullFileName)
def setText(self, text):
fullFileName = Path(text)
self.dirEdit.setText(str(fullFileName.parent))
self.fileNameEdit.setText(str(fullFileName.name))
class MyTable(QtWidgets.QTableWidget):
def __init__(self, headers):
self.headers = headers
colcount = len(headers)
super().__init__(1, colcount)
self.setHorizontalHeaderLabels(self.headers)
class FormFile(FileLayout):
def __init__(self, cfg):
super().__init__(text=cfg.get('text', ''), edit=cfg.get('edit', False))
def setContent(self, content):
self.setText(content)
def getContent(self):
return self.text()
class FormLineEdit(QtWidgets.QLineEdit):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def setContent(self, content):
self.setText(content)
def getContent(self):
return self.text()
class FormTextEdit(QtWidgets.QPlainTextEdit):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setWordWrapMode(QtGui.QTextOption.NoWrap)
def setContent(self, content):
self.setPlainText(content)
def getContent(self):
return self.toPlainText()
class FormTable(QtWidgets.QWidget):
def __init__(self, cfg):
super().__init__()
vbox = QtWidgets.QVBoxLayout()
vbox.setContentsMargins(0,0,0,0)
vbox.setMargin(0)
self.setLayout(vbox)
hbox = QtWidgets.QHBoxLayout()
vbox.addLayout(hbox)
self.addBtn = QtWidgets.QToolButton()
self.addBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'table_row_insert.png')))
self.addBtn.clicked.connect(self.addItem)
hbox.addWidget(self.addBtn)
self.deleteBtn = QtWidgets.QToolButton()
self.deleteBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'table_row_delete.png')))
self.deleteBtn.clicked.connect(self.deleteItem)
hbox.addWidget(self.deleteBtn)
self.moveUpBtn = QtWidgets.QToolButton()
self.moveUpBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'table_rows_insert_above_word.png')))
self.moveUpBtn.clicked.connect(self.moveUp)
hbox.addWidget(self.moveUpBtn)
self.moveDownBtn = QtWidgets.QToolButton()
self.moveDownBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'table_rows_insert_below_word.png')))
self.moveDownBtn.clicked.connect(self.moveDown)
hbox.addWidget(self.moveDownBtn)
# self.lineEdit = QtWidgets.QLineEdit()
# hbox.addWidget(self.lineEdit)
hbox.addStretch()
self.table = MyTable(cfg.get('headers', ['None']))
vbox.addWidget(self.table)
def addItem(self):
self.table.setRowCount(self.table.rowCount() + 1)
def deleteItem(self):
self.table.setRowCount(self.table.rowCount() - 1)
def moveUp(self):
rownr = self.table.selectedIndexes()[0].row()
if rownr > 0:
self.swapRow(rownr, rownr-1)
self.table.selectRow(rownr-1)
def moveDown(self):
rownr = self.table.selectedIndexes()[0].row()
if rownr < (self.table.rowCount() - 1):
self.swapRow(rownr, rownr+1)
self.table.selectRow(rownr+1)
def swapRow(self, rowNr0, rowNr1):
row0 = self.takeRow(rowNr0)
row1 = self.takeRow(rowNr1)
self.setRow(rowNr0, row1)
self.setRow(rowNr1, row0)
def takeRow(self, rownr):
row = []
for colnr in range(self.table.columnCount()):
row.append(self.table.takeItem(rownr, colnr))
return row
def setRow(self, rownr, row):
for colnr, item in enumerate(row):
self.table.setItem(rownr, colnr, item)
return row
def getContent(self):
content = []
table = self.table
for r in range(table.rowCount()):
item = dict()
for c in range(table.columnCount()):
header = table.horizontalHeaderItem(c).text()
cell = table.item(r, c)
if not cell is None:
text = cell.text()
item[header] = text
else:
item[header] = None
content.append(item)
return content
def setContent(self, content):
table = self.table
table.setRowCount(len(content))
for r, item in enumerate(content):
for key, val in item.items():
c = table.headers.index(key)
table.setItem(r, c, QtWidgets.QTableWidgetItem(val))
table.resizeColumnsToContents()
class CustomForm(QtWidgets.QWidget):
def __init__(self, cfg):
super().__init__()
self.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
self.flay = flay = QtWidgets.QFormLayout()
flay.setVerticalSpacing(5)
self.setLayout(flay)
self.widgets = OrderedDict()
self.widgetsMeta = dict()
self.setWidgets(cfg)
def setWidgets(self, cfg):
for item in cfg:
self.addFormItem(item['key'], item['caption'], '', item['widget_type'],
item.get('description', None),
item.get('widget_config', {}))
def getWidgetsConfig(self):
widgetsConfig = list()
for key, widget in self.widgets.items():
meta = self.widgetsMeta[key]
item = {'key': key, 'caption': meta['caption'], 'widget_type': meta['widget_type']}
widgetsConfig.append(item)
return widgetsConfig
def addFormItem(self, key, caption, content=None, widgetType='FormLineEdit', description=None, widget_cfg=None):
if widgetType == 'FormLineEdit':
widget = FormLineEdit()
widget.setContent(content)
elif widgetType == 'FormTextEdit':
widget = FormTextEdit()
widget.setContent(content)
elif widgetType == 'FormTable':
widget = FormTable(widget_cfg)
elif widgetType == 'FormFile':
widget = FormFile(widget_cfg)
self.widgets[key] = widget
self.widgetsMeta[key] = {'caption': caption, 'widget_type': widgetType}
captionLabel = QtWidgets.QLabel(caption)
if not description is None:
captionLabel.setToolTip(description)
self.flay.addRow(captionLabel, widget)
def setContent(self, cfg):
for key, widget in self.widgets.items():
if not key in cfg.keys():
logger.warn(f'{key} not found')
continue
widget.setContent(cfg[key])
def getContent(self):
cfg = dict()
for key, widget in self.widgets.items():
cfg[key] = widget.getContent()
return cfg
class TemplateScrollForm(QtWidgets.QScrollArea):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setWidgetResizable(True)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
def setWidgets(self, cfg):
self.form = CustomForm(cfg)
self.setWidget(self.form)
# def setContent(self, cfg):
# self.form.setContent(cfg)
# def getContent(self):
# return self.form.getContent()
class TemplateWidget(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
vbox = QtWidgets.QVBoxLayout()
vbox.setContentsMargins(5,5,5,5)
vbox.setMargin(5)
self.setLayout(vbox)
flay = QtWidgets.QFormLayout()
vbox.addLayout(flay)
self.formConfig = QtWidgets.QLineEdit()
self.formConfig.setReadOnly(True)
flay.addRow('Form', self.formConfig)
self.settingsFileName = DirFileLayout('',
loadCall=self.loadValues, saveCall=self.saveValues)
flay.addRow('Settings', self.settingsFileName)
self.templateForm = TemplateScrollForm(self)
vbox.addWidget(self.templateForm)
flay = QtWidgets.QFormLayout()
vbox.addLayout(flay)
self.codeTemplate = QtWidgets.QLineEdit()
self.codeTemplate.setReadOnly(True)
flay.addRow('Script Template', self.codeTemplate)
self.scriptFileName = FileLayout('', save=True)
flay.addRow('Script', self.scriptFileName)
hbox = QtWidgets.QHBoxLayout()
hbox.setMargin(0)
vbox.addLayout(hbox)
self.makeBtn = QtWidgets.QPushButton('Make')
self.makeBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'script_lightning.png')))
self.makeBtn.clicked.connect(self.make)
hbox.addWidget(self.makeBtn)
@property
def form(self):
return self.templateForm.widget()
def loadValues(self, filepath):
if filepath.suffix.lower() in ['.toml']:
import toml
cfg = toml.load(str(filepath))
elif filepath.suffix.lower() in ['.json']:
with open(filepath, 'r') as fp:
cfg = json.load(fp)
self.form.setContent(cfg['params'])
for call in cfg['calls']:
self.runTab.setFormWidgets(call['call'], updateCombo=True)
self.runTab.form.setContent(call['kwargs'])
self.runTab.returnRef.setText(call['retref'])
def saveValues(self, filepath):
cfg = dict()
cfg['params'] = self.form.getContent()
cfg['calls'] = [{'call': self.runTab.funcName.currentText(), 'retref': self.runTab.returnRef.text(), 'kwargs': self.runTab.form.getContent()}]
if filepath.suffix.lower() in ['.toml']:
import toml
with open(filepath, 'w') as fp:
toml.dump(cfg, fp)
elif filepath.suffix.lower() in ['.json']:
with open(filepath, 'w') as fp:
json.dump(cfg, fp, indent=' ')
@property
def runTab(self):
return self.parent().parent().runTab
def make(self):
templateFileName = self.codeTemplate.text()
cfg = self.form.getContent()
scriptFileName = self.scriptFileName.text()
templ_grab = Template(open(templateFileName, 'r').read())
pycode = templ_grab.render(**cfg)
with open(scriptFileName, 'w') as fp:
fp.write(f'# Created by ninja2 from template {templateFileName}\n')
fp.write(pycode)
self.runTab.scriptFileWidget.setText(scriptFileName)
class RunScrollForm(QtWidgets.QScrollArea):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setWidgetResizable(True)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
def setWidgets(self, cfg):
self.form = CustomForm(cfg)
self.setWidget(self.form)
def setContent(self, cfg):
self.form.setContent(cfg)
def getContent(self):
return self.form.getContent()
class RunWidget(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
vbox = QtWidgets.QVBoxLayout()
vbox.setContentsMargins(5, 5, 5, 5)
vbox.setMargin(5)
self.setLayout(vbox)
flay = QtWidgets.QFormLayout()
vbox.addLayout(flay)
self.scriptFileWidget = FileLayout('')
flay.addRow('Script', self.scriptFileWidget)
hbox = QtWidgets.QHBoxLayout()
self.funcName = QtWidgets.QComboBox()
self.funcName.setEditable(False)
self.funcName.currentIndexChanged.connect(self.funcNameIndexChanged)
hbox.addWidget(self.funcName)
self.locateBtn = QtWidgets.QPushButton('Locate')
self.locateBtn.clicked.connect(self.locate)
hbox.addWidget(self.locateBtn)
flay.addRow('Function', hbox)
self.callerEdit = QtWidgets.QLineEdit('')
flay.addRow('Caller', self.callerEdit)
self.settingsFileName = FileLayout('',
loadCall=self.loadValues, saveCall=self.saveValues)
flay.addRow('Settings', self.settingsFileName)
self.runScrollForm = RunScrollForm()
vbox.addWidget(self.runScrollForm)
flay = QtWidgets.QFormLayout()
vbox.addLayout(flay)
self.returnRef = QtWidgets.QLineEdit()
flay.addRow('Return Reference', self.returnRef)
hbox = QtWidgets.QHBoxLayout()
vbox.addLayout(hbox)
self.runBtn = QtWidgets.QPushButton('Run')
self.runBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'script_go.png')))
self.runBtn.clicked.connect(self.run)
hbox.addWidget(self.runBtn)
self.widgetConfigs = dict()
@property
def form(self):
return self.runScrollForm.widget()
def addFormWidgetsConfig(self, funcName, cfg):
self.widgetConfigs[funcName] = cfg
self.funcName.addItem(funcName)
def setFormWidgets(self, funcName, updateCombo=False):
cfg = self.widgetConfigs[funcName]
self.runScrollForm.setWidgets(cfg)
if updateCombo:
index = self.funcName.findText(funcName)
self.funcName.setCurrentIndex(index)
def clearFormWidgets(self):
while self.funcName.count() > 0:
self.funcName.removeItem(0)
self.widgetConfigs.clear()
@property
def scriptFileName(self):
return self.scriptFileWidget.text()
def funcNameIndexChanged(self, *args):
funcName = self.funcName.currentText()
if funcName == '': return
self.setFormWidgets(funcName)
def locate(self):
from gdesk import shell, use
scriptFileName = Path(self.scriptFileName).resolve()
funcName = self.funcName.currentText()
console = gui.qapp.panels.selected('console')
for path in use.__script_manager__.path:
try:
use_path = scriptFileName.relative_to(Path(path).resolve())
break
except:
pass
else:
logger.warn('Could not locate script')
self.callerEdit.setText(f'{use_path.stem}.{funcName}')
def loadValues(self, filepath):
if filepath.suffix.lower() in ['.toml']:
import toml
cfg = toml.load(str(filepath))
elif filepath.suffix.lower() in ['.json']:
with open(filepath, 'r') as fp:
cfg = json.load(fp)
self.form.setContent(cfg)
def saveValues(self, filepath):
cfg = self.form.getContent()
if filepath.suffix.lower() in ['.toml']:
import toml
with open(filepath, 'w') as fp:
toml.dump(cfg, fp)
elif filepath.suffix.lower() in ['.json']:
with open(filepath, 'w') as fp:
json.dump(cfg, fp, indent=' ')
def run(self):
from gdesk import shell, use
cfg = self.form.getContent()
console = gui.qapp.panels.selected('console')
self.locate()
module_func = self.callerEdit.text()
parts = module_func.split('.')
module, func = '.'.join(parts[:-1]), parts[-1]
retref = self.returnRef.text().strip()
console.exec_cmd('use.__script_manager__.update_now()')
if not retref == '':
console.exec_cmd(f'{retref} = use("{module}").{func}(**{cfg})')
else:
console.exec_cmd(f'use("{module}").{func}(**{cfg})')
class ScriptTabs(QtWidgets.QTabWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.templateTab = TemplateWidget()
self.runTab = RunWidget()
self.addTab(self.templateTab, 'Template')
self.addTab(self.runTab, 'Run')
@property
def scriptFileName(self):
return self.templateTab.scriptFileName.text()
class WrapCaller(object):
def __init__(self, caller, *args, **kwargs):
self.caller = caller
self.args = args
self.kwargs = kwargs
def __call__(self):
self.caller(*self.args, **self.kwargs)
class RecentMenu(QtWidgets.QMenu):
def __init__(self, parent=None):
super().__init__('Recent', parent)
self.scriptPanel = self.parent()
#self.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'images.png')))
def showEvent(self, event):
self.initactions()
def initactions(self):
self.clear()
self.actions = []
for rowid, timestamp, path in gui.qapp.history.yield_recent_paths(category='scriptwiz'):
action = QtWidgets.QAction(path, self)
action.triggered.connect(WrapCaller(self.scriptPanel.openTemplate, path))
self.addAction(action)
self.actions.append(action)
class ScriptWizardPanel(BasePanel):
panelCategory = 'scriptwiz'
panelShortName = 'basic'
userVisible = True
classIconFile = str(respath / 'icons' / 'px16' / 'script_bricks.png')
def __init__(self, parent, panid):
super().__init__(parent, panid, type(self).panelCategory)
self.fileMenu = CheckMenu("&File", self.menuBar())
self.addMenuItem(self.fileMenu, 'Open Template', self.openTemplateDialog,
statusTip="Open a script template")
self.fileMenu.addMenu(RecentMenu(self))
self.addMenuItem(self.fileMenu, 'Load Values', self.loadValues,
statusTip="Load Values")
self.addMenuItem(self.fileMenu, 'Save Values', self.saveValues,
statusTip="Save Values")
self.addMenuItem(self.fileMenu, 'Close', self.close_panel,
statusTip="Close this levels panel",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'cross.png')))
self.tabs = ScriptTabs()
self.setCentralWidget(self.tabs)
self.addBaseMenu()
self.statusBar().hide()
@property
def templateTab(self):
return self.tabs.templateTab
@property
def runTab(self):
return self.tabs.runTab
def openTemplateDialog(self):
filename = gui.getfilename('*.json', 'Open Script Template')
self.openTemplate(filename)
def openTemplate(self, filename):
from gdesk import shell, use
scm = use.__script_manager__
filename = Path(filename)
cfg = json.load(open(filename, 'r'))
gui.qapp.history.storepath(str(filename), category='scriptwiz')
self.templateTab.formConfig.setText(str(filename))
self.templateTab.templateForm.setWidgets(cfg.get('form', {}))
self.templateTab.settingsFileName.setText(str(filename.parent / cfg.get('settings_file', '')))
self.templateTab.codeTemplate.setText(str(filename.parent / cfg['code_template']))
self.templateTab.scriptFileName.setText(str(Path(scm.path[0]) / cfg.get('script', '')))
self.runTab.clearFormWidgets()
calls = cfg.get('calls', [])
for call in calls:
funcName = call.get('call')
self.runTab.addFormWidgetsConfig(funcName, call.get('form', {}))
default_call = cfg.get('default_call', None)
index = self.runTab.funcName.findText(default_call)
self.runTab.funcName.setCurrentIndex(index)
def loadValues(self):
import toml
valuesFile = gui.getfilename('*.toml')
cfg = toml.load(valuesFile)
self.templateTab.form.setContent(cfg)
def saveValues(self):
import toml
valuesFile = gui.putfilename('*.toml')
cfg = self.templateTab.form.getContent()
with open(valuesFile, 'w') as fp:
toml.dump(cfg, fp)
|
thocoo/gamma-desk | gdesk/dialogs/main.py | <reponame>thocoo/gamma-desk
import threading
import sys, os
import ctypes
from collections import OrderedDict
import logging
import importlib
import pprint
from pathlib import Path
import numpy as np
from qtpy import QtGui, QtWidgets, QtCore
from qtpy.QtCore import Qt
from .. import config, gui, __release__, PROGNAME, DOC_HTML
from ..core import conf
from ..console import restart
from .about import AboutScreen
from ..panels.base import BasePanel
from ..ezdock.laystruct import LayoutStruct
from ..dicttree.widgets import DictionaryTreeDialog
logger = logging.getLogger(__name__)
respath = Path(config['respath'])
class NewPanelMenu(QtWidgets.QMenu):
def __init__(self, parent=None, showIcon=False):
super().__init__('New', parent)
if showIcon:
self.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'application_add.png')))
@property
def panels(self):
return QtWidgets.QApplication.instance().panels
def showEvent(self, event):
self.showpos = QtGui.QCursor().pos()
self.initactions()
def initactions(self):
self.clear()
panelClasses = BasePanel.userPanelClasses()
self.liveActions = []
for category, catPanelClasses in panelClasses.items():
catMenu = QtWidgets.QMenu(category)
self.addMenu(catMenu)
for panelClass in catPanelClasses:
if not panelClass.userVisible: continue
try:
panelShortName = panelClass.panelShortName
except:
panelShortName = 'unkown'
action = QtWidgets.QAction(f'{panelShortName} <{panelClass.__qualname__}>')
if hasattr(panelClass, 'classIconFile'):
action.setIcon(QtGui.QIcon(panelClass.classIconFile))
action.triggered.connect(CachedArgCall(self.newPanel, panelClass, self.parent().windowName, self.showpos))
catMenu.addAction(action)
self.liveActions.append(action)
def newPanel(self, panelClass, windowName, showpos=None):
if panelClass.panelCategory == 'plot':
import pylab
fig = pylab.figure()
else:
self.panels.new_panel(panelClass, windowName)
class ShowMenu(QtWidgets.QMenu):
def __init__(self, parent=None, showIcon=False):
super().__init__('Panel', parent)
if showIcon:
self.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'application_get.png')))
def showEvent(self, event):
self.initactions()
@property
def panels(self):
return QtWidgets.QApplication.instance().panels
def preview(self):
self.previews = PanelsFloatingPreviews()
self.previews.preview()
self.previews.exec_()
self.previews.selectedPanel.select()
self.previews.selectedPanel.show_me()
def initactions(self):
self.clear()
self.liveActions = []
action = QtWidgets.QAction(f'Previews...\t{config["shortcuts"]["panel preview"]}', triggered=self.preview)
self.addAction(action)
self.liveActions.append(action)
self.addSeparator()
for category in self.panels.keys():
panels = self.panels[category]
selected_panid = self.panels.selected(category, panel=False)
for panid in sorted(panels.keys()):
panel = self.panels[category][panid]
action = QtWidgets.QAction(panel.windowTitle())
action.triggered.connect(CachedArgCall(self.showPanel, panel))
action.setCheckable(True)
if panel.panid == selected_panid:
action.setChecked(True)
else:
action.setChecked(False)
self.addAction(action)
self.liveActions.append(action)
def showPanel(self, panel):
panel.show_me()
panel.select()
class WindowMenu(QtWidgets.QMenu):
def __init__(self, parent=None, showIcon=False):
super().__init__('Window', parent)
if showIcon:
self.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'application_double.png')))
@property
def windows(self):
return QtWidgets.QApplication.instance().windows
def showEvent(self, event):
self.initactions()
def preview(self):
self.previews = WindowsFloatingPreviews()
self.previews.preview()
self.previews.exec_()
def initactions(self):
self.clear()
self.liveActions = []
action = QtWidgets.QAction(f'Previews...\t{config["shortcuts"]["window preview"]}', triggered=self.preview)
self.addAction(action)
self.liveActions.append(action)
self.addSeparator()
for window_name in self.windows.keys():
window = self.windows[window_name]
action = QtWidgets.QAction(window_name)
action.triggered.connect(CachedArgCall(self.showWindow, window))
self.addAction(action)
self.liveActions.append(action)
def showWindow(self, window):
window.show()
window.raise_()
class CachedArgCall(object):
def __init__(self, caller, *args, **kwargs):
self.caller = caller
self.args = args
self.kwargs = kwargs
def __call__(self):
self.caller(*self.args, **self.kwargs)
class LayoutMenu(QtWidgets.QMenu):
def __init__(self, parent=None, showIcon=False):
super().__init__('Layout', parent)
if showIcon:
self.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'layout_content.png')))
self.initactions()
@property
def panels(self):
return QtWidgets.QApplication.instance().panels
def showEvent(self, event):
self.initactions()
def initactions(self):
self.clear()
action = QtWidgets.QAction('Save Layout...', self, triggered=self.saveLayout)
self.addAction(action)
self.addSeparator()
self.addLayoutActions(self)
def addLayoutActions(self, parent=None):
shortcuts = dict((v,k) for k,v in config['shortcuts']['layout'].items())
prefix = config['shortcuts']['layout']['prefix']
for name, layout in config['layout'].items():
shortcut = shortcuts.get(name, None)
if shortcut is None:
action = QtWidgets.QAction(name, parent)
else:
action = QtWidgets.QAction(f'{name}\t{prefix}{shortcut}', parent)
caller = self.panels.restore_state_from_config
action.triggered.connect(CachedArgCall(caller, name))
parent.addAction(action)
def saveLayout(self):
layout_name = gui.dialog.getstring('Give it a name')
if layout_name == '': return
if layout_name == 'base':
gui.dialog.msgbox(f"You can't overwrite {layout_name}", icon='warn')
else:
config['layout'][layout_name] = gui.qapp.panels.ezm.get_perspective()
class MainDialog(QtWidgets.QMainWindow):
def __init__(self, panels):
super().__init__()
self.setWindowTitle(f'{PROGNAME} {__release__}')
self.panels = panels
self.tabs = QtWidgets.QTabWidget(self)
self.setCentralWidget(self.tabs)
self.panels_layout = PanelsLayout(self, panels)
self.tabs.addTab(self.panels_layout, 'Layout')
self.initMenu()
self.callerWindow = None
@property
def qapp(self):
return QtWidgets.QApplication.instance()
@property
def windowName(self):
return None
def initMenu(self):
self.appMenu = self.menuBar().addMenu("&Application")
act = QtWidgets.QAction("Restart", self,
triggered=self.restart,
statusTip=f"Restart {PROGNAME}",
icon=QtGui.QIcon(str(respath / 'icons' / 'px16' / 'recycle.png')))
self.appMenu.addAction(act)
act = QtWidgets.QAction("Exit", self, shortcut=QtGui.QKeySequence.Quit,
statusTip=f"Exit {PROGNAME}",
triggered=self.qapp.quit,
icon=QtGui.QIcon(str(respath / 'icons' / 'px16' / 'door_out.png')))
self.appMenu.addAction(act)
self.newMenu = NewPanelMenu(self)
self.menuBar().addMenu(self.newMenu)
self.showMenu = ShowMenu(self)
self.menuBar().addMenu(self.showMenu)
self.windowMenu = WindowMenu(self)
self.menuBar().addMenu(self.windowMenu)
self.layoutMenu = LayoutMenu(self)
self.menuBar().addMenu(self.layoutMenu)
self.configMenu = self.menuBar().addMenu("Config")
self.configMenu.addAction(QtWidgets.QAction("View Config", self, triggered=self.showConfig,
icon=QtGui.QIcon(str(respath / 'icons' / 'px16' / 'page_gear.png'))))
self.configMenu.addAction(QtWidgets.QAction("Save Config", self, triggered=self.saveConfig))
#matplotlib.rcsetup.all_backends
#'module://gdesk.matplotbe'
self.helpMenu = self.menuBar().addMenu("Help")
helpAct = QtWidgets.QAction("&Help", self, triggered=self.help)
helpAct.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'help.png')))
self.helpMenu.addAction(helpAct)
aboutGhQtAct = QtWidgets.QAction(f"About {PROGNAME}", self, triggered=self.about)
self.helpMenu.addAction(aboutGhQtAct)
self.helpMenu.addAction(QtWidgets.QAction("License", self, triggered=self.license))
infoGhQtAct = QtWidgets.QAction("Instance Info", self, triggered=self.info)
infoGhQtAct.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'information.png')))
self.helpMenu.addAction(infoGhQtAct)
aboutQtAct = QtWidgets.QAction("About Qt", self, triggered=self.qapp.aboutQt)
self.helpMenu.addAction(aboutQtAct)
def refresh(self):
self.panels_layout.refresh()
def exec_(self, callerWindow=None):
self.callerWindow = callerWindow
self.raise_()
self.qapp.setActiveWindow(self)
self.showNormal()
def accept(self):
self.showMinimized()
def restart(self):
restart()
#os.execlp(sys.executable, 'python', '-m', 'gdesk')
def showConfig(self):
dt = DictionaryTreeDialog(config)
dt.edit()
def saveConfig(self):
path = gui.putfilename('JSON (*.json)', file=config['save_config_file'])
conf.save_config_json(path)
def help(self):
print("Opening %s" % DOC_HTML)
os.system('start "help" "%s"' % DOC_HTML)
def about(self):
aboutScreen = AboutScreen()
aboutScreen.exec_()
def license(self):
message = open(respath / 'LICENSE.txt', 'r').read()
print(message)
self.qapp.panels['console'][0].show_me()
def info(self):
message = self.qapp.cmdserver.host_info()
print(message)
self.qapp.panels['console'][0].show_me()
def closeEvent(self, event):
allHidden = True
for window in self.qapp.windows.values():
if window.isVisible():
allHidden = False
break
if allHidden:
event.accept()
else:
self.showMinimized()
self.callerWindow = None
event.ignore()
class PanelsFloatingPreviews(QtWidgets.QDialog):
def __init__(self):
super().__init__()
self.thumbs = []
vbox = QtWidgets.QVBoxLayout()
self.setLayout(vbox)
font = self.font()
font.setPointSize(font.pointSize() * 2)
self.caption = QtWidgets.QLabel('Panels')
self.caption.setFont(font)
self.caption.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
vbox.addWidget(self.caption)
self.boxlay = QtWidgets.QGridLayout()
vbox.addLayout(self.boxlay)
self.setWindowFlag(Qt.FramelessWindowHint)
self.setWindowFlag(Qt.WindowStaysOnTopHint)
@property
def panels(self):
return QtWidgets.QApplication.instance().panels
def preview(self):
total = sum((len(pans) for cat, pans in self.panels.items()))
colcount = int((total*16/9)**0.5)
index = 0
for cat in self.panels.keys():
selectedId = self.panels.selected(cat, panel=False)
for panid in sorted(self.panels[cat].keys()):
panel = self.panels[cat][panid]
pixmap = panel.grab().scaled(160, 160, QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
thumb = QtWidgets.QToolButton()
thumb.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
thumb.setIcon(QtGui.QIcon(pixmap))
thumb.setIconSize(pixmap.rect().size())
thumb.setText(panel.short_title)
if panel.panid == selectedId:
thumb.setDown(True)
thumb.setToolTip(panel.long_title)
thumb.clicked.connect(CachedArgCall(self.showPanel, panel))
self.thumbs.append(thumb)
self.boxlay.addWidget(thumb, index // colcount, index % colcount)
index += 1
def showPanel(self, panel):
for thumb in self.thumbs:
thumb.setParent(None)
thumb.hide()
self.thumbs = []
self.hide()
self.selectedPanel = panel
class WindowsFloatingPreviews(QtWidgets.QDialog):
def __init__(self):
super().__init__()
self.thumbs = []
self.setLayout(QtWidgets.QVBoxLayout())
font = self.font()
font.setPointSize(font.pointSize() * 2)
self.caption = QtWidgets.QLabel('Windows')
self.caption.setFont(font)
self.caption.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
self.layout().addWidget(self.caption)
self.boxlay = QtWidgets.QGridLayout()
self.layout().addLayout(self.boxlay)
self.setWindowFlag(Qt.FramelessWindowHint)
self.setWindowFlag(Qt.WindowStaysOnTopHint)
@property
def windows(self):
return QtWidgets.QApplication.instance().windows
def preview(self):
total = len(self.windows.items())
colcount = int((total*16/9)**0.5)
index = 0
for window in self.windows.values():
pixmap = window.grab().scaled(160, 160, QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
thumb = QtWidgets.QToolButton()
thumb.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
thumb.setIcon(QtGui.QIcon(pixmap))
thumb.setIconSize(pixmap.rect().size())
#thumb.setText(window.windowTitle())
thumb.setText(window.name)
thumb.setToolTip(window.windowTitle())
thumb.clicked.connect(CachedArgCall(self.showWindow, window))
self.thumbs.append(thumb)
self.boxlay.addWidget(thumb, index // colcount, index % colcount)
index += 1
#self.show()
def showWindow(self, window):
window.showNormal()
window.raise_()
for thumb in self.thumbs:
thumb.hide()
self.thumbs = []
self.hide()
class LayoutList(QtWidgets.QListWidget):
def __init__(self, parent):
super().__init__(parent=parent)
self.currentItemChanged.connect(self.changeItem)
def changeItem(self, item):
if item is None: return
layout = config['layout'][item.name]
text = ''
ls = LayoutStruct()
for window in layout["windows"]:
text += f'window: {window["name"]}\n'
ls.root = window["docks"]
text += ls.describe() + '\n\n'
self.parent().parent().preview.setPlainText(text)
class PanelsLayout(QtWidgets.QWidget):
def __init__(self, dialog, panels):
super().__init__(parent=dialog)
self.dialog = dialog
self.panels = panels
self.layout_list = LayoutList(self)
self.preview = QtWidgets.QPlainTextEdit(self)
console_font = QtGui.QFont('Consolas', pointSize=config['console']['fontsize'])
self.preview.setFont(console_font)
self.preview.setWordWrapMode(QtGui.QTextOption.NoWrap)
self.vbox = QtWidgets.QVBoxLayout()
self.setLayout(self.vbox)
self.box = QtWidgets.QSplitter(QtCore.Qt.Horizontal)
self.vbox.addWidget(self.box)
self.box.addWidget(self.layout_list)
self.box.addWidget(self.preview)
self.loadBtn = QtWidgets.QPushButton('Load')
self.loadBtn.clicked.connect(self.loadLayout)
hbox = QtWidgets.QHBoxLayout()
hbox.addWidget(self.loadBtn)
self.vbox.addLayout(hbox)
self.refresh()
def refresh(self):
self.layout_list.clear()
shortcuts = dict((v,k) for k,v in config['shortcuts']['layout'].items())
for name, layout in config['layout'].items():
description = layout.get('description', 'no description')
if shortcuts.get(name, None):
description += f"\n [Ctrl+F{shortcuts.get(name, None)}]"
item = QtWidgets.QListWidgetItem(f'{name}:\n {description}')
item.name = name
self.layout_list.addItem(item)
def loadLayout(self):
item = self.layout_list.selectedItems()[0]
self.panels.restore_state_from_config(item.name)
self.dialog.accept()
|
thocoo/gamma-desk | gdesk/widgets/grid.py | from qtpy import QtCore, QtGui, QtWidgets
class GridSplitter(QtWidgets.QGridLayout):
layoutChanged = QtCore.Signal()
def __init__(self, parent):
super().__init__(parent=parent)
self.setSpacing(0)
self.setContentsMargins(0,0,0,0)
self.splitters = []
def addWidget(self, widget, row, col, rowspan=1, colspan=1):
super().addWidget(widget, row*2, col*2, rowspan*2-1, colspan*2-1)
if (self.itemAtPosition(row*2-1, col*2) == None) and (row > 0):
splitter = GridSeperator(self.parent(), QtCore.Qt.Vertical)
splitter.attachGridSplitter(self, row-1, row)
self.splitters.append(splitter)
super().addWidget(splitter, row*2-1, col*2)
if (self.itemAtPosition(row*2, col*2-1) == None) and (col > 0):
splitter = GridSeperator(self.parent(), QtCore.Qt.Horizontal)
splitter.attachGridSplitter(self, col-1, col)
self.splitters.append(splitter)
super().addWidget(splitter, row*2, col*2-1)
def getRowStretches(self):
"""get the row streches from all rows, skip the row splitters"""
stretches = []
for i in range(0, self.rowCount(), 2):
stretches.append(self.cellRect(i, 0).height())
return stretches
def setRowStretches(self, stretches):
for (i, stretch) in zip(range(0, self.rowCount(), 2), stretches):
self.setRowStretch(i, stretch)
def resetRowStretches(self):
for i in range(0, self.rowCount()):
self.setRowStretch(i, 0)
def getColumnStretches(self):
"""get the columns streches from all columns, skip the column splitters"""
stretches = []
for i in range(0, self.columnCount(), 2):
stretches.append(self.cellRect(0, i).width())
return stretches
def setColumnStretches(self, stretches):
for (i, stretch) in zip(range(0, self.columnCount(), 2), stretches):
self.setColumnStretch(i, stretch)
def resetColumnStretches(self):
for i in range(0, self.rowCount()):
self.setColumnStretch(i, 0)
def setGeometry(self, arg):
self.layoutChanged.emit()
super().setGeometry(arg)
class GridSeperator(QtWidgets.QWidget):
def __init__(self, parent=None, orientation = QtCore.Qt.Vertical):
super().__init__(parent=parent)
self.orientation = orientation
if orientation == QtCore.Qt.Vertical:
self.setCursor(QtGui.QCursor(QtCore.Qt.SplitVCursor))
self.setFixedHeight(5)
else:
self.setCursor(QtGui.QCursor(QtCore.Qt.SplitHCursor))
self.setFixedWidth(5)
self.dragStartX = 0
self.dragStartY = 0
def mousePressEvent(self, event):
if event.buttons() == QtCore.Qt.LeftButton:
self.dragStartX = event.pos().x()
self.dragStartY = event.pos().y()
self.row_stretches = self.gridSplitter.getRowStretches()
self.column_stretches = self.gridSplitter.getColumnStretches()
def mouseMoveEvent(self, event):
self.movedX = event.pos().x() - self.dragStartX
self.movedY = event.pos().y() - self.dragStartY
if self.orientation == QtCore.Qt.Vertical:
if self.movedY != 0:
self.restretchRows(self.movedY)
if self.orientation == QtCore.Qt.Horizontal:
if self.movedX != 0:
self.restretchColumns(self.movedX)
def restretchRows(self, moved):
stretches = self.row_stretches
stretches[self.lower] += moved
stretches[self.current] -= moved
self.gridSplitter.setRowStretches(stretches)
def restretchColumns(self, moved):
stretches = self.column_stretches
stretches[self.lower] += moved
stretches[self.current] -= moved
self.gridSplitter.setColumnStretches(stretches)
def attachGridSplitter(self, gridSplitter, lower, current):
self.gridSplitter = gridSplitter
self.lower = lower
self.current = current
|
thocoo/gamma-desk | gdesk/panels/imgview/imgview.py | import os
import time
import collections
from pathlib import Path
import types
from collections.abc import Iterable
import queue
import logging
import numpy as np
logger = logging.getLogger(__name__)
try:
import scipy
import scipy.ndimage
has_scipy = True
except:
has_scipy = False
try:
import imageio
has_imafio= True
except:
has_imafio = False
try:
import cv2
has_cv2 = True
except:
has_cv2 = False
from ... import config, gui
if has_imafio:
if not config.get("path_imageio_freeimage_lib", None) is None:
if os.getenv("IMAGEIO_FREEIMAGE_LIB", None) is None:
os.environ["IMAGEIO_FREEIMAGE_LIB"] = config.get("path_imageio_freeimage_lib")
try:
import imageio.plugins.freeimage
imageio.plugins._freeimage.get_freeimage_lib()
except Exception as ex:
logger.warning('Could not load freeimage dll')
logger.warning(str(ex))
logger.warning('Automatic download can be a problem when using VPN')
logger.warning("Download the dll's from https://github.com/imageio/imageio-binaries/tree/master/freeimage/")
logger.warning(f'And place it in {imageio.core.appdata_dir("imageio")}/freeimage')
#You can also use a system environmental variable
#IMAGEIO_FREEIMAGE_LIB=<the location>\FreeImage-3.18.0-win64.dll
#The effective dll is refered at
#imageio.plugins.freeimage.fi.lib
#Prefer freeimage above pil
#Freeimage seems to be a lot faster then pil
imageio.formats.sort('-FI', '-PIL')
FILTERS_NAMES = collections.OrderedDict()
FILTERS_NAMES['All Formats (*)'] = None
#for fmt in imageio.formats._formats_sorted:
for fmt in imageio.formats:
filter = f'{fmt.name} - {fmt.description} (' + ' '.join(f'*{fmt}' for fmt in fmt.extensions) + ')'
FILTERS_NAMES[filter] = fmt.name
IMAFIO_QT_READ_FILTERS = ';;'.join(FILTERS_NAMES.keys())
IMAFIO_QT_WRITE_FILTERS = ';;'.join(FILTERS_NAMES.keys())
IMAFIO_QT_WRITE_FILTER_DEFAULT = "TIFF-FI - Tagged Image File Format (*.tif *.tiff)"
from qtpy import QtCore, QtGui, QtWidgets
from qtpy.QtCore import Qt, Signal, QUrl
from qtpy.QtGui import QFont, QTextCursor, QPainter, QPixmap, QCursor, QPalette, QColor, QKeySequence
from qtpy.QtWidgets import (QApplication, QAction, QMainWindow, QPlainTextEdit, QSplitter, QVBoxLayout, QHBoxLayout, QSplitterHandle,
QMessageBox, QTextEdit, QLabel, QWidget, QStyle, QStyleFactory, QLineEdit, QShortcut, QMenu, QStatusBar, QColorDialog)
from ...panels import BasePanel, thisPanel, CheckMenu
from ...panels.base import MyStatusBar, selectThisPanel
from ...dialogs.formlayout import fedit
from ...dialogs.colormap import ColorMapDialog
from ...widgets.grid import GridSplitter
from ...utils import lazyf, clip_array
from ...utils import imconvert
from ...gcore.utils import ActionArguments
if has_cv2:
from .opencv import OpenCvMenu
from .operation import OperationMenu
from .profile import ProfilerPanel
from .blueprint import make_thumbnail
from .demosaic import bayer_split
from .quantiles import get_sigma_range_for_hist
from .spectrogram import spectr_hori, spectr_vert
from .imgdata import ImageData
from .roi import SelRoiWidget
from .dialogs import RawImportDialog
ZOOM_VALUES = [
0.005, 0.0064 , 0.008,
0.01 , 0.0125 , 0.016,
0.02 , 0.025 , 0.032,
0.04 , 0.05 , 0.064,
0.08 , 0.10 , 0.125,
0.16 , 0.20 , 0.250,
0.32 , 0.40 , 0.5 ,
0.64 , 0.80 , 1.0 ,
1.25 , 1.60 , 2.0 ,
2.50 , 3.20 , 4.0 ,
5.00 , 6.40 , 8.0 ,
10.00 , 12.5 , 16.0 ,
20.00 , 25.0 , 32.0 ,
40.00 , 50.0 , 64.0 ,
80.00 ,100.0 ,125.0 ,
160.0 ,200.0 ,250.0 ,
320.0 ,400.0 ,500.0 ,
640.0 ,800.0 ,1000 ,
1250 ,1600 ,2000 ,
2500 ,3200 ,4000]
here = Path(__file__).parent.absolute()
respath = Path(config['respath'])
#sck = config['shortcuts']
channels = ['R', 'G', 'B', 'A']
class ZoomWidget(MyStatusBar):
zoomEdited = Signal(float)
def __init__(self, parent):
super().__init__(parent=parent)
self.panel = parent.panel
self.zoomOutBtn = QtWidgets.QPushButton(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'bullet_toggle_minus.png')), None, self)
self.zoom = QLineEdit('100')
self.zoom.keyPressEvent = self.zoomKeyPressEvent
self.zoomInBtn = QtWidgets.QPushButton(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'bullet_toggle_plus.png')), None, self)
self.addWidget(self.zoomOutBtn)
self.addWidget(self.zoom, 1)
self.addWidget(self.zoomInBtn)
self.zoomOutBtn.clicked.connect(self.panel.zoomOut)
self.zoomInBtn.clicked.connect(self.panel.zoomIn)
self.zoomEdited.connect(self.panel.setZoomValue)
def set_zoom(self, value):
self.zoom.setText(f'{value*100:.2f}')
def zoomKeyPressEvent(self, event):
key_enter = (event.key() == Qt.Key_Return) or \
(event.key() == Qt.Key_Enter)
statpan = self
if event.key() == Qt.Key_Up:
statpan.panel.zoomIn()
elif event.key() == Qt.Key_Down:
statpan.panel.zoomOut()
if key_enter:
statpan.zoomEdited.emit(float(self.zoom.text()) / 100)
QLineEdit.keyPressEvent(self.zoom, event)
class ValuePanel(MyStatusBar):
zoomEdited = Signal(float)
def __init__(self, parent):
super().__init__(parent=parent)
self.panel = parent.panel
console_font = QFont('Consolas', pointSize=config['console']['fontsize'])
self.xy = QLabel('0,0')
self.vallab = QLabel('val')
self.val = QLineEdit('0')
self.val.setFont(console_font)
self.val.setReadOnly(True)
self.val.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
self.chooseValFormat = QMenu('Value Format', self)
self.chooseValFormat.addAction(QAction("Decimal", self, triggered=lambda: self.set_val_format('dec')))
self.chooseValFormat.addAction(QAction("Hex", self, triggered=lambda: self.set_val_format('hex')))
self.chooseValFormat.addAction(QAction("Binary", self, triggered=lambda: self.set_val_format('bin')))
self.val.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.val.customContextMenuRequested.connect(lambda: self.chooseValFormat.exec_(QtGui.QCursor().pos()))
self.addWidget(self.xy, 2)
self.addWidget(self.vallab, 1, Qt.AlignRight)
self.addWidget(self.val, 4)
def set_val_format(self, fmt='dec'):
self.panel.imviewer.set_val_item_format(fmt)
def set_xy_val(self, x, y, val=None):
self.xy.setText(f'xy:{x:d},{y:d} ')
fmt = self.panel.imviewer.val_item_format
if not val is None:
try:
if isinstance(val, Iterable):
text = '[' + ' '.join(fmt.format(v) for v in val) + ']'
self.val.setText(text)
else:
self.val.setText(fmt.format(val))
except:
self.val.setText(str(val))
class ContrastPanel(MyStatusBar):
offsetGainEdited = Signal(str, str, str)
blackWhiteEdited = Signal(str, str)
def __init__(self, parent):
super().__init__(parent=parent)
self.panel = parent.panel
console_font = QFont('Consolas', pointSize=config['console']['fontsize'])
self.offsetlab = QLabel('Black')
self.offset = QLineEdit('0')
self.offset.keyPressEvent = types.MethodType(offsetGainKeyPressEvent, self.offset)
self.offset.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
self.gainlab = QLabel('gain')
self.gain = QLineEdit('1')
self.gain.keyPressEvent = types.MethodType(offsetGainKeyPressEvent, self.gain)
self.gain.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
self.whitelab = QLabel('White')
self.white = QLineEdit('1')
self.white.keyPressEvent = types.MethodType(blackWhitePressEvent, self.white)
self.white.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
self.gammalab = QLabel('gamma')
self.gamma = QLineEdit('1')
self.gamma.keyPressEvent = types.MethodType(offsetGainKeyPressEvent, self.gamma)
self.gamma.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
self.addWidget(self.offsetlab, 1, Qt.AlignRight)
self.addWidget(self.offset, 1)
self.addWidget(self.gainlab, 1, Qt.AlignRight)
self.addWidget(self.gain, 1)
self.addWidget(self.whitelab, 1, Qt.AlignRight)
self.addWidget(self.white, 1)
self.addWidget(self.gammalab, 1, Qt.AlignRight)
self.addWidget(self.gamma, 1)
self.offsetGainEdited.connect(self.panel.changeOffsetGain)
self.blackWhiteEdited.connect(self.panel.changeBlackWhite)
def setOffsetGainInfo(self, offset, gain, white, gamma):
if not self.offset.hasFocus(): self.offset.setText(f'{offset:8.6g}')
if not self.gain.hasFocus(): self.gain.setText(f'{gain:8.6g}')
if not self.white.hasFocus(): self.white.setText(f'{white:8.6g}')
if not self.gamma.hasFocus(): self.gamma.setText(f'{gamma:8.6g}')
def offsetGainKeyPressEvent(self, event):
key_enter = (event.key() == Qt.Key_Return) or \
(event.key() == Qt.Key_Enter)
if key_enter:
statpan = self.parent()
statpan.offsetGainEdited.emit(statpan.offset.text(), statpan.gain.text(), statpan.gamma.text())
QLineEdit.keyPressEvent(self, event)
def blackWhitePressEvent(self, event):
key_enter = (event.key() == Qt.Key_Return) or \
(event.key() == Qt.Key_Enter)
if key_enter:
statpan = self.parent()
statpan.blackWhiteEdited.emit(statpan.offset.text(), statpan.white.text())
QLineEdit.keyPressEvent(self, event)
class StatusPanel(QWidget):
def __init__(self, parent):
super().__init__(parent=parent)
self.panel = self.parent()
self.chooseWidgetMenu = CheckMenu('Widgets')
self.addMenuItem(self.chooseWidgetMenu, 'Zoom',
lambda: self.toggleWidgetVisible(self.zoomWidget), checkcall=lambda: self.zoomWidget.isVisible())
self.addMenuItem(self.chooseWidgetMenu, 'Values',
lambda: self.toggleWidgetVisible(self.valuePanel), checkcall=lambda: self.valuePanel.isVisible())
self.addMenuItem(self.chooseWidgetMenu, 'Contrast',
lambda: self.toggleWidgetVisible(self.contrastPanel), checkcall=lambda: self.contrastPanel.isVisible())
self.chooseWidgetBtn = QtWidgets.QToolButton(self)
self.chooseWidgetBtn.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'menubar.png')))
self.chooseWidgetBtn.setPopupMode(QtWidgets.QToolButton.InstantPopup)
self.chooseWidgetBtn.setMenu(self.chooseWidgetMenu)
self.zoomWidget = ZoomWidget(self)
self.valuePanel = ValuePanel(self)
self.contrastPanel = ContrastPanel(self)
self.contrastPanel.hide()
hboxlayout = QtWidgets.QHBoxLayout()
hboxlayout.setContentsMargins(0, 0, 0, 0)
self.setLayout(hboxlayout)
fontmetric = QtGui.QFontMetrics(self.font())
fontheight = fontmetric.height()
self.setFixedHeight(fontheight + 2)
hboxlayout.addWidget(self.chooseWidgetBtn)
splitter = QSplitter(self)
hboxlayout.addWidget(splitter)
splitter.addWidget(self.zoomWidget)
splitter.addWidget(self.valuePanel)
splitter.addWidget(self.contrastPanel)
def addMenuItem(self, menu, text, triggered, checkcall=None, enabled=True, statusTip=None, icon=None, enablecall=None):
action = QAction(text, self, enabled=enabled, statusTip=statusTip)
action.triggered.connect(triggered)
if isinstance(icon, str):
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / icon))
if not icon is None:
action.setIcon(icon)
menu.addAction(action, checkcall=checkcall, enablecall=enablecall)
return action
def toggleWidgetVisible(self, widget):
widget.setVisible(not widget.isVisible())
def set_zoom(self, value):
self.zoomWidget.set_zoom(value)
def set_val_format(self, fmt='dec'):
self.valuePanel.set_val_format(fmt)
def set_xy_val(self, x, y, val=None):
self.valuePanel.set_xy_val(x, y, val)
def setOffsetGainInfo(self, offset, gain, white, gamma):
self.contrastPanel.setOffsetGainInfo(offset, gain, white, gamma)
class OpenImage(object):
def __init__(self, imgpanel, path):
self.imgpanel = imgpanel
self.path = path
def __call__(self):
self.imgpanel.openImage(self.path)
class RecentMenu(QMenu):
def __init__(self, parent=None):
super().__init__('Recent', parent)
self.imgpanel = self.parent()
self.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'images.png')))
def showEvent(self, event):
self.initactions()
def initactions(self):
self.clear()
self.actions = []
for rowid, timestamp, path in gui.qapp.history.yield_recent_paths():
action = QAction(path, self)
action.triggered.connect(OpenImage(self.imgpanel, path))
self.addAction(action)
self.actions.append(action)
class ImageViewerWidget(QWidget):
#class ImageViewerWidget(QtWidgets.QOpenGLWidget):
#Image size seems to be limitted to 8192x8182
#Tile shading limitation?
pickerPositionChanged = Signal(int, int)
zoomChanged = Signal(float)
zoomPanChanged = Signal()
pixelSelected = Signal(int, int)
def __init__(self, parent=None):
super().__init__(parent = parent)
self.imgdata = ImageData()
self.zoomValue = 1.0
self.zoomPostScale = 1.0
self.dispOffsetX = 0
self.dispOffsetY = 0
self.dragStartX = None
self.dragStartY = None
self.dispRoiStartX = self.dispOffsetX
self.dispRoiStartY = self.dispOffsetY
self.setBackgroundColor(*config['image background'])
self.set_val_item_format('dec')
self.roi = SelRoiWidget(self)
self.pickCursor = QCursor(QPixmap(str(respath / "icons" / "pixPick256.png")), 15, 15)
self.dragCursor = QCursor(Qt.OpenHandCursor)
self.setCursor(self.pickCursor)
self.setMouseTracking(True)
self.zoomPanChanged.connect(self.roi.recalcGeometry)
self.refresh_title()
self._scaledImage = None
self.hqzoomout = config['image'].get('render_detail_hq', False)
self.zoombind = config['image'].get('bind_zoom_absolute', False)
self.push_selected_pixel = False
self.setAcceptDrops(True)
def setBackgroundColor(self, r, g, b):
palette = self.palette()
self.bgcolor = QColor(r,g,b)
palette.setColor(self.backgroundRole(), self.bgcolor)
self.setPalette(palette)
#self.setAutoFillBackground(True)
#This auto fill background seems not to work fine if
#color is changed at runtime
#As soon the parent of self is set to None, it retores back to the prior color
#So it also happens after a relayout. (Even after a distribute)
#There seems to be some cache of the prior background
self.qpainter = QPainter()
def set_val_item_format(self, fmt):
if fmt == 'dec':
self.val_item_format = '{0:.5g}'
elif fmt == 'hex':
self.val_item_format = '0x{0:04X}'
elif fmt == 'bin':
self.val_item_format = '{0:_b}'
self.val_format = fmt
@property
def vd(self):
return self.imgdata
def getImageCoordOfMouseEvent(self, event):
x_float = event.pos().x() / self.zoomDisplay + self.dispOffsetX
y_float = event.pos().y() / self.zoomDisplay + self.dispOffsetY
#Round down
return (int(x_float), int(y_float))
def getImageCoordOfDisplayCenter(self):
#get the current pixel position as seen on center of screen
rect = self.geometry()
centPointX = rect.width() // 2 / self.zoomDisplay + self.dispOffsetX
centPointY = rect.height() // 2 / self.zoomDisplay + self.dispOffsetY
return (centPointX, centPointY)
def setZoomValue(self, value):
self._scaledImage = None
self._zoomValue = value
def getZoomValue(self):
return self._zoomValue
def getZoomDisplay(self):
return self._zoomValue * self.zoomPostScale
zoomValue = property(getZoomValue, setZoomValue)
zoomDisplay = property(getZoomDisplay)
def setHigherZoomValue(self):
if self.zoomValue in ZOOM_VALUES:
i = ZOOM_VALUES.index(self.zoomValue)
self.zoomValue = ZOOM_VALUES[min(i + 1, len(ZOOM_VALUES) - 1)]
return self.zoomValue
n = 0
for zoomVal in ZOOM_VALUES:
if zoomVal < self._zoomValue:
n += 1
else:
self.zoomValue = ZOOM_VALUES[min(n, len(ZOOM_VALUES) - 1)]
return self._zoomValue
self.zoomValue = ZOOM_VALUES[0]
return self.zoomValue
def setLowerZoomValue(self):
if self.zoomValue in ZOOM_VALUES:
i = ZOOM_VALUES.index(self.zoomValue)
self.zoomValue = ZOOM_VALUES[max(i - 1, 0)]
return self.zoomValue
n = 0
for zoomVal in ZOOM_VALUES:
if zoomVal < self.zoomValue:
n += 1
else:
self.zoomValue = ZOOM_VALUES[max(n-1, 0)]
return self.zoomValue
self.zoomValue = ZOOM_VALUES[-1]
return self.zoomValue
def setClosestZoomValue(self):
if self.zoomValue in ZOOM_VALUES:
i = ZOOM_VALUES.index(self.zoomValue)
self.zoomValue = ZOOM_VALUES[max(i - 1, 0)]
return self.zoomValue
n = 0
for zoomVal in ZOOM_VALUES:
if zoomVal < self.zoomValue:
n += 1
else:
lower = ZOOM_VALUES[max(n-2, 0)]
upper = ZOOM_VALUES[max(n-1, 0)]
if (zoomVal - lower) < (upper - zoomVal):
self.zoomValue = lower
else:
self.zoomValue = upper
return self.zoomValue
self.zoomValue = ZOOM_VALUES[-1]
return self.zoomValue
def setZoom(self, value=1, fixPointX = -1, fixPointY=-1):
self.zoom(value, fixPointX, fixPointY, step=False)
def zoomIn(self, fixPointX=-1, fixPointY=-1, fine=False):
self.zoom(1, fixPointX, fixPointY, step=True, fine=fine)
def zoomOut(self, fixPointX=-1, fixPointY=-1, fine=False):
self.zoom(-1, fixPointX, fixPointY, step=True, fine=fine)
def zoom(self, zoomValue=0, fixPointX=-1, fixPointY=-1, step=True, fine=False):
self.dragStartX = None
self.dragStartY = None
if fixPointX == -1 or fixPointY == -1:
(fixPointX, fixPointY) = self.getImageCoordOfDisplayCenter()
tmpX = (fixPointX - self.dispOffsetX) * self.zoomDisplay
tmpY = (fixPointY - self.dispOffsetY) * self.zoomDisplay
if step == True:
if fine:
self.zoomValue = self.zoomValue + zoomValue * 0.01 * self.zoomValue
else:
if zoomValue > 0:
self.setHigherZoomValue()
elif zoomValue < 0:
self.setLowerZoomValue()
else:
self.zoomValue = zoomValue
# TO DO
#Note that there is a rounding effect because the viewer will
#Always round the top left corner to an integer
#Start display a full pixel, not a part of a pixel
#A lot of zoom ins and zoom outs will move the image to bottom, right
#This effect also exists on the bigger steps but is less pronounced
self.dispOffsetX = fixPointX - tmpX / self.zoomDisplay
self.dispOffsetY = fixPointY - tmpY / self.zoomDisplay
self.zoomPanChanged.emit()
self.zoomChanged.emit(self.zoomValue)
self.repaint()
self.refresh_title()
def refresh_title(self):
#self.parent().setWindowTitle(f'Image Viewer {self.parent().id} - {self.zoomValue*100:.3g}%')
pass
def set_info_xy(self):
self.parent().statuspanel.set_xy()
def zoomAuto(self):
if self.roi.isVisible():
if not self.zoomToRoi():
self.zoomFull()
else:
if not self.zoomFull():
self.zoomFit()
def zoomFull(self):
"""Zoom to the full image and do a best fit."""
zoomRegionWidth = self.imgdata.qimg.width()
zoomRegionHeight = self.imgdata.qimg.height()
return self.zoomToRegion(0, 0, zoomRegionWidth, zoomRegionHeight, zoomSnap = False)
def zoomFit(self):
"""Zoom to the full image and do a best fit. Snap of lower zoom value"""
zoomRegionWidth = self.imgdata.qimg.width()
zoomRegionHeight = self.imgdata.qimg.height()
return self.zoomToRegion(0, 0, zoomRegionWidth, zoomRegionHeight, zoomSnap = True)
def zoomToRoi(self):
"""Zoom to the region of interest and do a best fit."""
zoomRegionX = self.roi.selroi.xr.start
zoomRegionY = self.roi.selroi.yr.start
zoomRegionWidth = self.roi.selroi.xr.stop - self.roi.selroi.xr.start
zoomRegionHeight = self.roi.selroi.yr.stop - self.roi.selroi.yr.start
return self.zoomToRegion(zoomRegionX, zoomRegionY, zoomRegionWidth, zoomRegionHeight)
def zoomNormalized(self, zoomRegionX, zoomRegionY, zoomRegionWidth, zoomRegionHeight, zoomSnap=True, emit=True, zoomValue=0):
area = self.imgdata.width * self.imgdata.height
zoomRegionX *= self.imgdata.width
zoomRegionY *= self.imgdata.height
zoomRegionWidth *= self.imgdata.width
zoomRegionHeight *= self.imgdata.height
self.zoomToRegion(zoomRegionX, zoomRegionY, zoomRegionWidth, zoomRegionHeight, zoomSnap, emit, zoomValue)
def zoomToRegion(self, zoomRegionX, zoomRegionY, zoomRegionWidth, zoomRegionHeight, zoomSnap=True, emit=True, zoomValue=0):
"""Zoom to a certain region and do a best fit."""
self.dragStartX = None
self.dragStartY = None
oldZoomValue = self.zoomValue
oldDispOffsetX = self.dispOffsetX
oldDispOffsetY = self.dispOffsetY
xscale = self.width() / zoomRegionWidth
yscale = self.height() / zoomRegionHeight
if zoomValue == 0:
self.zoomValue = min(xscale, yscale)
if zoomSnap and (not self.zoomValue in ZOOM_VALUES):
self.setLowerZoomValue()
#self.setClosestZoomValue()
else:
self.zoomValue = zoomValue
self.dispOffsetX = zoomRegionX - (self.width() / self.zoomDisplay - zoomRegionWidth) / 2
self.dispOffsetY = zoomRegionY - (self.height() / self.zoomDisplay - zoomRegionHeight) / 2
if (oldZoomValue != self.zoomValue) or \
(oldDispOffsetX != self.dispOffsetX) or \
(oldDispOffsetY != self.dispOffsetY):
if emit:
self.zoomPanChanged.emit()
self.zoomChanged.emit(self.zoomValue)
self.repaint()
self.refresh_title()
return True
else:
return False
def visibleRegion(self, normalized=False, clip_square=False, width=None, height=None):
width = width or self.width()
height = height or self.height()
x, y, w, h = self.dispOffsetX, self.dispOffsetY, width / self.zoomDisplay, height / self.zoomDisplay
if clip_square:
if h < w:
x += (w - h) / 2
w = h
elif h > w:
y += (h - w) / 2
h = w
if normalized:
return x / self.imgdata.width, y / self.imgdata.height, w / self.imgdata.width, h / self.imgdata.height,
return x, y, w, h
def panned(self, manhattan=False):
if (self.dragStartX is None) or (self.dragStartY is None):
return None
self.shiftX = (self.dragStartX - self.dragEndX) / self.zoomDisplay
self.shiftY = (self.dragStartY - self.dragEndY) / self.zoomDisplay
if manhattan:
if abs(self.shiftX) < abs(self.shiftY):
self.shiftX = 0
else:
self.shiftY = 0
if self.shiftX != 0 or self.shiftY != 0:
self.dispOffsetX = self.dispRoiStartX + self.shiftX
self.dispOffsetY = self.dispRoiStartY + self.shiftY
self.zoomPanChanged.emit()
self.repaint()
def mouseDoubleClickEvent(self, event):
self.zoomAuto()
def wheelEvent(self, event):
if event.modifiers() & QtCore.Qt.ControlModifier:
fine = True
else:
fine = False
if event.delta() < 0:
self.zoomOut(*self.getImageCoordOfMouseEvent(event), fine)
else:
self.zoomIn(*self.getImageCoordOfMouseEvent(event), fine)
def mousePressEvent(self, event):
if event.buttons() == Qt.LeftButton or \
(event.buttons() == Qt.MiddleButton):
self.dragStartX = event.pos().x()
self.dragStartY = event.pos().y()
#roi value at the start of the dragging
self.dispRoiStartX = self.dispOffsetX
self.dispRoiStartY = self.dispOffsetY
elif event.buttons() == Qt.RightButton:
self.roiDragStartX, self.roiDragStartY = self.getImageCoordOfMouseEvent(event)
def mouseMoveEvent(self, event):
if (event.buttons() == Qt.LeftButton) or \
(event.buttons() == Qt.MiddleButton):
self.setCursor(self.dragCursor)
self.dragEndX = event.pos().x()
self.dragEndY = event.pos().y()
self.panned(event.modifiers() & QtCore.Qt.ShiftModifier)
elif (event.buttons() == Qt.RightButton):
self.roiDragEndX, self.roiDragEndY = self.getImageCoordOfMouseEvent(event)
self.roi.createState = True
self.roi.setStartEndPoints(self.roiDragStartX, self.roiDragStartY, \
self.roiDragEndX, self.roiDragEndY)
self.roi.show()
self.pickerPositionChanged.emit(*self.getImageCoordOfMouseEvent(event))
def dragDistance(self, event):
if self.dragStartX is None or self.dragStartY is None:
return None
return ((event.pos().x() - self.dragStartX)**2 + (event.pos().y() - self.dragStartY)**2) ** 0.5
def mouseReleaseEvent(self, event):
drag_distance = self.dragDistance(event)
if not drag_distance is None and drag_distance <= 2:
pixel_position = self.getImageCoordOfMouseEvent(event)
if self.push_selected_pixel:
panel = gui.qapp.panels.selected('console')
panel.task.send_input(str(pixel_position))
self.push_selected_pixel = False
self.pixelSelected.emit(*pixel_position)
self.setCursor(self.pickCursor)
if self.roi.createState:
self.roi.release_creation()
def refresh(self):
self._scaledImage = None
self.repaint()
def paintEvent(self, event):
try:
self.qpainter.begin(self)
self.qpainter.fillRect(event.rect(), self.bgcolor)
self.paintImage(self.qpainter)
finally:
self.qpainter.end()
def scaledImage(self):
qimg = self.imgdata.qimg
if self._scaledImage is None:
self._scaledImage = qimg.scaledToWidth(int(qimg.width() * self.zoomDisplay), Qt.SmoothTransformation)
return self._scaledImage
def paintImage(self, qp, position=None):
if position is None:
sx = self.dispOffsetX
sy = self.dispOffsetY
else:
sx, sy = position
qp.setOpacity(1.0)
if (self.zoomDisplay < 1) and (self.hqzoomout):
qp.scale(1, 1)
qp.drawImage(0, 0, self.scaledImage(), int(sx * self.zoomDisplay), int(sy * self.zoomDisplay), -1, -1)
else:
if config["image"].get('render_detail_smooth', False) and self.zoomDisplay < 1:
qp.setRenderHint(qp.SmoothPixmapTransform)
qp.scale(self.zoomDisplay, self.zoomDisplay)
qp.translate(-sx, -sy)
qp.drawImage(0, 0, self.imgdata.qimg, 0, 0, -1, -1)
for layer in self.imgdata.layers.values():
qp.setCompositionMode(layer['composition'])
qp.drawImage(0, 0, layer['qimage'], 0, 0, -1, -1)
qp.resetTransform()
if config['image'].get('pixel_labels', True) and self.zoomDisplay >= 125:
qp.setPen(QColor(128,128,128))
font = QFont("Consolas")
fontSize = round(self.zoomDisplay / 10)
font.setPixelSize(fontSize)
font.setStyleStrategy(QFont.NoAntialias)
if self.imgdata.statarr.dtype in ['double']:
fmt = '{0:.5g}'
else:
fmt = self.val_item_format
qp.setFont(font)
qp.setCompositionMode(QtGui.QPainter.RasterOp_SourceXorDestination)
qp.setRenderHint(qp.Antialiasing, False)
x, y, w, h = self.visibleRegion()
mh, mw = self.imgdata.statarr.shape[:2]
startx, starty = max(0, round(x - 0.5)), max(0, round(y - 0.5))
endx, endy = min(mw, round(x + w + 0.5)), min(mh, round(y + h + 0.5))
for sx in range(startx, endx):
for sy in range(starty, endy):
xpos = round((sx + 0.05 - self.dispOffsetX) * self.zoomDisplay)
ypos = round((sy + 0.95 - self.dispOffsetY) * self.zoomDisplay)
val = self.imgdata.statarr[sy, sx]
if isinstance(val, Iterable):
values = list(val)
ypos -= (len(values) - 1) * (fontSize + 1)
for i, v in enumerate(values):
try:
label = fmt.format(v)
except:
label = 'invalid'
qp.drawText(xpos, ypos + i * (fontSize + 1), f'{channels[i]}: {label}')
else:
try:
label = fmt.format(val)
except:
label = 'invalid'
qp.drawText(xpos, ypos, label)
def dragEnterEvent(self, event):
event.accept()
def dropEvent(self, event):
panel = thisPanel(self)
mimeData = event.mimeData()
dropedInFiles = []
if mimeData.hasUrls():
filenamelist = []
for url in mimeData.urls():
filename = url.toString(QUrl.FormattingOptions(QUrl.RemoveScheme)).replace('///','')
dropedInFiles.append(filename)
elif mimeData.hasText():
filename = mimeData.text()
dropedInFiles.append(filename)
panel.openImage(dropedInFiles[0])
for path in dropedInFiles[1:]:
gui.img.open(path)
class ImageViewerBase(BasePanel):
panelCategory = 'image'
panelShortName = 'base'
userVisible = False
contentChanged = Signal(int, bool)
gainChanged = Signal(int)
visibleRegionChanged = Signal(float, float, float, float, bool, bool, float)
roiChanged = Signal(int)
classIconFile = str(respath / 'icons' / 'px16' / 'picture.png')
def __init__(self, parent=None, panid=None, **kwargs):
super().__init__(parent, panid, type(self).panelCategory)
self.offset = 0
self.white = 256
self.gamma = 1
self.colormap = config['image color map']
self.defaults = dict()
self.defaults['offset'] = 0
self.defaults['gain'] = 1
self.defaults['gamma'] = 1
self.createMenus()
self.createStatusBar()
def createMenus(self):
self.fileMenu = self.menuBar().addMenu("&File")
#self.editMenu = self.menuBar().addMenu("&Edit")
self.editMenu = CheckMenu("&Edit", self.menuBar())
self.menuBar().addMenu(self.editMenu)
self.viewMenu = CheckMenu("&View", self.menuBar())
self.menuBar().addMenu(self.viewMenu)
self.selectMenu = self.menuBar().addMenu("&Select")
self.canvasMenu = self.menuBar().addMenu("&Canvas")
#self.imageMenu = self.menuBar().addMenu("&Image")
self.imageMenu = CheckMenu("&Image", self.menuBar())
self.processMenu = self.menuBar().addMenu("&Process")
self.analyseMenu = self.menuBar().addMenu("&Analyse")
if has_cv2:
self.openCvMenu = OpenCvMenu("Open CV", self.menuBar(), self)
self.operationMenu = OperationMenu("Operation", self.menuBar(), self)
### File
self.addMenuItem(self.fileMenu, 'New...' , self.newImage,
statusTip="Make a new image in this image viewer",
icon = 'picture_empty.png')
self.addMenuItem(self.fileMenu, 'Duplicate' , self.duplicate,
statusTip="Duplicate the image to a new image viewer",
icon = 'application_double.png')
self.addMenuItem(self.fileMenu, 'Open Image...' , self.openImageDialog,
statusTip="Open an image",
icon = 'folder_image.png')
self.addMenuItem(self.fileMenu, 'Import Raw Image...', self.importRawImage,
statusTip="Import Raw Image",
icon = 'picture_go.png')
self.fileMenu.addMenu(RecentMenu(self))
self.addMenuItem(self.fileMenu, 'Save Image...' , self.saveImageDialog,
statusTip="Save the image",
icon = 'picture_save.png')
self.addMenuItem(self.fileMenu, 'Close' , self.close_panel,
statusTip="Close this image panel",
icon = 'cross.png')
### Edit
self.addMenuItem(self.editMenu, 'Show Prior Image', self.piorImage,
enablecall = lambda: self.imviewer.imgdata.imghist.prior_length() > 0,
statusTip="Get the prior image from the history stack and show it",
icon = 'undo.png')
self.addMenuItem(self.editMenu, 'Show Next Image', self.nextImage,
enablecall = lambda: self.imviewer.imgdata.imghist.next_length() > 0,
statusTip="Get the next image from the history stack and show it",
icon = 'redo.png')
self.editMenu.addSeparator()
self.addMenuItem(self.editMenu, 'Copy 8bit Image to clipboard', self.placeRawOnClipboard,
statusTip="Place the 8bit image on clipboard, offset and gain applied",
icon = 'page_copy.png')
self.addMenuItem(self.editMenu, 'Copy Display Image to clipboard', self.placeQimgOnClipboard,
statusTip="Place the displayed image on clipboard with display processing applied",
icon = 'page_copy.png')
self.addMenuItem(self.editMenu, 'Paste into New Image', self.showFromClipboard,
statusTip="Paste content of clipboard in this image viewer",
icon = 'picture_clipboard.png')
self.editMenu.addSeparator()
### View
self.addMenuItem(self.viewMenu, 'Refresh', self.refresh,
statusTip="Refresh the image",
icon = 'update.png')
self.addMenuItem(self.viewMenu, 'Zoom In' , self.zoomIn,
statusTip="Zoom in 1 step",
icon = 'zoom_in.png')
self.addMenuItem(self.viewMenu, 'Zoom Out', self.zoomOut,
statusTip="Zoom out 1 step",
icon = 'zoom_out.png')
zoomMenu = QMenu('Zoom')
zoomMenu.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'zoom.png')))
self.viewMenu.addMenu(zoomMenu)
self.addMenuItem(zoomMenu, 'Zoom 100%', self.setZoom100,
statusTip="Zoom to a actual size (100%)",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'zoom_actual.png')))
self.addMenuItem(zoomMenu, 'Zoom Fit' , self.zoomFit,
statusTip="Zoom to fit the image in the image viewer, snap on predefined zoom value",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'zoom_fit.png')))
self.addMenuItem(zoomMenu, 'Zoom Full' , self.zoomFull,
statusTip="Zoom to fit the image in the image viewer",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'zoom_extend.png')))
self.addMenuItem(zoomMenu, 'Zoom Auto' , self.zoomAuto,
statusTip="Toggle between to to selection and full image",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'zoom_refresh.png')))
self.addMenuItem(zoomMenu, 'Zoom exact...' , self.setZoom,
statusTip="Zoom to a defined value",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'zoom_actual_equal.png')))
self.viewMenu.addSeparator()
self.addMenuItem(self.viewMenu, 'Default Offset && Gain', self.defaultOffsetGain,
statusTip="Apply default offset, gain and gamma",
icon=QtGui.QIcon(str(respath / 'icons' / 'px16' / 'unmark_to_download.png')))
self.addMenuItem(self.viewMenu, 'Set Current as Default', self.setCurrentOffsetGainAsDefault,
statusTip="Set the current offset, gain and gamma as default")
self.addMenuItem(self.viewMenu, 'Offset && Gain...', self.offsetGainDialog,
statusTip="Set offset and gain",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'weather_cloudy.png')))
self.addMenuItem(self.viewMenu, 'Black && White...', self.blackWhiteDialog,
statusTip="Set the black and white point",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'color_adjustment.png')))
self.addMenuItem(self.viewMenu, 'Grey && Gain...', self.changeGreyGainDialog,
statusTip="Set the mid grey level and gain",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'contrast.png')))
self.addMenuItem(self.viewMenu, 'Gain to Min-Max', self.gainToMinMax,
statusTip="Auto level to min and max")
self.gainSigmaMenu = QMenu('Gain to Sigma')
self.viewMenu.addMenu(self.gainSigmaMenu)
self.addMenuItem(self.gainSigmaMenu, 'Gain to Sigma 1', self.gainToSigma1)
self.addMenuItem(self.gainSigmaMenu, 'Gain to Sigma 2', self.gainToSigma2)
self.addMenuItem(self.gainSigmaMenu, 'Gain to Sigma 3', self.gainToSigma3)
self.viewMenu.addSeparator()
self.addMenuItem(self.viewMenu, 'HQ Zoom Out', self.toggle_hq,
checkcall = lambda: self.imviewer.hqzoomout,
statusTip = "Use high quality resampling on zoom levels < 100%")
self.bindMenu = CheckMenu("Bind", self.viewMenu)
self.addMenuItem(self.bindMenu, 'Bind All Image Viewers', self.bindImageViewers)
self.addMenuItem(self.bindMenu, 'Unbind All Image Viewers', self.unbindImageViewers)
self.addMenuItem(self.bindMenu, 'Absolute Zoom Link', self.toggle_zoombind,
checkcall = lambda: self.imviewer.zoombind,
statusTip = "If binded to other image viewer, bind with absolute zoom value")
self.addMenuItem(self.viewMenu, 'Colormap...' , self.setColorMap,
statusTip="Set the color map for monochroom images",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'dopplr.png')))
self.addMenuItem(self.viewMenu, 'Background Color...' , self.setBackground,
statusTip="Set the background color...",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'document_background.png')))
self.addMenuItem(self.viewMenu, 'Selection Color...' , self.setRoiColor,
statusTip="Set the Selection color...",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'color_swatch.png')))
self.chooseValFormat = QMenu('Value Format')
self.chooseValFormat.setIcon(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'pilcrow.png')))
self.chooseValFormat.addAction(QAction("Decimal", self, triggered=lambda: self.statuspanel.set_val_format('dec')))
self.chooseValFormat.addAction(QAction("Hex", self, triggered=lambda: self.statuspanel.set_val_format('hex')))
self.chooseValFormat.addAction(QAction("Binary", self, triggered=lambda: self.statuspanel.set_val_format('bin')))
self.chooseValFormat.addAction(QAction("Pixel Labels", self, triggered=self.togglePixelLabels))
self.viewMenu.addMenu(self.chooseValFormat)
### Select
self.addMenuItem(self.selectMenu, 'Select Full Image', self.selectAll,
statusTip="Select Full Image")
self.addMenuItem(self.selectMenu, 'Deselect', self.selectNone,
statusTip="Deselect, select nothing")
self.addMenuItem(self.selectMenu, 'Select dialog...', self.setRoi,
statusTip="Select with input numbers dialog",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'region_of_interest.png')))
self.addMenuItem(self.selectMenu, 'Jump to Coordinates' , self.jumpToDialog,
statusTip="Select 1 pixel and zoom to it",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'canvas.png')))
self.addMenuItem(self.selectMenu, 'Mask Value...' , self.maskValue,
statusTip="Mask pixels based on value",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'find.png')))
### Canvas
self.addMenuItem(self.canvasMenu, 'Flip Horizontal', self.flipHorizontal,
statusTip="Flip the image Horizontal",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'shape_flip_horizontal.png')))
self.addMenuItem(self.canvasMenu, 'Flip Vertical' , self.flipVertical,
statusTip="Flip the image Vertical",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'shape_flip_vertical.png')))
self.addMenuItem(self.canvasMenu, 'Rotate Left 90' , self.rotate90,
statusTip="Rotate the image 90 degree anti clockwise",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'shape_rotate_anticlockwise.png')))
self.addMenuItem(self.canvasMenu, 'Rotate Right 90', self.rotate270,
statusTip="Rotate the image 90 degree clockwise",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'shape_rotate_clockwise.png')))
self.addMenuItem(self.canvasMenu, 'Rotate 180' , self.rotate180,
statusTip="Rotate the image 180 degree")
self.addMenuItem(self.canvasMenu, 'Rotate any Angle...', triggered=self.rotateAny, enabled=has_scipy,
statusTip="Rotate any angle")
self.addMenuItem(self.canvasMenu, 'Crop on Selection', self.crop,
statusTip="Crop the image on the current rectangle selection",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'transform_crop.png')))
self.addMenuItem(self.canvasMenu, 'Resize Canvas...', self.canvasResize,
statusTip="Add or remove borders",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'canvas_size.png')))
self.addMenuItem(self.canvasMenu, 'Resize Image', triggered=self.resize, enabled=has_scipy,
statusTip="Resize the image by resampling",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'resize_picture.png')))
### Image
self.addMenuItem(self.imageMenu, 'Swap RGB | BGR', self.swapRGB,
statusTip="Swap the blue with red channel",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'color.png')))
self.addMenuItem(self.imageMenu, 'to Monochroom', self.toMonochroom,
statusTip="Convert an RGB image to monochroom grey",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'convert_color_to_gray.png')))
self.addMenuItem(self.imageMenu, 'to Photometric Monochroom', self.toPhotoMonochroom,
statusTip="Convert an RGB image to photometric monochroom grey",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'convert_color_to_gray.png')))
self.addMenuItem(self.imageMenu, 'to 8-bit', self.to8bit,
enablecall = self.is16bit)
self.addMenuItem(self.imageMenu, 'to 16-bit', self.to16bit,
enablecall = self.is8bit)
self.addMenuItem(self.imageMenu, 'to Data Type', self.to_dtype)
self.addMenuItem(self.imageMenu, 'Swap MSB LSB Bytes', self.swapbytes,
enablecall = self.is16bit)
self.addMenuItem(self.imageMenu, 'Fill...' , self.fillValue,
statusTip="Fill the image with the same value",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'paintcan.png')))
self.addMenuItem(self.imageMenu, 'Add noise...' , self.addNoise,
statusTip="Add Gaussian noise")
self.addMenuItem(self.imageMenu, 'Invert', self.invert,
statusTip="Invert the image")
self.addMenuItem(self.imageMenu, 'Adjust Lighting...', self.adjustLighting,
statusTip="Adjust the pixel values",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'contrast.png')))
self.addMenuItem(self.imageMenu, 'Adjust Gamma...', self.adjustGamma,
statusTip="Adjust the gamma")
#Process
self.addMenuItem(self.processMenu, 'Bayer Split', self.bayer_split_tiles,
statusTip="Split to 4 images based on the Bayer kernel",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'pictures_thumbs.png')))
self.addMenuItem(self.processMenu, 'Colored Bayer', self.colored_bayer)
self.addMenuItem(self.processMenu, 'Demosaic', self.demosaic, enabled=has_scipy,
statusTip="Demosaic",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'things_digital.png')))
self.addMenuItem(self.processMenu, 'Make Blueprint', self.makeBlueprint,
statusTip="Make a thumbnail (8x smaller) with blowup high frequencies",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'map_blue.png')))
#Analyse
self.addMenuItem(self.analyseMenu, 'Horizontal Spectrogram', self.horizontalSpectrogram,
statusTip="Horizontal Spectrogram")
self.addMenuItem(self.analyseMenu, 'Vertical Spectrogram', self.verticalSpectrogram,
statusTip="Vertical Spectrogram")
self.addMenuItem(self.analyseMenu, 'Measure Distance', self.measureDistance,
statusTip="Measure Distance",
icon = QtGui.QIcon(str(respath / 'icons' / 'px16' / 'geolocation_sight.png')))
self.addBaseMenu(['levels', 'values', 'image'])
def get_select_menu(self):
#The select menu should be index 4 from the menuBar children
return self.menuBar().children()[4]
def createStatusBar(self):
self.statuspanel = StatusPanel(self)
self.statusBar().addWidget(self.statuspanel)
def set_info_xy_val(self, x, y):
try:
val = self.imviewer.imgdata.statarr[y, x]
except:
val = None
self.statuspanel.set_xy_val(x, y, val)
def addBindingTo(self, category, panid):
targetPanel = super().addBindingTo(category, panid)
if targetPanel is None: return None
if targetPanel.category == 'image':
self.visibleRegionChanged.connect(targetPanel.changeVisibleRegion)
elif targetPanel.category == 'levels':
self.contentChanged.connect(targetPanel.imageContentChanged)
self.roiChanged.connect(targetPanel.roiChanged)
self.gainChanged.connect(targetPanel.imageGainChanged)
elif targetPanel.category == 'values':
self.imviewer.pixelSelected.connect(targetPanel.pick)
return targetPanel
def removeBindingTo(self, category, panid):
targetPanel = super().removeBindingTo(category, panid)
if targetPanel is None: return None
if targetPanel.category == 'image':
self.visibleRegionChanged.disconnect(targetPanel.changeVisibleRegion)
elif targetPanel.category == 'levels':
self.contentChanged.disconnect(targetPanel.imageContentChanged)
self.gainChanged.disconnect(targetPanel.imageGainChanged)
elif targetPanel.category == 'values':
self.imviewer.pixelSelected.disconnect(targetPanel.pick)
return targetPanel
def changeVisibleRegion(self, x, y, w, h, zoomSnap, emit, zoomValue):
self.imviewer.zoomNormalized(x, y, w, h, zoomSnap, emit, zoomValue)
self.imviewer.roi.recalcGeometry()
############################
# File Menu Connections
def newImage(self):
with ActionArguments(self) as args:
args['width'] = 1920*2
args['height'] = 1080*2
args['channels'] = 1
args['dtype'] = 'uint8'
args['mean'] = 128
if args.isNotSet():
dtypes = ['uint8', 'int8', 'uint16', 'int16', 'uint32', 'int32', 'float32', 'float64']
options_form = [('Width', args['width']),
('Height', args['height']),
('Channels', args['channels']),
('dtype', [1] + dtypes),
('mean', args['mean'])]
result = fedit(options_form)
if result is None: return
args['width'], args['height'], args['channels'], dtype_ind, args['mean'] = result
args['dtype'] = dtypes[dtype_ind-1]
shape = [args['height'], args['width']]
if args['channels'] > 1: shape = shape + [args['channels']]
arr = np.ndarray(shape, args['dtype'])
arr[:] = args['mean']
self.show_array(arr, zoomFitHist=True)
def duplicate(self, floating=False):
newPanel = super().duplicate(floating)
newPanel.show_array(self.ndarray)
return newPanel
def openImageDialog(self):
filepath = here / 'images' / 'default.png'
with ActionArguments(self) as args:
args['filepath'] = here / 'images' / 'default.png'
args['format'] = None
if args.isNotSet():
if has_imafio:
args['filepath'], filter = gui.getfile(filter=IMAFIO_QT_READ_FILTERS, title='Open Image File (Imafio)', file=str(args['filepath']))
if args['filepath'] == '': return
args['format'] = FILTERS_NAMES[filter]
else:
args['filepath'], filter = gui.getfile(title='Open Image File (PIL)', file=str(args['filepath']))
args['format'] = None
if args['filepath'] == '': return
self.openImage(args['filepath'], args['format'])
def openImage(self, filepath, format=None, zoom='full'):
if has_imafio:
arr = self.openImageImafio(filepath, format)
else:
arr = self.openImagePIL(filepath)
if not arr is None:
self.long_title = str(filepath)
gui.qapp.history.storepath(str(filepath))
self.show_array(arr, zoomFitHist=True)
if zoom == 'full':
self.zoomFull()
else:
self.setZoomValue(zoom)
def openImagePIL(self, filepath):
with gui.qapp.waitCursor(f'Opening image using PIL {filepath}'):
from PIL import Image
logger.info(f'Using PIL library')
image = Image.open(str(filepath))
arr = np.array(image)
return arr
def openImageImafio(self, filepath, format=None):
with gui.qapp.waitCursor(f'Opening image using imageio {filepath} {format}'):
FormatClass = imageio.formats[str(filepath)]
logger.info(f'Using FormatClass {repr(FormatClass)}')
arr = imageio.imread(str(filepath), format=format)
return arr
def importRawImage(self):
import struct
filepath = here / 'images' / 'default.png'
filepath = gui.getfile(file=str(filepath))[0]
if filepath == '': return
fp = open(filepath, 'br')
data = fp.read()
fp.close()
#somehwhere in the header, there is the resolution
#image studio: 128 bytes header, 4 bytes=width, 4 bytes=height, 120 bytes=???
header = 128
dtype = 'uint16'
width = struct.unpack('<I', data[0:4])[0]
height = struct.unpack('<I', data[4:8])[0]
print(f'Width x Height: {width} x {height}')
dialog = RawImportDialog(data)
dialog.form.offset.setText(str(header))
dialog.form.dtype.setText(dtype)
dialog.form.width.setText(str(width))
dialog.form.height.setText(str(height))
dialog.exec_()
offset = int(dialog.form.offset.text())
dtype = dialog.form.dtype.text()
byteorder = dialog.form.byteorder.currentText()
width = int(dialog.form.width.text())
height = int(dialog.form.height.text())
with gui.qapp.waitCursor():
dtype = np.dtype(dtype)
leftover = len(data) - (width * height * dtype.itemsize + offset)
if leftover > 0:
print('Too much data found (%d bytes too many)' % leftover)
elif leftover < 0:
print('Not enough data found (missing %d bytes)' % (-leftover))
arr = np.ndarray(shape=(height, width), dtype=dtype, buffer=data[offset:])
if byteorder == 'big endian':
arr = arr.byteswap()
self.show_array(arr, zoomFitHist=True)
self.zoomFull()
gui.qapp.history.storepath(str(filepath))
def saveImageDialog(self):
if has_imafio:
filepath, filter = gui.putfile(filter=IMAFIO_QT_WRITE_FILTERS, title='Save Image using Imafio',
defaultfilter=IMAFIO_QT_WRITE_FILTER_DEFAULT)
if filepath == '': return
format = FILTERS_NAMES[filter]
self.saveImage(filepath, format)
else:
filepath, filter = gui.putfile(title='Save Image using PIL')
if filepath == '': return
self.saveImage(filepath)
def saveImage(self, filepath, format=None):
if has_imafio:
self.saveImageImafio(filepath, format)
else:
self.saveImagePIL(filepath)
gui.qapp.history.storepath(str(filepath))
def saveImagePIL(self, filepath):
with gui.qapp.waitCursor():
from PIL import Image
image = Image.fromarray(self.ndarray)
image.save(str(filepath))
def saveImageImafio(self, filepath, format):
if format is None:
from imageio.core import Request
format = imageio.formats.search_write_format(Request(filepath, 'wi')).name
if format == 'JPEG-FI':
(quality, progressive, optimize, baseline) = gui.fedit([('quality', 90), ('progressive', False), ('optimize', False), ('baseline', False)])
with gui.qapp.waitCursor(f'Saving to {filepath}'):
imageio.imwrite(filepath, self.ndarray, format,
quality=quality, progressive=progressive,
optimize=optimize, baseline=baseline)
elif format == 'TIFF-FI':
compression_options = {
'none': imageio.plugins.freeimage.IO_FLAGS.TIFF_NONE,
'default': imageio.plugins.freeimage.IO_FLAGS.TIFF_DEFAULT,
'packbits': imageio.plugins.freeimage.IO_FLAGS.TIFF_PACKBITS,
'adobe': imageio.plugins.freeimage.IO_FLAGS.TIFF_ADOBE_DEFLATE,
'lzw': imageio.plugins.freeimage.IO_FLAGS.TIFF_LZW,
'deflate': imageio.plugins.freeimage.IO_FLAGS.TIFF_DEFLATE,
'logluv': imageio.plugins.freeimage.IO_FLAGS.TIFF_LOGLUV}
(compression_index,) = gui.fedit([('compression', [2] + list(compression_options.keys()))])
compression = list(compression_options.keys())[compression_index-1]
compression_flag = compression_options[compression]
with gui.qapp.waitCursor(f'Saving to {filepath}'):
imageio.imwrite(filepath, self.ndarray, format, flags=compression_flag)
elif format == 'PNG-FI':
compression_options = [('None', 0), ('Best Speed', 1), ('Default', 6), ('Best Compression', 9)]
(compression_index, quantize, interlaced) = gui.fedit([('compression', [2] + [item[0] for item in compression_options]), ('quantize', 0), ('interlaced', True)])
compression = compression_options[compression_index-1][1]
print(f'compression: {compression}')
with gui.qapp.waitCursor(f'Saving to {filepath}'):
imageio.imwrite(filepath, self.ndarray, format, compression=compression, quantize=quantize, interlaced=interlaced)
elif format == 'PNG-PIL':
compression_options = [('None', 0), ('Best Speed', 1), ('Default', 6), ('Best Compression', 9)]
(compression_index, quantize, optimize) = gui.fedit([('compression', [4] + [item[0] for item in compression_options]), ('quantize', 0), ('optimize', True)])
compression = compression_options[compression_index-1][1]
if quantize == 0: quantize = None
print(f'compression: {compression}')
with gui.qapp.waitCursor(f'Saving to {filepath}'):
imageio.imwrite(filepath, self.ndarray, format, compression=compression,
quantize=quantize, optimize=optimize, prefer_uint8=False)
else:
with gui.qapp.waitCursor(f'Saving to {filepath}'):
imageio.imwrite(filepath, self.ndarray, format)
def close_panel(self):
super().close_panel()
#Deleting self.imviewer doesn't seem to delete the imgdata
del self.imviewer.imgdata
############################
# Edit Menu Connections
def piorImage(self):
if self.imviewer.imgdata.imghist.prior_length() > 0:
arr = self.imviewer.imgdata.imghist.prior(self.ndarray)
#self.imviewer.imgdata.sharray = None
self.show_array(arr, log=False)
def nextImage(self):
if self.imviewer.imgdata.imghist.next_length() > 0:
arr = self.imviewer.imgdata.imghist.next(self.ndarray)
#self.imviewer.imgdata.sharray = None
self.show_array(arr, log=False)
#---------------------------
def placeRawOnClipboard(self):
clipboard = self.qapp.clipboard()
array = self.ndarray
qimg = imconvert.process_ndarray_to_qimage_8bit(array, 0, 1)
clipboard.setImage(qimg)
def placeQimgOnClipboard(self):
clipboard = self.qapp.clipboard()
#If qimg is not copied, GH crashes on paste after the qimg instance has been garbaged!
#Clipboard can only take ownership if the object is a local?
qimg = self.imviewer.imgdata.qimg.copy()
clipboard.setImage(qimg)
def showFromClipboard(self):
arr = gui.get_clipboard_image()
self.show_array(arr)
############################
# View Menu Connections
def refresh(self):
with gui.qapp.waitCursor(f'Refreshing {self.short_title}'):
self.show_array(None)
def get_gain(self):
natrange = self.imviewer.imgdata.get_natural_range()
gain = natrange / (self.white - self.offset)
return gain
def set_gain(self, gain):
natrange = self.imviewer.imgdata.get_natural_range()
self.white = self.offset + natrange / gain
gain = property(get_gain, set_gain)
def offsetGainDialog(self):
with ActionArguments(self) as args:
args['offset'] = self.offset
args['gain'] = self.gain
args['gamma'] = self.gamma
args['cmap'] = self.colormap
if args.isNotSet():
colormaps = imconvert.colormaps
cmapind = colormaps.index(self.colormap) + 1
form = [('Offset', self.offset * 1.0),
('Gain', self.gain * 1.0),
('Gamma', self.gamma * 1.0),
('Color Map', [cmapind] + colormaps)]
results = fedit(form)
if results is None: return
self.offset, self.gain, self.gamma, cmapind = results
self.colormap = colormaps[cmapind-1]
else:
self.offset, self.gain = args['offset'], args['gain']
self.gamma, self.colormap = args['gamma'], args['cmap']
self.refresh_offset_gain()
def setCurrentOffsetGainAsDefault(self):
self.defaults['offset'] = self.offset
self.defaults['gain'] = self.gain
self.defaults['gamma'] = self.gamma
def defaultOffsetGain(self):
offset = self.defaults['offset']
gain = self.defaults['gain']
gamma = self.defaults['gamma']
self.changeOffsetGain(offset, gain, gamma)
def changeOffsetGain(self, offset, gain, gamma):
if isinstance(offset, str):
if offset == 'default':
offset = self.defaults['offset']
else:
offset = eval(offset)
if isinstance(gain, str):
if gain == 'default':
gain = self.defaults['gain']
else:
gain = eval(gain)
if isinstance(gamma, str):
if gamma == 'default':
gamma = self.defaults['gamma']
else:
gamma = eval(gamma)
if not offset is None: self.offset = offset
if not gain is None: self.gain = gain
if not gamma is None: self.gamma = gamma
self.refresh_offset_gain()
def blackWhiteDialog(self):
with ActionArguments(self) as args:
args['black'] = self.offset
args['white'] = self.white
args['cmap'] = self.colormap
if args.isNotSet():
colormaps = imconvert.colormaps
cmapind = colormaps.index(self.colormap) + 1
black = self.offset
gain1_range = self.imviewer.imgdata.get_natural_range()
form = [('Black', black),
('White', self.white),
('Color Map', [cmapind] + colormaps)]
results = fedit(form)
if results is None: return
black, white, cmapind = results
self.colormap = colormaps[cmapind-1]
else:
black, white = args['black'], args['white']
self.colormap = args['cmap']
self.changeBlackWhite(black, white)
def changeBlackWhite(self, black, white):
if isinstance(black, str):
black = eval(black)
if isinstance(white, str):
white = eval(white)
if black == white:
print(f'Warning: black and white are set the same ({black}). Setting to mid grey!')
self.changeMidGrey(black)
return
gain1_range = self.imviewer.imgdata.get_natural_range()
if not (black is None or white is None):
self.offset = black
self.gain = gain1_range / (white - black)
elif white is None:
white = self.white
self.offset = black
self.gain = gain1_range / (white - self.offset)
elif black is None:
self.gain = gain1_range / (white - self.offset)
self.refresh_offset_gain()
def changeGreyGainDialog(self):
gain1_range = self.imviewer.imgdata.get_natural_range()
grey = self.offset + gain1_range / self.gain / 2
with ActionArguments(self) as args:
args['grey'] = grey
args['gain'] = self.gain
args['cmap'] = self.colormap
if args.isNotSet():
colormaps = imconvert.colormaps
cmapind = colormaps.index(self.colormap) + 1
form = [('Grey', grey),
('Gain', self.gain * 1.0),
('Color Map', [cmapind] + colormaps)]
results = fedit(form)
if results is None: return
grey, gain, cmapind = results
self.colormap = colormaps[cmapind-1]
else:
grey = args['grey']
gain = args['gain']
self.colormap = args['cmap']
self.changeMidGrey(grey, gain)
def changeMidGrey(self, midgrey, gain=None):
if not gain is None: self.gain = gain
gain1_range = self.imviewer.imgdata.get_natural_range()
self.offset = midgrey - gain1_range / self.gain / 2
self.refresh_offset_gain()
def gainToMinMax(self):
black = self.ndarray.min()
white = self.ndarray.max()
self.changeBlackWhite(black, white)
def gainToSigma1(self):
with gui.qapp.waitCursor('Gain 1 sigma'):
self.gainToSigma(1)
def gainToSigma2(self):
with gui.qapp.waitCursor('Gain 2 sigma'):
self.gainToSigma(2)
def gainToSigma3(self):
with gui.qapp.waitCursor('Gain 3 sigma'):
self.gainToSigma(3)
def gainToSigma(self, sigma=3, roi=None):
chanstats = self.imviewer.imgdata.chanstats
if roi is None:
roi = self.imviewer.roi.isVisible()
elif roi and not self.imviewer.roi.isVisible():
roi = False
if roi:
clrs = set(('RK','RR', 'RG', 'RB'))
else:
clrs = set(('K', 'R', 'G', 'B'))
clrs = clrs.intersection(set(chanstats.keys()))
blacks = dict()
whites = dict()
for clr in clrs:
stats = chanstats[clr]
if stats.arr2d is None: continue
hist = stats.histogram(1)
starts = stats.starts(1)
blacks[clr], whites[clr] = get_sigma_range_for_hist(starts, hist, sigma)
black = min(blacks.values())
white = max(whites.values())
if self.ndarray.dtype in ['uint8', 'uint16']:
if black == white:
self.defaultOffsetGain()
return
else:
white += 1
self.changeBlackWhite(black, white)
def zoomIn(self):
self.imviewer.zoomIn()
def zoomOut(self):
self.imviewer.zoomOut()
def setZoom100(self):
self.imviewer.setZoom(1)
def setZoom(self):
with ActionArguments(self) as args:
args['zoom'] = self.imviewer.zoomValue * 100
if args.isNotSet():
results = fedit([('Zoom value %', args['zoom'])])
if results is None: return
args['zoom'] = results[0]
self.imviewer.setZoom(args['zoom'] / 100)
def setZoomValue(self, value):
self.imviewer.setZoom(value)
def zoomFit(self):
self.imviewer.zoomFit()
def zoomFull(self):
self.imviewer.zoomFull()
def zoomAuto(self):
self.imviewer.zoomAuto()
def setColorMap(self):
with ActionArguments(self) as args:
args['cmap'] = 'grey'
if args.isNotSet():
colormapdialog = ColorMapDialog()
colormapdialog.exec_()
self.colormap = colormapdialog.cm_name
else:
self.colormap = args['cmap']
self.refresh_offset_gain()
def toggle_hq(self):
self.imviewer.hqzoomout = not self.imviewer.hqzoomout
self.show_array(None)
def toggle_zoombind(self):
self.imviewer.zoombind = not self.imviewer.zoombind
def bindImageViewers(self):
for src_panid, src_panel in gui.qapp.panels['image'].items():
for tgt_panid, tgt_panel in gui.qapp.panels['image'].items():
if src_panid == tgt_panid: continue
src_panel.addBindingTo('image', tgt_panid)
def unbindImageViewers(self):
for src_panid, src_panel in gui.qapp.panels['image'].items():
for tgt_panid, tgt_panel in gui.qapp.panels['image'].items():
if src_panid == tgt_panid: continue
src_panel.removeBindingTo('image', tgt_panid)
def setBackground(self):
old_color = self.imviewer.palette().window().color()
rgb = old_color.toTuple()[:3]
with ActionArguments(self) as args:
args['r'] = rgb[0]
args['g'] = rgb[1]
args['b'] = rgb[2]
if args.isNotSet():
color = QColorDialog.getColor(old_color)
try:
rgb = color.toTuple()[:3]
except:
rgb = (0,0,0)
else:
rgb = (args['r'], args['g'], args['b'])
config['image background'] = rgb
self.imviewer.setBackgroundColor(*config['image background'])
def setRoiColor(self):
old_color = QtGui.QColor(*config['roi color'])
rgb = old_color.toTuple()[:3]
with ActionArguments(self) as args:
args['r'] = rgb[0]
args['g'] = rgb[1]
args['b'] = rgb[2]
if args.isNotSet():
color = QColorDialog.getColor(old_color)
try:
rgb = color.toTuple()[:3]
except:
rgb = (0,0,0)
else:
rgb = (args['r'], args['g'], args['b'])
config['roi color'] = rgb
self.imviewer.roi.initUI()
def togglePixelLabels(self):
v = config['image'].get('pixel_labels', False)
config['image']['pixel_labels'] = not v
############################
# Select Menu Connections
def selectAll(self):
selroi = self.imviewer.imgdata.selroi
selroi.reset()
self.imviewer.roi.clip()
self.imviewer.roi.show()
def selectNone(self):
self.imviewer.imgdata.selroi.reset()
self.imviewer.roi.clip()
self.imviewer.roi.hide()
def setRoi(self):
selroi = self.imviewer.imgdata.selroi
form = [('x start', selroi.xr.start),
('x stop', selroi.xr.stop),
('x step', selroi.xr.step),
('y start', selroi.yr.start),
('y stop', selroi.yr.stop),
('y step', selroi.yr.step)]
r = fedit(form, title='ROI')
if r is None: return
selroi.xr.start = r[0]
selroi.xr.stop = r[1]
selroi.xr.step = r[2]
selroi.yr.start = r[3]
selroi.yr.stop = r[4]
selroi.yr.step = r[5]
self.imviewer.roi.clip()
self.imviewer.roi.show()
def jumpToDialog(self):
selroi = self.imviewer.imgdata.selroi
form = [('x', selroi.xr.start),
('y', selroi.yr.start)]
results = fedit(form, title='Position')
if results is None: return
x, y = results
self.jumpTo(x, y)
def jumpTo(self, x, y):
selroi = self.imviewer.imgdata.selroi
selroi.xr.start, selroi.yr.start = x, y
selroi.xr.stop = selroi.xr.start + 1
selroi.xr.step = 1
selroi.yr.stop = selroi.yr.start + 1
selroi.yr.step = 1
self.imviewer.roi.clip()
self.imviewer.roi.show()
self.imviewer.zoomToRoi()
self.roiChanged.emit(self.panid)
def maskValue(self):
array = self.ndarray
evalOptions = ['Equal', 'Smaller', 'Larger']
if array.ndim == 2:
form = [('Evaluate', [1] + evalOptions),
('Value', 0)]
elif array.ndim == 3:
form = [('Evaluate', [1] + evalOptions),
('Red', 0),
('Green', 0),
('Blue', 0)]
result = fedit(form, title='Mask')
if result is None:
self.imviewer.imgdata.set_mask(None)
self.imviewer.refresh()
return
evalind, *values = result
if evalind == 1:
if array.ndim == 2:
mask = (array == values[0])
elif array.ndim == 3:
mask0 = (array[:,:,0] == values[0])
mask1 = (array[:,:,1] == values[1])
mask2 = (array[:,:,2] == values[2])
mask = mask0 & mask1 & mask2
elif evalind == 2:
if array.ndim == 2:
mask = (array < values[0])
elif array.ndim == 3:
mask0 = (array[:,:,0] < values[0])
mask1 = (array[:,:,1] < values[1])
mask2 = (array[:,:,2] < values[2])
mask = mask0 & mask1 & mask2
elif evalind == 3:
if array.ndim == 2:
mask = (array > values[0])
elif array.ndim == 3:
mask0 = (array[:,:,0] > values[0])
mask1 = (array[:,:,1] > values[1])
mask2 = (array[:,:,2] > values[2])
mask = mask0 & mask1 & mask2
self.imviewer.imgdata.set_mask(mask)
self.imviewer.refresh()
############################
# Canvas Menu Connections
def flipHorizontal(self):
self.show_array(self.ndarray[:, ::-1])
def flipVertical(self):
self.show_array(self.ndarray[::-1, :])
def rotate90(self):
rotated = np.rot90(self.ndarray, 1).copy()
self.show_array(rotated)
def rotate180(self):
self.show_array(self.ndarray[::-1, ::-1])
def rotate270(self):
rotated = np.rot90(self.ndarray, 3).copy()
self.show_array(rotated)
def rotateAny(self):
with ActionArguments(self) as args:
args['angle'] = 0.0
if args.isNotSet():
form = [('Angle', args['angle'])]
results = fedit(form)
if results is None: return
args['angle'] = results[0]
with gui.qapp.waitCursor(f'Rotating {args["angle"]} degree'):
procarr = scipy.ndimage.rotate(self.ndarray, args['angle'], reshape=True)
self.show_array(procarr)
def crop(self):
self.select()
croped_array = gui.vr.copy()
gui.img.show(croped_array)
self.selectNone()
def canvasResize(self):
old_height, old_width = self.ndarray.shape[:2]
with ActionArguments(self) as args:
args['width'], args['height'] = old_width, old_height
channels = self.ndarray.shape[2] if self.ndarray.ndim == 3 else 1
if args.isNotSet():
form = [('Width', args['width']), ('Height', args['height'])]
results = fedit(form)
if results is None: return
args['width'], args['height'] = results
new_width = args['width']
new_height = args['height']
if channels == 1:
procarr = np.ndarray((new_height, new_width), dtype=self.ndarray.dtype)
else:
procarr = np.ndarray((new_height, new_width, channels), dtype=self.ndarray.dtype)
#What with the alpha channel?
procarr[:] = 0
width = min(old_width, new_width)
height = min(old_height, new_height)
ofow = (old_width - width) // 2
ofnw = (new_width - width) // 2
ofoh = (old_height - height) // 2
ofnh = (new_height - height) // 2
procarr[ofnh:ofnh+height, ofnw:ofnw+width, ...] = self.ndarray[ofoh:ofoh+height, ofow:ofow+width, ...]
self.show_array(procarr)
def resize(self):
source = self.sharray.ndarray
shape = self.sharray.shape
form = [("width", shape[1]), ("height", shape[0]), ("order", 1)]
results = fedit(form)
if results is None: return
width, height, order = results
factorx = width / shape[1]
factory = height / shape[0]
if source.ndim == 2:
scaled = scipy.ndimage.zoom(source, (factory, factorx), order=order, mode="nearest")
elif source.ndim == 3:
#some bug here
scaled = scipy.ndimage.zoom(source, (factory, factorx, 1.0), order=order, mode="nearest")
#returned array dimensions are not on the expected index
self.show_array(scaled)
############################
# Image Menu Connections
def fillValue(self):
"""
:param float value:
"""
with ActionArguments(self) as args:
args['value'] = 0.0
if args.isNotSet():
form = [('Value', args['value'])]
results = fedit(form)
if results is None: return
args['value'] = results[0]
procarr = self.ndarray.copy()
procarr[:] = args['value']
self.show_array(procarr)
def addNoise(self):
form = [('Standard Deviation', 1.0)]
results = fedit(form)
if results is None: return
std = float(results[0])
def run_in_console(std):
arr = gui.vs
shape = arr.shape
dtype = arr.dtype
procarr = clip_array(arr + np.random.randn(*shape) * std + 0.5, dtype)
gui.show(procarr)
panel = gui.qapp.panels.selected('console')
panel.task.call_func(run_in_console, args=(std,))
def invert(self):
procarr = ~self.ndarray
self.show_array(procarr)
def swapRGB(self):
if not self.ndarray.ndim >= 3:
gui.dialog.msgbox('The image has not 3 or more channels', icon='error')
return
procarr = self.ndarray.copy()
procarr[:,:,0] = self.ndarray[:,:,2]
procarr[:,:,1] = self.ndarray[:,:,1]
procarr[:,:,2] = self.ndarray[:,:,0]
self.show_array(procarr)
def toMonochroom(self):
array = self.ndarray
if not array.ndim == 3:
return
dtype = array.dtype
procarr = clip_array(array.mean(2), dtype)
self.show_array(procarr)
def toPhotoMonochroom(self):
array = self.ndarray
if not array.ndim == 3:
return
clip_low, clip_high = imconvert.integer_limits(array.dtype)
mono = np.dot(array, [0.299, 0.587, 0.144])
procarr = clip_array(mono, array.dtype)
self.show_array(procarr)
def is8bit(self):
return self.ndarray.dtype in ['uint8', 'int8']
def is16bit(self):
return self.ndarray.dtype in ['uint16', 'int16']
def to8bit(self):
if not self.is16bit(): return
self.show_array((self.ndarray >> 8).astype('uint8'))
def to16bit(self):
if not self.is8bit(): return
self.show_array(self.ndarray.astype('uint16') << 8)
def to_dtype(self):
dtypes = ['uint8', 'uint16', 'double']
scales = ['bit shift', 'clip']
form = [
('Data Type', [1] + dtypes),
('Scale', [1] + scales)]
results = fedit(form, title='Convert Data Type')
if results is None: return
dtype = dtypes[results[0]-1]
scale = scales[results[1]-1]
array = gui.vs
if scale == 'clip' and dtype in ['uint8', 'uint16']:
if dtype == 'uint8':
lower, upper = 0, 255
elif dtype == 'uint16':
lower, upper = 0, 65535
array = array.clip(lower, upper)
array = array.astype(dtype)
elif scale == 'bit shift' and dtype in ['uint8', 'uint16']:
if array.dtype == 'uint8' and dtype == 'uint16':
array = gui.vs.astype(dtype)
array <<= 8
elif array.dtype == 'uint16' and dtype == 'uint8':
array >>= 8
array = gui.vs.astype(dtype)
else:
array = array.astype(dtype)
gui.show(array)
def swapbytes(self):
gui.show(gui.vs.byteswap())
def adjustLighting(self):
"""
:param float offset:
:param float gain:
"""
with ActionArguments(self) as args:
args['offset'] = -self.offset * 1.0
args['gain'] = self.gain * 1.0
if args.isNotSet():
form = [('Offset', args['offset']),
('Gain', args['gain'])]
results = fedit(form, title='Adjust Lighting')
if results is None: return
offset, gain = results
else:
offset, gain = args['offset'], args['gain']
#TO DO: use value mapping if possible
array = self.ndarray
procarr = clip_array(array * gain + offset, array.dtype)
self.show_array(procarr)
def adjustGamma(self):
"""
:param float gamma:
:param float upper:
"""
with ActionArguments(self) as args:
args['gamma'] = 1.0
args['upper'] = 255
if args.isNotSet():
form = [('Gamma', args['gamma']),
('Upper', args['upper'])]
results = fedit(form, title='Adjust Gamma')
if results is None: return
gamma, upper = results
else:
gamma, upper = args['gamma'], args['upper']
#TO DO: use value mapping if possible
array = self.ndarray
procarr = clip_array(np.power(array, gamma) * upper ** (1-gamma), array.dtype)
self.show_array(procarr)
############################
# Process Menu Connections
def bayer_split_tiles(self):
arr = self.ndarray
blocks = []
for y, x in [(0,0),(0,1),(1,0),(1,1)]:
blocks.append(arr[y::2, x::2, ...])
split = np.concatenate([
np.concatenate([blocks[0], blocks[1]], axis=1),
np.concatenate([blocks[2], blocks[3]], axis=1)])
self.show_array(split)
def colored_bayer(self):
baypatns = ['RGGB', 'BGGR', 'GRBG', 'GBRG']
form = [('Bayer Pattern', [1] + baypatns)]
ind = fedit(form, title='Demosaic')[0]
baypatn = baypatns[ind-1]
procarr = bayer_split(self.ndarray, baypatn)
self.show_array(procarr)
def demosaic(self):
baypatns = ['RGGB', 'BGGR', 'GRBG', 'GBRG']
form = [('Bayer Pattern', [1] + baypatns)]
ind = fedit(form, title='Demosaic')[0]
baypatn = baypatns[ind-1]
code = f"""\
from gdesk.panels.imgview.demosaic import demosaicing_CFA_Bayer_bilinear
procarr = demosaicing_CFA_Bayer_bilinear(gui.vs, '{baypatn}')
gui.show(procarr)"""
panel = gui.qapp.panels.selected('console')
panel.exec_cmd(code)
def makeBlueprint(self):
with gui.qapp.waitCursor('making blueprint'):
arr = self.ndarray
if arr.ndim == 3:
dtype = arr.dtype
arr = arr.mean(2).astype(dtype)
blueprint = make_thumbnail(arr)
gui.img.new()
gui.img.show(blueprint)
def externalProcessDemo(self):
panel = gui.qapp.panels.select_or_new('console', None, 'child')
panel.task.wait_process_ready()
from .proxy import ImageGuiProxy
def stage1_done(mode, error_code, result):
gui.msgbox('Mirroring done')
panel.task.call_func(ImageGuiProxy.high_pass_current_image, callback=stage2_done)
def stage2_done(mode, error_code, result):
gui.msgbox('Highpass filter done')
panel.task.call_func(ImageGuiProxy.mirror_x, callback=stage1_done)
def measureDistance(self):
panel = gui.qapp.panels.selected('console')
#panel.task.wait_process_ready()
from .proxy import ImageGuiProxy
def stage1_done(mode, error_code, result):
pass
panel.task.call_func(ImageGuiProxy.get_distance, callback=stage1_done)
############################
# Analyse Menu Connections
def horizontalSpectrogram(self):
panel = gui.qapp.panels.selected('console')
panel.task.call_func(spectr_hori, args=(gui.vs,))
def verticalSpectrogram(self):
panel = gui.qapp.panels.selected('console')
panel.task.call_func(spectr_vert, args=(gui.vs,))
#############################
def show_array(self, array, zoomFitHist=False, log=True):
self.refresh_offset_gain(array, log=log)
self.contentChanged.emit(self.panid, zoomFitHist)
def select(self):
was_selected = super().select()
if not was_selected:
self.gainChanged.emit(self.panid)
self.contentChanged.emit(self.panid, False)
return was_selected
def refresh_offset_gain(self, array=None, zoomFitHist=False, log=True):
self.imviewer.imgdata.show_array(array, self.offset, self.white, self.colormap, self.gamma, log)
self.statuspanel.setOffsetGainInfo(self.offset, self.gain, self.white, self.gamma)
self.gainChanged.emit(self.panid)
self.imviewer.refresh()
@property
def ndarray(self):
return self.sharray.ndarray
@property
def sharray(self):
return self.imviewer.imgdata.sharray
class ImageViewer(ImageViewerBase):
#contentChanged = Signal(int)
#gainChanged = Signal(int)
panelShortName = 'basic'
userVisible = True
def __init__(self, *args, **kwargs):
#super().__init__(parent, panid, 'image')
super().__init__(*args, **kwargs)
self.imviewer = ImageViewerWidget(self)
self.imviewer.roi.roiChanged.connect(self.passRoiChanged)
self.imviewer.roi.get_context_menu = self.get_select_menu
self.setCentralWidget(self.imviewer)
self.imviewer.pickerPositionChanged.connect(self.set_info_xy_val)
self.imviewer.zoomChanged.connect(self.statuspanel.set_zoom)
self.imviewer.zoomPanChanged.connect(self.emitVisibleRegionChanged)
def passRoiChanged(self):
self.roiChanged.emit(self.panid)
def emitVisibleRegionChanged(self):
if self.imviewer.zoombind:
self.visibleRegionChanged.emit(*self.imviewer.visibleRegion(normalized=True, clip_square=True), False, False, self.imviewer.zoomValue)
else:
self.visibleRegionChanged.emit(*self.imviewer.visibleRegion(normalized=True, clip_square=True), False, False, 0.0)
class ImageProfileWidget(QWidget):
def __init__(self, parent):
super().__init__(parent=parent)
self.imviewer = ImageViewerWidget(self)
self.profBtn = QtWidgets.QPushButton(QtGui.QIcon(str(respath / 'icons' / 'px16' / 'diagramm.png')), None, self)
self.profBtn.setFixedHeight(20)
self.profBtn.setFixedWidth(20)
self.profBtn.clicked.connect(self.toggleProfileVisible)
self.rowPanel = ProfilerPanel(self, 'x', self.imviewer)
self.colPanel = ProfilerPanel(self, 'y', self.imviewer)
self.gridsplit = GridSplitter(None)
self.imviewer.zoomPanChanged.connect(self.colPanel.zoomToImage)
self.imviewer.zoomPanChanged.connect(self.rowPanel.zoomToImage)
self.gridsplit.addWidget(self.rowPanel, 0, 1)
self.gridsplit.addWidget(self.colPanel, 1, 0)
self.gridsplit.addWidget(self.imviewer, 1, 1)
self.cornerLayout = QtWidgets.QGridLayout()
self.cornerLayout.addWidget(self.profBtn, 0, 0, alignment=Qt.AlignRight | Qt.AlignBottom)
self.gridsplit.addLayout(self.cornerLayout, 0, 0)
self.setLayout(self.gridsplit)
self.profilesVisible = False
def toggleProfileVisible(self):
self.profilesVisible = not self.profilesVisible
def showOnlyRuler(self):
self.rowPanel.showOnlyRuler()
self.colPanel.showOnlyRuler()
self._profilesVisible = False
gui.qapp.processEvents()
self.refresh_profile_views()
def showProfiles(self):
self.rowPanel.setMinimumHeight(20)
self.rowPanel.setMaximumHeight(500)
self.colPanel.setMinimumWidth(20)
self.colPanel.setMaximumWidth(500)
strow = self.gridsplit.getRowStretches()
stcol = self.gridsplit.getColumnStretches()
rowspan = strow[0]+ strow[1]
colspan = stcol[0] + stcol[1]
target = rowspan // 5
self.gridsplit.setRowStretches((target,rowspan-target))
self.gridsplit.setColumnStretches((target,colspan-target))
self.colPanel.showAll()
self.rowPanel.showAll()
self._profilesVisible = True
self.drawMeanProfile()
gui.qapp.processEvents()
self.refresh_profile_views()
def drawMeanProfile(self):
arr = self.imviewer.imgdata.sharray.ndarray
if arr.ndim > 2:
arr = arr.mean(2)
rowProfile = arr.mean(0)
colProfile = arr.mean(1)
self.rowPanel.drawMeanProfile(np.arange(len(rowProfile)), rowProfile)
self.colPanel.drawMeanProfile(np.arange(len(colProfile)), colProfile)
self.refresh_profile_views()
def set_profiles_visible(self, value):
if value:
self.showProfiles()
else:
self.showOnlyRuler()
profilesVisible = property(lambda self: self._profilesVisible, set_profiles_visible)
def refresh_profile_views(self):
self.colPanel.zoomToImage()
self.rowPanel.zoomToImage()
if self.colPanel.view.auto_zoom:
self.colPanel.zoomFit()
self.colPanel.view.refresh()
if self.rowPanel.view.auto_zoom:
self.rowPanel.zoomFit()
self.rowPanel.view.refresh()
class ImageProfilePanel(ImageViewerBase):
panelShortName = 'image-profile'
userVisible = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.imgprof = ImageProfileWidget(self)
self.setCentralWidget(self.imgprof)
self.imviewer.pickerPositionChanged.connect(self.set_info_xy_val)
self.imviewer.zoomChanged.connect(self.statuspanel.set_zoom)
self.imviewer.zoomPanChanged.connect(self.emitVisibleRegionChanged)
self.imviewer.roi.roiChanged.connect(self.passRoiChanged)
self.imviewer.roi.get_context_menu = self.get_select_menu
self.addMenuItem(self.viewMenu, 'Show/Hide Profiles' , self.showHideProfiles,
checkcall=lambda: self.imgprof.profilesVisible,
statusTip="Show or Hide the image column and row profiles")
if not kwargs.get('empty', False):
self.openImage(respath / 'images' / 'gamma_test_22.png', zoom=1)
def emitVisibleRegionChanged(self):
if self.imviewer.zoombind:
self.visibleRegionChanged.emit(*self.imviewer.visibleRegion(normalized=True, clip_square=True), False, False, self.imviewer.zoomValue)
else:
self.visibleRegionChanged.emit(*self.imviewer.visibleRegion(normalized=True, clip_square=True), False, False, 0.0)
def changeVisibleRegion(self, x, y, w, h, zoomSnap, emit, zoomValue):
self.imgprof.imviewer.zoomNormalized(x, y, w, h, zoomSnap, emit, zoomValue)
self.imgprof.colPanel.zoomToImage()
self.imgprof.rowPanel.zoomToImage()
self.imviewer.roi.recalcGeometry()
def passRoiChanged(self):
self.roiChanged.emit(self.panid)
def show_array(self, array, zoomFitHist=False, log=True):
super().show_array(array, zoomFitHist, log=log)
if self.imgprof.profilesVisible:
self.imgprof.drawMeanProfile()
else:
self.imgprof.refresh_profile_views()
@property
def imviewer(self):
return self.imgprof.imviewer
def showHideProfiles(self):
self.imgprof.profilesVisible = not self.imgprof.profilesVisible
|
thocoo/gamma-desk | gdesk/panels/__init__.py | <reponame>thocoo/gamma-desk<filename>gdesk/panels/__init__.py
from .. import config, gui
if config.get('qapp', False):
from .base import BasePanel, CheckMenu, thisPanel, selectThisPanel |
thocoo/gamma-desk | gdesk/panels/imgview/dialogs.py | <gh_stars>0
import logging
from pathlib import Path
from collections import OrderedDict
import numpy as np
from qtpy import QtCore, QtGui, QtWidgets
from qtpy.QtCore import Qt
from ... import gui, config
from ...panels.base import BasePanel, CheckMenu
from ...dialogs.formlayout import fedit
from ...utils import get_factors
logger = logging.getLogger(__name__)
respath = Path(config['respath'])
class RawImportForm(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
vbox = QtWidgets.QVBoxLayout()
vbox.setContentsMargins(5, 5, 5, 5)
vbox.setMargin(5)
self.setLayout(vbox)
flay = QtWidgets.QFormLayout()
vbox.addLayout(flay)
self.offset = QtWidgets.QLineEdit()
flay.addRow('Offset', self.offset)
self.dtype = QtWidgets.QLineEdit()
flay.addRow('Data type', self.dtype)
self.byteorder = QtWidgets.QComboBox()
self.byteorder.addItem('litle endian')
self.byteorder.addItem('big endian')
flay.addRow('Byte Order', self.byteorder)
self.guess = QtWidgets.QPushButton('Resolutions')
self.guess.clicked.connect(self.guessSize)
flay.addRow('Guess size', self.guess)
self.width = QtWidgets.QLineEdit()
flay.addRow('Width', self.width)
self.height = QtWidgets.QLineEdit()
flay.addRow('Height', self.height)
def guessSize(self):
flatsize = len(self.parent().data)
flatsize = flatsize - int(self.offset.text())
flatsize /= np.dtype(self.dtype.text()).itemsize
factors = get_factors(flatsize)
ratios = {}
for width in factors:
height = flatsize // width
ratios[width/height] = [int(width), int(height)]
sorted_ratios = sorted(ratios)
resolutions = [' x '.join(str(v) for v in ratios[ratio]) for ratio in sorted_ratios]
options_form = [('Resolutions', [len(factors) // 2] + resolutions)]
choosen = fedit(options_form)[0] - 1
width, height = ratios[sorted_ratios[choosen]]
#height, width = get_factors_equal(flatsize, 2)
self.height.setText(str(height))
self.width.setText(str(width))
class RawImportDialog(QtWidgets.QDialog):
def __init__(self, data):
super().__init__()
self.data = data
self.initUI()
def initUI(self):
self.form = RawImportForm(self)
self.buttonOk = QtWidgets.QPushButton('Ok', self)
self.buttonCancel = QtWidgets.QPushButton('Cancel', self)
vbox = QtWidgets.QVBoxLayout()
self.setLayout(vbox)
vbox.addWidget(self.form)
bhbox = QtWidgets.QHBoxLayout()
bhbox.addStretch()
bhbox.addWidget(self.buttonOk)
bhbox.addWidget(self.buttonCancel)
vbox.addLayout(bhbox)
self.setLayout(vbox)
self.connect(self.buttonOk, QtCore.SIGNAL('clicked()'), self.accept)
self.connect(self.buttonCancel, QtCore.SIGNAL('clicked()'), self.reject)
self.show()
|
thocoo/gamma-desk | gdesk/panels/panels.py | import threading
import sys, os
import ctypes
from collections import OrderedDict
import logging
import importlib
import pprint
from pathlib import Path
import numpy as np
from qtpy import QtGui, QtWidgets, QtCore
from qtpy.QtCore import Qt
from .. import config, gui, __release__
from ..core import conf
from .base import BasePanel
from .window import MainWindow
from ..ezdock.ezdock import DockManager
from ..ezdock.laystruct import LayoutStruct
from ..utils import new_id_using_keys
from ..gcore.utils import getMenuAction
respath = Path(config['respath'])
sck = config['shortcuts']
logger = logging.getLogger(__name__)
class Panels(object):
def __init__(self, qapp):
self.panels = OrderedDict()
self.ezm = DockManager(self, qapp)
self.qapp = qapp
def keys(self):
return self.panels.keys()
def __getitem__(self, category):
return self.get(category)
def get(self, category, default=None):
return self.panels.get(category, default)
def id_exists(self, category, panid):
if not category in self.keys():
self.panels[category] = OrderedDict()
return panid in self.panels[category].keys()
def new_id(self, category):
return int(new_id_using_keys(tuple(self[category].keys())))
def items(self):
for item in self.panels.items():
yield item
def move_to_end(self, widget, category=None):
if not category is None:
catpanels = self.panels[category]
panid = next((k for k,v in panels.items() if v is widget), None)
else:
for category, catpanels in self.panels.items():
panid = next((k for k,v in catpanels.items() if v is widget), None)
if not panid is None: break
else:
panid = None
category = None
if not panid is None:
catpanels.move_to_end(panid)
self.panels.move_to_end(category)
return panid, category
def __iter__(self):
for category in self.keys():
yield category, self[category]
def get_active_panid(self, category, panidpos=-1):
panel = self.selected(category, panidpos)
if panel is None:
return None
else:
return panel.panid
def new(self, category, paneltype=None, windowname=None, size=None, *args, **kwargs):
image_classes = self.classes_of_category(category)
if paneltype is None:
ImageClass = next(iter(image_classes.values()))
else:
ImageClass = image_classes[paneltype]
panel = self.new_panel(ImageClass, windowname, None, size=size, args=args, kwargs=kwargs)
return panel
def select_or_new(self, category, panid=None, defaulttype='basic', parentName='main', args=(), kwargs={}):
"""
If panid < 0, -1: select the active panel, -2: selected before that, ...
panid > 0: select the panel if exists, otherwise a new with that number
"""
if not panid is None and panid < 0:
panel = self.selected(category, panid)
elif category in self.keys():
panel = self[category].get(panid, None)
else:
panel = None
if panid is not None and panid < 0:
panid = None
if panel is None:
panel = self.new(category, defaulttype, parentName, *args, **kwargs)
panel.select()
return panel
def selected_category(self):
return tuple(self.panels.keys())[-1]
def selected(self, category, panidpos=-1, panel=True):
assert panidpos < 0
if category in self.keys():
panids = tuple(self[category].keys())
else:
return None
if abs(panidpos) <= len(panids):
panid = panids[panidpos]
if panel:
panel = self[category][panid]
return panel
else:
return panid
else:
return None
def reselect_all(self):
for category in list(self.keys()):
panel = self.selected(category)
if not panel is None:
#logger.info(f'Selecting {category}: {panel.panid}')
panel.select()
def restore_state_from_config(self, layout_name):
if isinstance(layout_name, int):
layout_name = config['shortcuts']['layout'][str(layout_name)]
perspective = config['layout'][layout_name]
self.ezm.set_perspective(perspective)
def classes_of_category(self, category):
panelClasses = BasePanel.userPanelClasses()
panelClassesCat = panelClasses.get(category, [])
return dict([(Cls.panelShortName, Cls) for Cls in panelClassesCat])
def new_panel(self, PanelClass, parentName=None, panid=None, floating=False,
position=None, size=None, args=(), kwargs={}):
if parentName is None:
activeWindow = self.qapp.activeWindow()
if isinstance(activeWindow, MainWindow):
parentName = activeWindow.name
panel = PanelClass(None, panid, *args, **kwargs)
if not size is None:
panel.setGeometry(0, 0, size[0], size[1])
#panel.show()
if floating:
window = panel
else:
window = self.ezm.new_window_on_panel(panel, parentName)
#window.activateWindow()
if position is None:
position = self.place_window(window, panel.category)
window.move(position)
panel.select()
return panel
def place_window(self, window, category):
screen = QtWidgets.QDesktopWidget().screenNumber(self.qapp.windows['main'])
desktop_rect = QtWidgets.QDesktopWidget().availableGeometry(screen)
window_rect = window.frameGeometry()
prior_panel = self.selected(category, -2)
if not prior_panel is None:
prior_rect = prior_panel.frameGeometry()
topleft = prior_panel.mapToGlobal(QtCore.QPoint(0,0))
prior_rect.moveTopLeft(QtCore.QPoint(topleft.x()+10, topleft.y()+10))
center = prior_rect.center()
else:
center = desktop_rect.center()
window_rect.moveCenter(center)
position = window_rect.topLeft()
visible = False
for screen in range(QtWidgets.QDesktopWidget().screenCount()):
if QtWidgets.QDesktopWidget().availableGeometry(screen).contains(window_rect):
visible = True
if not visible:
position = desktop_rect.topLeft()
return position
def get_menu_action(self, category, panid, menutrace):
"""
Trigger a menu action of a panel.
:param str category: Example 'image'
:param int id: Example 1
:param list menutrace: Example ['File', 'New Image']
"""
if category is None:
window = self.qapp.activeWindow()
if not isinstance(window, MainWindow):
raise KeyError('Action not found')
category = window.activeCategory
if panid is None:
panid = self.get_active_panid(category)
if panid is None:
return
panel = self[category][panid]
return getMenuAction(panel.menuBar(), menutrace)
def removeBindingsTo(self, category, panid):
for c, panels in self.items():
for p, panel in panels.items():
if (category, panid) in panel.bindings:
panel.removeBindingTo(category, panid)
|
thocoo/gamma-desk | gdesk/panels/imgview/blueprint.py | <reponame>thocoo/gamma-desk
import numpy
import time
import math
HFG = 8
USEFORDER = True
XKERN = 8
YKERN = 8
def make_thumbnail(array, max_long_side=240, hfg=HFG, bayer=False):
if hasattr(array, 'tondarray'):
array = image.tondarray('uint16') # reference to uint16 buffer of Bindex
height, width = array.shape
long_side = max(height, width)
ratio = math.ceil(long_side / max_long_side)
thumb = make_blueprint(array, ratio, ratio, hfg, bayer)
return thumb
def get_blue_print(array, xkern=XKERN, ykern=YKERN, hfg=HFG, bayer=False):
return make_blueprint(array, xkern, ykern, hfg, bayer)
def make_blueprint(array, xkern=XKERN, ykern=YKERN, hfg=HFG, bayer=False):
"""
Make a so called blueprint thumbnail.
The 16-bitgrey image is converted to a downscaled color version.
The downscale kernel is tyical 8x8. So the statiscs of 64 pixels is used
to derive 3 8-bit numbers. So the RGB values are based on the average,
the minimum and the maximum.
Green for the average of the kernel.
Blue for the difference of the minumum to average.
Red for the difference of maximum to average.
The min-max differences are typical gained up.
The final result is an image of 1/100 of the original data.
It blows-up high frequency artifacts, making them much more visible.
:param array: A 2d Image
:type array: np.ndarray or PyBix
:param int xkern: width of the kernel
:param int ykern: height of the kernel
:param float hfg: high frequency gain
:param bool bayer: Does it contains a bayer pattern (color sensor?)
"""
def get_blue_print_of_roi(array, xkern, ykern, hfg):
#the shape of array is supposed to be a multiple of xkern and ykern
ydim, xdim = array.shape
xdimsc = xdim // xkern
ydimsc = ydim // ykern
#note that a reshape doesn't realy do anything on the memory buffer
#it only redefines the strides
tmp2 = array.reshape(ydim * xdimsc, xkern)
#calculates on every xkern succesive pixels in a row
#total size of the array will be 8 times less
min3 = tmp2.min(1)
mean3 = tmp2.mean(1)
max3 = tmp2.max(1)
#reshufle the memory
#the copy is needed to really remap the memory
if USEFORDER == False:
#swap row and column in memory
min4 = min3.reshape(ydim, xdimsc).T.copy()
mean4 = mean3.reshape(ydim, xdimsc).T.copy()
max4 = max3.reshape(ydim, xdimsc).T.copy()
min5 = min4.reshape(xdimsc * ydimsc, ykern)
mean5 = mean4.reshape(xdimsc * ydimsc, ykern)
max5 = max4.reshape(xdimsc * ydimsc, ykern)
stataxis = 1
else:
#the use or order='F' actual makes a copy
#so we don't gain anything with this Fortran order
min5 = min3.reshape(ydim, xdimsc).reshape(ykern, xdimsc * ydimsc, order='F')
mean5 = mean3.reshape(ydim, xdimsc).reshape(ykern, xdimsc * ydimsc, order='F')
max5 = max3.reshape(ydim, xdimsc).reshape(ykern, xdimsc * ydimsc, order='F')
stataxis = 0
#calculates on every 8 succesive pixels in a col
#total size of the array will be now 64 times less then the orginal
min6 = min5.min(stataxis)
mean6 = mean5.mean(stataxis)
max6 = max5.max(stataxis)
blueprint = numpy.ndarray((ydimsc, xdimsc, 3),'uint8')
if array.dtype == 'uint8':
scale = 1
elif array.dtype == 'uint16':
scale = 256
min7 = min6.reshape(xdimsc, ydimsc).T[:,:] // scale
mean7 = mean6.reshape(xdimsc, ydimsc).T[:,:] // scale
max7 = max6.reshape(xdimsc, ydimsc).T[:,:] // scale
blueprint[:,:,2] = (255 - (mean7 - min7) * hfg).clip(0,255)
blueprint[:,:,1] = mean7
blueprint[:,:,0] = ((max7 - mean7) * hfg).clip(0,255)
#print("it took %8.2f seconds " % (time.time() - timestamp))
return blueprint
def blue_reduce_mono(array, kernel_width, kernel_height, hfgain):
"""
array is supposed to be debayered
"""
ydim, xdim = array.shape
#round down and up to multiple of kernel widths and height
xroundlow, xroundhigh = xdim // kernel_width * kernel_width, -(-xdim // kernel_width * kernel_width)
yroundlow, yroundhigh = ydim // kernel_height * kernel_height, -(-ydim // kernel_height * kernel_height)
if xroundlow == xroundhigh and yroundlow == yroundhigh:
blue_print = get_blue_print_of_roi(array, kernel_width, kernel_height, hfgain)
else:
xdimsc = xroundlow // xkern
ydimsc = yroundlow // ykern
if xroundlow == xroundhigh:
blue_print = numpy.ndarray((ydimsc+1, xdimsc, 3),'uint8')
blue_print[:ydimsc,:] = get_blue_print_of_roi(array[:yroundlow, :], xkern, ykern, hfgain)
blue_print[ydimsc:,:] = get_blue_print_of_roi(array[yroundlow:, :], xkern, ydim-yroundlow, hfgain)
elif yroundlow == yroundhigh:
blue_print = numpy.ndarray((ydimsc, xdimsc+1, 3),'uint8')
blue_print[:, :xdimsc] = get_blue_print_of_roi(array[:, :xroundlow], xkern, ykern, hfgain)
blue_print[:, xdimsc:] = get_blue_print_of_roi(array[:, xroundlow:], xdim-xroundlow, ykern, hfgain)
else:
blue_print = numpy.ndarray((ydimsc+1, xdimsc+1, 3),'uint8')
blue_print[:ydimsc, :xdimsc] = get_blue_print_of_roi(array[:yroundlow, :xroundlow], xkern, ykern, hfgain)
blue_print[:ydimsc, xdimsc:] = get_blue_print_of_roi(array[:yroundlow, xroundlow:], xdim-xroundlow, ykern, hfgain)
blue_print[ydimsc:, :xdimsc] = get_blue_print_of_roi(array[yroundlow:, :xroundlow], xkern, ydim-yroundlow, hfgain)
blue_print[ydimsc:, xdimsc:] = get_blue_print_of_roi(array[yroundlow:, xroundlow:], xdim-xroundlow, ydim-yroundlow, hfgain)
return blue_print
if hasattr(array, 'tondarray'):
array = image.tondarray('uint16') # reference to uint16 buffer of Bindex
if not len(array.shape) == 2:
raise AttributeError('Only images with 1 channel (mono) is supported')
if not bayer:
blue_print = blue_reduce_mono(array, xkern, ykern, hfg)
else:
blue_prints = []
for i,j in ((0,0),(1,0),(0,1),(1,1)):
blue_prints.append(blue_reduce_mono(array[i::2,j::2], xkern, ykern, hfg))
blue_print_width = blue_prints[0].shape[1] + blue_prints[2].shape[1]
blue_print_height = blue_prints[0].shape[0] + blue_prints[1].shape[0]
blue_print = numpy.zeros((blue_print_height, blue_print_width, 3), 'uint8')
substarts = [(0,0),(1,0),(0,1),(1,1)]
for i, sub_blue_print in enumerate(blue_prints):
j, k = substarts[i]
blue_print[j::2,k::2] = sub_blue_print
return blue_print |
thocoo/gamma-desk | gdesk/panels/imgview/dimensions.py | <reponame>thocoo/gamma-desk
"""multiple dimension range and slicing
virtual sequence of numbers defined by start to stop by step
functions to
* modify the range by applying slices
* reset to full range
* get default python range object
* partial ranges for representations
start = first number; 0 by default
stop = all numbers are lower then stop, so not included in sequence
step = stepping of the sequence; 1 by default
maxstop = stop can never be set larger than maxstop
DimRange: single dimension
DimRanges: multiple dimensions
"""
class DimRange(object):
"""range info of a single dimension
maxstop = the size of the dimension
apply slices to a range to get a new range
slices can be applied cumulatively
range can be reset to the original range
"""
def __init__(self, maxstop):
self.maxstop = maxstop
self.start = 0
self.stop = maxstop
self.step = 1
def reset(self):
self.start = 0
self.stop = self.maxstop
self.step = 1
def clone(self):
return DimRange(self.maxstop)
def inherite(self, source):
self.start = source.start
self.stop = source.stop
self.step = source.step
def copy(self):
result = self.clone()
result.inherite(self)
return result
def setfromslice(self, aslice, full=True):
"""slice the current range by the slice aslice
a slice is e.g. created from item[0:10:2]
or by slice(0,10,2)
if full == True: don't build on the current roi
"""
if full:
# reset the roi before the new roi
self.reset()
prior_start = self.start
prior_stop = self.stop
prior_step = self.step
if isinstance(aslice, int):
# the slice is just one index
if aslice < 0:
aslice = max(prior_stop + prior_step * aslice, 0)
aslice = slice(aslice, aslice+1, None)
if aslice.start is None:
self.start = prior_start
elif aslice.start < 0:
self.start = max(prior_stop + prior_step * aslice.start, 0)
else:
self.start = min(prior_start + prior_step * aslice.start, prior_stop)
if aslice.stop is None:
self.stop = prior_stop
elif aslice.stop < 0:
self.stop = max(prior_stop + prior_step * aslice.stop, 0)
else:
self.stop = min(prior_start + prior_step * aslice.stop, prior_stop)
if aslice.step is None:
self.step = prior_step
else:
self.step = prior_step * aslice.step
def applyslice(self, aslice, full=False):
"""return a range based on another range and a range definition
ex:
bslice = slice(3,None,2) or [3::2] on the __getitem__ or __setitem__
brange = (0,50,2)
return range(6,50,4)
"""
self.setfromslice(aslice, full)
def clip(self):
if self.start < self.maxstop:
self.start = max(self.start, 0)
else:
self.start = 0
if self.stop > 0:
self.stop = min(self.stop, self.maxstop)
else:
self.stop = self.maxstop
def size(self):
return len(self)
def __len__(self):
return len(self.range)
def isfullrange(self):
if (self.start == 0) and (self.stop == self.maxstop) and (self.step == 1):
return True
else:
return False
def getstartrange(self, count=7):
half = count // 2
if self.count > count:
tmp = self.copy()
tmp.applyslice(slice(None, half, None), False) # 2nd argument unnecessary
return tmp.getrange()
else:
return self.getrange()
def getstoprange(self, count=7):
half = count // 2
if self.count > count:
tmp = self.copy()
if (tmp.stop - tmp.start) % tmp.step != 0:
tmp.maxstop += tmp.step - (tmp.stop - tmp.start) % tmp.step
tmp.stop = tmp.maxstop
tmp.applyslice(slice(-half, None, None), False) # 2nd argument unnecessary
return tmp.getrange()
else:
return None
def __str__(self):
return 'dim:%d slice:%d:%d:%d' % (self.maxstop, self.start, self.stop, self.step)
def __repr__(self):
return str(self)
def __getitem__(self, index):
newrange = self.copy()
newrange.applyslice(index, False) # 2nd argument is unnecessary
return newrange
def getslice(self):
return slice(self.start, self.stop, self.step)
def getrange(self):
return range(self.start, self.stop, self.step)
def splitstarts(self, count):
from math import ceil
l = len(self)
step = ceil(l / count)
return self.range[::step]
count = property(size)
slice = property(getslice)
range = property(getrange)
class SetSlices(object):
"""
to set the roi, use the slices syntax on set
example:
b = pybix.ones(100,80) * 12000
b.roi
>>>[dim:100 slice:0:100:1, dim:80 slice:0:80:1]
b.roi.set[20:60,::2]
b.roi
>>>[dim:100 slice:20:60:1, dim:80 slice:0:80:2]
"""
def __init__(self, parent):
self.parent = parent
def __getitem__(self, indices):
self.parent.setslices(indices)
def __repr__(self):
return self.__doc__
class DimRanges(object):
"""range info of a multiple dimensions
sizes = tuple of the sizes of the dimensions
"""
def __init__(self, sizes=None):
self.rngs = []
self.set = SetSlices(self)
if sizes is not None:
for size in sizes:
self.rngs.append(DimRange(size))
def maketuple(self, index):
if not isinstance(index, tuple):
return (index,)
else:
return index
def setslices(self, index):
slices = self.maketuple(index)
for i in range(len(self.rngs)):
self.rngs[i].applyslice(slices[i], full=True)
def accslices(self, index):
slices = self.maketuple(index)
for (rng, slc) in zip(self.rngs, slices):
rng.applyslice(slc, full=False)
def getslices(self, swap_row_columns=False):
tmp = []
for rng in self.rngs:
tmp.append(rng.getslice())
if swap_row_columns:
# swap first and second index
tmp[0], tmp[1] = tmp[1], tmp[0]
return tuple(tmp)
def getshape(self):
tmp = []
for rng in self.rngs:
tmp.append(len(rng))
return tuple(tmp)
shape = property(getshape)
def getfullshape(self):
tmp = []
for rng in self.rngs:
tmp.append(rng.maxstop)
return tuple(tmp)
fullshape = property(getfullshape)
def reset(self):
for rng in self.rngs:
rng.reset()
def clip(self):
for rng in self.rngs:
rng.clip()
def clone(self):
return DimRanges(self.shape)
def inherite(self, source):
for (tarr, srcr) in zip(self.rngs, source.rngs):
tarr.inherite(srcr)
def copy(self):
newrngs = self.clone()
newrngs.inherite(self)
return newrngs
def isfullrange(self):
test = True
for rngs in self.rngs:
test &= rngs.isfullrange()
return test
def __len__(self):
length = 1
for rng in self.rngs:
length *= len(rng)
return length
def __getitem__(self, indices):
newranges = self.copy()
newranges.accslices(indices)
return newranges
def __str__(self):
return str(self.rngs)
def __repr__(self):
return str(self)
|
thocoo/gamma-desk | gdesk/panels/console/__init__.py | <reponame>thocoo/gamma-desk
from ... import config
if config.get('qapp', False):
from .consolepanel import MainThreadConsole, SubThreadConsole, ChildProcessConsole, ChildThreadConsole
from .consoleproxy import ConsoleGuiProxy |
thocoo/gamma-desk | gdesk/live/interactive.py | <filename>gdesk/live/interactive.py<gh_stars>0
import sys
import platform
import code
from pathlib import Path
def interact(workspace=None, banner=None, exitmsg=None, completer='standard'):
if workspace is None:
frame = sys._getframe(1)
globs = frame.f_globals
locs = frame.f_locals
workspace = globs
workspace.update(locs)
try:
import readline
if completer=='standard':
from rlcompleter import Completer
readline.set_completer(Completer(workspace).complete)
elif completer=='key':
from .completer import Completer
readline.set_completer_delims(' \t\n\\`@$><=;|&{(')
readline.set_completer(Completer(workspace).complete)
else:
raise AttributeError(f'Invalid completer {completer}')
readline.parse_and_bind('tab:complete')
except:
print('Could not start auto complete!')
shell = code.InteractiveConsole(workspace)
shell.interact(banner=banner, exitmsg=exitmsg) |
thocoo/gamma-desk | gdesk/__init__.py | #-------------------------------------------------------------------------------
# Copyright 2021 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
"""Bench Desk"""
from .version import VERSION_INFO
from .core.conf import config, configure
from .core.gui_proxy import gui
from .live import use, using
PROGNAME = 'Gamma Desk'
DOC_HTML = 'https://thocoo.github.io/gdesk-data/docs'
DOC_HTML_EXTRA = ['https://test.pypi.org/project/gamma-desk']
__release__ = "-".join(map(str, VERSION_INFO)).replace("-", ".", 2)
__version__ = ".".join(map(str, VERSION_INFO[:3]))
shell = None
def refer_shell_instance(shellinst):
"""refer_shell_instance"""
global shell
shell = shellinst
|
PRHatte/Tracking-a-person-and-estimating-height-of-the-person | LogoDetecet.py | from scipy.spatial import distance as dist
import cv2
import numpy as np
detector=cv2.xfeatures2d.SIFT_create()
FLANN_INDEX_KDITREE=0
flannParam=dict(algorithm=FLANN_INDEX_KDITREE,tree=5)
flann=cv2.FlannBasedMatcher(flannParam,{})
people_cascade = cv2.CascadeClassifier("C:/Users/prh170230/Downloads/haarcascade_upperbody.xml")
face_cascade = cv2.CascadeClassifier("C:/Users/prh170230/Downloads/haarcascade_frontalface_alt.xml")
eye_cascade = cv2.CascadeClassifier("C:/Users/prh170230/Downloads/haarcascade_eye.xml")
LOGOImg=cv2.imread("C:/Users/prh170230/Downloads/logo.jpg",0)
trainKP,trainDesc=detector.detectAndCompute(LOGOImg,None)
MIN_MATCH_COUNT=20
cam=cv2.VideoCapture(0)
while True:
ret, QueryImgBGR=cam.read()
QueryImg=cv2.cvtColor(QueryImgBGR,cv2.COLOR_BGR2GRAY)
queryKP,queryDesc=detector.detectAndCompute(QueryImg,None)
matches=flann.knnMatch(queryDesc,trainDesc,k=2)
goodMatch=[]
for m,n in matches:
if(m.distance <0.75*n.distance):
goodMatch.append(m)
if(len(goodMatch) >MIN_MATCH_COUNT):
tp=[]
qp=[]
for m in goodMatch:
tp.append(trainKP[m.trainIdx].pt)
qp.append(queryKP[m.queryIdx].pt)
tp,qp=np.float32((tp,qp))
H,status=cv2.findHomography(tp,qp,cv2.RANSAC,3.0)
h,w=LOGOImg.shape
trainBorder=np.float32([[[0,0],[0,h-1],[w-1,h-1],[w-1,0]]])
queryBorder=cv2.perspectiveTransform(trainBorder,H)
#cv2.polylines(QueryImgBGR,[np.int32(queryBorder)],True,(0,255,0),5)
x=[np.int32(queryBorder)]
cv2.rectangle(QueryImgBGR,tuple(x[0][0][0]),tuple(x[0][0][2]),(0,0,255),3)
a=x[0][0][0][0]
b=x[0][0][0][1]
c=x[0][0][2][0]
d=x[0][0][2][1]
mid=int((a+c)/2)
wid1=int((3*a-mid)/2)
wid2=int(2*mid-wid1)
u=(x[0][0][1][1]*1.4-x[0][0][0][1])/0.4
#cv2.rectangle(QueryImgBGR,(a,2*b-d),(c,int(u)),(255,0,0),3)
cv2.rectangle(QueryImgBGR,(wid1,2*b-d),(wid2,int(u)),(255,0,0),3)
#window = QueryImgBGR[2*b-d:int(u),wid1:wid2]
face= face_cascade.detectMultiScale(QueryImgBGR,1.3,5)
eyes= eye_cascade.detectMultiScale(QueryImgBGR,1.3,5)
people=people_cascade.detectMultiScale(QueryImgBGR,1.3,5)
for(p,q,r,s) in people:
#cv2.rectangle(QueryImgBGR,(p,q),(p+r,q+s),(0,0,255),2)
if c>((2*p+r)/2)>a:
cv2.rectangle(QueryImgBGR,(p,q),(p+r,q+s),(0,0,255),2)
for (x,y,w,h) in face:
if(x > wid1 and x+w < wid2):
cv2.rectangle(QueryImgBGR,(x,y),(x+w,y+h),(0,255,0),2)
for (x,y,w,h) in eyes:
if(x > wid1 and x+w < wid2):
cv2.rectangle(QueryImgBGR,(x,y),(x+w,y+h),(255,0,0),2)
cv2.imshow('result',QueryImgBGR)
if cv2.waitKey(10)==ord('q'):
break
cam.release()
cv2.destroyAllWindows()
|
PRHatte/Tracking-a-person-and-estimating-height-of-the-person | part2.py | from scipy.spatial import distance as dist
import cv2
import numpy as np
detector=cv2.xfeatures2d.SIFT_create()
FLANN_INDEX_KDITREE=0
flannParam=dict(algorithm=FLANN_INDEX_KDITREE,tree=5)
flann=cv2.FlannBasedMatcher(flannParam,{})
people_cascade = cv2.CascadeClassifier("C:/Users/prh070230/Downloads/haarcascade_upperbody.xml")
face_cascade = cv2.CascadeClassifier("C:/Users/prh170230/Downloads/haarcascade_frontalface_alt.xml")
eye_cascade = cv2.CascadeClassifier("C:/Users/prh170230/Downloads/haarcascade_eye.xml")
LOGOImg=cv2.imread("C:/Users/prh170230/Downloads/book.jpg",0)
trainKP,trainDesc=detector.detectAndCompute(LOGOImg,None)
MIN_MATCH_COUNT=20
cam=cv2.VideoCapture(0)
while True:
ret, QueryImgBGR=cam.read()
QueryImg=cv2.cvtColor(QueryImgBGR,cv2.COLOR_BGR2GRAY)
queryKP,queryDesc=detector.detectAndCompute(QueryImg,None)
matches=flann.knnMatch(queryDesc,trainDesc,k=2)
goodMatch=[]
for m,n in matches:
if(m.distance <0.75*n.distance):
goodMatch.append(m)
if(len(goodMatch) >MIN_MATCH_COUNT):
tp=[]
qp=[]
for m in goodMatch:
tp.append(trainKP[m.trainIdx].pt)
qp.append(queryKP[m.queryIdx].pt)
tp,qp=np.float32((tp,qp))
H,status=cv2.findHomography(tp,qp,cv2.RANSAC,3.0)
h,w=LOGOImg.shape
trainBorder=np.float32([[[0,0],[0,h-1],[w-1,h-1],[w-1,0]]])
queryBorder=cv2.perspectiveTransform(trainBorder,H)
cv2.polylines(QueryImgBGR,[np.int32(queryBorder)],True,(0,255,0),5)
face = face_cascade.detectMultiScale(QueryImgBGR,1.3,5)
h_book=queryBorder[0][1][1]-queryBorder[0][0][1]
for (x,y,w,h) in face:
cv2.rectangle(QueryImgBGR,(x,y),(x+w,y+h),(255,0,0),2)
r_f_b=h/h_book
f_height=r_f_b*22
cv2.imshow('result',QueryImgBGR)
if cv2.waitKey(10)==ord('q'):
break
r_f_h=15/160
person_height=f_height/r_f_h
print(person_height)
cam.release()
cv2.destroyAllWindows()
|
unartig/cdo-bindings | python/cdo.py | <reponame>unartig/cdo-bindings
import os
import re
import subprocess
import tempfile
import random
import glob
import signal
from pkg_resources import parse_version
from io import StringIO
import logging as pyLog
import six
# workaround for python2/3 string handling {{{
try:
from string import strip
except ImportError:
strip = str.strip
#}}}
# Copyright 2011-2019 <NAME>, <EMAIL>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
CDO_PY_VERSION = "1.5.3rc1"
# build interactive documentation: help(cdo.sinfo) {{{
def auto_doc(tool, path2cdo):
"""Generate the __doc__ string of the decorated function by calling the cdo help command
use like this:
c = cdo.Cdo()
help(c.sinfov)"""
def desc(func):
func.__doc__ = operator_doc(tool, path2cdo)
return func
return desc
#}}}
def operator_doc(tool, path2cdo):
proc = subprocess.Popen('%s -h %s ' % (path2cdo, tool),
shell=True,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
retvals = proc.communicate()
return retvals[0].decode("utf-8")
# some helper functions without side effects {{{
def getCdoVersion(path2cdo, verbose=False):
proc = subprocess.Popen([path2cdo, '-V'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
ret = proc.communicate()
cdo_help = ret[1].decode("utf-8")
if verbose:
return cdo_help
match = re.search("Climate Data Operators version (\d.*) .*", cdo_help)
return match.group(1)
def setupLogging(logFile):
logger = pyLog.getLogger(__name__)
logger.setLevel(pyLog.INFO)
if isinstance(logFile, six.string_types):
handler = pyLog.FileHandler(logFile)
else:
handler = pyLog.StreamHandler(stream=logFile)
formatter = pyLog.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
#}}}
# extra execptions for CDO {{{
class CDOException(Exception):
def __init__(self, stdout, stderr, returncode):
super(CDOException, self).__init__()
self.stdout = stdout
self.stderr = stderr
self.returncode = returncode
self.msg = '(returncode:%s) %s' % (returncode, stderr)
def __str__(self):
return self.msg
# }}}
# MAIN Cdo class {{{
class Cdo(object):
# fallback operator lists {{{
NoOutputOperators = 'cdiread cmor codetab conv_cmor_table diff diffc diffn \
diffp diffv dump_cmor_table dumpmap filedes gmtcells gmtxyz gradsdes griddes \
griddes2 gridverify info infoc infon infop infos infov map ncode ndate \
ngridpoints ngrids nlevel nmon npar ntime nvar nyear output outputarr \
outputbounds outputboundscpt outputcenter outputcenter2 outputcentercpt \
outputext outputf outputfld outputint outputkey outputsrv outputtab outputtri \
outputts outputvector outputvrml outputxyz pardes partab partab2 seinfo \
seinfoc seinfon seinfop showattribute showatts showattsglob showattsvar \
showcode showdate showformat showgrid showlevel showltype showmon showname \
showparam showstdname showtime showtimestamp showunit showvar showyear sinfo \
sinfoc sinfon sinfop sinfov spartab specinfo tinfo vardes vct vct2 verifygrid \
vlist xinfon zaxisdes'.split()
TwoOutputOperators = 'trend samplegridicon mrotuv eoftime \
eofspatial eof3dtime eof3dspatial eof3d eof complextorect complextopol'.split()
MoreOutputOperators = 'distgrid eofcoeff eofcoeff3d intyear scatter splitcode \
splitday splitgrid splithour splitlevel splitmon splitname splitparam splitrec \
splitseas splitsel splittabnum splitvar splityear splityearmon splitzaxis'.split()
AliasOperators = {'seq':'for'}
#}}}
name = ''
def __init__(self,
cdo='cdo',
returnNoneOnError=False,
forceOutput=True,
env=os.environ,
debug=False,
tempdir=tempfile.gettempdir(),
logging=False,
logFile=StringIO(),
cmd=[],
options=[]):
if 'CDO' in os.environ and os.path.isfile(os.environ['CDO']):
self.CDO = os.environ['CDO']
else:
self.CDO = cdo
self._cmd = cmd
self._options = options
self.operators = self.__getOperators()
self.noOutputOperators = [op for op in self.operators.keys() if 0 == self.operators[op]]
self.returnNoneOnError = returnNoneOnError
self.tempStore = CdoTempfileStore(dir = tempdir)
self.forceOutput = forceOutput
self.env = env
self.debug = True if 'DEBUG' in os.environ else debug
self.libs = self.getSupportedLibs()
# optional IO libraries for additional return types {{{
self.hasNetcdf = False
self.hasXarray = False
self.cdf = None
self.xa_open = None
self.__loadOptionalLibs()
self.logging = logging # internal logging {{{
self.logFile = logFile
if (self.logging):
self.logger = setupLogging(self.logFile) # }}}
# handling different exits from interactive sessions {{{
# remove tempfiles from those sessions
signal.signal(signal.SIGINT, self.__catch__)
signal.signal(signal.SIGTERM, self.__catch__)
signal.signal(signal.SIGSEGV, self.__catch__)
signal.siginterrupt(signal.SIGINT, False)
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGSEGV, False)
# other left-overs can only be handled afterwards
# might be good to use the tempdir keyword to ease this, but deletion can
# be triggered using cleanTempDir() }}}
def __get__(self, instance, owner):
if instance is None:
return self
name = self.name
# CDO (version 1.9.6 and older) has an operator called 'for', which cannot
# called with 'cdo.for()' because 'for' is a keyword in python. 'for' is
# renamed to 'seq' in 1.9.7.
# This workaround translates all calls of 'seq' into for in case of
# versions prior tp 1.9.7
if name in self.AliasOperators.keys() and \
( parse_version(getCdoVersion(self.CDO)) < parse_version('1.9.7') ):
name = self.AliasOperators[name]
return self.__class__(
instance.CDO,
instance.returnNoneOnError,
instance.forceOutput,
instance.env,
instance.debug,
instance.tempStore.dir,
instance.logging,
instance.logFile,
instance._cmd + ['-' + name],
instance._options)
# from 1.9.6 onwards CDO returns 1 of diff* finds a difference
def __exit_success(self,operatorName):
if ( parse_version(getCdoVersion(self.CDO)) < parse_version('1.9.6') ):
return 0
if ( 'diff' != operatorName[0:4] ):
return 0
return 1
# retrieve the list of operators from the CDO binary plus info out number of
# output streams
def __getOperators(self): # {{{
operators = {}
version = parse_version(getCdoVersion(self.CDO))
if (version < parse_version('1.7.2')):
proc = subprocess.Popen([self.CDO, '-h'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
ret = proc.communicate()
l = ret[1].decode("utf-8").find("Operators:")
ops = ret[1].decode("utf-8")[l:-1].split(os.linesep)[1:-1]
endI = ops.index('')
s = ' '.join(ops[:endI]).strip()
s = re.sub("\s+", " ", s)
for op in list(set(s.split(" "))):
operators[op] = 1
if op in self.NoOutputOperators:
operators[op] = 0
if op in self.TwoOutputOperators:
operators[op] = 2
if op in self.MoreOutputOperators:
operators[op] = -1
elif (version < parse_version('1.8.0') or parse_version('1.9.0') == version):
proc = subprocess.Popen([self.CDO, '--operators'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
ret = proc.communicate()
ops = list(map(lambda x: x.split(' ')[0], ret[0].decode("utf-8")[0:-1].split(os.linesep)))
for op in ops:
operators[op] = 1
if op in self.NoOutputOperators:
operators[op] = 0
if op in self.TwoOutputOperators:
operators[op] = 2
if op in self.MoreOutputOperators:
operators[op] = -1
elif (version < parse_version('1.9.3')):
proc = subprocess.Popen([self.CDO, '--operators'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
ret = proc.communicate()
ops = list(map(lambda x: x.split(' ')[0], ret[0].decode("utf-8")[0:-1].split(os.linesep)))
proc = subprocess.Popen([self.CDO, '--operators_no_output'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
ret = proc.communicate()
opsNoOutput = list(map(lambda x: x.split(' ')[0], ret[0].decode("utf-8")[0:-1].split(os.linesep)))
for op in ops:
operators[op] = 1
if op in opsNoOutput:
operators[op] = 0
if op in self.TwoOutputOperators:
operators[op] = 2
if op in self.MoreOutputOperators:
operators[op] = -1
else:
proc = subprocess.Popen([self.CDO, '--operators'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
ret = proc.communicate()
ops = list(map(lambda x: x.split(' ')[0], ret[0].decode("utf-8")[0:-1].split(os.linesep)))
ios = list(map(lambda x: x.split(' ')[-1], ret[0].decode("utf-8")[0:-1].split(os.linesep)))
for i, op in enumerate(ops):
operators[op] = int(ios[i][1:len(ios[i]) - 1].split('|')[1])
return operators # }}}
# execute a single CDO command line {{{
def __call(self, cmd, envOfCall={}):
if self.logging and '-h' != cmd[1]:
self.logger.info(u' '.join(cmd))
env = dict(self.env)
env.update(envOfCall)
proc = subprocess.Popen(' '.join(cmd),
shell=True,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
env=env)
retvals = proc.communicate()
stdout = retvals[0].decode("utf-8")
stderr = retvals[1].decode("utf-8")
if self.debug: # debug printing {{{
print('# DEBUG - start =============================================================')
# if {} != env:
# for k,v in list(env.items()):
# print("ENV: " + k + " = " + v)
print('CALL :' + ' '.join(cmd))
print('STDOUT:')
if (0 != len(stdout.strip())):
print(stdout)
print('STDERR:')
if (0 != len(stderr.strip())):
print(stderr)
print('# DEBUG - end ===============================================================') # }}}
return {"stdout": stdout, "stderr": stderr, "returncode": proc.returncode} # }}}
# error handling for CDO calls
def __hasError(self, method_name, cmd, retvals): # {{{
if (self.debug):
print("RETURNCODE:" + retvals["returncode"].__str__())
if ( self.__exit_success(method_name) < retvals["returncode"] ):
print("Error in calling operator " + method_name + " with:")
print(">>> " + ' '.join(cmd) + "<<<")
print('STDOUT:' + retvals["stdout"])
print('STDERR:' + retvals["stderr"])
if self.logging:
self.logger.error(cmd + " with:" + retvals["stderr"])
return True
else:
return False # }}}
# {{{ attempt to load optional libraries: netcdf-IO + XArray
# numpy is a dependency of both, so no need to check that
def __loadOptionalLibs(self):
try:
import xarray
self.hasXarray = True
self.xa_open = xarray.open_dataset
except:
print("-->> Could not load xarray!! <<--")
try:
from netCDF4 import Dataset as cdf
self.cdf = cdf
self.hasNetcdf = True
import numpy as np
self.np = np
except:
print("-->> Could not load netCDF4! <<--") #}}}
def infile(self, *infiles):
for infile in infiles:
if isinstance(infile, six.string_types):
self._cmd.append(infile)
elif self.hasXarray:
import xarray #<<-- python2 workaround
if (type(infile) == xarray.core.dataset.Dataset):
# create a temp nc file from input data
tmpfile = self.tempStore.newFile()
infile.to_netcdf(tmpfile)
self._cmd.append(tmpfile)
return self
def add_option(self, *options):
self._options = self._options + list(options)
return self
def __call__(self, *args, **kwargs):
user_kwargs = kwargs.copy()
try:
method_name = self._cmd[0][1:].split(',')[0]
except IndexError:
method_name = ''
operatorPrintsOut = method_name in self.noOutputOperators
self.envByCall = {}
# Build the cdo command
# 0. the cdo command itself
cmd = [self.CDO]
# 1. OVERWRITE EXISTING FILES
cmd.append('-O')
cmd.extend(self._options)
# 2. set the options
# switch to netcdf output in case of numpy/xarray usage
if ( None != kwargs.get('returnArray')
or None != kwargs.get('returnMaArray')
or None != kwargs.get('returnXArray')
or None != kwargs.get('returnXDataset')
or None != kwargs.get('returnCdf')):
cmd.append('-f nc')
if 'options' in kwargs:
cmd += kwargs['options'].split()
# 3. add operators
# collect operator parameters and pad them to the operator name
if len(args) != 0:
self._cmd[-1] += ',' + ','.join(map(str, args))
if self._cmd:
cmd.extend(self._cmd)
# 4. input files or other operators
if 'input' in kwargs:
if isinstance(kwargs["input"], six.string_types):
cmd.append(kwargs["input"])
elif type(kwargs["input"]) == list:
cmd.append(' '.join(kwargs["input"]))
elif self.hasXarray:
import xarray #<<-- python2 workaround
if (type(kwargs["input"]) == xarray.core.dataset.Dataset):
# create a temp nc file from input data
tmpfile = self.tempStore.newFile()
kwargs["input"].to_netcdf(tmpfile)
kwargs["input"] = tmpfile
cmd.append(kwargs["input"])
else:
# we assume it's either a list, a tuple or any iterable.
cmd.append(kwargs["input"])
# 5. handle rewrite of existing output files
if not kwargs.__contains__("force"):
kwargs["force"] = self.forceOutput
# 6. handle environment setup per call
envOfCall = {}
if kwargs.__contains__("env"):
for k, v in kwargs["env"].items():
envOfCall[k] = v
# 7. output handling: use given outputs or create temporary files
outputs = []
# collect the given output
if None != kwargs.get("output"):
outputs.append(kwargs["output"])
if not user_kwargs or not kwargs.get('compute', True):
return self
elif not kwargs.get('keep', True):
self._cmd.clear()
if operatorPrintsOut:
retvals = self.__call(cmd, envOfCall)
if (not self.__hasError(method_name, cmd, retvals)):
r = list(map(strip, retvals["stdout"].split(os.linesep)))
if "autoSplit" in kwargs:
splitString = kwargs["autoSplit"]
_output = [x.split(splitString) for x in r[:len(r) - 1]]
if (1 == len(_output)):
return _output[0]
else:
return _output
else:
return r[:len(r) - 1]
else:
if self.returnNoneOnError:
return None
else:
raise CDOException(**retvals)
else:
if kwargs["force"] or \
(kwargs.__contains__("output") and not os.path.isfile(kwargs["output"])):
if not kwargs.__contains__("output") or None == kwargs["output"]:
for i in range(0, self.operators[method_name]):
outputs.append(self.tempStore.newFile())
cmd.append(' '.join(outputs))
retvals = self.__call(cmd, envOfCall)
if self.__hasError(method_name, cmd, retvals):
if self.returnNoneOnError:
return None
else:
raise CDOException(**retvals)
else:
if self.debug:
print(("Use existing file'" + kwargs["output"] + "'"))
# defaults for file handles as return values
if not kwargs.__contains__("returnCdf"):
kwargs["returnCdf"] = False
if not kwargs.__contains__("returnXDataset"):
kwargs["returnXDataset"] = False
# return data arrays
if None != kwargs.get("returnArray"):
return self.readArray(outputs[0], kwargs["returnArray"])
elif None != kwargs.get("returnMaArray"):
return self.readMaArray(outputs[0], kwargs["returnMaArray"])
elif None != kwargs.get("returnXArray"):
return self.readXArray(outputs[0], kwargs.get("returnXArray"))
# return files handles (or lists of them)
elif kwargs["returnCdf"]:
if 1 == len(outputs):
return self.readCdf(outputs[0])
else:
return [self.readCdf(file) for file in outputs]
elif kwargs["returnXDataset"]:
if 1 == len(outputs):
return self.readXDataset(outputs[0])
else:
return [self.readXDataset(file) for file in outputs]
# handle split-operator outputs
elif ('split' == method_name[0:5]):
return glob.glob(kwargs["output"] + '*')
# default: return filename (given or tempfile)
else:
if (1 == len(outputs)):
return outputs[0]
else:
return outputs
def __getattr__(self, method_name): # main method-call handling for Cdo-objects {{{
if ((method_name in self.__dict__) or (method_name in list(self.operators.keys()))
or (method_name in self.AliasOperators)):
if self.debug:
print(("Found method:" + method_name))
# cache the method for later
class Operator(self.__class__):
__doc__ = operator_doc(method_name, self.CDO)
name = method_name
setattr(self.__class__, method_name, Operator())
return getattr(self, method_name)
else:
# given method might match part of know operators: autocompletion
if (len(list(filter(lambda x: re.search(method_name, x), list(self.operators.keys())))) == 0):
# If the method isn't in our dictionary, act normal.
raise AttributeError("Unknown method '" + method_name + "'!")
# }}}
def getSupportedLibs(self, force=False):
proc = subprocess.Popen(self.CDO + ' -V',
shell=True,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
retvals = proc.communicate()
withs = list(re.findall('(with|Features): (.*)',
retvals[1].decode("utf-8"))[0])[1].split(' ')
# do an additional split, if the entry has a / and collect everything into a flatt list
withs = list(map(lambda x: x.split('/') if re.search('\/', x) else x, withs))
allWiths = []
for _withs in withs:
if isinstance(_withs, list):
for __withs in _withs:
allWiths.append(__withs)
else:
allWiths.append(_withs)
withs = allWiths
libs = re.findall('(\w+) library version : (\d+\.\S+) ',
retvals[1].decode("utf-8"))
libraries = dict({})
for w in withs:
libraries[w.lower()] = True
for lib in libs:
l, v = lib
libraries[l.lower()] = v
return libraries
def collectLogs(self):
if isinstance(self.logFile, six.string_types):
content = []
with open(self.logFile, 'r') as f:
content.append(f.read())
return ''.join(content)
else:
self.logFile.flush()
return self.logFile.getvalue()
def showLog(self):
print(self.collectLogs())
# check if the current (or given) CDO binary works
def hasCdo(self, path=None):
if path is None:
path = self.CDO
cmd = [path, " -V", '>/dev/null 2>&1']
executable = (0 == self.__call(cmd)["returncode"])
fullpath = (os.path.isfile(path) and os.access(path, os.X_OK))
return (executable or fullpath)
# selfcheck for the current CDO binary
def check(self):
if not self.hasCdo():
return False
if self.debug:
print(self.__call([self.CDO, ' -V']))
return True
# change the CDO binary for the current object
def setCdo(self, value):
self.CDO = value
self.operators = self.__getOperators()
# return the path to the CDO binary currently used
def getCdo(self):
return self.CDO
def hasLib(self, lib):
return (lib in self.libs.keys())
def libsVersion(self, lib):
if not self.hasLib(lib):
raise AttributeError("Cdo does NOT have support for '#{lib}'")
else:
if True != self.libs[lib]:
return self.libs[lib]
else:
print("No version information available about '" + lib + "'")
return False
def cleanTempDir(self):
self.tempStore.cleanTempDir()
# if a termination signal could be caught, remove tempfile
def __catch__(self, signum, frame):
self.tempStore.__del__()
print("caught signal", self, signum, frame)
# make use of internal documentation structure of python
def __dir__(self):
res = dir(type(self)) + list(self.__dict__.keys())
res.extend(list(self.operators.keys()))
return res
# ==================================================================
# Addional operators:
# ------------------------------------------------------------------
def version(self, verbose=False):
# return CDO's version
return getCdoVersion(self.CDO, verbose)
def boundaryLevels(self, **kwargs):
ilevels = list(map(float, self.showlevel(input=kwargs['input'])[0].split()))
bound_levels = []
bound_levels.insert(0, 0)
for i in range(1, len(ilevels) + 1):
bound_levels.insert(i, bound_levels[i - 1] + 2 * (ilevels[i - 1] - bound_levels[i - 1]))
return bound_levels
def thicknessOfLevels(self, **kwargs):
bound_levels = self.boundaryLevels(**kwargs)
delta_levels = []
for i in range(0, len(bound_levels)):
v = bound_levels[i]
if 0 == i:
continue
delta_levels.append(v - bound_levels[i - 1])
return delta_levels
def run(self, output=None):
if output:
return self(output=output, compute=True)
else:
return self(compute=True)
def readCdf(self, iFile=None):
"""Return a cdf handle created by the available cdf library"""
if iFile is None:
iFile = self.run()
if self.hasNetcdf:
fileObj = self.cdf(iFile, mode='r')
return fileObj
else:
print("Could not import data from file '%s' (python-netCDF4)" % iFile)
six.raise_from(ImportError,None)
def readArray(self, iFile=None, varname=None):
"""Direcly return a numpy array for a given variable name"""
if iFile is None:
iFile = self.run()
if varname is None:
raise ValueError("A varname needs to be specified!")
filehandle = self.readCdf(iFile)
try:
# return the data array for given variable name
return filehandle.variables[varname][:].copy()
except:
print("Cannot find variable '%s'" % varname)
six.raise_from(LookupError,None)
def readMaArray(self, iFile=None, varname=None):# {{{
"""Create a masked array based on cdf's FillValue"""
if iFile is None:
iFile = self.run()
if varname is None:
raise ValueError("A varname needs to be specified!")
fileObj = self.readCdf(iFile)
if not varname in fileObj.variables:
print("Cannot find variables '%s'" % varname)
six.raise_from(LookupError,None)
else:
data = fileObj.variables[varname][:].copy()
if hasattr(fileObj.variables[varname], '_FillValue'):
# return masked array
retval = self.np.ma.array(data, mask=data == fileObj.variables[varname]._FillValue)
else:
# generate dummy mask which is always valid
retval = self.np.ma.array(data, mask=data != data)
return retval# }}}
def readXArray(self, ifile=None, varname=None):
if ifile is None:
ifile = self.run()
if varname is None:
raise ValueError("A varname needs to be specified!")
if not self.hasXarray:
print("Could not load XArray")
six.raise_from(ImportError,None)
dataSet = self.xa_open(ifile)
try:
return dataSet[varname]
except:
print("Cannot find variable '%s'" % varname)
six.raise_from(LookupError,None)
def readXDataset(self, ifile=None):
if ifile is None:
ifile = self.run()
if not self.hasXarray:
print("Could not load XArray")
six.raise_from(ImportError,None)
return self.xa_open(ifile)
# internal helper methods:
# return internal cdo.py version
def __version__(self):
return CDO_PY_VERSION
def __print__(self, context=''):
if '' != context:
print('CDO:CONTEXT ' + context)
print("CDO:ID = " + str(id(self)))
print("CDO:ENV = " + str(self.env))
# }}}
# Helper module for easy temp file handling {{{
class CdoTempfileStore(object):
__tempfiles = []
__tempdirs = []
def __init__(self, dir):
self.persistent_tempfile = False
self.fileTag = 'cdoPy'
self.dir = dir
if not os.path.isdir(dir):
os.makedirs(dir)
self.__tempdirs.append(dir)
def __del__(self):
# remove temporary files
try:
self.__tempdirs.remove(self.dir)
except ValueError:
pass
if self.dir not in self.__tempdirs:
for filename in self.__class__.__tempfiles:
if os.path.isfile(filename):
os.remove(filename)
def cleanTempDir(self):
leftOvers = [os.path.join(self.dir, f) for f in os.listdir(self.dir)]
# filter for cdo.py's tempfiles owned by you
leftOvers = [f for f in leftOvers if
self.fileTag in f and
os.path.isfile(f) and
os.stat(f).st_uid == os.getuid()]
# this might lead to trouble if it is used by server side computing like
# jupyter notebooks, filtering by userid might no be enough
for f in leftOvers:
os.remove(f)
def setPersist(self, value):
self.persistent_tempfiles = value
def newFile(self):
if not self.persistent_tempfile:
t = tempfile.NamedTemporaryFile(delete=True, prefix=self.fileTag, dir=self.dir)
self.__class__.__tempfiles.append(t.name)
t.close()
return t.name
else:
N = 10000000
return "_" + random.randint(0, N).__str__()
# }}}
# vim: tabstop=2 expandtab shiftwidth=2 softtabstop=2 fdm=marker
|
Kobzol/hyperqueue | tests/benchmarks/overhead.py | <filename>tests/benchmarks/overhead.py
import multiprocessing
import pytest
from ..conftest import HqEnv, run_hq_env
def task_overhead(env: HqEnv, task_count: int):
env.command(
[
"submit",
"--stdout",
"none",
"--stderr",
"none",
"--wait",
"--array",
f"1-{task_count}",
"--",
"sleep",
"0",
]
)
@pytest.mark.parametrize("task_count", (100, 1000, 10000))
def test_benchmark_task_overhead(benchmark, tmp_path, task_count):
with run_hq_env(tmp_path, debug=False) as hq_env:
hq_env.start_server()
hq_env.start_workers(1, cpus=multiprocessing.cpu_count())
benchmark(task_overhead, hq_env, task_count)
|
Kobzol/hyperqueue | tests/utils/job.py | <reponame>Kobzol/hyperqueue<gh_stars>0
def default_task_output(job_id=1, task_id=0, type="stdout") -> str:
return f"job-{job_id}/{task_id}.{type}"
|
Kobzol/hyperqueue | tests/utils/table.py | <reponame>Kobzol/hyperqueue
from typing import List, Optional
JOB_TABLE_ROWS = 15
# TODO: create a pandas dataframe instead?
class Table:
def __init__(self, rows):
self.rows = rows
def __getitem__(self, item):
if isinstance(item, slice):
return Table(self.rows[item])
return self.rows[item]
def __iter__(self):
yield from self.rows
def get_row_value(self, key: str) -> Optional[str]:
"""
Assumes vertical table (each value has a separate row).
"""
row = [r for r in self.rows if r and r[0] == key]
if not row:
return None
assert len(row) == 1
return row[0][1]
def check_row_value(self, key: str, value: str):
row = self.get_row_value(key)
if row is None:
raise Exception(f"Key `{key!r}` not found in\n{self}")
assert_equals(row, value)
def get_column_value(self, key: str) -> Optional[List[str]]:
"""
Assumes horizontal table (each value has a separate column).
"""
header = self.rows[0]
if key not in header:
return None
index = header.index(key)
return [row[index] for row in self.rows[1:]]
def check_column_value(self, key: str, index: int, value: str):
column = self.get_column_value(key)
if not column:
raise Exception(f"Value for key `{key!r}` not found in\n{self}")
row = column[index]
assert_equals(row, value)
def check_columns_value(self, keys: List[str], index: int, values: List[str]):
assert len(keys) == len(values)
for (key, val) in zip(keys, values):
self.check_column_value(key, index, val)
def print(self):
for i, row in enumerate(self):
print(i, row)
def __len__(self):
return len(self.rows)
def __repr__(self):
return "\n".join(" | ".join(val) for val in self.rows)
def assert_equals(a, b):
"""
Workaround for pytest.
Without this it won't show the differing values if an assert error happens inside some method.
"""
if a != b:
raise Exception(f"{a!r} does not equal {b!r}")
def parse_table(table_string: str) -> Table:
lines = table_string.strip().split("\n")
result = []
new_row = True
for line in lines:
if line.startswith("+-"):
new_row = True
continue
items = [x.strip() for x in line.split("|")[1:-1]]
if new_row:
result.append(items)
new_row = False
else:
for i, item in enumerate(items):
if item:
result[-1][i] += "\n" + item
return Table(result)
|
Kobzol/hyperqueue | tests/conftest.py | import contextlib
import os
import signal
import subprocess
import time
from typing import Optional, Tuple
import pytest
from .utils import parse_table
from .utils.mock import ProgramMock
from .utils.wait import wait_until
PYTEST_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(PYTEST_DIR)
def get_hq_binary(debug=True):
directory = "debug" if debug else "release"
return os.path.join(ROOT_DIR, "target", directory, "hq")
RUNNING_IN_CI = "CI" in os.environ
class Env:
def __init__(self, work_path):
self.processes = []
self.work_path = work_path
def start_process(self, name, args, env=None, catch_io=True, cwd=None):
cwd = str(cwd or self.work_path)
logfile = (self.work_path / name).with_suffix(".out")
if catch_io:
with open(logfile, "w") as out:
p = subprocess.Popen(
args,
preexec_fn=os.setsid,
stdout=out,
stderr=subprocess.STDOUT,
cwd=cwd,
env=env,
)
else:
p = subprocess.Popen(args, cwd=cwd, env=env)
self.processes.append((name, p))
return p
def check_process_exited(self, process: subprocess.Popen, expected_code=0):
for (n, p) in self.processes:
if p is process:
if process.poll() is None:
raise Exception(f"Process with pid {process.pid} is still running")
if expected_code == "error":
assert process.returncode != 0
elif expected_code is not None:
assert process.returncode == expected_code
self.processes = [
(n, p) for (n, p) in self.processes if p is not process
]
return
raise Exception(f"Process with pid {process.pid} not found")
def check_running_processes(self):
"""Checks that everything is still running"""
for name, process in self.processes:
if process.poll() is not None:
raise Exception(
"Process {0} crashed (log in {1}/{0}.out)".format(
name, self.work_path
)
)
def kill_all(self):
self.sort_processes_for_kill()
for _, process in self.processes:
# Kill the whole group since the process may spawn a child
if not process.poll():
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
def kill_process(self, name):
for i, (n, p) in enumerate(self.processes):
if n == name:
del self.processes[i]
# Kill the whole group since the process may spawn a child
if p.returncode is None and not p.poll():
os.killpg(os.getpgid(p.pid), signal.SIGTERM)
return
else:
raise Exception("Process not found")
def sort_processes_for_kill(self):
pass
class HqEnv(Env):
default_listen_port = 17002
def __init__(self, work_dir, mock: ProgramMock, debug=True):
Env.__init__(self, work_dir)
self.mock = mock
self.server = None
self.workers = {}
self.id_counter = 0
self.do_final_check = True
self.server_dir = None
self.debug = debug
def no_final_check(self):
self.do_final_check = False
def make_default_env(self):
env = os.environ.copy()
env["RUST_LOG"] = "tako=trace,hyperqueue=trace"
env["RUST_BACKTRACE"] = "full"
self.mock.update_env(env)
return env
@staticmethod
def server_args(server_dir="hq-server", debug=True):
return [
get_hq_binary(debug=debug),
"--colors",
"never",
"--server-dir",
server_dir,
"server",
"start",
]
def start_server(self, server_dir="hq-server", args=None) -> subprocess.Popen:
self.server_dir = os.path.join(self.work_path, server_dir)
env = self.make_default_env()
server_args = self.server_args(self.server_dir, debug=self.debug)
if args:
server_args += args
process = self.start_process("server", server_args, env=env)
time.sleep(0.2)
self.check_running_processes()
return process
def start_workers(self, count, **kwargs):
for _ in range(count):
self.start_worker(**kwargs)
def start_worker(
self,
*,
cpus="1",
env=None,
args=None,
set_hostname=True,
wait_for_start=True,
) -> subprocess.Popen:
self.id_counter += 1
worker_id = self.id_counter
worker_env = self.make_default_env()
if env:
worker_env.update(env)
worker_args = [
get_hq_binary(self.debug),
"--server-dir",
self.server_dir,
"worker",
"start",
"--no-detect-resources", # Ignore resources on testing machine
]
hostname = f"worker{worker_id}"
if set_hostname:
worker_args += ["--hostname", hostname]
if cpus is not None:
worker_args += ["--cpus", str(cpus)]
if args:
worker_args += list(args)
r = self.start_process(hostname, worker_args, env=worker_env)
if wait_for_start:
print(wait_for_start)
assert set_hostname
def wait_for_worker():
table = self.command(["worker", "list"], as_table=True)
print(table)
return hostname in table.get_column_value("Hostname")
wait_until(wait_for_worker)
return r
def kill_worker(self, worker_id: int):
table = self.command(["worker", "info", str(worker_id)], as_table=True)
pid = table.get_row_value("Process pid")
process = self.find_process_by_pid(int(pid))
if process is None:
raise Exception(f"Worker {worker_id} not found")
self.kill_process(process[0])
def find_process_by_pid(self, pid: int) -> Optional[Tuple[str, subprocess.Popen]]:
for (name, process) in self.processes:
if process.pid == pid:
return (name, process)
return None
def command(
self,
args,
as_table=False,
as_lines=False,
cwd=None,
wait=True,
expect_fail=None,
):
if isinstance(args, str):
args = [args]
else:
args = list(args)
args = [get_hq_binary(self.debug), "--server-dir", self.server_dir] + args
cwd = cwd or self.work_path
try:
if not wait:
return subprocess.Popen(args, stderr=subprocess.STDOUT, cwd=cwd)
output = subprocess.check_output(args, stderr=subprocess.STDOUT, cwd=cwd)
if expect_fail is not None:
raise Exception("Command should failed")
output = output.decode()
if as_table:
return parse_table(output)
if as_lines:
return output.rstrip().split("\n")
return output
except subprocess.CalledProcessError as e:
stdout = e.stdout.decode()
if expect_fail:
if expect_fail not in stdout:
raise Exception(
f"Command should failed with message '{expect_fail}' but got:\n{stdout}"
)
else:
return
print(f"Process output: {stdout}")
raise Exception(f"Process failed with exit-code {e.returncode}\n\n{stdout}")
def final_check(self):
pass
def close(self):
pass
def sort_processes_for_kill(self):
# Kill server last to avoid workers ending too soon
self.processes.sort(key=lambda process: 1 if "server" in process[0] else 0)
@pytest.fixture(autouse=False, scope="function")
def hq_env(tmp_path):
with run_hq_env(tmp_path) as env:
yield env
@contextlib.contextmanager
def run_hq_env(tmp_path, debug=True):
"""Fixture that allows to start HQ test environment"""
print("Working dir", tmp_path)
os.chdir(tmp_path)
mock = ProgramMock(tmp_path.joinpath("mock"))
env = HqEnv(tmp_path, debug=debug, mock=mock)
yield env
try:
env.final_check()
env.check_running_processes()
finally:
env.close()
env.kill_all()
# Final sleep to let server port be freed, on some slow computers
# a new test is starter before the old server is properly cleaned
time.sleep(0.02)
|
Kobzol/hyperqueue | tests/utils/check.py | import contextlib
import os
@contextlib.contextmanager
def check_error_log(path: str):
"""Check that the file at the given path is either missing or empty.
If not, raise an exception with its contents"""
yield
if not os.path.isfile(path):
return
with open(path) as f:
data = f.read().strip()
if data:
raise Exception(f"Error log at {path}\n{data}")
|
Kobzol/hyperqueue | tests/test_job.py | <gh_stars>0
import os
import time
from datetime import datetime
from os.path import isdir, isfile
import pytest
from .conftest import HqEnv
from .utils import JOB_TABLE_ROWS, wait_for_job_state
from .utils.job import default_task_output
def test_job_submit(hq_env: HqEnv):
hq_env.start_server()
table = hq_env.command("jobs", as_table=True)
assert len(table) == 1
assert table[0][:3] == ["Id", "Name", "State"]
hq_env.command(["submit", "--", "bash", "-c", "echo 'hello'"])
hq_env.command(["submit", "--", "bash", "-c", "echo 'hello2'"])
wait_for_job_state(hq_env, [1, 2], "WAITING")
table = hq_env.command("jobs", as_table=True)
assert len(table) == 3
table.check_columns_value(["Id", "Name", "State"], 0, ["1", "bash", "WAITING"])
table.check_columns_value(["Id", "Name", "State"], 1, ["2", "bash", "WAITING"])
hq_env.start_worker(cpus=1)
wait_for_job_state(hq_env, [1, 2], "FINISHED")
table = hq_env.command("jobs", as_table=True)
assert len(table) == 3
table.check_columns_value(["Id", "Name", "State"], 0, ["1", "bash", "FINISHED"])
table.check_columns_value(["Id", "Name", "State"], 1, ["2", "bash", "FINISHED"])
hq_env.command(["submit", "--", "sleep", "1"])
wait_for_job_state(hq_env, 3, "RUNNING", sleep_s=0.2)
table = hq_env.command("jobs", as_table=True)
assert len(table) == 4
table.check_columns_value(["Id", "Name", "State"], 0, ["1", "bash", "FINISHED"])
table.check_columns_value(["Id", "Name", "State"], 1, ["2", "bash", "FINISHED"])
table.check_columns_value(["Id", "Name", "State"], 2, ["3", "sleep", "RUNNING"])
wait_for_job_state(hq_env, 3, "FINISHED")
table = hq_env.command("jobs", as_table=True)
assert len(table) == 4
table.check_columns_value(["Id", "Name", "State"], 0, ["1", "bash", "FINISHED"])
table.check_columns_value(["Id", "Name", "State"], 1, ["2", "bash", "FINISHED"])
table.check_columns_value(["Id", "Name", "State"], 2, ["3", "sleep", "FINISHED"])
def test_custom_name(hq_env: HqEnv, tmp_path):
hq_env.start_server()
hq_env.command(["submit", "sleep", "1", "--name=sleep_prog"])
wait_for_job_state(hq_env, 1, "WAITING")
table = hq_env.command("jobs", as_table=True)
assert len(table) == 2
table.check_columns_value(
["Id", "Name", "State"], 0, ["1", "sleep_prog", "WAITING"]
)
with pytest.raises(Exception):
hq_env.command(["submit", "sleep", "1", "--name=second_sleep \n"])
with pytest.raises(Exception):
hq_env.command(["submit", "sleep", "1", "--name=second_sleep \t"])
with pytest.raises(Exception):
hq_env.command(
[
"submit",
"sleep",
"1",
"--name=sleep_sleep_sleep_sleep_sleep_sleep_sleep_sleep",
]
)
table = hq_env.command("jobs", as_table=True)
assert len(table) == 2
def test_custom_working_dir(hq_env: HqEnv, tmpdir):
hq_env.start_server()
test_string = "cwd_test_string"
test_path = tmpdir.mkdir("test_dir")
test_file = test_path.join("testfile")
test_file.write(test_string)
submit_dir = tmpdir.mkdir("submit_dir")
cwd_submit_tbl = hq_env.command(
["submit", "--cwd=" + str(test_path), "--", "bash", "-c", "cat testfile"],
as_table=True,
cwd=submit_dir,
)
cwd_submit_tbl.check_row_value("Working Dir", str(test_path))
hq_env.start_worker(cpus=1)
wait_for_job_state(hq_env, 1, ["FINISHED"])
with open(os.path.join(tmpdir, "submit_dir", default_task_output())) as f:
assert f.read() == test_string
def test_job_output_default(hq_env: HqEnv, tmp_path):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "--", "bash", "-c", "echo 'hello'"])
hq_env.command(["submit", "--", "ls", "/non-existent"])
hq_env.command(["submit", "--", "/non-existent-program"])
wait_for_job_state(hq_env, [1, 2, 3], ["FINISHED", "FAILED"])
with open(
os.path.join(tmp_path, default_task_output(job_id=1, type="stdout"))
) as f:
assert f.read() == "hello\n"
with open(
os.path.join(tmp_path, default_task_output(job_id=1, type="stderr"))
) as f:
assert f.read() == ""
with open(
os.path.join(tmp_path, default_task_output(job_id=2, type="stdout"))
) as f:
assert f.read() == ""
with open(
os.path.join(tmp_path, default_task_output(job_id=2, type="stderr"))
) as f:
data = f.read()
assert "No such file or directory" in data
assert data.startswith("ls:")
with open(
os.path.join(tmp_path, default_task_output(job_id=3, type="stdout"))
) as f:
assert f.read() == ""
with open(
os.path.join(tmp_path, default_task_output(job_id=3, type="stderr"))
) as f:
assert f.read() == ""
def test_create_output_folders(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker()
hq_env.command(
[
"submit",
"--stdout",
"foo/1/job.out",
"--stderr",
"foo/1/job.err",
"--",
"echo",
"hi",
]
)
wait_for_job_state(hq_env, 1, "FINISHED")
assert isdir("foo/1")
assert isfile("foo/1/job.out")
assert isfile("foo/1/job.err")
def test_job_output_configured(hq_env: HqEnv, tmp_path):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(
["submit", "--stdout=abc", "--stderr=xyz", "--", "bash", "-c", "echo 'hello'"]
)
wait_for_job_state(hq_env, 1, "FINISHED")
with open(os.path.join(tmp_path, "abc")) as f:
assert f.read() == "hello\n"
with open(os.path.join(tmp_path, "xyz")) as f:
assert f.read() == ""
def test_job_output_none(hq_env: HqEnv, tmp_path):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(
["submit", "--stdout=none", "--stderr=none", "--", "bash", "-c", "echo 'hello'"]
)
wait_for_job_state(hq_env, 1, "FINISHED")
assert not os.path.exists(os.path.join(tmp_path, "none"))
assert not os.path.exists(
os.path.join(tmp_path, default_task_output(job_id=1, task_id=0, type="stdout"))
)
assert not os.path.exists(
os.path.join(tmp_path, default_task_output(job_id=1, task_id=0, type="stderr"))
)
def test_job_filters(hq_env: HqEnv):
hq_env.start_server()
table_empty = hq_env.command(["jobs"], as_table=True)
assert len(table_empty) == 1
hq_env.command(["submit", "--", "bash", "-c", "echo 'to cancel'"])
hq_env.command(["submit", "--", "bash", "-c", "echo 'bye'"])
hq_env.command(["submit", "--", "ls", "failed"])
wait_for_job_state(hq_env, [1, 2, 3], "WAITING")
r = hq_env.command(["cancel", "1"])
assert "Job 1 canceled" in r
table = hq_env.command(["jobs"], as_table=True)
table.check_column_value("State", 0, "CANCELED")
table.check_column_value("State", 1, "WAITING")
table.check_column_value("State", 2, "WAITING")
assert len(table) == 4
table_canceled = hq_env.command(["jobs", "canceled"], as_table=True)
assert len(table_canceled) == 2
table_waiting = hq_env.command(["jobs", "waiting"], as_table=True)
assert len(table_waiting) == 3
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "--", "sleep", "1"])
wait_for_job_state(hq_env, 4, "RUNNING")
table_running = hq_env.command(["jobs", "running"], as_table=True)
assert len(table_running) == 2
table_finished = hq_env.command(["jobs", "finished"], as_table=True)
assert len(table_finished) == 2
table_failed = hq_env.command(["jobs", "failed"], as_table=True)
assert len(table_failed) == 2
def test_job_fail(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "--", "/non-existent-program"])
wait_for_job_state(hq_env, 1, "FAILED")
table = hq_env.command("jobs", as_table=True)
assert len(table) == 2
table.check_column_value("Id", 0, "1")
table.check_column_value("Name", 0, "non-existent-program")
table.check_column_value("State", 0, "FAILED")
table = hq_env.command(["job", "1", "--tasks"], as_table=True)
table.check_row_value("Id", "1")
table.check_row_value("State", "FAILED")
table = table[JOB_TABLE_ROWS:]
table.check_column_value("Task Id", 0, "0")
assert "No such file or directory" in table.get_column_value("Message")[0]
def test_job_invalid(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus=1)
result = hq_env.command(["job", "5"])
assert "Job 5 not found" in result
def test_cancel_without_workers(hq_env: HqEnv):
hq_env.start_server()
hq_env.command(["submit", "/bin/hostname"])
r = hq_env.command(["cancel", "1"])
assert "Job 1 canceled" in r
table = hq_env.command(["jobs"], as_table=True)
table.check_column_value("State", 0, "CANCELED")
hq_env.start_worker(cpus=1)
table = hq_env.command(["jobs"], as_table=True)
table.check_column_value("State", 0, "CANCELED")
def test_cancel_running(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "sleep", "10"])
wait_for_job_state(hq_env, 1, "RUNNING")
table = hq_env.command(["jobs"], as_table=True)
table.check_column_value("State", 0, "RUNNING")
r = hq_env.command(["cancel", "1"])
assert "Job 1 canceled" in r
table = hq_env.command(["jobs"], as_table=True)
table.check_column_value("State", 0, "CANCELED")
r = hq_env.command(["cancel", "1"])
assert "Canceling job 1 failed" in r
def test_cancel_finished(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "hostname"])
hq_env.command(["submit", "/invalid"])
wait_for_job_state(hq_env, [1, 2], ["FINISHED", "FAILED"])
r = hq_env.command(["cancel", "1"])
assert "Canceling job 1 failed" in r
r = hq_env.command(["cancel", "2"])
assert "Canceling job 2 failed" in r
table = hq_env.command(["jobs"], as_table=True)
table.check_column_value("State", 0, "FINISHED")
table.check_column_value("State", 1, "FAILED")
def test_cancel_last(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "hostname"])
hq_env.command(["submit", "/invalid"])
wait_for_job_state(hq_env, [1, 2], ["FINISHED", "FAILED"])
r = hq_env.command(["cancel", "last"])
assert "Canceling job 2 failed" in r
def test_cancel_some(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "sleep", "100"])
hq_env.command(["submit", "hostname"])
hq_env.command(["submit", "/invalid"])
r = hq_env.command(["cancel", "1-4"])
assert "Canceling job 4 failed" in r
table = hq_env.command(["jobs"], as_table=True)
for i in range(3):
table.check_column_value("State", i, "CANCELED")
def test_cancel_all(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus=1)
hq_env.command(["submit", "hostname"])
hq_env.command(["submit", "/invalid"])
wait_for_job_state(hq_env, [1, 2], ["FINISHED", "FAILED"])
hq_env.command(["submit", "sleep", "100"])
r = hq_env.command(["cancel", "all"]).splitlines()
assert len(r) == 1
assert "Job 3 canceled" in r[0]
def test_reporting_state_after_worker_lost(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_workers(2, cpus=1)
hq_env.command(["submit", "sleep", "1"])
hq_env.command(["submit", "sleep", "1"])
wait_for_job_state(hq_env, [1, 2], "RUNNING")
table = hq_env.command(["jobs"], as_table=True)
assert table[1][2] == "RUNNING"
assert table[2][2] == "RUNNING"
hq_env.kill_worker(1)
time.sleep(0.25)
table = hq_env.command(["jobs"], as_table=True)
if table[1][2] == "WAITING":
idx, other = 1, 2
elif table[2][2] == "WAITING":
idx, other = 2, 1
else:
assert 0
assert table[other][2] == "RUNNING"
wait_for_job_state(hq_env, other, "FINISHED")
table = hq_env.command(["jobs"], as_table=True)
assert table[other][2] == "FINISHED"
assert table[idx][2] == "RUNNING"
wait_for_job_state(hq_env, idx, "FINISHED")
table = hq_env.command(["jobs"], as_table=True)
assert table[other][2] == "FINISHED"
assert table[idx][2] == "FINISHED"
def test_set_env(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_workers(1)
hq_env.command(
[
"submit",
"--env",
"FOO=BAR",
"--env",
"FOO2=BAR2",
"--",
"bash",
"-c",
"echo $FOO $FOO2",
]
)
wait_for_job_state(hq_env, 1, "FINISHED")
with open(os.path.join(hq_env.work_path, default_task_output())) as f:
assert f.read().strip() == "BAR BAR2"
table = hq_env.command(["job", "1"], as_table=True)
table.check_row_value("Environment", "FOO=BAR\nFOO2=BAR2")
def test_max_fails_0(hq_env: HqEnv):
hq_env.start_server()
hq_env.command(
[
"submit",
"--array",
"1-200",
"--stdout",
"none",
"--stderr",
"none",
"--max-fails",
"0",
"--",
"bash",
"-c",
"if [ $HQ_TASK_ID == 137 ]; then exit 1; fi",
]
)
hq_env.start_workers(1)
wait_for_job_state(hq_env, 1, "CANCELED")
table = hq_env.command(["job", "1"], as_table=True)
states = table.get_row_value("State").split("\n")
assert "FAILED (1)" in states
assert any(s.startswith("FINISHED") for s in states)
assert any(s.startswith("CANCELED") for s in states)
def test_max_fails_1(hq_env: HqEnv):
hq_env.start_server()
hq_env.command(
[
"submit",
"--array",
"1-200",
"--stdout",
"none",
"--stderr",
"none",
"--max-fails",
"1",
"--",
"bash",
"-c",
"if [ $HQ_TASK_ID == 137 ]; then exit 1; fi",
]
)
hq_env.start_workers(1)
wait_for_job_state(hq_env, 1, "FAILED")
table = hq_env.command(["job", "1"], as_table=True)
states = table.get_row_value("State").split("\n")
assert "FAILED (1)" in states
assert "FINISHED (199)" in states
def test_max_fails_many(hq_env: HqEnv):
hq_env.start_server()
hq_env.command(
[
"submit",
"--array",
"1-10",
"--stdout",
"none",
"--stderr",
"none",
"--max-fails",
"3",
"--",
"bash",
"-c",
"sleep 1; exit 1",
]
)
hq_env.start_workers(1)
time.sleep(5)
wait_for_job_state(hq_env, 1, "CANCELED")
table = hq_env.command(["job", "1"], as_table=True)
states = table.get_row_value("State").split("\n")
assert "FAILED (4)" in states
assert "CANCELED (6)" in states
def test_job_last(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker()
hq_env.command(["job", "last"])
hq_env.command(["submit", "ls"])
wait_for_job_state(hq_env, 1, "FINISHED")
table = hq_env.command(["job", "last"], as_table=True)
table.check_row_value("Id", "1")
hq_env.command(["submit", "ls"])
wait_for_job_state(hq_env, 2, "FINISHED")
table = hq_env.command(["job", "last"], as_table=True)
table.check_row_value("Id", "2")
def test_job_resubmit_with_status(hq_env: HqEnv):
hq_env.start_server()
hq_env.command(
[
"submit",
"--array=3-9",
"--",
"python3",
"-c",
"import os; assert os.environ['HQ_TASK_ID'] not in ['4', '5', '6', '8']",
]
)
hq_env.start_workers(2, cpus=1)
wait_for_job_state(hq_env, 1, "FAILED")
table = hq_env.command(["resubmit", "1", "--status=failed"], as_table=True)
table.check_row_value("Tasks", "4; Ids: 4-6, 8")
table = hq_env.command(["resubmit", "1", "--status=finished"], as_table=True)
table.check_row_value("Tasks", "3; Ids: 3, 7, 9")
def test_job_resubmit_all(hq_env: HqEnv):
hq_env.start_server()
hq_env.command(["submit", "--array=2,7,9", "--", "/bin/hostname"])
hq_env.start_workers(2, cpus=1)
wait_for_job_state(hq_env, 1, "FINISHED")
table = hq_env.command(["resubmit", "1"], as_table=True)
table.check_row_value("Tasks", "3; Ids: 2, 7, 9")
def test_job_priority(hq_env: HqEnv, tmp_path):
hq_env.start_server()
hq_env.command(
[
"submit",
"--priority",
"1",
"--",
"bash",
"-c",
"date --iso-8601=seconds && sleep 1",
]
)
hq_env.command(
[
"submit",
"--priority",
"3",
"--",
"bash",
"-c",
"date --iso-8601=seconds && sleep 1",
]
)
hq_env.command(
[
"submit",
"--priority",
"3",
"--",
"bash",
"-c",
"date --iso-8601=seconds && sleep 1",
]
)
hq_env.command(
[
"submit",
"--",
"bash",
"-c",
"date --iso-8601=seconds && sleep 1",
]
)
hq_env.start_worker(cpus=1)
wait_for_job_state(hq_env, 1, "FINISHED")
wait_for_job_state(hq_env, 2, "FINISHED")
wait_for_job_state(hq_env, 3, "FINISHED")
wait_for_job_state(hq_env, 4, "FINISHED")
dates = []
for file in [default_task_output(job_id=id, type="stdout") for id in range(1, 5)]:
with open(os.path.join(tmp_path, file)) as f:
dates.append(datetime.fromisoformat(f.read().strip()))
assert dates[1] < dates[0]
assert dates[2] < dates[0]
assert dates[0] < dates[3]
def test_job_tasks_table(hq_env: HqEnv):
hq_env.start_server()
hq_env.command(["submit", "echo", "test"])
table = hq_env.command(["job", "1", "--tasks"], as_table=True)[JOB_TABLE_ROWS:]
wait_for_job_state(hq_env, 1, "WAITING")
table.check_column_value("Worker", 0, "")
hq_env.start_worker()
table = hq_env.command(["job", "1", "--tasks"], as_table=True)[JOB_TABLE_ROWS:]
wait_for_job_state(hq_env, 1, "FINISHED")
table.check_column_value("Worker", 0, "worker1")
hq_env.command(["submit", "non-existent-program", "test"])
table = hq_env.command(["job", "2", "--tasks"], as_table=True)[JOB_TABLE_ROWS:]
wait_for_job_state(hq_env, 2, "FAILED")
worker = table.get_column_value("Worker")[0]
assert worker == "" or worker == "worker1"
def test_job_wait(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker()
hq_env.command(["submit", "sleep", "1"])
r = hq_env.command(["wait", "1"])
assert "1 job finished" in r
table = hq_env.command(["job", "1"], as_table=True)
table.check_row_value("State", "FINISHED")
r = hq_env.command(["wait", "all"])
assert "1 job finished" in r
def test_job_submit_wait(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker()
r = hq_env.command(["submit", "sleep", "1", "--wait"])
assert "1 job finished" in r
table = hq_env.command(["job", "1"], as_table=True)
table.check_row_value("State", "FINISHED")
def test_job_wait_failure_exit_code(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker()
process = hq_env.command(["submit", "--wait", "non-existent-program"], wait=False)
assert process.wait() == 1
def test_job_wait_cancellation_exit_code(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker()
process = hq_env.command(["submit", "--wait", "sleep", "100"], wait=False)
wait_for_job_state(hq_env, 1, "RUNNING")
hq_env.command(["cancel", "last"])
assert process.wait() == 1
def test_job_completion_time(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker()
table = hq_env.command(["submit", "sleep", "1"], as_table=True)
table.check_row_value("Makespan", "0s")
wait_for_job_state(hq_env, 1, "RUNNING")
table = hq_env.command(["job", "1"], as_table=True)
assert table.get_row_value("Makespan") != "0s"
assert not table.get_row_value("Makespan").startswith("1s")
wait_for_job_state(hq_env, 1, "FINISHED")
time.sleep(
1.2
) # This sleep is not redundant, we check that after finished time is not moving
table = hq_env.command(["job", "1"], as_table=True)
assert table.get_row_value("Makespan").startswith("1s")
table = hq_env.command(["job", "1", "--tasks"], as_table=True)
offset = JOB_TABLE_ROWS
assert table[offset + 1][3].startswith("1s")
def test_job_timeout(hq_env: HqEnv):
hq_env.start_server()
hq_env.start_worker(cpus="3")
hq_env.command(["submit", "--time-limit=500ms", "sleep", "2"])
hq_env.command(["submit", "--time-limit=3s", "sleep", "2"])
hq_env.command(["submit", "sleep", "2"])
table = hq_env.command(["job", "1"], as_table=True)
table.check_row_value("Task time limit", "500ms")
table = hq_env.command(["job", "2"], as_table=True)
table.check_row_value("Task time limit", "3s")
table = hq_env.command(["job", "3"], as_table=True)
table.check_row_value("Task time limit", "None")
wait_for_job_state(hq_env, 1, "FAILED")
table = hq_env.command(["job", "1"], as_table=True)
table.check_row_value("Task time limit", "500ms")
offset = JOB_TABLE_ROWS
assert table[offset + 1][2] == "Time limit reached"
assert table.get_row_value("Makespan").startswith("5")
assert table.get_row_value("Makespan").endswith("ms")
wait_for_job_state(hq_env, 2, "FINISHED")
table = hq_env.command(["job", "2"], as_table=True)
assert table.get_row_value("Makespan").startswith("2")
wait_for_job_state(hq_env, 3, "FINISHED")
table = hq_env.command(["job", "3"], as_table=True)
assert table.get_row_value("Makespan").startswith("2")
|
Kobzol/hyperqueue | tests/test_autoalloc.py | import contextlib
import json
import os
import time
from os.path import dirname, join
from typing import List, Optional
from .conftest import HqEnv
from .utils.check import check_error_log
from .utils.wait import wait_until
def test_autoalloc_descriptor_list(hq_env: HqEnv):
mock = PbsMock(hq_env, qtime="Thu Aug 19 13:05:38 2021")
with mock.activate():
hq_env.start_server()
add_queue(hq_env, name=None, backlog=5)
table = hq_env.command(["alloc", "list"], as_table=True)
table.check_columns_value(
(
"ID",
"Backlog size",
"Workers per alloc",
"Timelimit",
"Manager",
"Name",
),
0,
("1", "5", "1", "N/A", "PBS", ""),
)
add_queue(
hq_env,
manager="pbs",
name="bar",
backlog=1,
workers_per_alloc=2,
time_limit="1h",
)
table = hq_env.command(["alloc", "list"], as_table=True)
table.check_columns_value(
(
"ID",
"Backlog size",
"Workers per alloc",
"Timelimit",
"Name",
),
1,
("2", "1", "2", "1h", "bar"),
)
add_queue(hq_env, manager="slurm", backlog=1)
table = hq_env.command(["alloc", "list"], as_table=True)
table.check_columns_value(("ID", "Manager"), 2, ("3", "SLURM"))
def test_add_pbs_descriptor(hq_env: HqEnv):
mock = PbsMock(hq_env, qtime="Thu Aug 19 13:05:38 2021")
with mock.activate():
hq_env.start_server(args=["--autoalloc-interval", "500ms"])
output = add_queue(
hq_env,
manager="pbs",
name="foo",
backlog=5,
workers_per_alloc=2,
)
assert "Allocation queue 1 successfully created" in output
info = hq_env.command(["alloc", "list"], as_table=True)
info.check_column_value("ID", 0, "1")
def test_add_slurm_descriptor(hq_env: HqEnv):
hq_env.start_server(args=["--autoalloc-interval", "500ms"])
output = add_queue(
hq_env,
manager="slurm",
name="foo",
backlog=5,
workers_per_alloc=2,
)
assert "Allocation queue 1 successfully created" in output
info = hq_env.command(["alloc", "list"], as_table=True)
info.check_column_value("ID", 0, "1")
def test_pbs_queue_qsub_fail(hq_env: HqEnv):
qsub_code = "exit(1)"
with hq_env.mock.mock_program("qsub", qsub_code):
with hq_env.mock.mock_program("qstat", ""):
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env)
time.sleep(0.2)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
table.check_column_value("Event", 0, "Allocation submission failed")
table.check_column_value(
"Message",
0,
"qsub execution failed\nCaused by:\nExit code: 1\nStderr:\nStdout:",
)
def test_slurm_queue_sbatch_fail(hq_env: HqEnv):
sbatch_code = "exit(1)"
with hq_env.mock.mock_program("sbatch", sbatch_code):
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env, manager="slurm")
time.sleep(0.2)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
table.check_column_value("Event", 0, "Allocation submission failed")
table.check_column_value(
"Message",
0,
"sbatch execution failed\nCaused by:\nExit code: 1\nStderr:\nStdout:",
)
def program_code_store_args_json(path: str) -> str:
"""
Creates program code that stores its cmd arguments as JSON into the specified `path`.
"""
return f"""
import sys
import json
with open("{path}", "w") as f:
f.write(json.dumps(sys.argv))
"""
def extract_script_args(script: str, prefix: str) -> List[str]:
return [
line[len(prefix) :].strip()
for line in script.splitlines(keepends=False)
if line.startswith(prefix)
]
def test_pbs_queue_qsub_args(hq_env: HqEnv):
path = join(hq_env.work_path, "qsub.out")
qsub_code = program_code_store_args_json(path)
with hq_env.mock.mock_program("qsub", qsub_code):
with hq_env.mock.mock_program("qstat", ""):
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env, time_limit="3m", additional_args="--foo=bar a b --baz 42")
wait_until(lambda: os.path.exists(path))
with open(path) as f:
args = json.loads(f.read())
qsub_script_path = args[1]
with open(qsub_script_path) as f:
data = f.read()
pbs_args = extract_script_args(data, "#PBS")
assert pbs_args == [
"-l select=1",
"-N hq-alloc-1",
f"-o {join(dirname(qsub_script_path), 'stdout')}",
f"-e {join(dirname(qsub_script_path), 'stderr')}",
"-l walltime=00:03:00",
"--foo=bar a b --baz 42",
]
def test_slurm_queue_sbatch_args(hq_env: HqEnv):
path = join(hq_env.work_path, "sbatch.out")
sbatch_code = program_code_store_args_json(path)
with hq_env.mock.mock_program("sbatch", sbatch_code):
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(
hq_env,
manager="slurm",
time_limit="3m",
additional_args="--foo=bar a b --baz 42",
)
wait_until(lambda: os.path.exists(path))
with open(path) as f:
args = json.loads(f.read())
sbatch_script_path = args[1]
with open(sbatch_script_path) as f:
data = f.read()
pbs_args = extract_script_args(data, "#SBATCH")
assert pbs_args == [
"--nodes=1",
"--job-name=hq-alloc-1",
f"--output={join(dirname(sbatch_script_path), 'stdout')}",
f"--error={join(dirname(sbatch_script_path), 'stderr')}",
"--time=00:03:00",
"--foo=bar a b --baz 42",
]
def test_pbs_queue_qsub_success(hq_env: HqEnv):
qsub_code = """print("123.job")"""
with hq_env.mock.mock_program("qsub", qsub_code):
with hq_env.mock.mock_program("qstat", ""):
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env)
time.sleep(0.2)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
table.check_column_value("Event", 0, "Allocation queued")
table.check_column_value("Message", 0, "123.job")
def test_slurm_queue_sbatch_success(hq_env: HqEnv):
sbatch_code = """print("Submitted batch job 123.job")"""
with hq_env.mock.mock_program("sbatch", sbatch_code):
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env, manager="slurm")
time.sleep(0.2)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
table.check_column_value("Event", 0, "Allocation queued")
table.check_column_value("Message", 0, "123.job")
def test_pbs_queue_qsub_check_args(hq_env: HqEnv):
output_log = join(hq_env.work_path, "output.log")
qsub_code = f"""
import sys
import traceback
args = sys.argv[1:]
def check_arg(key, val):
for (index, arg) in enumerate(args):
if arg == key:
assert args[index + 1] == val
return
raise Exception(f"Key `{{key}}` not found")
def check():
check_arg("-q", "queue")
assert "-lselect=1" in args
for arg in args:
assert not arg.startswith("-lwalltime")
try:
check()
except:
with open("{output_log}", "w") as f:
tb = traceback.format_exc()
f.write(tb)
f.write(" ".join(args))
"""
with hq_env.mock.mock_program("qsub", qsub_code):
with hq_env.mock.mock_program("qstat", ""):
with check_error_log(output_log):
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
add_queue(hq_env)
time.sleep(0.2)
def test_pbs_events_job_lifecycle(hq_env: HqEnv):
mock = PbsMock(hq_env, qtime="Thu Aug 19 13:05:38 2021")
with mock.activate():
mock.set_job_data("Q")
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env)
# Queued
time.sleep(0.2)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
table.check_column_value("Event", -1, "Allocation queued")
# Started
mock.set_job_data("R", stime="Thu Aug 19 13:05:39 2021")
time.sleep(0.2)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
assert "Allocation started" in table.get_column_value("Event")
# Finished
mock.set_job_data(
"F",
stime="Thu Aug 19 13:05:39 2021",
mtime="Thu Aug 19 13:05:39 2021",
exit_code=0,
)
time.sleep(0.2)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
assert "Allocation finished" in table.get_column_value("Event")
def test_pbs_events_job_failed(hq_env: HqEnv):
mock = PbsMock(hq_env, qtime="Thu Aug 19 13:05:38 2021")
mock.set_job_data(
"F",
stime="Thu Aug 19 13:05:39 2021",
mtime="Thu Aug 19 13:05:39 2021",
exit_code=1,
)
with mock.activate():
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env)
time.sleep(0.5)
table = hq_env.command(["alloc", "events", "1"], as_table=True)
column = table.get_column_value("Event")
assert "Allocation failed" in column
def test_pbs_allocations_job_lifecycle(hq_env: HqEnv):
mock = PbsMock(
hq_env,
qtime="Thu Aug 19 13:05:38 2021",
stime="Thu Aug 19 13:05:39 2021",
mtime="Thu Aug 19 13:05:39 2021",
)
mock.set_job_data("Q")
with mock.activate():
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env, name="foo")
time.sleep(0.2)
table = hq_env.command(["alloc", "info", "1"], as_table=True)
table.check_columns_value(
("Id", "State", "Worker count"), 0, ("0", "Queued", "1")
)
mock.set_job_data("R")
time.sleep(0.2)
table = hq_env.command(["alloc", "info", "1"], as_table=True)
table.check_column_value("State", 0, "Running")
mock.set_job_data("F", exit_code=0)
time.sleep(0.2)
table = hq_env.command(["alloc", "info", "1"], as_table=True)
table.check_column_value("State", 0, "Finished")
def test_allocations_ignore_job_changes_after_finish(hq_env: HqEnv):
mock = PbsMock(
hq_env,
jobs=["1", "2"],
qtime="Thu Aug 19 13:05:38 2021",
stime="Thu Aug 19 13:05:39 2021",
mtime="Thu Aug 19 13:05:39 2021",
)
mock.set_job_data("F", exit_code=0)
with mock.activate():
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env)
time.sleep(0.3)
table = hq_env.command(["alloc", "info", "1"], as_table=True)
table.check_column_value("State", 0, "Finished")
mock.set_job_data("R")
time.sleep(0.3)
table = hq_env.command(["alloc", "info", "1"], as_table=True)
table.check_column_value("State", 0, "Finished")
def test_pbs_delete_active_jobs(hq_env: HqEnv):
mock = PbsMock(
hq_env,
jobs=["1", "2"],
qtime="Thu Aug 19 13:05:38 2021",
stime="Thu Aug 19 13:05:39 2021",
mtime="Thu Aug 19 13:05:39 2021",
)
mock.set_job_data("R")
with mock.activate():
process = hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env, name="foo", backlog=2, workers_per_alloc=1)
def allocations_up():
table = hq_env.command(["alloc", "info", "1"], as_table=True)
return len(table) == 3
wait_until(allocations_up)
hq_env.command(["server", "stop"])
process.wait()
hq_env.check_process_exited(process)
wait_until(lambda: len(mock.deleted_jobs()) == 2)
assert sorted(mock.deleted_jobs()) == ["1", "2"]
def test_remove_descriptor(hq_env: HqEnv):
mock = PbsMock(hq_env, qtime="Thu Aug 19 13:05:38 2021")
with mock.activate():
hq_env.start_server()
add_queue(hq_env)
add_queue(hq_env)
add_queue(hq_env)
result = remove_queue(hq_env, queue_id=2)
assert "Allocation queue 2 successfully removed" in result
table = hq_env.command(["alloc", "list"], as_table=True)
table.check_columns_value(["ID"], 0, ["1"])
table.check_columns_value(["ID"], 1, ["3"])
def test_pbs_remove_descriptor_cancel_allocations(hq_env: HqEnv):
mock = PbsMock(
hq_env,
jobs=["1", "2"],
qtime="Thu Aug 19 13:05:38 2021",
stime="Thu Aug 19 13:05:39 2021",
mtime="Thu Aug 19 13:05:39 2021",
)
mock.set_job_data("R")
with mock.activate():
hq_env.start_server(args=["--autoalloc-interval", "100ms"])
prepare_tasks(hq_env)
add_queue(hq_env, backlog=2, workers_per_alloc=1)
def allocations_up():
table = hq_env.command(["alloc", "info", "1"], as_table=True)
return len(table) == 3
wait_until(allocations_up)
remove_queue(hq_env, 1)
wait_until(lambda: len(hq_env.command(["alloc", "list"], as_table=True)) == 1)
assert sorted(mock.deleted_jobs()) == ["1", "2"]
class PbsMock:
def __init__(self, hq_env: HqEnv, jobs: List[str] = None, **data):
if jobs is None:
jobs = list(str(i) for i in range(1000))
self.hq_env = hq_env
self.jobs = jobs
self.qstat_path = join(self.hq_env.work_path, "pbs-qstat")
self.qsub_path = join(self.hq_env.work_path, "pbs-qsub")
self.qdel_dir = join(self.hq_env.work_path, "pbs-qdel")
os.makedirs(self.qdel_dir)
self.data = data
with open(self.qsub_path, "w") as f:
f.write(json.dumps(self.jobs))
self.qsub_code = f"""
import json
with open("{self.qsub_path}") as f:
jobs = json.loads(f.read())
if not jobs:
raise Exception("No more jobs can be scheduled")
job = jobs.pop(0)
with open("{self.qsub_path}", "w") as f:
f.write(json.dumps(jobs))
print(job)
"""
self.qstat_code = f"""
import sys
import json
jobid = None
args = sys.argv[1:]
for (index, arg) in enumerate(args[:-1]):
if arg == "-f":
jobid = args[index + 1]
break
assert jobid is not None
with open("{self.qstat_path}") as f:
jobdata = json.loads(f.read())
data = {{
"Jobs": {{
jobid: jobdata
}}
}}
print(json.dumps(data))
"""
self.qdel_code = f"""
import sys
import json
import os
jobid = sys.argv[1]
with open(os.path.join("{self.qdel_dir}", jobid), "w") as f:
f.write(jobid)
f.flush()
"""
@contextlib.contextmanager
def activate(self):
with self.hq_env.mock.mock_program("qsub", self.qsub_code):
with self.hq_env.mock.mock_program("qstat", self.qstat_code):
with self.hq_env.mock.mock_program("qdel", self.qdel_code):
yield
def set_job_data(
self,
status: str,
qtime: str = None,
stime: str = None,
mtime: str = None,
exit_code: int = None,
):
jobdata = dict(self.data)
jobdata.update(
{
"job_state": status,
}
)
if qtime is not None:
jobdata["qtime"] = qtime
if stime is not None:
jobdata["stime"] = stime
if mtime is not None:
jobdata["mtime"] = mtime
if exit_code is not None:
jobdata["Exit_status"] = exit_code
with open(self.qstat_path, "w") as f:
f.write(json.dumps(jobdata))
def deleted_jobs(self) -> List[str]:
return list(os.listdir(self.qdel_dir))
def add_queue(
hq_env: HqEnv,
manager="pbs",
name: Optional[str] = "foo",
backlog=1,
workers_per_alloc=1,
additional_args=None,
time_limit=None,
) -> str:
args = ["alloc", "add", manager]
if name is not None:
args.extend(["--name", name])
args.extend(
[
"--backlog",
str(backlog),
"--workers-per-alloc",
str(workers_per_alloc),
]
)
if time_limit is not None:
args.extend(["--time-limit", time_limit])
if additional_args is not None:
args.append("--")
args.extend(additional_args.split(" "))
return hq_env.command(args)
def prepare_tasks(hq_env: HqEnv, count=1000):
hq_env.command(["submit", f"--array=0-{count}", "sleep", "1"])
def remove_queue(hq_env: HqEnv, queue_id: int):
args = ["alloc", "remove", str(queue_id)]
return hq_env.command(args)
|
Kobzol/hyperqueue | tests/test_server.py | import json
import os
import signal
import socket
import subprocess
import pytest
from .conftest import HqEnv
from .utils import parse_table
def test_server_host(hq_env: HqEnv):
args = hq_env.server_args()
args += ["--host", "abcd123"]
p = subprocess.Popen(args, stdout=subprocess.PIPE)
try:
stdout, stderr = p.communicate(timeout=0.5)
except subprocess.TimeoutExpired:
p.kill()
stdout, stderr = p.communicate()
stdout = stdout.decode()
table = parse_table(stdout)
table.check_row_value("Host", "abcd123")
def test_version_mismatch(hq_env: HqEnv):
hq_env.start_server()
hq_env.command("jobs", as_table=True)
access_file = os.path.join(hq_env.server_dir, "hq-current", "access.json")
with open(access_file) as f:
data = json.load(f)
version = data["version"]
data["version"] += ".1"
# Make the file writable
os.chmod(access_file, 0o600)
with open(access_file, "w") as f:
json.dump(data, f)
with pytest.raises(
Exception,
match=f"Server was started with version {version}.1, but the current version is {version}",
):
hq_env.command("jobs", as_table=True)
def test_server_info(hq_env: HqEnv):
process = hq_env.start_server()
table = hq_env.command(["server", "info"], as_table=True)
table.check_row_value("Server directory", hq_env.server_dir)
table.check_row_value("Host", socket.gethostname())
table.check_row_value("Pid", str(process.pid))
assert len(table) == 7
def test_server_stop(hq_env: HqEnv):
process = hq_env.start_server()
hq_env.command(["server", "stop"])
process.wait()
hq_env.check_process_exited(process, 0)
def test_delete_symlink_after_server_stop(hq_env: HqEnv):
process = hq_env.start_server()
symlink_path = os.path.join(hq_env.server_dir, "hq-current")
assert os.path.isdir(symlink_path)
rundir_path = os.path.realpath(symlink_path)
hq_env.command(["server", "stop"])
process.wait()
hq_env.check_process_exited(process, 0)
assert not os.path.isdir(os.path.join(hq_env.server_dir, "hq-current"))
assert os.path.isdir(rundir_path)
def test_delete_symlink_after_ctrl_c(hq_env: HqEnv):
process = hq_env.start_server()
process.send_signal(signal.SIGINT)
process.wait()
hq_env.check_process_exited(process, 0)
assert not os.path.isdir(os.path.join(hq_env.server_dir, "hq-current"))
|
ggiinnoo/awx | awx/main/models/inventory.py | <reponame>ggiinnoo/awx<gh_stars>0
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
# Python
import datetime
import time
import logging
import re
import copy
import os.path
from urllib.parse import urljoin
import yaml
import configparser
import tempfile
from io import StringIO
from distutils.version import LooseVersion as Version
# Django
from django.conf import settings
from django.db import models, connection
from django.utils.translation import ugettext_lazy as _
from django.db import transaction
from django.core.exceptions import ValidationError
from django.utils.timezone import now
from django.utils.encoding import iri_to_uri
from django.db.models import Q
# REST Framework
from rest_framework.exceptions import ParseError
# AWX
from awx.api.versioning import reverse
from awx.main.constants import CLOUD_PROVIDERS
from awx.main.consumers import emit_channel_notification
from awx.main.fields import (
ImplicitRoleField,
JSONBField,
SmartFilterField,
OrderedManyToManyField,
)
from awx.main.managers import HostManager
from awx.main.models.base import (
BaseModel,
CommonModelNameNotUnique,
VarsDictProperty,
CLOUD_INVENTORY_SOURCES,
prevent_search, accepts_json
)
from awx.main.models.events import InventoryUpdateEvent
from awx.main.models.unified_jobs import UnifiedJob, UnifiedJobTemplate
from awx.main.models.mixins import (
ResourceMixin,
TaskManagerInventoryUpdateMixin,
RelatedJobsMixin,
CustomVirtualEnvMixin,
)
from awx.main.models.notifications import (
NotificationTemplate,
JobNotificationMixin,
)
from awx.main.models.credential.injectors import _openstack_data
from awx.main.utils import _inventory_updates, region_sorting, get_licenser
from awx.main.utils.safe_yaml import sanitize_jinja
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate',
'CustomInventoryScript', 'SmartInventoryMembership']
logger = logging.getLogger('awx.main.models.inventory')
class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
'''
an inventory source contains lists and hosts.
'''
FIELDS_TO_PRESERVE_AT_COPY = ['hosts', 'groups', 'instance_groups']
KIND_CHOICES = [
('', _('Hosts have a direct link to this inventory.')),
('smart', _('Hosts for inventory generated using the host_filter property.')),
]
class Meta:
app_label = 'main'
verbose_name_plural = _('inventories')
unique_together = [('name', 'organization')]
ordering = ('name',)
organization = models.ForeignKey(
'Organization',
related_name='inventories',
help_text=_('Organization containing this inventory.'),
on_delete=models.SET_NULL,
null=True,
)
variables = accepts_json(models.TextField(
blank=True,
default='',
help_text=_('Inventory variables in JSON or YAML format.'),
))
has_active_failures = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Flag indicating whether any hosts in this inventory have failed.'),
)
total_hosts = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Total number of hosts in this inventory.'),
)
hosts_with_active_failures = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Number of hosts in this inventory with active failures.'),
)
total_groups = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Total number of groups in this inventory.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Flag indicating whether this inventory has any external inventory sources.'),
)
total_inventory_sources = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('Total number of external inventory sources configured within this inventory.'),
)
inventory_sources_with_failures = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('Number of external inventory sources in this inventory with failures.'),
)
kind = models.CharField(
max_length=32,
choices=KIND_CHOICES,
blank=True,
default='',
help_text=_('Kind of inventory being represented.'),
)
host_filter = SmartFilterField(
blank=True,
null=True,
default=None,
help_text=_('Filter that will be applied to the hosts of this inventory.'),
)
instance_groups = OrderedManyToManyField(
'InstanceGroup',
blank=True,
through='InventoryInstanceGroupMembership',
)
admin_role = ImplicitRoleField(
parent_role='organization.inventory_admin_role',
)
update_role = ImplicitRoleField(
parent_role='admin_role',
)
adhoc_role = ImplicitRoleField(
parent_role='admin_role',
)
use_role = ImplicitRoleField(
parent_role='adhoc_role',
)
read_role = ImplicitRoleField(parent_role=[
'organization.auditor_role',
'update_role',
'use_role',
'admin_role',
])
insights_credential = models.ForeignKey(
'Credential',
related_name='insights_inventories',
help_text=_('Credentials to be used by hosts belonging to this inventory when accessing Red Hat Insights API.'),
on_delete=models.SET_NULL,
blank=True,
null=True,
default=None,
)
pending_deletion = models.BooleanField(
default=False,
editable=False,
help_text=_('Flag indicating the inventory is being deleted.'),
)
def get_absolute_url(self, request=None):
return reverse('api:inventory_detail', kwargs={'pk': self.pk}, request=request)
variables_dict = VarsDictProperty('variables')
def get_group_hosts_map(self):
'''
Return dictionary mapping group_id to set of child host_id's.
'''
# FIXME: Cache this mapping?
group_hosts_kw = dict(group__inventory_id=self.pk, host__inventory_id=self.pk)
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
group_hosts_qs = group_hosts_qs.values_list('group_id', 'host_id')
group_hosts_map = {}
for group_id, host_id in group_hosts_qs:
group_host_ids = group_hosts_map.setdefault(group_id, set())
group_host_ids.add(host_id)
return group_hosts_map
def get_group_parents_map(self):
'''
Return dictionary mapping group_id to set of parent group_id's.
'''
# FIXME: Cache this mapping?
group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk)
group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw)
group_parents_qs = group_parents_qs.values_list('from_group_id', 'to_group_id')
group_parents_map = {}
for from_group_id, to_group_id in group_parents_qs:
group_parents = group_parents_map.setdefault(from_group_id, set())
group_parents.add(to_group_id)
return group_parents_map
def get_group_children_map(self):
'''
Return dictionary mapping group_id to set of child group_id's.
'''
# FIXME: Cache this mapping?
group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk)
group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw)
group_parents_qs = group_parents_qs.values_list('from_group_id', 'to_group_id')
group_children_map = {}
for from_group_id, to_group_id in group_parents_qs:
group_children = group_children_map.setdefault(to_group_id, set())
group_children.add(from_group_id)
return group_children_map
@staticmethod
def parse_slice_params(slice_str):
m = re.match(r"slice(?P<number>\d+)of(?P<step>\d+)", slice_str)
if not m:
raise ParseError(_('Could not parse subset as slice specification.'))
number = int(m.group('number'))
step = int(m.group('step'))
if number > step:
raise ParseError(_('Slice number must be less than total number of slices.'))
elif number < 1:
raise ParseError(_('Slice number must be 1 or higher.'))
return (number, step)
def get_script_data(self, hostvars=False, towervars=False, show_all=False, slice_number=1, slice_count=1):
hosts_kw = dict()
if not show_all:
hosts_kw['enabled'] = True
fetch_fields = ['name', 'id', 'variables', 'inventory_id']
if towervars:
fetch_fields.append('enabled')
hosts = self.hosts.filter(**hosts_kw).order_by('name').only(*fetch_fields)
if slice_count > 1 and slice_number > 0:
offset = slice_number - 1
hosts = hosts[offset::slice_count]
data = dict()
all_group = data.setdefault('all', dict())
all_hostnames = set(host.name for host in hosts)
if self.variables_dict:
all_group['vars'] = self.variables_dict
if self.kind == 'smart':
all_group['hosts'] = [host.name for host in hosts]
else:
# Keep track of hosts that are members of a group
grouped_hosts = set([])
# Build in-memory mapping of groups and their hosts.
group_hosts_qs = Group.hosts.through.objects.filter(
group__inventory_id=self.id,
host__inventory_id=self.id
).values_list('group_id', 'host_id', 'host__name')
group_hosts_map = {}
for group_id, host_id, host_name in group_hosts_qs:
if host_name not in all_hostnames:
continue # host might not be in current shard
group_hostnames = group_hosts_map.setdefault(group_id, [])
group_hostnames.append(host_name)
grouped_hosts.add(host_name)
# Build in-memory mapping of groups and their children.
group_parents_qs = Group.parents.through.objects.filter(
from_group__inventory_id=self.id,
to_group__inventory_id=self.id,
).values_list('from_group_id', 'from_group__name', 'to_group_id')
group_children_map = {}
for from_group_id, from_group_name, to_group_id in group_parents_qs:
group_children = group_children_map.setdefault(to_group_id, [])
group_children.append(from_group_name)
# Add ungrouped hosts to all group
all_group['hosts'] = [host.name for host in hosts if host.name not in grouped_hosts]
# Now use in-memory maps to build up group info.
all_group_names = []
for group in self.groups.only('name', 'id', 'variables', 'inventory_id'):
group_info = dict()
if group.id in group_hosts_map:
group_info['hosts'] = group_hosts_map[group.id]
if group.id in group_children_map:
group_info['children'] = group_children_map[group.id]
group_vars = group.variables_dict
if group_vars:
group_info['vars'] = group_vars
if group_info:
data[group.name] = group_info
all_group_names.append(group.name)
# add all groups as children of all group, includes empty groups
if all_group_names:
all_group['children'] = all_group_names
if hostvars:
data.setdefault('_meta', dict())
data['_meta'].setdefault('hostvars', dict())
for host in hosts:
data['_meta']['hostvars'][host.name] = host.variables_dict
if towervars:
tower_dict = dict(remote_tower_enabled=str(host.enabled).lower(),
remote_tower_id=host.id)
data['_meta']['hostvars'][host.name].update(tower_dict)
return data
def update_computed_fields(self):
'''
Update model fields that are computed from database relationships.
'''
logger.debug("Going to update inventory computed fields, pk={0}".format(self.pk))
start_time = time.time()
active_hosts = self.hosts
failed_hosts = active_hosts.filter(last_job_host_summary__failed=True)
active_groups = self.groups
if self.kind == 'smart':
active_groups = active_groups.none()
if self.kind == 'smart':
active_inventory_sources = self.inventory_sources.none()
else:
active_inventory_sources = self.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
failed_inventory_sources = active_inventory_sources.filter(last_job_failed=True)
computed_fields = {
'has_active_failures': bool(failed_hosts.count()),
'total_hosts': active_hosts.count(),
'hosts_with_active_failures': failed_hosts.count(),
'total_groups': active_groups.count(),
'has_inventory_sources': bool(active_inventory_sources.count()),
'total_inventory_sources': active_inventory_sources.count(),
'inventory_sources_with_failures': failed_inventory_sources.count(),
}
# CentOS python seems to have issues clobbering the inventory on poor timing during certain operations
iobj = Inventory.objects.get(id=self.id)
for field, value in list(computed_fields.items()):
if getattr(iobj, field) != value:
setattr(iobj, field, value)
# update in-memory object
setattr(self, field, value)
else:
computed_fields.pop(field)
if computed_fields:
iobj.save(update_fields=computed_fields.keys())
logger.debug("Finished updating inventory computed fields, pk={0}, in "
"{1:.3f} seconds".format(self.pk, time.time() - start_time))
def websocket_emit_status(self, status):
connection.on_commit(lambda: emit_channel_notification(
'inventories-status_changed',
{'group_name': 'inventories', 'inventory_id': self.id, 'status': status}
))
@property
def root_groups(self):
group_pks = self.groups.values_list('pk', flat=True)
return self.groups.exclude(parents__pk__in=group_pks).distinct()
def clean_insights_credential(self):
if self.kind == 'smart' and self.insights_credential:
raise ValidationError(_("Assignment not allowed for Smart Inventory"))
if self.insights_credential and self.insights_credential.credential_type.kind != 'insights':
raise ValidationError(_("Credential kind must be 'insights'."))
return self.insights_credential
@transaction.atomic
def schedule_deletion(self, user_id=None):
from awx.main.tasks import delete_inventory
from awx.main.signals import activity_stream_delete
if self.pending_deletion is True:
raise RuntimeError("Inventory is already pending deletion.")
self.pending_deletion = True
self.save(update_fields=['pending_deletion'])
self.jobtemplates.clear()
activity_stream_delete(Inventory, self, inventory_delete_flag=True)
self.websocket_emit_status('pending_deletion')
delete_inventory.delay(self.pk, user_id)
def _update_host_smart_inventory_memeberships(self):
if self.kind == 'smart' and settings.AWX_REBUILD_SMART_MEMBERSHIP:
def on_commit():
from awx.main.tasks import update_host_smart_inventory_memberships
update_host_smart_inventory_memberships.delay()
connection.on_commit(on_commit)
def save(self, *args, **kwargs):
self._update_host_smart_inventory_memeberships()
super(Inventory, self).save(*args, **kwargs)
if (self.kind == 'smart' and 'host_filter' in kwargs.get('update_fields', ['host_filter']) and
connection.vendor != 'sqlite'):
# Minimal update of host_count for smart inventory host filter changes
self.update_computed_fields()
def delete(self, *args, **kwargs):
self._update_host_smart_inventory_memeberships()
super(Inventory, self).delete(*args, **kwargs)
'''
RelatedJobsMixin
'''
def _get_related_jobs(self):
return UnifiedJob.objects.non_polymorphic().filter(
Q(job__inventory=self) |
Q(inventoryupdate__inventory=self) |
Q(adhoccommand__inventory=self)
)
class SmartInventoryMembership(BaseModel):
'''
A lookup table for Host membership in Smart Inventory
'''
class Meta:
app_label = 'main'
unique_together = (('host', 'inventory'),)
inventory = models.ForeignKey('Inventory', related_name='+', on_delete=models.CASCADE)
host = models.ForeignKey('Host', related_name='+', on_delete=models.CASCADE)
class Host(CommonModelNameNotUnique, RelatedJobsMixin):
'''
A managed node
'''
FIELDS_TO_PRESERVE_AT_COPY = [
'name', 'description', 'groups', 'inventory', 'enabled', 'instance_id', 'variables'
]
class Meta:
app_label = 'main'
unique_together = (("name", "inventory"),) # FIXME: Add ('instance_id', 'inventory') after migration.
ordering = ('name',)
inventory = models.ForeignKey(
'Inventory',
related_name='hosts',
on_delete=models.CASCADE,
)
smart_inventories = models.ManyToManyField(
'Inventory',
related_name='+',
through='SmartInventoryMembership',
)
enabled = models.BooleanField(
default=True,
help_text=_('Is this host online and available for running jobs?'),
)
instance_id = models.CharField(
max_length=1024,
blank=True,
default='',
help_text=_('The value used by the remote inventory source to uniquely identify the host'),
)
variables = accepts_json(models.TextField(
blank=True,
default='',
help_text=_('Host variables in JSON or YAML format.'),
))
last_job = models.ForeignKey(
'Job',
related_name='hosts_as_last_job+',
null=True,
default=None,
editable=False,
on_delete=models.SET_NULL,
)
last_job_host_summary = models.ForeignKey(
'JobHostSummary',
related_name='hosts_as_last_job_summary+',
blank=True,
null=True,
default=None,
editable=False,
on_delete=models.SET_NULL,
)
inventory_sources = models.ManyToManyField(
'InventorySource',
related_name='hosts',
editable=False,
help_text=_('Inventory source(s) that created or modified this host.'),
)
ansible_facts = JSONBField(
blank=True,
default=dict,
help_text=_('Arbitrary JSON structure of most recent ansible_facts, per-host.'),
)
ansible_facts_modified = models.DateTimeField(
default=None,
editable=False,
null=True,
help_text=_('The date and time ansible_facts was last modified.'),
)
insights_system_id = models.TextField(
blank=True,
default=None,
null=True,
db_index=True,
help_text=_('Red Hat Insights host unique identifier.'),
)
objects = HostManager()
def get_absolute_url(self, request=None):
return reverse('api:host_detail', kwargs={'pk': self.pk}, request=request)
variables_dict = VarsDictProperty('variables')
@property
def all_groups(self):
'''
Return all groups of which this host is a member, avoiding infinite
recursion in the case of cyclical group relations.
'''
group_parents_map = self.inventory.get_group_parents_map()
group_pks = set(self.groups.values_list('pk', flat=True))
child_pks_to_check = set()
child_pks_to_check.update(group_pks)
child_pks_checked = set()
while child_pks_to_check:
for child_pk in list(child_pks_to_check):
p_ids = group_parents_map.get(child_pk, set())
group_pks.update(p_ids)
child_pks_to_check.remove(child_pk)
child_pks_checked.add(child_pk)
child_pks_to_check.update(p_ids - child_pks_checked)
return Group.objects.filter(pk__in=group_pks).distinct()
# Use .job_host_summaries.all() to get jobs affecting this host.
# Use .job_events.all() to get events affecting this host.
'''
We don't use timestamp, but we may in the future.
'''
def update_ansible_facts(self, module, facts, timestamp=None):
if module == "ansible":
self.ansible_facts.update(facts)
else:
self.ansible_facts[module] = facts
self.save()
def get_effective_host_name(self):
'''
Return the name of the host that will be used in actual ansible
command run.
'''
host_name = self.name
if 'ansible_ssh_host' in self.variables_dict:
host_name = self.variables_dict['ansible_ssh_host']
if 'ansible_host' in self.variables_dict:
host_name = self.variables_dict['ansible_host']
return host_name
def _update_host_smart_inventory_memeberships(self):
if settings.AWX_REBUILD_SMART_MEMBERSHIP:
def on_commit():
from awx.main.tasks import update_host_smart_inventory_memberships
update_host_smart_inventory_memberships.delay()
connection.on_commit(on_commit)
def clean_name(self):
try:
sanitize_jinja(self.name)
except ValueError as e:
raise ValidationError(str(e) + ": {}".format(self.name))
return self.name
def save(self, *args, **kwargs):
self._update_host_smart_inventory_memeberships()
super(Host, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
self._update_host_smart_inventory_memeberships()
super(Host, self).delete(*args, **kwargs)
'''
RelatedJobsMixin
'''
def _get_related_jobs(self):
return self.inventory._get_related_jobs()
class Group(CommonModelNameNotUnique, RelatedJobsMixin):
'''
A group containing managed hosts. A group or host may belong to multiple
groups.
'''
FIELDS_TO_PRESERVE_AT_COPY = [
'name', 'description', 'inventory', 'children', 'parents', 'hosts', 'variables'
]
class Meta:
app_label = 'main'
unique_together = (("name", "inventory"),)
ordering = ('name',)
inventory = models.ForeignKey(
'Inventory',
related_name='groups',
on_delete=models.CASCADE,
)
# Can also be thought of as: parents == member_of, children == members
parents = models.ManyToManyField(
'self',
symmetrical=False,
related_name='children',
blank=True,
)
variables = accepts_json(models.TextField(
blank=True,
default='',
help_text=_('Group variables in JSON or YAML format.'),
))
hosts = models.ManyToManyField(
'Host',
related_name='groups',
blank=True,
help_text=_('Hosts associated directly with this group.'),
)
inventory_sources = models.ManyToManyField(
'InventorySource',
related_name='groups',
editable=False,
help_text=_('Inventory source(s) that created or modified this group.'),
)
def get_absolute_url(self, request=None):
return reverse('api:group_detail', kwargs={'pk': self.pk}, request=request)
@transaction.atomic
def delete_recursive(self):
from awx.main.utils import ignore_inventory_computed_fields
from awx.main.tasks import update_inventory_computed_fields
from awx.main.signals import disable_activity_stream, activity_stream_delete
def mark_actual():
all_group_hosts = Group.hosts.through.objects.select_related("host", "group").filter(group__inventory=self.inventory)
group_hosts = {'groups': {}, 'hosts': {}}
all_group_parents = Group.parents.through.objects.select_related("from_group", "to_group").filter(from_group__inventory=self.inventory)
group_children = {}
group_parents = {}
marked_hosts = []
marked_groups = [self.id]
for pairing in all_group_hosts:
if pairing.group_id not in group_hosts['groups']:
group_hosts['groups'][pairing.group_id] = []
if pairing.host_id not in group_hosts['hosts']:
group_hosts['hosts'][pairing.host_id] = []
group_hosts['groups'][pairing.group_id].append(pairing.host_id)
group_hosts['hosts'][pairing.host_id].append(pairing.group_id)
for pairing in all_group_parents:
if pairing.to_group_id not in group_children:
group_children[pairing.to_group_id] = []
if pairing.from_group_id not in group_parents:
group_parents[pairing.from_group_id] = []
group_children[pairing.to_group_id].append(pairing.from_group_id)
group_parents[pairing.from_group_id].append(pairing.to_group_id)
linked_children = [(self.id, g) for g in group_children[self.id]] if self.id in group_children else []
if self.id in group_hosts['groups']:
for host in copy.copy(group_hosts['groups'][self.id]):
group_hosts['hosts'][host].remove(self.id)
group_hosts['groups'][self.id].remove(host)
if len(group_hosts['hosts'][host]) < 1:
marked_hosts.append(host)
for subgroup in linked_children:
parent, group = subgroup
group_parents[group].remove(parent)
group_children[parent].remove(group)
if len(group_parents[group]) > 0:
continue
for host in copy.copy(group_hosts['groups'].get(group, [])):
group_hosts['hosts'][host].remove(group)
group_hosts['groups'][group].remove(host)
if len(group_hosts['hosts'][host]) < 1:
marked_hosts.append(host)
if group in group_children:
for direct_child in group_children[group]:
linked_children.append((group, direct_child))
marked_groups.append(group)
Group.objects.filter(id__in=marked_groups).delete()
Host.objects.filter(id__in=marked_hosts).delete()
update_inventory_computed_fields.delay(self.inventory.id)
with ignore_inventory_computed_fields():
with disable_activity_stream():
mark_actual()
activity_stream_delete(None, self)
variables_dict = VarsDictProperty('variables')
def get_all_parents(self, except_pks=None):
'''
Return all parents of this group recursively. The group itself will
be excluded unless there is a cycle leading back to it.
'''
group_parents_map = self.inventory.get_group_parents_map()
child_pks_to_check = set([self.pk])
child_pks_checked = set()
parent_pks = set()
while child_pks_to_check:
for child_pk in list(child_pks_to_check):
p_ids = group_parents_map.get(child_pk, set())
parent_pks.update(p_ids)
child_pks_to_check.remove(child_pk)
child_pks_checked.add(child_pk)
child_pks_to_check.update(p_ids - child_pks_checked)
return Group.objects.filter(pk__in=parent_pks).distinct()
@property
def all_parents(self):
return self.get_all_parents()
def get_all_children(self, except_pks=None):
'''
Return all children of this group recursively. The group itself will
be excluded unless there is a cycle leading back to it.
'''
group_children_map = self.inventory.get_group_children_map()
parent_pks_to_check = set([self.pk])
parent_pks_checked = set()
child_pks = set()
while parent_pks_to_check:
for parent_pk in list(parent_pks_to_check):
c_ids = group_children_map.get(parent_pk, set())
child_pks.update(c_ids)
parent_pks_to_check.remove(parent_pk)
parent_pks_checked.add(parent_pk)
parent_pks_to_check.update(c_ids - parent_pks_checked)
return Group.objects.filter(pk__in=child_pks).distinct()
@property
def all_children(self):
return self.get_all_children()
def get_all_hosts(self, except_group_pks=None):
'''
Return all hosts associated with this group or any of its children.
'''
group_children_map = self.inventory.get_group_children_map()
group_hosts_map = self.inventory.get_group_hosts_map()
parent_pks_to_check = set([self.pk])
parent_pks_checked = set()
host_pks = set()
while parent_pks_to_check:
for parent_pk in list(parent_pks_to_check):
c_ids = group_children_map.get(parent_pk, set())
parent_pks_to_check.remove(parent_pk)
parent_pks_checked.add(parent_pk)
parent_pks_to_check.update(c_ids - parent_pks_checked)
h_ids = group_hosts_map.get(parent_pk, set())
host_pks.update(h_ids)
return Host.objects.filter(pk__in=host_pks).distinct()
@property
def all_hosts(self):
return self.get_all_hosts()
@property
def job_host_summaries(self):
from awx.main.models.jobs import JobHostSummary
return JobHostSummary.objects.filter(host__in=self.all_hosts)
@property
def job_events(self):
from awx.main.models.jobs import JobEvent
return JobEvent.objects.filter(host__in=self.all_hosts)
@property
def ad_hoc_commands(self):
from awx.main.models.ad_hoc_commands import AdHocCommand
return AdHocCommand.objects.filter(hosts__in=self.all_hosts)
'''
RelatedJobsMixin
'''
def _get_related_jobs(self):
return UnifiedJob.objects.non_polymorphic().filter(
Q(job__inventory=self.inventory) |
Q(inventoryupdate__inventory_source__groups=self)
)
class InventorySourceOptions(BaseModel):
'''
Common fields for InventorySource and InventoryUpdate.
'''
injectors = dict()
SOURCE_CHOICES = [
('file', _('File, Directory or Script')),
('scm', _('Sourced from a Project')),
('ec2', _('Amazon EC2')),
('gce', _('Google Compute Engine')),
('azure_rm', _('Microsoft Azure Resource Manager')),
('vmware', _('VMware vCenter')),
('satellite6', _('Red Hat Satellite 6')),
('cloudforms', _('Red Hat CloudForms')),
('openstack', _('OpenStack')),
('rhv', _('Red Hat Virtualization')),
('tower', _('Ansible Tower')),
('custom', _('Custom Script')),
]
# From the options of the Django management base command
INVENTORY_UPDATE_VERBOSITY_CHOICES = [
(0, '0 (WARNING)'),
(1, '1 (INFO)'),
(2, '2 (DEBUG)'),
]
# Use tools/scripts/get_ec2_filter_names.py to build this list.
INSTANCE_FILTER_NAMES = [
"architecture",
"association.allocation-id",
"association.association-id",
"association.ip-owner-id",
"association.public-ip",
"availability-zone",
"block-device-mapping.attach-time",
"block-device-mapping.delete-on-termination",
"block-device-mapping.device-name",
"block-device-mapping.status",
"block-device-mapping.volume-id",
"client-token",
"dns-name",
"group-id",
"group-name",
"hypervisor",
"iam-instance-profile.arn",
"image-id",
"instance-id",
"instance-lifecycle",
"instance-state-code",
"instance-state-name",
"instance-type",
"instance.group-id",
"instance.group-name",
"ip-address",
"kernel-id",
"key-name",
"launch-index",
"launch-time",
"monitoring-state",
"network-interface-private-dns-name",
"network-interface.addresses.association.ip-owner-id",
"network-interface.addresses.association.public-ip",
"network-interface.addresses.primary",
"network-interface.addresses.private-ip-address",
"network-interface.attachment.attach-time",
"network-interface.attachment.attachment-id",
"network-interface.attachment.delete-on-termination",
"network-interface.attachment.device-index",
"network-interface.attachment.instance-id",
"network-interface.attachment.instance-owner-id",
"network-interface.attachment.status",
"network-interface.availability-zone",
"network-interface.description",
"network-interface.group-id",
"network-interface.group-name",
"network-interface.mac-address",
"network-interface.network-interface.id",
"network-interface.owner-id",
"network-interface.requester-id",
"network-interface.requester-managed",
"network-interface.source-destination-check",
"network-interface.status",
"network-interface.subnet-id",
"network-interface.vpc-id",
"owner-id",
"placement-group-name",
"platform",
"private-dns-name",
"private-ip-address",
"product-code",
"product-code.type",
"ramdisk-id",
"reason",
"requester-id",
"reservation-id",
"root-device-name",
"root-device-type",
"source-dest-check",
"spot-instance-request-id",
"state-reason-code",
"state-reason-message",
"subnet-id",
"tag-key",
"tag-value",
"tenancy",
"virtualization-type",
"vpc-id"
]
class Meta:
abstract = True
source = models.CharField(
max_length=32,
choices=SOURCE_CHOICES,
blank=False,
default=None,
)
source_path = models.CharField(
max_length=1024,
blank=True,
default='',
)
source_script = models.ForeignKey(
'CustomInventoryScript',
null=True,
default=None,
blank=True,
on_delete=models.SET_NULL,
)
source_vars = models.TextField(
blank=True,
default='',
help_text=_('Inventory source variables in YAML or JSON format.'),
)
source_regions = models.CharField(
max_length=1024,
blank=True,
default='',
)
instance_filters = models.CharField(
max_length=1024,
blank=True,
default='',
help_text=_('Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.'),
)
group_by = models.CharField(
max_length=1024,
blank=True,
default='',
help_text=_('Limit groups automatically created from inventory source (EC2 only).'),
)
overwrite = models.BooleanField(
default=False,
help_text=_('Overwrite local groups and hosts from remote inventory source.'),
)
overwrite_vars = models.BooleanField(
default=False,
help_text=_('Overwrite local variables from remote inventory source.'),
)
timeout = models.IntegerField(
blank=True,
default=0,
help_text=_("The amount of time (in seconds) to run before the task is canceled."),
)
verbosity = models.PositiveIntegerField(
choices=INVENTORY_UPDATE_VERBOSITY_CHOICES,
blank=True,
default=1,
)
@classmethod
def get_ec2_region_choices(cls):
ec2_region_names = getattr(settings, 'EC2_REGION_NAMES', {})
ec2_name_replacements = {
'us': 'US',
'ap': 'Asia Pacific',
'eu': 'Europe',
'sa': 'South America',
}
import boto.ec2
regions = [('all', 'All')]
for region in boto.ec2.regions():
label = ec2_region_names.get(region.name, '')
if not label:
label_parts = []
for part in region.name.split('-'):
part = ec2_name_replacements.get(part.lower(), part.title())
label_parts.append(part)
label = ' '.join(label_parts)
regions.append((region.name, label))
return sorted(regions, key=region_sorting)
@classmethod
def get_ec2_group_by_choices(cls):
return [
('ami_id', _('Image ID')),
('availability_zone', _('Availability Zone')),
('aws_account', _('Account')),
('instance_id', _('Instance ID')),
('instance_state', _('Instance State')),
('platform', _('Platform')),
('instance_type', _('Instance Type')),
('key_pair', _('Key Name')),
('region', _('Region')),
('security_group', _('Security Group')),
('tag_keys', _('Tags')),
('tag_none', _('Tag None')),
('vpc_id', _('VPC ID')),
]
@classmethod
def get_gce_region_choices(self):
"""Return a complete list of regions in GCE, as a list of
two-tuples.
"""
# It's not possible to get a list of regions from GCE without
# authenticating first. Therefore, use a list from settings.
regions = list(getattr(settings, 'GCE_REGION_CHOICES', []))
regions.insert(0, ('all', 'All'))
return sorted(regions, key=region_sorting)
@classmethod
def get_azure_rm_region_choices(self):
"""Return a complete list of regions in Microsoft Azure, as a list of
two-tuples.
"""
# It's not possible to get a list of regions from Azure without
# authenticating first (someone reading these might think there's
# a pattern here!). Therefore, you guessed it, use a list from
# settings.
regions = list(getattr(settings, 'AZURE_RM_REGION_CHOICES', []))
regions.insert(0, ('all', 'All'))
return sorted(regions, key=region_sorting)
@classmethod
def get_vmware_region_choices(self):
"""Return a complete list of regions in VMware, as a list of two-tuples
(but note that VMware doesn't actually have regions!).
"""
return [('all', 'All')]
@classmethod
def get_openstack_region_choices(self):
"""I don't think openstack has regions"""
return [('all', 'All')]
@classmethod
def get_satellite6_region_choices(self):
"""Red Hat Satellite 6 region choices (not implemented)"""
return [('all', 'All')]
@classmethod
def get_cloudforms_region_choices(self):
"""Red Hat CloudForms region choices (not implemented)"""
return [('all', 'All')]
@classmethod
def get_rhv_region_choices(self):
"""No region supprt"""
return [('all', 'All')]
@classmethod
def get_tower_region_choices(self):
"""No region supprt"""
return [('all', 'All')]
@staticmethod
def cloud_credential_validation(source, cred):
if not source:
return None
if cred and source not in ('custom', 'scm'):
# If a credential was provided, it's important that it matches
# the actual inventory source being used (Amazon requires Amazon
# credentials; Rackspace requires Rackspace credentials; etc...)
if source.replace('ec2', 'aws') != cred.kind:
return _('Cloud-based inventory sources (such as %s) require '
'credentials for the matching cloud service.') % source
# Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials
# from the instance metadata instead of those explicitly provided.
elif source in CLOUD_PROVIDERS and source != 'ec2':
return _('Credential is required for a cloud source.')
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
return _(
'Credentials of type machine, source control, insights and vault are '
'disallowed for custom inventory sources.'
)
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
return _(
'Credentials of type insights and vault are '
'disallowed for scm inventory sources.'
)
return None
def get_cloud_credential(self):
"""Return the credential which is directly tied to the inventory source type.
"""
credential = None
for cred in self.credentials.all():
if self.source in CLOUD_PROVIDERS:
if cred.kind == self.source.replace('ec2', 'aws'):
credential = cred
break
else:
# these need to be returned in the API credential field
if cred.credential_type.kind != 'vault':
credential = cred
break
return credential
def get_extra_credentials(self):
"""Return all credentials that are not used by the inventory source injector.
These are all credentials that should run their own inject_credential logic.
"""
special_cred = None
if self.source in CLOUD_PROVIDERS:
# these have special injection logic associated with them
special_cred = self.get_cloud_credential()
extra_creds = []
for cred in self.credentials.all():
if special_cred is None or cred.pk != special_cred.pk:
extra_creds.append(cred)
return extra_creds
@property
def credential(self):
cred = self.get_cloud_credential()
if cred is not None:
return cred.pk
def clean_source_regions(self):
regions = self.source_regions
if self.source in CLOUD_PROVIDERS:
get_regions = getattr(self, 'get_%s_region_choices' % self.source)
valid_regions = [x[0] for x in get_regions()]
region_transform = lambda x: x.strip().lower()
else:
return ''
all_region = region_transform('all')
valid_regions = [region_transform(x) for x in valid_regions]
regions = [region_transform(x) for x in regions.split(',') if x.strip()]
if all_region in regions:
return all_region
invalid_regions = []
for r in regions:
if r not in valid_regions and r not in invalid_regions:
invalid_regions.append(r)
if invalid_regions:
raise ValidationError(_('Invalid %(source)s region: %(region)s') % {
'source': self.source, 'region': ', '.join(invalid_regions)})
return ','.join(regions)
source_vars_dict = VarsDictProperty('source_vars')
def clean_instance_filters(self):
instance_filters = str(self.instance_filters or '')
if self.source == 'ec2':
invalid_filters = []
instance_filter_re = re.compile(r'^((tag:.+)|([a-z][a-z\.-]*[a-z]))=.*$')
for instance_filter in instance_filters.split(','):
instance_filter = instance_filter.strip()
if not instance_filter:
continue
if not instance_filter_re.match(instance_filter):
invalid_filters.append(instance_filter)
continue
instance_filter_name = instance_filter.split('=', 1)[0]
if instance_filter_name.startswith('tag:'):
continue
if instance_filter_name not in self.INSTANCE_FILTER_NAMES:
invalid_filters.append(instance_filter)
if invalid_filters:
raise ValidationError(_('Invalid filter expression: %(filter)s') %
{'filter': ', '.join(invalid_filters)})
return instance_filters
elif self.source in ('vmware', 'tower'):
return instance_filters
else:
return ''
def clean_group_by(self):
group_by = str(self.group_by or '')
if self.source == 'ec2':
get_choices = getattr(self, 'get_%s_group_by_choices' % self.source)
valid_choices = [x[0] for x in get_choices()]
choice_transform = lambda x: x.strip().lower()
valid_choices = [choice_transform(x) for x in valid_choices]
choices = [choice_transform(x) for x in group_by.split(',') if x.strip()]
invalid_choices = []
for c in choices:
if c not in valid_choices and c not in invalid_choices:
invalid_choices.append(c)
if invalid_choices:
raise ValidationError(_('Invalid group by choice: %(choice)s') %
{'choice': ', '.join(invalid_choices)})
return ','.join(choices)
elif self.source == 'vmware':
return group_by
else:
return ''
class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualEnvMixin, RelatedJobsMixin):
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'inventory')]
class Meta:
app_label = 'main'
ordering = ('inventory', 'name')
inventory = models.ForeignKey(
'Inventory',
related_name='inventory_sources',
null=True,
default=None,
on_delete=models.CASCADE,
)
source_project = models.ForeignKey(
'Project',
related_name='scm_inventory_sources',
help_text=_('Project containing inventory file used as source.'),
on_delete=models.CASCADE,
blank=True,
default=None,
null=True
)
scm_last_revision = models.CharField(
max_length=1024,
blank=True,
default='',
editable=False,
)
update_on_project_update = models.BooleanField(
default=False,
)
update_on_launch = models.BooleanField(
default=False,
)
update_cache_timeout = models.PositiveIntegerField(
default=0,
)
@classmethod
def _get_unified_job_class(cls):
return InventoryUpdate
@classmethod
def _get_unified_job_field_names(cls):
return set(f.name for f in InventorySourceOptions._meta.fields) | set(
['name', 'description', 'organization', 'credentials', 'inventory']
)
def save(self, *args, **kwargs):
# if this is a new object, inherit organization from its inventory
if not self.pk and self.inventory and self.inventory.organization_id and not self.organization_id:
self.organization_id = self.inventory.organization_id
# If update_fields has been specified, add our field names to it,
# if it hasn't been specified, then we're just doing a normal save.
update_fields = kwargs.get('update_fields', [])
is_new_instance = not bool(self.pk)
# Set name automatically. Include PK (or placeholder) to make sure the names are always unique.
replace_text = '__replace_%s__' % now()
old_name_re = re.compile(r'^inventory_source \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.*?$')
if not self.name or old_name_re.match(self.name) or '__replace_' in self.name:
group_name = getattr(self, 'v1_group_name', '')
if self.inventory and self.pk:
self.name = '%s (%s - %s)' % (group_name, self.inventory.name, self.pk)
elif self.inventory:
self.name = '%s (%s - %s)' % (group_name, self.inventory.name, replace_text)
elif not is_new_instance:
self.name = 'inventory source (%s)' % self.pk
else:
self.name = 'inventory source (%s)' % replace_text
if 'name' not in update_fields:
update_fields.append('name')
# Reset revision if SCM source has changed parameters
if self.source=='scm' and not is_new_instance:
before_is = self.__class__.objects.get(pk=self.pk)
if before_is.source_path != self.source_path or before_is.source_project_id != self.source_project_id:
# Reset the scm_revision if file changed to force update
self.scm_last_revision = ''
if 'scm_last_revision' not in update_fields:
update_fields.append('scm_last_revision')
# Do the actual save.
super(InventorySource, self).save(*args, **kwargs)
# Add the PK to the name.
if replace_text in self.name:
self.name = self.name.replace(replace_text, str(self.pk))
super(InventorySource, self).save(update_fields=['name'])
if self.source=='scm' and is_new_instance and self.update_on_project_update:
# Schedule a new Project update if one is not already queued
if self.source_project and not self.source_project.project_updates.filter(
status__in=['new', 'pending', 'waiting']).exists():
self.update()
if not getattr(_inventory_updates, 'is_updating', False):
if self.inventory is not None:
self.inventory.update_computed_fields()
def _get_current_status(self):
if self.source:
if self.current_job and self.current_job.status:
return self.current_job.status
elif not self.last_job:
return 'never updated'
# inherit the child job status
else:
return self.last_job.status
else:
return 'none'
def get_absolute_url(self, request=None):
return reverse('api:inventory_source_detail', kwargs={'pk': self.pk}, request=request)
def _can_update(self):
if self.source == 'custom':
return bool(self.source_script)
elif self.source == 'scm':
return bool(self.source_project)
elif self.source == 'file':
return False
elif self.source == 'ec2':
# Permit credential-less ec2 updates to allow IAM roles
return True
elif self.source == 'gce':
# These updates will hang if correct credential is not supplied
credential = self.get_cloud_credential()
return bool(credential and credential.kind == 'gce')
return True
def create_inventory_update(self, **kwargs):
return self.create_unified_job(**kwargs)
def create_unified_job(self, **kwargs):
# Use special name, if name not already specified
if self.inventory:
if '_eager_fields' not in kwargs:
kwargs['_eager_fields'] = {}
if 'name' not in kwargs['_eager_fields']:
name = '{} - {}'.format(self.inventory.name, self.name)
name_field = self._meta.get_field('name')
if len(name) > name_field.max_length:
name = name[:name_field.max_length]
kwargs['_eager_fields']['name'] = name
return super(InventorySource, self).create_unified_job(**kwargs)
@property
def cache_timeout_blocked(self):
if not self.last_job_run:
return False
if (self.last_job_run + datetime.timedelta(seconds=self.update_cache_timeout)) > now():
return True
return False
@property
def needs_update_on_launch(self):
if self.source and self.update_on_launch:
if not self.last_job_run:
return True
if (self.last_job_run + datetime.timedelta(seconds=self.update_cache_timeout)) <= now():
return True
return False
@property
def notification_templates(self):
base_notification_templates = NotificationTemplate.objects
error_notification_templates = list(base_notification_templates
.filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
started_notification_templates = list(base_notification_templates
.filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
success_notification_templates = list(base_notification_templates
.filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
if self.inventory.organization is not None:
error_notification_templates = set(error_notification_templates + list(base_notification_templates
.filter(organization_notification_templates_for_errors=self.inventory.organization)))
started_notification_templates = set(started_notification_templates + list(base_notification_templates
.filter(organization_notification_templates_for_started=self.inventory.organization)))
success_notification_templates = set(success_notification_templates + list(base_notification_templates
.filter(organization_notification_templates_for_success=self.inventory.organization)))
return dict(error=list(error_notification_templates),
started=list(started_notification_templates),
success=list(success_notification_templates))
def clean_update_on_project_update(self):
if self.update_on_project_update is True and \
self.source == 'scm' and \
InventorySource.objects.filter(
Q(inventory=self.inventory,
update_on_project_update=True, source='scm') &
~Q(id=self.id)).exists():
raise ValidationError(_("More than one SCM-based inventory source with update on project update per-inventory not allowed."))
return self.update_on_project_update
def clean_update_on_launch(self):
if self.update_on_project_update is True and \
self.source == 'scm' and \
self.update_on_launch is True:
raise ValidationError(_("Cannot update SCM-based inventory source on launch if set to update on project update. "
"Instead, configure the corresponding source project to update on launch."))
return self.update_on_launch
def clean_source_path(self):
if self.source != 'scm' and self.source_path:
raise ValidationError(_("Cannot set source_path if not SCM type."))
return self.source_path
'''
RelatedJobsMixin
'''
def _get_related_jobs(self):
return InventoryUpdate.objects.filter(inventory_source=self)
class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin, TaskManagerInventoryUpdateMixin, CustomVirtualEnvMixin):
'''
Internal job for tracking inventory updates from external sources.
'''
class Meta:
app_label = 'main'
ordering = ('inventory', 'name')
inventory = models.ForeignKey(
'Inventory',
related_name='inventory_updates',
null=True,
default=None,
on_delete=models.DO_NOTHING,
)
inventory_source = models.ForeignKey(
'InventorySource',
related_name='inventory_updates',
editable=False,
on_delete=models.CASCADE,
)
license_error = models.BooleanField(
default=False,
editable=False,
)
org_host_limit_error = models.BooleanField(
default=False,
editable=False,
)
source_project_update = models.ForeignKey(
'ProjectUpdate',
related_name='scm_inventory_updates',
help_text=_('Inventory files from this Project Update were used for the inventory update.'),
on_delete=models.CASCADE,
blank=True,
default=None,
null=True
)
def _get_parent_field_name(self):
return 'inventory_source'
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunInventoryUpdate
return RunInventoryUpdate
def _global_timeout_setting(self):
return 'DEFAULT_INVENTORY_UPDATE_TIMEOUT'
def websocket_emit_data(self):
websocket_data = super(InventoryUpdate, self).websocket_emit_data()
websocket_data.update(dict(inventory_source_id=self.inventory_source.pk))
if self.inventory_source.inventory is not None:
websocket_data.update(dict(inventory_id=self.inventory_source.inventory.pk))
return websocket_data
def get_absolute_url(self, request=None):
return reverse('api:inventory_update_detail', kwargs={'pk': self.pk}, request=request)
def get_ui_url(self):
return urljoin(settings.TOWER_URL_BASE, "/#/jobs/inventory/{}".format(self.pk))
def get_actual_source_path(self):
'''Alias to source_path that combines with project path for for SCM file based sources'''
if self.inventory_source_id is None or self.inventory_source.source_project_id is None:
return self.source_path
return os.path.join(
self.inventory_source.source_project.get_project_path(check_if_exists=False),
self.source_path)
@property
def event_class(self):
return InventoryUpdateEvent
@property
def task_impact(self):
return 1
# InventoryUpdate credential required
# Custom and SCM InventoryUpdate credential not required
@property
def can_start(self):
if not super(InventoryUpdate, self).can_start:
return False
elif not self.inventory_source or not self.inventory_source._can_update():
return False
return True
'''
JobNotificationMixin
'''
def get_notification_templates(self):
return self.inventory_source.notification_templates
def get_notification_friendly_name(self):
return "Inventory Update"
@property
def preferred_instance_groups(self):
if self.inventory_source.inventory is not None and self.inventory_source.inventory.organization is not None:
organization_groups = [x for x in self.inventory_source.inventory.organization.instance_groups.all()]
else:
organization_groups = []
if self.inventory_source.inventory is not None:
inventory_groups = [x for x in self.inventory_source.inventory.instance_groups.all()]
else:
inventory_groups = []
selected_groups = inventory_groups + organization_groups
if not selected_groups:
return self.global_instance_groups
return selected_groups
@property
def ansible_virtualenv_path(self):
if self.inventory_source and self.inventory_source.custom_virtualenv:
return self.inventory_source.custom_virtualenv
if self.inventory_source and self.inventory_source.source_project:
project = self.inventory_source.source_project
if project and project.custom_virtualenv:
return project.custom_virtualenv
return settings.ANSIBLE_VENV_PATH
def cancel(self, job_explanation=None, is_chain=False):
res = super(InventoryUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain)
if res:
if self.launch_type != 'scm' and self.source_project_update:
self.source_project_update.cancel(job_explanation=job_explanation)
return res
class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
class Meta:
app_label = 'main'
unique_together = [('name', 'organization')]
ordering = ('name',)
script = prevent_search(models.TextField(
blank=True,
default='',
help_text=_('Inventory script contents'),
))
organization = models.ForeignKey(
'Organization',
related_name='custom_inventory_scripts',
help_text=_('Organization owning this inventory script'),
blank=False,
null=True,
on_delete=models.SET_NULL,
)
admin_role = ImplicitRoleField(
parent_role='organization.admin_role',
)
read_role = ImplicitRoleField(
parent_role=['organization.auditor_role', 'organization.member_role', 'admin_role'],
)
def get_absolute_url(self, request=None):
return reverse('api:inventory_script_detail', kwargs={'pk': self.pk}, request=request)
# TODO: move to awx/main/models/inventory/injectors.py
class PluginFileInjector(object):
# if plugin_name is not given, no inventory plugin functionality exists
plugin_name = None # Ansible core name used to reference plugin
# if initial_version is None, but we have plugin name, injection logic exists,
# but it is vaporware, meaning we do not use it for some reason in Ansible core
initial_version = None # at what version do we switch to the plugin
ini_env_reference = None # env var name that points to old ini config file
# base injector should be one of None, "managed", or "template"
# this dictates which logic to borrow from playbook injectors
base_injector = None
# every source should have collection, but these are set here
# so that a source without a collection will have null values
namespace = None
collection = None
collection_migration = '2.9' # Starting with this version, we use collections
def __init__(self, ansible_version):
# This is InventoryOptions instance, could be source or inventory update
self.ansible_version = ansible_version
@property
def filename(self):
"""Inventory filename for using the inventory plugin
This is created dynamically, but the auto plugin requires this exact naming
"""
return '{0}.yml'.format(self.plugin_name)
@property
def script_name(self):
"""Name of the script located in awx/plugins/inventory
"""
return '{0}.py'.format(self.__class__.__name__)
def inventory_as_dict(self, inventory_update, private_data_dir):
"""Default implementation of inventory plugin file contents.
There are some valid cases when all parameters can be obtained from
the environment variables, example "plugin: linode" is valid
ideally, however, some options should be filled from the inventory source data
"""
if self.plugin_name is None:
raise NotImplementedError('At minimum the plugin name is needed for inventory plugin use.')
if self.initial_version is None or Version(self.ansible_version) >= Version(self.collection_migration):
proper_name = f'{self.namespace}.{self.collection}.{self.plugin_name}'
else:
proper_name = self.plugin_name
return {'plugin': proper_name}
def inventory_contents(self, inventory_update, private_data_dir):
"""Returns a string that is the content for the inventory file for the inventory plugin
"""
return yaml.safe_dump(
self.inventory_as_dict(inventory_update, private_data_dir),
default_flow_style=False,
width=1000
)
def should_use_plugin(self):
return bool(
self.plugin_name and self.initial_version and
Version(self.ansible_version) >= Version(self.initial_version)
)
def build_env(self, inventory_update, env, private_data_dir, private_data_files):
if self.should_use_plugin():
injector_env = self.get_plugin_env(inventory_update, private_data_dir, private_data_files)
else:
injector_env = self.get_script_env(inventory_update, private_data_dir, private_data_files)
env.update(injector_env)
# Preserves current behavior for Ansible change in default planned for 2.10
env['ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS'] = 'never'
return env
def _get_shared_env(self, inventory_update, private_data_dir, private_data_files):
"""By default, we will apply the standard managed_by_tower injectors
for the script injection
"""
injected_env = {}
credential = inventory_update.get_cloud_credential()
# some sources may have no credential, specifically ec2
if credential is None:
return injected_env
if self.base_injector in ('managed', 'template'):
injected_env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk) # so injector knows this is inventory
if self.base_injector == 'managed':
from awx.main.models.credential import injectors as builtin_injectors
cred_kind = inventory_update.source.replace('ec2', 'aws')
if cred_kind in dir(builtin_injectors):
getattr(builtin_injectors, cred_kind)(credential, injected_env, private_data_dir)
elif self.base_injector == 'template':
safe_env = injected_env.copy()
args = []
credential.credential_type.inject_credential(
credential, injected_env, safe_env, args, private_data_dir
)
# NOTE: safe_env is handled externally to injector class by build_safe_env static method
# that means that managed_by_tower injectors must only inject detectable env keys
# enforcement of this is accomplished by tests
return injected_env
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
env = self._get_shared_env(inventory_update, private_data_dir, private_data_files)
if self.initial_version is None or Version(self.ansible_version) >= Version(self.collection_migration):
env['ANSIBLE_COLLECTIONS_PATHS'] = settings.INVENTORY_COLLECTIONS_ROOT
return env
def get_script_env(self, inventory_update, private_data_dir, private_data_files):
injected_env = self._get_shared_env(inventory_update, private_data_dir, private_data_files)
# Put in env var reference to private ini data files, if relevant
if self.ini_env_reference:
credential = inventory_update.get_cloud_credential()
cred_data = private_data_files['credentials']
injected_env[self.ini_env_reference] = cred_data[credential]
return injected_env
def build_private_data(self, inventory_update, private_data_dir):
if self.should_use_plugin():
return self.build_plugin_private_data(inventory_update, private_data_dir)
else:
return self.build_script_private_data(inventory_update, private_data_dir)
def build_script_private_data(self, inventory_update, private_data_dir):
return None
def build_plugin_private_data(self, inventory_update, private_data_dir):
return None
@staticmethod
def dump_cp(cp, credential):
"""Dump config parser data and return it as a string.
Helper method intended for use by build_script_private_data
"""
if cp.sections():
f = StringIO()
cp.write(f)
private_data = {'credentials': {}}
private_data['credentials'][credential] = f.getvalue()
return private_data
else:
return None
class azure_rm(PluginFileInjector):
plugin_name = 'azure_rm'
initial_version = '2.8' # Driven by unsafe group names issue, hostvars, host names
ini_env_reference = 'AZURE_INI_PATH'
base_injector = 'managed'
namespace = 'azure'
collection = 'azcollection'
def get_plugin_env(self, *args, **kwargs):
ret = super(azure_rm, self).get_plugin_env(*args, **kwargs)
# We need native jinja2 types so that tags can give JSON null value
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
return ret
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(azure_rm, self).inventory_as_dict(inventory_update, private_data_dir)
source_vars = inventory_update.source_vars_dict
ret['fail_on_template_errors'] = False
group_by_hostvar = {
'location': {'prefix': '', 'separator': '', 'key': 'location'},
'tag': {'prefix': '', 'separator': '', 'key': 'tags.keys() | list if tags else []'},
# Introduced with https://github.com/ansible/ansible/pull/53046
'security_group': {'prefix': '', 'separator': '', 'key': 'security_group'},
'resource_group': {'prefix': '', 'separator': '', 'key': 'resource_group'},
# Note, os_family was not documented correctly in script, but defaulted to grouping by it
'os_family': {'prefix': '', 'separator': '', 'key': 'os_disk.operating_system_type'}
}
# by default group by everything
# always respect user setting, if they gave it
group_by = [
grouping_name for grouping_name in group_by_hostvar
if source_vars.get('group_by_{}'.format(grouping_name), True)
]
ret['keyed_groups'] = [group_by_hostvar[grouping_name] for grouping_name in group_by]
if 'tag' in group_by:
# Nasty syntax to reproduce "key_value" group names in addition to "key"
ret['keyed_groups'].append({
'prefix': '', 'separator': '',
'key': r'dict(tags.keys() | map("regex_replace", "^(.*)$", "\1_") | list | zip(tags.values() | list)) if tags else []'
})
# Compatibility content
# TODO: add proper support for instance_filters non-specific to compatibility
# TODO: add proper support for group_by non-specific to compatibility
# Dashes were not configurable in azure_rm.py script, we do not want unicode, so always use this
ret['use_contrib_script_compatible_sanitization'] = True
# use same host names as script
ret['plain_host_names'] = True
# By default the script did not filter hosts
ret['default_host_filters'] = []
# User-given host filters
user_filters = []
old_filterables = [
('resource_groups', 'resource_group'),
('tags', 'tags')
# locations / location would be an entry
# but this would conflict with source_regions
]
for key, loc in old_filterables:
value = source_vars.get(key, None)
if value and isinstance(value, str):
# tags can be list of key:value pairs
# e.g. 'Creator:jmarshall, peanutbutter:jelly'
# or tags can be a list of keys
# e.g. 'Creator, peanutbutter'
if key == "tags":
# grab each key value pair
for kvpair in value.split(','):
# split into key and value
kv = kvpair.split(':')
# filter out any host that does not have key
# in their tags.keys() variable
user_filters.append('"{}" not in tags.keys()'.format(kv[0].strip()))
# if a value is provided, check that the key:value pair matches
if len(kv) > 1:
user_filters.append('tags["{}"] != "{}"'.format(kv[0].strip(), kv[1].strip()))
else:
user_filters.append('{} not in {}'.format(
loc, value.split(',')
))
if user_filters:
ret.setdefault('exclude_host_filters', [])
ret['exclude_host_filters'].extend(user_filters)
ret['conditional_groups'] = {'azure': True}
ret['hostvar_expressions'] = {
'provisioning_state': 'provisioning_state | title',
'computer_name': 'name',
'type': 'resource_type',
'private_ip': 'private_ipv4_addresses[0] if private_ipv4_addresses else None',
'public_ip': 'public_ipv4_addresses[0] if public_ipv4_addresses else None',
'public_ip_name': 'public_ip_name if public_ip_name is defined else None',
'public_ip_id': 'public_ip_id if public_ip_id is defined else None',
'tags': 'tags if tags else None'
}
# Special functionality from script
if source_vars.get('use_private_ip', False):
ret['hostvar_expressions']['ansible_host'] = 'private_ipv4_addresses[0]'
# end compatibility content
if inventory_update.source_regions and 'all' not in inventory_update.source_regions:
# initialize a list for this section in inventory file
ret.setdefault('exclude_host_filters', [])
# make a python list of the regions we will use
python_regions = [x.strip() for x in inventory_update.source_regions.split(',')]
# convert that list in memory to python syntax in a string
# now put that in jinja2 syntax operating on hostvar key "location"
# and put that as an entry in the exclusions list
ret['exclude_host_filters'].append("location not in {}".format(repr(python_regions)))
return ret
def build_script_private_data(self, inventory_update, private_data_dir):
cp = configparser.RawConfigParser()
section = 'azure'
cp.add_section(section)
cp.set(section, 'include_powerstate', 'yes')
cp.set(section, 'group_by_resource_group', 'yes')
cp.set(section, 'group_by_location', 'yes')
cp.set(section, 'group_by_tag', 'yes')
if inventory_update.source_regions and 'all' not in inventory_update.source_regions:
cp.set(
section, 'locations',
','.join([x.strip() for x in inventory_update.source_regions.split(',')])
)
azure_rm_opts = dict(inventory_update.source_vars_dict.items())
for k, v in azure_rm_opts.items():
cp.set(section, k, str(v))
return self.dump_cp(cp, inventory_update.get_cloud_credential())
class ec2(PluginFileInjector):
plugin_name = 'aws_ec2'
# blocked by https://github.com/ansible/ansible/issues/54059
initial_version = '2.9' # Driven by unsafe group names issue, parent_group templating, hostvars
ini_env_reference = 'EC2_INI_PATH'
base_injector = 'managed'
namespace = 'amazon'
collection = 'aws'
def get_plugin_env(self, *args, **kwargs):
ret = super(ec2, self).get_plugin_env(*args, **kwargs)
# We need native jinja2 types so that ec2_state_code will give integer
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
return ret
def _compat_compose_vars(self):
return {
# vars that change
'ec2_block_devices': (
"dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings "
"| map(attribute='ebs.volume_id') | list))"
),
'ec2_dns_name': 'public_dns_name',
'ec2_group_name': 'placement.group_name',
'ec2_instance_profile': 'iam_instance_profile | default("")',
'ec2_ip_address': 'public_ip_address',
'ec2_kernel': 'kernel_id | default("")',
'ec2_monitored': "monitoring.state in ['enabled', 'pending']",
'ec2_monitoring_state': 'monitoring.state',
'ec2_placement': 'placement.availability_zone',
'ec2_ramdisk': 'ramdisk_id | default("")',
'ec2_reason': 'state_transition_reason',
'ec2_security_group_ids': "security_groups | map(attribute='group_id') | list | join(',')",
'ec2_security_group_names': "security_groups | map(attribute='group_name') | list | join(',')",
'ec2_tag_Name': 'tags.Name',
'ec2_state': 'state.name',
'ec2_state_code': 'state.code',
'ec2_state_reason': 'state_reason.message if state_reason is defined else ""',
'ec2_sourceDestCheck': 'source_dest_check | default(false) | lower | string', # snake_case syntax intended
'ec2_account_id': 'owner_id',
# vars that just need ec2_ prefix
'ec2_ami_launch_index': 'ami_launch_index | string',
'ec2_architecture': 'architecture',
'ec2_client_token': 'client_token',
'ec2_ebs_optimized': 'ebs_optimized',
'ec2_hypervisor': 'hypervisor',
'ec2_image_id': 'image_id',
'ec2_instance_type': 'instance_type',
'ec2_key_name': 'key_name',
'ec2_launch_time': r'launch_time | regex_replace(" ", "T") | regex_replace("(\+)(\d\d):(\d)(\d)$", ".\g<2>\g<3>Z")',
'ec2_platform': 'platform | default("")',
'ec2_private_dns_name': 'private_dns_name',
'ec2_private_ip_address': 'private_ip_address',
'ec2_public_dns_name': 'public_dns_name',
'ec2_region': 'placement.region',
'ec2_root_device_name': 'root_device_name',
'ec2_root_device_type': 'root_device_type',
# many items need blank defaults because the script tended to keep a common schema
'ec2_spot_instance_request_id': 'spot_instance_request_id | default("")',
'ec2_subnet_id': 'subnet_id | default("")',
'ec2_virtualization_type': 'virtualization_type',
'ec2_vpc_id': 'vpc_id | default("")',
# same as ec2_ip_address, the script provided this
'ansible_host': 'public_ip_address',
# new with https://github.com/ansible/ansible/pull/53645
'ec2_eventsSet': 'events | default("")',
'ec2_persistent': 'persistent | default(false)',
'ec2_requester_id': 'requester_id | default("")'
}
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(ec2, self).inventory_as_dict(inventory_update, private_data_dir)
keyed_groups = []
group_by_hostvar = {
'ami_id': {'prefix': '', 'separator': '', 'key': 'image_id', 'parent_group': 'images'},
# 2 entries for zones for same groups to establish 2 parentage trees
'availability_zone': {'prefix': '', 'separator': '', 'key': 'placement.availability_zone', 'parent_group': 'zones'},
'aws_account': {'prefix': '', 'separator': '', 'key': 'ec2_account_id', 'parent_group': 'accounts'}, # composed var
'instance_id': {'prefix': '', 'separator': '', 'key': 'instance_id', 'parent_group': 'instances'}, # normally turned off
'instance_state': {'prefix': 'instance_state', 'key': 'ec2_state', 'parent_group': 'instance_states'}, # composed var
# ec2_platform is a composed var, but group names do not match up to hostvar exactly
'platform': {'prefix': 'platform', 'key': 'platform | default("undefined")', 'parent_group': 'platforms'},
'instance_type': {'prefix': 'type', 'key': 'instance_type', 'parent_group': 'types'},
'key_pair': {'prefix': 'key', 'key': 'key_name', 'parent_group': 'keys'},
'region': {'prefix': '', 'separator': '', 'key': 'placement.region', 'parent_group': 'regions'},
# Security requires some ninja jinja2 syntax, credit to s-hertel
'security_group': {'prefix': 'security_group', 'key': 'security_groups | map(attribute="group_name")', 'parent_group': 'security_groups'},
# tags cannot be parented in exactly the same way as the script due to
# https://github.com/ansible/ansible/pull/53812
'tag_keys': [
{'prefix': 'tag', 'key': 'tags', 'parent_group': 'tags'},
{'prefix': 'tag', 'key': 'tags.keys()', 'parent_group': 'tags'}
],
# 'tag_none': None, # grouping by no tags isn't a different thing with plugin
# naming is redundant, like vpc_id_vpc_8c412cea, but intended
'vpc_id': {'prefix': 'vpc_id', 'key': 'vpc_id', 'parent_group': 'vpcs'},
}
# -- same-ish as script here --
group_by = [x.strip().lower() for x in inventory_update.group_by.split(',') if x.strip()]
for choice in inventory_update.get_ec2_group_by_choices():
value = bool((group_by and choice[0] in group_by) or (not group_by and choice[0] != 'instance_id'))
# -- end sameness to script --
if value:
this_keyed_group = group_by_hostvar.get(choice[0], None)
# If a keyed group syntax does not exist, there is nothing we can do to get this group
if this_keyed_group is not None:
if isinstance(this_keyed_group, list):
keyed_groups.extend(this_keyed_group)
else:
keyed_groups.append(this_keyed_group)
# special case, this parentage is only added if both zones and regions are present
if not group_by or ('region' in group_by and 'availability_zone' in group_by):
keyed_groups.append({'prefix': '', 'separator': '', 'key': 'placement.availability_zone', 'parent_group': '{{ placement.region }}'})
source_vars = inventory_update.source_vars_dict
# This is a setting from the script, hopefully no one used it
# if true, it replaces dashes, but not in region / loc names
replace_dash = bool(source_vars.get('replace_dash_in_groups', True))
# Compatibility content
legacy_regex = {
True: r"[^A-Za-z0-9\_]",
False: r"[^A-Za-z0-9\_\-]" # do not replace dash, dash is whitelisted
}[replace_dash]
list_replacer = 'map("regex_replace", "{rx}", "_") | list'.format(rx=legacy_regex)
# this option, a plugin option, will allow dashes, but not unicode
# when set to False, unicode will be allowed, but it was not allowed by script
# thus, we always have to use this option, and always use our custom regex
ret['use_contrib_script_compatible_sanitization'] = True
for grouping_data in keyed_groups:
if grouping_data['key'] in ('placement.region', 'placement.availability_zone'):
# us-east-2 is always us-east-2 according to ec2.py
# no sanitization in region-ish groups for the script standards, ever ever
continue
if grouping_data['key'] == 'tags':
# dict jinja2 transformation
grouping_data['key'] = 'dict(tags.keys() | {replacer} | zip(tags.values() | {replacer}))'.format(
replacer=list_replacer
)
elif grouping_data['key'] == 'tags.keys()' or grouping_data['prefix'] == 'security_group':
# list jinja2 transformation
grouping_data['key'] += ' | {replacer}'.format(replacer=list_replacer)
else:
# string transformation
grouping_data['key'] += ' | regex_replace("{rx}", "_")'.format(rx=legacy_regex)
# end compatibility content
if source_vars.get('iam_role_arn', None):
ret['iam_role_arn'] = source_vars['iam_role_arn']
# This was an allowed ec2.ini option, also plugin option, so pass through
if source_vars.get('boto_profile', None):
ret['boto_profile'] = source_vars['boto_profile']
elif not replace_dash:
# Using the plugin, but still want dashes whitelisted
ret['use_contrib_script_compatible_sanitization'] = True
if source_vars.get('nested_groups') is False:
for this_keyed_group in keyed_groups:
this_keyed_group.pop('parent_group', None)
if keyed_groups:
ret['keyed_groups'] = keyed_groups
# Instance ID not part of compat vars, because of settings.EC2_INSTANCE_ID_VAR
compose_dict = {'ec2_id': 'instance_id'}
inst_filters = {}
# Compatibility content
compose_dict.update(self._compat_compose_vars())
# plugin provides "aws_ec2", but not this which the script gave
ret['groups'] = {'ec2': True}
if source_vars.get('hostname_variable') is not None:
hnames = []
for expr in source_vars.get('hostname_variable').split(','):
if expr == 'public_dns_name':
hnames.append('dns-name')
elif not expr.startswith('tag:') and '_' in expr:
hnames.append(expr.replace('_', '-'))
else:
hnames.append(expr)
ret['hostnames'] = hnames
else:
# public_ip as hostname is non-default plugin behavior, script behavior
ret['hostnames'] = [
'network-interface.addresses.association.public-ip',
'dns-name',
'private-dns-name'
]
# The script returned only running state by default, the plugin does not
# https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html#options
# options: pending | running | shutting-down | terminated | stopping | stopped
inst_filters['instance-state-name'] = ['running']
# end compatibility content
if source_vars.get('destination_variable') or source_vars.get('vpc_destination_variable'):
for fd in ('destination_variable', 'vpc_destination_variable'):
if source_vars.get(fd):
compose_dict['ansible_host'] = source_vars.get(fd)
break
if compose_dict:
ret['compose'] = compose_dict
if inventory_update.instance_filters:
# logic used to live in ec2.py, now it belongs to us. Yay more code?
filter_sets = [f for f in inventory_update.instance_filters.split(',') if f]
for instance_filter in filter_sets:
# AND logic not supported, unclear how to...
instance_filter = instance_filter.strip()
if not instance_filter or '=' not in instance_filter:
continue
filter_key, filter_value = [x.strip() for x in instance_filter.split('=', 1)]
if not filter_key:
continue
inst_filters[filter_key] = filter_value
if inst_filters:
ret['filters'] = inst_filters
if inventory_update.source_regions and 'all' not in inventory_update.source_regions:
ret['regions'] = inventory_update.source_regions.split(',')
return ret
def build_script_private_data(self, inventory_update, private_data_dir):
cp = configparser.RawConfigParser()
# Build custom ec2.ini for ec2 inventory script to use.
section = 'ec2'
cp.add_section(section)
ec2_opts = dict(inventory_update.source_vars_dict.items())
regions = inventory_update.source_regions or 'all'
regions = ','.join([x.strip() for x in regions.split(',')])
regions_blacklist = ','.join(settings.EC2_REGIONS_BLACKLIST)
ec2_opts['regions'] = regions
ec2_opts.setdefault('regions_exclude', regions_blacklist)
ec2_opts.setdefault('destination_variable', 'public_dns_name')
ec2_opts.setdefault('vpc_destination_variable', 'ip_address')
ec2_opts.setdefault('route53', 'False')
ec2_opts.setdefault('all_instances', 'True')
ec2_opts.setdefault('all_rds_instances', 'False')
ec2_opts.setdefault('include_rds_clusters', 'False')
ec2_opts.setdefault('rds', 'False')
ec2_opts.setdefault('nested_groups', 'True')
ec2_opts.setdefault('elasticache', 'False')
ec2_opts.setdefault('stack_filters', 'False')
if inventory_update.instance_filters:
ec2_opts.setdefault('instance_filters', inventory_update.instance_filters)
group_by = [x.strip().lower() for x in inventory_update.group_by.split(',') if x.strip()]
for choice in inventory_update.get_ec2_group_by_choices():
value = bool((group_by and choice[0] in group_by) or (not group_by and choice[0] != 'instance_id'))
ec2_opts.setdefault('group_by_%s' % choice[0], str(value))
if 'cache_path' not in ec2_opts:
cache_path = tempfile.mkdtemp(prefix='ec2_cache', dir=private_data_dir)
ec2_opts['cache_path'] = cache_path
ec2_opts.setdefault('cache_max_age', '300')
for k, v in ec2_opts.items():
cp.set(section, k, str(v))
return self.dump_cp(cp, inventory_update.get_cloud_credential())
class gce(PluginFileInjector):
plugin_name = 'gcp_compute'
initial_version = '2.8' # Driven by unsafe group names issue, hostvars
ini_env_reference = 'GCE_INI_PATH'
base_injector = 'managed'
namespace = 'google'
collection = 'cloud'
def get_plugin_env(self, *args, **kwargs):
ret = super(gce, self).get_plugin_env(*args, **kwargs)
# We need native jinja2 types so that ip addresses can give JSON null value
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
return ret
def get_script_env(self, inventory_update, private_data_dir, private_data_files):
env = super(gce, self).get_script_env(inventory_update, private_data_dir, private_data_files)
cred = inventory_update.get_cloud_credential()
# these environment keys are unique to the script operation, and are not
# concepts in the modern inventory plugin or gce Ansible module
# email and project are redundant with the creds file
env['GCE_EMAIL'] = cred.get_input('username', default='')
env['GCE_PROJECT'] = cred.get_input('project', default='')
env['GCE_ZONE'] = inventory_update.source_regions if inventory_update.source_regions != 'all' else '' # noqa
return env
def _compat_compose_vars(self):
# missing: gce_image, gce_uuid
# https://github.com/ansible/ansible/issues/51884
return {
'gce_description': 'description if description else None',
'gce_machine_type': 'machineType',
'gce_name': 'name',
'gce_network': 'networkInterfaces[0].network.name',
'gce_private_ip': 'networkInterfaces[0].networkIP',
'gce_public_ip': 'networkInterfaces[0].accessConfigs[0].natIP | default(None)',
'gce_status': 'status',
'gce_subnetwork': 'networkInterfaces[0].subnetwork.name',
'gce_tags': 'tags.get("items", [])',
'gce_zone': 'zone',
'gce_metadata': 'metadata.get("items", []) | items2dict(key_name="key", value_name="value")',
# NOTE: image hostvar is enabled via retrieve_image_info option
'gce_image': 'image',
# We need this as long as hostnames is non-default, otherwise hosts
# will not be addressed correctly, was returned in script
'ansible_ssh_host': 'networkInterfaces[0].accessConfigs[0].natIP | default(networkInterfaces[0].networkIP)'
}
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(gce, self).inventory_as_dict(inventory_update, private_data_dir)
credential = inventory_update.get_cloud_credential()
# auth related items
ret['projects'] = [credential.get_input('project', default='')]
ret['auth_kind'] = "serviceaccount"
filters = []
# TODO: implement gce group_by options
# gce never processed the group_by field, if it had, we would selectively
# apply those options here, but it did not, so all groups are added here
keyed_groups = [
# the jinja2 syntax is duplicated with compose
# https://github.com/ansible/ansible/issues/51883
{'prefix': 'network', 'key': 'gce_subnetwork'}, # composed var
{'prefix': '', 'separator': '', 'key': 'gce_private_ip'}, # composed var
{'prefix': '', 'separator': '', 'key': 'gce_public_ip'}, # composed var
{'prefix': '', 'separator': '', 'key': 'machineType'},
{'prefix': '', 'separator': '', 'key': 'zone'},
{'prefix': 'tag', 'key': 'gce_tags'}, # composed var
{'prefix': 'status', 'key': 'status | lower'},
# NOTE: image hostvar is enabled via retrieve_image_info option
{'prefix': '', 'separator': '', 'key': 'image'},
]
# This will be used as the gce instance_id, must be universal, non-compat
compose_dict = {'gce_id': 'id'}
# Compatibility content
# TODO: proper group_by and instance_filters support, irrelevant of compat mode
# The gce.py script never sanitized any names in any way
ret['use_contrib_script_compatible_sanitization'] = True
# Perform extra API query to get the image hostvar
ret['retrieve_image_info'] = True
# Add in old hostvars aliases
compose_dict.update(self._compat_compose_vars())
# Non-default names to match script
ret['hostnames'] = ['name', 'public_ip', 'private_ip']
# end compatibility content
if keyed_groups:
ret['keyed_groups'] = keyed_groups
if filters:
ret['filters'] = filters
if compose_dict:
ret['compose'] = compose_dict
if inventory_update.source_regions and 'all' not in inventory_update.source_regions:
ret['zones'] = inventory_update.source_regions.split(',')
return ret
def build_script_private_data(self, inventory_update, private_data_dir):
cp = configparser.RawConfigParser()
# by default, the GCE inventory source caches results on disk for
# 5 minutes; disable this behavior
cp.add_section('cache')
cp.set('cache', 'cache_max_age', '0')
return self.dump_cp(cp, inventory_update.get_cloud_credential())
class vmware(PluginFileInjector):
plugin_name = 'vmware_vm_inventory'
initial_version = '2.9'
ini_env_reference = 'VMWARE_INI_PATH'
base_injector = 'managed'
namespace = 'community'
collection = 'vmware'
@property
def script_name(self):
return 'vmware_inventory.py' # exception
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(vmware, self).inventory_as_dict(inventory_update, private_data_dir)
ret['strict'] = False
# Documentation of props, see
# https://github.com/ansible/ansible/blob/devel/docs/docsite/rst/scenario_guides/vmware_scenarios/vmware_inventory_vm_attributes.rst
UPPERCASE_PROPS = [
"availableField",
"configIssue",
"configStatus",
"customValue", # optional
"datastore",
"effectiveRole",
"guestHeartbeatStatus", # optonal
"layout", # optional
"layoutEx", # optional
"name",
"network",
"overallStatus",
"parentVApp", # optional
"permission",
"recentTask",
"resourcePool",
"rootSnapshot",
"snapshot", # optional
"tag",
"triggeredAlarmState",
"value"
]
NESTED_PROPS = [
"capability",
"config",
"guest",
"runtime",
"storage",
"summary", # repeat of other properties
]
ret['properties'] = UPPERCASE_PROPS + NESTED_PROPS
ret['compose'] = {'ansible_host': 'guest.ipAddress'} # default value
ret['compose']['ansible_ssh_host'] = ret['compose']['ansible_host']
# the ansible_uuid was unique every host, every import, from the script
ret['compose']['ansible_uuid'] = '99999999 | random | to_uuid'
for prop in UPPERCASE_PROPS:
if prop == prop.lower():
continue
ret['compose'][prop.lower()] = prop
ret['with_nested_properties'] = True
# ret['property_name_format'] = 'lower_case' # only dacrystal/topic/vmware-inventory-plugin-property-format
# process custom options
vmware_opts = dict(inventory_update.source_vars_dict.items())
if inventory_update.instance_filters:
vmware_opts.setdefault('host_filters', inventory_update.instance_filters)
if inventory_update.group_by:
vmware_opts.setdefault('groupby_patterns', inventory_update.group_by)
alias_pattern = vmware_opts.get('alias_pattern')
if alias_pattern:
ret.setdefault('hostnames', [])
for alias in alias_pattern.split(','): # make best effort
striped_alias = alias.replace('{', '').replace('}', '').strip() # make best effort
if not striped_alias:
continue
ret['hostnames'].append(striped_alias)
host_pattern = vmware_opts.get('host_pattern') # not working in script
if host_pattern:
stripped_hp = host_pattern.replace('{', '').replace('}', '').strip() # make best effort
ret['compose']['ansible_host'] = stripped_hp
ret['compose']['ansible_ssh_host'] = stripped_hp
host_filters = vmware_opts.get('host_filters')
if host_filters:
ret.setdefault('filters', [])
for hf in host_filters.split(','):
striped_hf = hf.replace('{', '').replace('}', '').strip() # make best effort
if not striped_hf:
continue
ret['filters'].append(striped_hf)
else:
# default behavior filters by power state
ret['filters'] = ['runtime.powerState == "poweredOn"']
groupby_patterns = vmware_opts.get('groupby_patterns')
ret.setdefault('keyed_groups', [])
if groupby_patterns:
for pattern in groupby_patterns.split(','):
stripped_pattern = pattern.replace('{', '').replace('}', '').strip() # make best effort
ret['keyed_groups'].append({
'prefix': '', 'separator': '',
'key': stripped_pattern
})
else:
# default groups from script
for entry in ('guest.guestId', '"templates" if config.template else "guests"'):
ret['keyed_groups'].append({
'prefix': '', 'separator': '',
'key': entry
})
return ret
def build_script_private_data(self, inventory_update, private_data_dir):
cp = configparser.RawConfigParser()
credential = inventory_update.get_cloud_credential()
# Allow custom options to vmware inventory script.
section = 'vmware'
cp.add_section(section)
cp.set('vmware', 'cache_max_age', '0')
cp.set('vmware', 'validate_certs', str(settings.VMWARE_VALIDATE_CERTS))
cp.set('vmware', 'username', credential.get_input('username', default=''))
cp.set('vmware', 'password', credential.get_input('password', default=''))
cp.set('vmware', 'server', credential.get_input('host', default=''))
vmware_opts = dict(inventory_update.source_vars_dict.items())
if inventory_update.instance_filters:
vmware_opts.setdefault('host_filters', inventory_update.instance_filters)
if inventory_update.group_by:
vmware_opts.setdefault('groupby_patterns', inventory_update.group_by)
for k, v in vmware_opts.items():
cp.set(section, k, str(v))
return self.dump_cp(cp, credential)
class openstack(PluginFileInjector):
ini_env_reference = 'OS_CLIENT_CONFIG_FILE'
plugin_name = 'openstack'
# minimum version of 2.7.8 may be theoretically possible
initial_version = '2.8' # Driven by consistency with other sources
namespace = 'openstack'
collection = 'cloud'
@property
def script_name(self):
return 'openstack_inventory.py' # exception
def _get_clouds_dict(self, inventory_update, cred, private_data_dir, mk_cache=True):
openstack_data = _openstack_data(cred)
openstack_data['clouds']['devstack']['private'] = inventory_update.source_vars_dict.get('private', True)
if mk_cache:
# Retrieve cache path from inventory update vars if available,
# otherwise create a temporary cache path only for this update.
cache = inventory_update.source_vars_dict.get('cache', {})
if not isinstance(cache, dict):
cache = {}
if not cache.get('path', ''):
cache_path = tempfile.mkdtemp(prefix='openstack_cache', dir=private_data_dir)
cache['path'] = cache_path
openstack_data['cache'] = cache
ansible_variables = {
'use_hostnames': True,
'expand_hostvars': False,
'fail_on_errors': True,
}
provided_count = 0
for var_name in ansible_variables:
if var_name in inventory_update.source_vars_dict:
ansible_variables[var_name] = inventory_update.source_vars_dict[var_name]
provided_count += 1
if provided_count:
# Must we provide all 3 because the user provides any 1 of these??
# this probably results in some incorrect mangling of the defaults
openstack_data['ansible'] = ansible_variables
return openstack_data
def build_script_private_data(self, inventory_update, private_data_dir, mk_cache=True):
credential = inventory_update.get_cloud_credential()
private_data = {'credentials': {}}
openstack_data = self._get_clouds_dict(inventory_update, credential, private_data_dir, mk_cache=mk_cache)
private_data['credentials'][credential] = yaml.safe_dump(
openstack_data, default_flow_style=False, allow_unicode=True
)
return private_data
def build_plugin_private_data(self, inventory_update, private_data_dir):
# Credentials can be passed in the same way as the script did
# but do not create the tmp cache file
return self.build_script_private_data(inventory_update, private_data_dir, mk_cache=False)
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
env = super(openstack, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
script_env = self.get_script_env(inventory_update, private_data_dir, private_data_files)
env.update(script_env)
return env
def inventory_as_dict(self, inventory_update, private_data_dir):
def use_host_name_for_name(a_bool_maybe):
if not isinstance(a_bool_maybe, bool):
# Could be specified by user via "host" or "uuid"
return a_bool_maybe
elif a_bool_maybe:
return 'name' # plugin default
else:
return 'uuid'
ret = super(openstack, self).inventory_as_dict(inventory_update, private_data_dir)
ret['fail_on_errors'] = True
ret['expand_hostvars'] = True
ret['inventory_hostname'] = use_host_name_for_name(False)
# Note: mucking with defaults will break import integrity
# For the plugin, we need to use the same defaults as the old script
# or else imports will conflict. To find script defaults you have
# to read source code of the script.
#
# Script Defaults Plugin Defaults
# 'use_hostnames': False, 'name' (True)
# 'expand_hostvars': True, 'no' (False)
# 'fail_on_errors': True, 'no' (False)
#
# These are, yet again, different from ansible_variables in script logic
# but those are applied inconsistently
source_vars = inventory_update.source_vars_dict
for var_name in ['expand_hostvars', 'fail_on_errors']:
if var_name in source_vars:
ret[var_name] = source_vars[var_name]
if 'use_hostnames' in source_vars:
ret['inventory_hostname'] = use_host_name_for_name(source_vars['use_hostnames'])
return ret
class rhv(PluginFileInjector):
"""ovirt uses the custom credential templating, and that is all
"""
plugin_name = 'ovirt'
base_injector = 'template'
namespace = 'ovirt'
collection = 'ovirt_collection'
@property
def script_name(self):
return 'ovirt4.py' # exception
class satellite6(PluginFileInjector):
plugin_name = 'foreman'
ini_env_reference = 'FOREMAN_INI_PATH'
initial_version = '2.9'
# No base injector, because this does not work in playbooks. Bug??
namespace = 'theforeman'
collection = 'foreman'
@property
def script_name(self):
return 'foreman.py' # exception
def build_script_private_data(self, inventory_update, private_data_dir):
cp = configparser.RawConfigParser()
credential = inventory_update.get_cloud_credential()
section = 'foreman'
cp.add_section(section)
group_patterns = '[]'
group_prefix = 'foreman_'
want_hostcollections = 'False'
want_ansible_ssh_host = 'False'
rich_params = 'False'
want_facts = 'True'
foreman_opts = dict(inventory_update.source_vars_dict.items())
foreman_opts.setdefault('ssl_verify', 'False')
for k, v in foreman_opts.items():
if k == 'satellite6_group_patterns' and isinstance(v, str):
group_patterns = v
elif k == 'satellite6_group_prefix' and isinstance(v, str):
group_prefix = v
elif k == 'satellite6_want_hostcollections' and isinstance(v, bool):
want_hostcollections = v
elif k == 'satellite6_want_ansible_ssh_host' and isinstance(v, bool):
want_ansible_ssh_host = v
elif k == 'satellite6_rich_params' and isinstance(v, bool):
rich_params = v
elif k == 'satellite6_want_facts' and isinstance(v, bool):
want_facts = v
else:
cp.set(section, k, str(v))
if credential:
cp.set(section, 'url', credential.get_input('host', default=''))
cp.set(section, 'user', credential.get_input('username', default=''))
cp.set(section, 'password', credential.get_input('password', default=''))
section = 'ansible'
cp.add_section(section)
cp.set(section, 'group_patterns', group_patterns)
cp.set(section, 'want_facts', str(want_facts))
cp.set(section, 'want_hostcollections', str(want_hostcollections))
cp.set(section, 'group_prefix', group_prefix)
cp.set(section, 'want_ansible_ssh_host', str(want_ansible_ssh_host))
cp.set(section, 'rich_params', str(rich_params))
section = 'cache'
cp.add_section(section)
cp.set(section, 'path', '/tmp')
cp.set(section, 'max_age', '0')
return self.dump_cp(cp, credential)
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
# this assumes that this is merged
# https://github.com/ansible/ansible/pull/52693
credential = inventory_update.get_cloud_credential()
ret = super(satellite6, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
if credential:
ret['FOREMAN_SERVER'] = credential.get_input('host', default='')
ret['FOREMAN_USER'] = credential.get_input('username', default='')
ret['FOREMAN_PASSWORD'] = credential.get_input('password', default='')
return ret
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(satellite6, self).inventory_as_dict(inventory_update, private_data_dir)
# Compatibility content
group_by_hostvar = {
"environment": {"prefix": "foreman_environment_",
"separator": "",
"key": "foreman['environment_name'] | lower | regex_replace(' ', '') | "
"regex_replace('[^A-Za-z0-9\_]', '_') | regex_replace('none', '')"}, # NOQA: W605
"location": {"prefix": "foreman_location_",
"separator": "",
"key": "foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"},
"organization": {"prefix": "foreman_organization_",
"separator": "",
"key": "foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"},
"lifecycle_environment": {"prefix": "foreman_lifecycle_environment_",
"separator": "",
"key": "foreman['content_facet_attributes']['lifecycle_environment_name'] | "
"lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"},
"content_view": {"prefix": "foreman_content_view_",
"separator": "",
"key": "foreman['content_facet_attributes']['content_view_name'] | "
"lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"}
}
ret['keyed_groups'] = [group_by_hostvar[grouping_name] for grouping_name in group_by_hostvar]
ret['legacy_hostvars'] = True
ret['want_facts'] = True
ret['want_params'] = True
return ret
class cloudforms(PluginFileInjector):
# plugin_name = 'FIXME' # contribute inventory plugin to Ansible
ini_env_reference = 'CLOUDFORMS_INI_PATH'
# Also no base_injector because this does not work in playbooks
# namespace = '' # does not have a collection
# collection = ''
def build_script_private_data(self, inventory_update, private_data_dir):
cp = configparser.RawConfigParser()
credential = inventory_update.get_cloud_credential()
section = 'cloudforms'
cp.add_section(section)
if credential:
cp.set(section, 'url', credential.get_input('host', default=''))
cp.set(section, 'username', credential.get_input('username', default=''))
cp.set(section, 'password', credential.get_input('password', default=''))
cp.set(section, 'ssl_verify', "false")
cloudforms_opts = dict(inventory_update.source_vars_dict.items())
for opt in ['version', 'purge_actions', 'clean_group_keys', 'nest_tags', 'suffix', 'prefer_ipv4']:
if opt in cloudforms_opts:
cp.set(section, opt, str(cloudforms_opts[opt]))
section = 'cache'
cp.add_section(section)
cp.set(section, 'max_age', "0")
cache_path = tempfile.mkdtemp(
prefix='cloudforms_cache',
dir=private_data_dir
)
cp.set(section, 'path', cache_path)
return self.dump_cp(cp, credential)
class tower(PluginFileInjector):
plugin_name = 'tower'
base_injector = 'template'
initial_version = '2.8' # Driven by "include_metadata" hostvars
namespace = 'awx'
collection = 'awx'
def get_script_env(self, inventory_update, private_data_dir, private_data_files):
env = super(tower, self).get_script_env(inventory_update, private_data_dir, private_data_files)
env['TOWER_INVENTORY'] = inventory_update.instance_filters
env['TOWER_LICENSE_TYPE'] = get_licenser().validate().get('license_type', 'unlicensed')
return env
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(tower, self).inventory_as_dict(inventory_update, private_data_dir)
# Credentials injected as env vars, same as script
try:
# plugin can take an actual int type
identifier = int(inventory_update.instance_filters)
except ValueError:
# inventory_id could be a named URL
identifier = iri_to_uri(inventory_update.instance_filters)
ret['inventory_id'] = identifier
ret['include_metadata'] = True # used for license check
return ret
for cls in PluginFileInjector.__subclasses__():
InventorySourceOptions.injectors[cls.__name__] = cls
|
tlamadon/pygfe | pygrpfe/helpers.py | <filename>pygrpfe/helpers.py
"""
help functions to run BLM2
"""
import torch
import numpy as np
import pandas as pd
from sklearn.cluster import KMeans
from statsmodels.discrete.discrete_model import Probit
import patsy as patsy
import matplotlib.pylab as plt
from scipy.optimize import minimize
from scipy import sparse
from scipy.stats import norm
ax = np.newaxis
def group(M_itm, max_k=100,whitening=True,scale=True ):
""" function that returns the groups
inputs are the micro moments, the function expect a (N,T,M) matrix.
for example: M_itm = np.stack([Y,X],axis=2)
"""
ni,nt,nm = M_itm.shape
if ( scale==True ):
M_itm = (M_itm - M_itm.mean((0,1)))/ M_itm.std((0,1))
# following the document on weighting moments
H_im = M_itm.mean(axis=1)
# computing the within noise
R = (M_itm - H_im[:,ax,:]).reshape((nt*ni,nm))
Vw = np.matmul(R.transpose(),R)/(ni*nt**2)
Vb = np.matmul( H_im.transpose(), H_im)/ni - Vw
G = np.matmul(Vb, np.linalg.inv( Vb+ Vw))
if whitening==False:
G = np.eye(nm)
Mw = np.matmul(G,H_im.transpose()).transpose()
noise_level = np.matmul(G,np.matmul(Vw,G.transpose())).sum()
# finding number of groups
for k_i in range(2,max_k):
#km = KMeans(init='k-means++', n_clusters=k_i, n_init=100)
km = KMeans(algorithm='full',init='random', n_clusters=k_i, n_init=100, max_iter=1000)
res = km.fit(Mw)
sum_sqr = ((Mw - res.cluster_centers_[res.labels_,:])**2).sum()
if sum_sqr/ni < noise_level:
break
ID_i = res.labels_.reshape((ni,1))
# print("k_i = {}".format(k_i))
# print(G)
return(ID_i.flatten(),G)
def train(model, maxiter=1000):
optimizer = torch.optim.Adam(model.params, lr=0.1)
maxiter = 10000
loss_vec = np.zeros(maxiter)
loss_last = 1e5
for i in range(maxiter):
optimizer.zero_grad()
loss = model.loss()
# gradient descent
loss.backward(retain_graph=True)
optimizer.step()
# check loss change
loss2 = loss.item()
dloss = np.abs(loss_last - loss2)/np.abs(loss_last)
if (dloss< 1e-8):
break
loss_last = loss2
loss_vec[i] = loss2
return(loss_vec)
|
tlamadon/pygfe | pygrpfe/__init__.py | <reponame>tlamadon/pygfe<filename>pygrpfe/__init__.py
from .helpers import group,train
|
tlamadon/pygfe | tests/test_pygrpfe.py | <filename>tests/test_pygrpfe.py
'''
Tests for pygfe
DATE: Jan 2021
'''
import pytest
import pandas as pd
import pygrpfe as gfe
import numpy as np
ax = np.newaxis
def test_pygfe_group():
def dgp_simulate(ni,nt,gamma=2.0,eps_sd=1.0):
""" simulates according to the model """
alpha = np.random.normal(size=(ni))
eps = np.random.normal(size=(ni,nt))
v = np.random.normal(size=(ni,nt))
# non-censored outcome
W = alpha[:,ax] + eps*eps_sd
# utility
U = (np.exp( alpha * (1-gamma)) - 1)/(1-gamma)
U = U - U.mean()
# costs
C1 = -1; C0=0;
# binary decision
Y = np.ones((ni,nt))
Y[:,0] = U.squeeze() > C1 + v[:,0]
for t in range(1,nt):
Y[:,t] = U > C1*Y[:,t-1] + C0*(1-Y[:,t-1]) + v[:,t]
W = W * Y
return(W,Y)
ni = 200
nt = 20
Y,D = dgp_simulate(ni,nt,2.0)
M_itm = np.stack([Y,D],axis=2)
G_i,_ = gfe.group(M_itm)
assert G_i.max() > 0
|
guozequn/code-tips | Custom_Logger.py | <filename>Custom_Logger.py<gh_stars>1-10
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time : 2019/9/6 15:33
# @Author : ZeQun
# @File : logger.py
import os
import logbook
from logbook import Logger, RotatingFileHandler
from logbook.more import ColorizedStderrHandler
class CustomLog(object):
"""
Custom create Logs.
Only create logs.{{ level }} when logs output.
"""
def __init__(self, log_name, custom_path=False, debug=False):
self.name = log_name
self.logger = Logger(log_name)
self.levels = []
self.log_path = os.path.dirname(os.path.realpath(__file__))
if custom_path:
self.log_dir = os.path.join(custom_path, "logs")
else:
self.log_dir = os.path.join(self.log_path, "logs")
if debug:
debug_handler = ColorizedStderrHandler(bubble=True)
debug_handler.formatter = self.user_handler_log_formatter
self.logger.handlers.append(debug_handler)
def __getattr__(self, level):
if level not in self.levels:
if not os.path.isdir(self.log_dir):
os.makedirs(self.log_dir)
log_file = os.path.join(self.log_dir, '{}.{}'.format(self.name, level))
file_handler = RotatingFileHandler(
filename=log_file,
level=getattr(logbook, level.upper()),
max_size=1000000000
)
file_handler.formatter = self.user_handler_log_formatter
self.logger.handlers.append(file_handler)
return getattr(self.logger, level)
@staticmethod
def user_handler_log_formatter(record, handler):
formatter = "[{dt}] [{level}] [{filename}] [{func_name}] [{lineno}] {msg}".format(
dt=record.time,
level=record.level_name,
filename=os.path.split(record.filename)[-1],
func_name=record.func_name,
lineno=record.lineno,
msg=record.message,
)
return formatter
if __name__ == '__main__':
custom_log = CustomLog("test", debug=True)
custom_log.info('user_info')
custom_log.info('user_info')
custom_log.info('user_info')
custom_log.info('user_info')
|
kandjiabdou/turtleiutv | turtleIutv/Turtle.py | import os.path
import math
from ipywidgets import widgets
from notebook import nbextensions
from traitlets import Unicode, List, Int, Bool
from IPython.display import display
class Turtle(widgets.DOMWidget):
_view_module = Unicode(
"nbextensions/turtleIutvjs/turtlewidget").tag(sync=True)
_view_name = Unicode('TurtleView').tag(sync=True)
# TODO: All actions are synchronized with the turtlewidget module,
# which allows python to pass actions and data to the JavaScript.
# For each action, this code is used to add it and
# manage the snychronization
# >> self.actions = self.actions + [action]
# Synchronization is done when self.actions changes value
# self.actions.append(action) does not sync
# because the action array has not changed but just increased in size.
# Here is the data to communicate
actions = List(sync=True)
canvasSize = Int(sync=True)
canvasElementSize = Int(sync=True)
turtleShow = Bool(sync=True)
backgroundColor = Unicode().tag(sync=True)
def __init__(self, width, limit, color, turtle):
'''Create a Turtle.
Example::
t = Turtle(canvas, 500,1000)
'''
super(Turtle, self).__init__()
self.canvasElementSize = width
self.canvasSize = limit
self.backgroundColor = color
self.turtleShow = turtle
install_js()
display(self)
# scale that allows you to place the points
# according to the size of the canvas and the grid
self.scale = width/limit
self.angle = 90
self.filling = False
self.color = "black"
self.fillingColor = "black"
self.fillingStrokeColor = "black"
self.actions = []
self.posX = self.canvasElementSize/2
self.posY = self.canvasElementSize/2
def pendown(self):
'''Put down the pen. Turtles start with their pen down.
Example::
t.pendown()
'''
# action is a dictionary containing some informations
action = dict(type="pen", value=True)
self.actions = self.actions + [action]
def penup(self):
'''Lift up the pen.
Example::
t.penup()
'''
# When the user decides to raise or lower the pen
# if the value is True,
# shifting are drawn else only turtle moves
action = dict(type="pen", value=False)
self.actions = self.actions + [action]
def speed(self, speed):
'''Change the speed of the turtle (range 1-10).
Example::
t.speed(10) # Full speed
'''
# When the user decides change the turtle speed
# with the new value "speed"
action = dict(type="speed", value=speed)
self.actions = self.actions + [action]
def right(self, num):
'''Turn the Turtle num degrees to the right.
Example::
t.right(90)
'''
# Rotate the turtle to the right with an angle num,
# the direction is 1 (right)
self.angle += num
self.angle = self.angle % 360
action = dict(type="rotation", value=num, sense=1)
self.actions = self.actions + [action]
def left(self, num):
'''Turn the Turtle num degrees to the left.
Example::
t.left(90)
'''
# Rotate the turtle to the left with an angle num,
# the direction is -1 (left)
self.angle -= num
self.angle = self.angle % 360
action = dict(type="rotation", value=num, sense=-1)
self.actions = self.actions + [action]
def forward(self, num):
'''Move the Turtle forward by num units.
Example:
t.forward(100)
'''
# Move the turtle by "n", first calculate
# the coordinates (x, y) at the end of this movement.
self.posX += num * self.scale * math.sin(math.radians(self.angle))
self.posY -= num * self.scale * math.cos(math.radians(self.angle))
action = dict(type="shifting", point=dict(x=self.posX, y=self.posY))
self.actions = self.actions + [action]
def backward(self, num):
'''Move the Turtle backward by num units.
Example::
t.backward(100)
'''
# Same as forward
self.posX -= num * self.scale * math.sin(math.radians(self.angle))
self.posY += num * self.scale * math.cos(math.radians(self.angle))
action = dict(
type="shifting", point=dict(x=self.posX, y=self.posY))
self.actions = self.actions + [action]
def penColor(self, color):
'''Change the color of the pen to color. Default is black.
Example::
t.penColor("red")
'''
# When the user decides change the turtle color
# with the new value "color"
action = dict(type="penColor", value=color)
self.actions = self.actions + [action]
def penSize(self, size):
'''Change the size of the pen to size. Default is 3.
Example::
t.penSize(5)
'''
# When the user decides change the turtle size
# with the new value "size"
# Size must be between 1 and 10
# otherwise it is readjust automatically
action = dict(type="penSize", value=size)
self.actions = self.actions + [action]
def setposition(self, x, y):
"""Change the position of the turtle.
Example::
t.setposition(100, 100)
"""
# toX and toY are coodinate of the arrival position
toX = x * self.scale
toY = y * self.scale
# Do nothing if the position doesn't change
if(self.posX == toX and self.posY == toY):
return
# First calculate the required rotation angle 'alpha'
# before making the move
# sensX and sensY are coodinate of the direction of the turtle
sensX = self.posX + math.sin(math.radians(self.angle))
sensY = self.posY - math.cos(math.radians(self.angle))
# alpha is the angle needed to rotate,
# it is calculated with two vectors :
# (self.posX,self.posy)-->(toX, toY) and
# (self.posX,self.posy)-->(sensX, sensY)
alpha = math.degrees(
math.atan2(toY - self.posY, toX - self.posX)
- math.atan2(sensY - self.posY, sensX - self.posX))
# If the angle is greater than a flat angle
# it is brought back to an acute angle
if(abs(alpha) > 180):
alpha = (alpha - 360) % 360
# Turn the turtle
if(alpha > 0):
self.right(alpha)
else:
self.left(-alpha)
# Add the shifting to the action array
self.posX = toX
self.posY = toY
action = dict(
type="shifting", point=dict(x=self.posX, y=self.posY))
self.actions = self.actions + [action]
def begin_fill(self, borderColor, fillColor):
""" Start the process of coloring a figure, if the path is closed.
Color can be a string like :
"red", "blue", "yellow", "brown", "black", "purple", "green"
or hexadecimal
Example::
begin_fill("red")
"""
self.filling = True
self.fillingColor = fillColor
self.fillingStrokeColor = borderColor
action = dict(
type="filling", value=True, fillColor=fillColor,
strokeColor=borderColor)
self.actions = self.actions + [action]
def end_fill(self):
""" Stop the process of coloring a figure.
Example::
end_fill()
"""
self.filling = False
action = dict(type="filling", value=False)
self.actions = self.actions + [action]
def circle(self, radius, extent):
"""Draw a circle, or part of a circle.
From its current position, the turtle will draw
a series of short lines, turning slightly between each.
If radius is positive, it will turn to its left;
a negative radius will make it turn to its right.
Example::
t.circle(50)
"""
if(radius == 0):
return
# step is number of segments to make the circle or an arc
# and w et w2 angle of rotation
frac = abs(extent)/360
steps = 1+int(min(11+abs(radius)/6.0, 59.0)*frac)
w = 1.0 * extent / steps
w2 = 0.5 * w
num = 2.0 * radius * math.sin(w2*math.pi/180.0)
if radius < 0:
w, w2 = -w, -w2
# Raise the turtle and and go to a radius distance
# befor drawing the circle or arc
self.penup()
self.right(90)
self.forward(radius)
self.left(90)
self.pendown()
# If the coloring process was started we continue to do it
# Because the raising of the pen stop the process of coloring
if(self.filling):
self.end_fill()
self.begin_fill(self.fillingStrokeColor, self.fillingColor)
self.left(w2)
for i in range(steps):
self.forward(num)
self.left(w)
self.left(-w2)
# Raise the turtle and and go to it started point
# Because the raising of the pen stop the process of coloring
self.penup()
self.left(90)
self.forward(radius)
self.right(90)
self.pendown()
if(self.filling):
self.end_fill()
self.begin_fill(self.fillingStrokeColor, self.fillingColor)
def home(self):
'''Move the Turtle to its home position.
Example::
t.home()
'''
center = self.canvasSize/2
self.setposition(center, center)
def install_js():
pkgdir = os.path.dirname(__file__)
nbextensions.install_nbextension(
os.path.join(pkgdir, 'turtleIutvjs'), user=True)
|
kandjiabdou/turtleiutv | turtleIutv/__init__.py | <filename>turtleIutv/__init__.py
from .Turtle import Turtle
from random import randint
__version__ = '1.1'
"""
# This file initializes the turtle with a global variable
# It uses the Turtle class to do procedural programming
# and avoid object programming,
# because beginners are not familiar with the Object.
# It's just rewritten functions of the Turtle class.
# So to understand these functions
# you have to look at the Turtle class.
"""
turtleTmp = None
def drawing(width=500, limit=1000, color="#99CCFF", turtle=True):
"""Start a drawing
Example::
drawing()
"""
global turtleTmp
turtleTmp = Turtle(width, limit, color, turtle)
def home():
'''Move the Turtle to its home position.
Example::
home()
'''
turtleTmp.home()
def forward(n):
'''Move the Turtle forward by n units.
Example:
forward(100)
'''
turtleTmp.forward(n)
def backward(n):
'''Move the Turtle backward by n units.
Example:
backward(100)
'''
turtleTmp.backward(n)
def left(n):
'''Turn the Turtle n degrees to the left.
Example:
left(90)
'''
turtleTmp.left(n)
def speed(n):
"""Change the speed of the Turtle.
Example:
speed(5)
"""
turtleTmp.speed(max(1, min(n, 10)))
def right(n):
'''Turn the Turtle n degrees to the right.
Example:
right(90)
'''
turtleTmp.right(n)
def goto(x, y):
"""Change the position of the Turtle.
Example::
goto(100, 100)
"""
turtleTmp.setposition(x, y)
def up():
"""Lift up the pen.
Example::
up()
"""
turtleTmp.penup()
def down():
"""Put down the pen. Turtles start with their pen down.
Example::
down()
"""
turtleTmp.pendown()
def setColor(color):
"""Change the color of the pen.
CSS Color can be a string like (16 string color) :
"red", "blue", "yellow", "brown", "black", "purple", "green" ...
or hexadecimal
Example::
setColor("#ABD011")
"""
turtleTmp.color = color
turtleTmp.penColor(color)
def setRandomColor():
"""Change the color of the pen with a random color
Use setColor() with "getRandomColor()" as parameter
Example::
setRandomColor()
"""
setColor(getRandomColor())
def setSize(size):
"""Change the size of the pen.
Example::
setSize(5)
"""
turtleTmp.penSize(max(1, min(size, 100)))
def begin_fill(*args,**kwargs):
""" Start the process of coloring a figure, if the path is closed.
Color can be a string like :
"red", "blue", "yellow", "brown", "black", "purple", "green"
or hexadecimal
Example::
begin_fill("red")
"""
if(len(args)==2 and len(kwargs)==0):
turtleTmp.begin_fill(args[0], args[1])
elif (len(args)==1 and len(kwargs)==0):
turtleTmp.begin_fill(args[0], turtleTmp.color)
elif (len(args)==0 and len(kwargs)==1):
if("borderColor" in kwargs):
turtleTmp.begin_fill(kwargs["borderColor"], turtleTmp.color)
else:
turtleTmp.begin_fill(turtleTmp.color, kwargs["fillColor"])
elif (len(args)==1 and len(kwargs)==1):
if("borderColor" in kwargs):
turtleTmp.begin_fill(kwargs["borderColor"], args[0])
else:
turtleTmp.begin_fill(args[0], kwargs["fillColor"])
elif(len(kwargs)==2 and len(args)==0):
turtleTmp.begin_fill(kwargs["borderColor"], kwargs["fillColor"])
else:
turtleTmp.begin_fill(turtleTmp.color, turtleTmp.color)
def end_fill():
""" Stop the process of coloring a figure.
Example::
end_fill()
"""
turtleTmp.end_fill()
def circle(radius, extent=360):
"""Draw a circle, or part of a circle.
From its current position, the turtle will draw
a series of short lines, turning slightly between each.
If radius is positive, it will turn to its left;
a negative radius will make it turn to its right.
Example::
circle(50)
"""
turtleTmp.circle(radius, extent)
def getRandomColor():
"""Return a random color
Example::
getRandomColor()
>> rgb(0, 255, 15)
"""
r = randint(0, 255)
g = randint(0, 255)
b = randint(0, 255)
return "rgb(" + str(r) + ", " + str(g) + ", " + str(b) +")"
|
IfThenMaker/MutosPlan | python/youjyun.py | import json
import os
import pandas as pd
teikei = [
"豪風命","微風命","陽照命","灯篭命",
"厳山命","砂丘命","轟音命","閃光命","海水命","湖水命"
]
""" sclaped text to json """
def get_textdata(file):
path = os.path.join(os.getcwd(), file)
with open(file,'r', encoding='utf-8') as f:
dt = f.read()
return dt
def get_jsondata(file):
path = os.path.join(os.getcwd(), file)
with open(path,'r', encoding='utf-8') as f:
dt = json.load(f)
return dt
def to_json(dict_file, name):
name = f'{name}.json'
with open(name, 'w', encoding='utf-8') as f:
json.dump(dict_file, f, indent=2, ensure_ascii=False)
def main(file):
data = get_textdata(file).split('\n')
# print(len(txt),txt)
odd = lambda x: range(0,len(x),2)
even = lambda x: range(1,len(x),2)
res = {data[i]: data[v] for i,v in zip(odd(data),even(data))}
res = {k: {
'kashin': v[:v.index('/')],
'megurikashin': v[v.index('/')+1:],
'saikuru': res[v].replace('(','').replace(')','')}
for k,v in zip(teikei, res)}
print(len(res), res)
# res = teikei
return res
# txtdic = {k: v in z}
def combine():
import glob
files = glob.glob('python/json/*.json')
print(len(files), files)
dic = {}
for f in files:
data = get_jsondata(f)
n = os.path.basename(f)[1:].replace('.json','')
print(n)
dic[n] = data
ndic = {k:{} for k in teikei}
for n in dic:
for key in dic[n]:
ndic[key][n] = dic[n][key]
print('ddsg', ndic)
name = os.path.join(os.getcwd(), 'python/json/megurikashin')
to_json(ndic, name)
if __name__ == '__main__':
na = 'y10'
file = os.path.join(os.getcwd(),f'python/{na}.txt')
# js = main(file)
combine()
# print(js)
# name = os.path.join(os.getcwd(), f'python/json/{na}')
# to_json(js, name)
#
|
IfThenMaker/MutosPlan | python/balance.py | import json
import os
import pandas as pd
dic = {
"豪風命": {
'kyoujyaku': 3,
'inyo': 2,
'danjyo': -1,
},
"微風命": {
'kyoujyaku': -3,
'inyo': 0,
'danjyo': -4,
},
"陽照命": {
'kyoujyaku': 4,
'inyo': 5,
'danjyo': 2,
},
"灯篭命": {
'kyoujyaku': -2,
'inyo': 1,
'danjyo': -3,
},
"厳山命": {
'kyoujyaku': 2,
'inyo': -3,
'danjyo': 4,
},
"砂丘命": {
'kyoujyaku': -1,
'inyo': -4,
'danjyo': 0,
},
"轟音命": {
'kyoujyaku': 5,
'inyo': 4,
'danjyo': 5,
},
"閃光命": {
'kyoujyaku': 0,
'inyo': 3,
'danjyo': 3,
},
"海水命": {
'kyoujyaku': 1,
'inyo': -1,
'danjyo': 1,
},
"湖水命": {
'kyoujyaku': -4,
'inyo': -2,
'danjyo': -2,
},
}
""" sclaped text to json """
def get_textdata(file):
path = os.path.join(os.getcwd(), file)
with open(file,'r', encoding='utf-8') as f:
dt = f.read()
return dt
def get_jsondata(file):
path = os.path.join(os.getcwd(), file)
with open(path,'r', encoding='utf-8') as f:
dt = json.load(f)
return dt
def to_json(dict_file, name):
name = f'{name}.json'
with open(name, 'w', encoding='utf-8') as f:
json.dump(dict_file, f, indent=2, ensure_ascii=False)
# def main():
if __name__ == '__main__':
na = 'setuAtoSu'
file = os.path.join(os.getcwd(),f'python/{na}.txt')
# main()
# na = 'setuAtoSuHead'
# file = os.path.join(os.getcwd(),f'python/{na}.txt')
# head(file)
# name = os.path.join(os.getcwd(), f'python/json/{na}')
# to_json(js, name)
import keyword
print(keyword.iskeyword('str'))
#
|
IfThenMaker/MutosPlan | python/jyunsetu.py | import json
import os
import pandas as pd
""" sclaped text to json """
def get_textdata(file):
path = os.path.join(os.getcwd(), file)
with open(file,'r', encoding='utf-8') as f:
dt = f.read()
return dt
def get_jsondata(file):
path = os.path.join(os.getcwd(), file)
with open(path,'r', encoding='utf-8') as f:
dt = json.load(f)
return dt
def to_json(dict_file, name):
name = f'{name}.json'
with open(name, 'w', encoding='utf-8') as f:
json.dump(dict_file, f, indent=2, ensure_ascii=False)
def main(name):
# na = 'himeguri'
file = os.path.join(os.getcwd(),f'python/text/{name}.txt')
data = get_textdata(file).split('\n')
data = {i + 1: d.split('\t') for i,d in enumerate(data) if d}
print(len(data), data)
df = pd.DataFrame(data, index = [i for i in range(1,13)])
print(df)
dic = df.T.to_dict()
print(dic)
to_json(dic, f'python/json/{name}')
if __name__ == '__main__':
na = 'jyunsetuyou'
na = 'jyunsetuin'
file = os.path.join(os.getcwd(),f'python/{na}.txt')
main(na)
# na = 'setuAtoSuHead'
# file = os.path.join(os.getcwd(),f'python/{na}.txt')
# head(file)
# name = os.path.join(os.getcwd(), f'python/json/{na}')
# to_json(js, name)
#
|
IfThenMaker/MutosPlan | python/himeguri.py | import json
import os
import pandas as pd
""" sclaped text to json """
def get_textdata(file):
path = os.path.join(os.getcwd(), file)
with open(file,'r', encoding='utf-8') as f:
dt = f.read()
return dt
def get_jsondata(file):
path = os.path.join(os.getcwd(), file)
with open(path,'r', encoding='utf-8') as f:
dt = json.load(f)
return dt
def to_json(dict_file, name):
name = f'{name}.json'
with open(name, 'w', encoding='utf-8') as f:
json.dump(dict_file, f, indent=2, ensure_ascii=False)
def main():
na = 'himeguri'
file = os.path.join(os.getcwd(),f'python/{na}.txt')
data = get_textdata(file)
data = [d.split('\t') for d in data.split('\n')]
dic = {y: {} for y in range(2032, 2042)}
# dic = {}
for m, d in zip(range(1,13), data):
for y, num in zip(range(2032,2042),d):
dic[y][m] = int(num)
print(y, num)
print(m ,d)
print(len(data), data)
print(dic)
to_json(dic, '2032')
if __name__ == '__main__':
na = 'setuAtoSu'
file = os.path.join(os.getcwd(),f'python/{na}.txt')
main()
# na = 'setuAtoSuHead'
# file = os.path.join(os.getcwd(),f'python/{na}.txt')
# head(file)
# name = os.path.join(os.getcwd(), f'python/json/{na}')
# to_json(js, name)
#
|
IfThenMaker/MutosPlan | python/setu.py | import json
import os
import pandas as pd
kata = ['ア', 'イ', 'ウ', 'エ', 'オ', 'カ', 'キ', 'ク', 'ケ', 'コ', 'サ', 'シ', 'ス', 'セ', 'ソ', 'タ', 'チ', 'ツ', 'テ', 'ト', 'ナ', 'ニ', 'ヌ', 'ネ', 'ノ', 'ハ']
head = ['4', '4', '4', '4', '4', '4', '5', '5', '5', '4', '4', '5', '5']
print(kata)
""" sclaped text to json """
def get_textdata(file):
path = os.path.join(os.getcwd(), file)
with open(file,'r', encoding='utf-8') as f:
dt = f.read()
return dt
def get_jsondata(file):
path = os.path.join(os.getcwd(), file)
with open(path,'r', encoding='utf-8') as f:
dt = json.load(f)
return dt
def to_json(dict_file, name):
name = f'{name}.json'
with open(name, 'w', encoding='utf-8') as f:
json.dump(dict_file, f, indent=2, ensure_ascii=False)
def main(body_text, head_text, index):
na = body_text
file = os.path.join(os.getcwd(),f'python/{na}.txt')
data = get_textdata(file).split('\n')
bdata = [d.split('\t') for d in data if d]
print(len(bdata),bdata)
# index = [i for i in range(1,13)]
print(index)
df = pd.DataFrame({key: val for key, val in
zip([i for i in range(1,13)], bdata)},
index = index)
dic = df.T.to_dict()
print(dic)
na = head_text
file = os.path.join(os.getcwd(),f'python/{na}.txt')
data = get_textdata(file).split('\n')
hdata = [d[-2:-1] for d in data if d]
print(len(hdata), hdata)
return {key:{
'startDate': startDate,
'tuki': {k: dic[key][k][-1:] for k in dic[key]}
} for key,startDate in zip(dic, hdata)}
# setu =
# arr = {mark:{
# 'startDate': hdata[i],
# 'tuki': bdata[i]
# } for i, mark in enumerate(zip(kataArr))}
# print(arr)
def head(file):
na = 'text/setuSetoHaHead'
file = os.path.join(os.getcwd(),f'python/{na}.txt')
data = get_textdata(file).split('\n')
data = [d[-2:-1] for d in data if d]
print(len(data), data)
if __name__ == '__main__':
# head('f')
body = 'text/setuAtoSu'
head = 'text/setuAtoSuHead'
# headArr = ['4', '4', '4', '4', '4', '4', '5', '5', '5', '4', '4', '5', '5']
index = kata[:13]
body = 'text/setuSetoHa'
head = 'text/setuSetoHaHead'
# headArr = ['4', '4', '4', '4', '4', '4', '5', '5', '5', '4', '4', '5', '5']
index = kata[13:27]
body = 'text/setuHitoMi'
head = 'text/setuHitoMiHead'
# headArr = ['4', '4', '4', '4', '4', '4', '5', '5', '5', '4', '4', '5', '5']
index = ['ヒ','フ','へ','ホ','マ','ミ']
print('ind', len(index),index)
js = main(body, head, index)
to_json(js, f'python/json/{body[5:]}')
# na = 'setuAtoSuHead'
# file = os.path.join(os.getcwd(),f'python/{na}.txt')
# head(file)
# name = os.path.join(os.getcwd(), f'python/json/{na}')
# to_json(js, name)
#
|
IfThenMaker/MutosPlan | python/setu_bind.py | import json
import os
teikeimei = ['豪風命', '微風命', '陽照命', '灯篭命', '厳山命', '砂丘命', '轟音命', '閃光命', '海水命', '湖水命']
""" sclaped text to json """
def get_textdata(file):
path = os.path.join(os.getcwd(), file)
with open(file,'r', encoding='utf-8') as f:
dt = f.read()
return dt
def get_jsondata(file):
path = os.path.join(os.getcwd(), file)
with open(path,'r', encoding='utf-8') as f:
dt = json.load(f)
return dt
def to_json(dict_file, name):
name = f'{name}.json'
with open(name, 'w', encoding='utf-8') as f:
json.dump(dict_file, f, indent=2, ensure_ascii=False)
def main():
a = get_jsondata('python/json/setuAtoSu.json')
hi = get_jsondata('python/json/setuHitoMi.json')
ha = get_jsondata('python/json/setuSetoHa.json')
dic = {}
dic.update(a)
dic.update(hi)
dic.update(ha)
print(dic)
return dic
if __name__ == '__main__':
na = 'python/text/jyunkashin.txt'
file = os.path.join(os.getcwd(),f'python/{na}.txt')
js = main()
# na = 'setuAtoSuHead'
# file = os.path.join(os.getcwd(),f'python/{na}.txt')
# head(file)
# name = os.path.join(os.getcwd(), f'python/json/{na}')
to_json(js, 'python/json/setu')
#
|
chriscampbell19/polygnome | functionOnKn.py | <reponame>chriscampbell19/polygnome
import vector
import tensorAlgebra
import algebra
import bimoduleMapDecorator
class functionOnKn(vector.vector):
"""
File: functionOnKn.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A function on Kn represented as a vector.
"""
def __init__(self, alg, basisOfKn, images):
self.codomain = self.algebra = alg
self.basisOfKn = basisOfKn #This is a basis of the intersection space in the free algebra
freeAlgebra = algebra.algebra()
self.domain = tensorAlgebra.tensorAlgebra([alg,freeAlgebra,alg])
vector.vector.__init__(self,images)
def __call__(self, tensor):
@bimoduleMapDecorator.bimoduleMapDecorator(self.domain,self.codomain)
def helper(PT):
index = self.basisOfKn.index(PT)
return self[index]
return helper(tensor)
def __add__(self,other):
assert isinstance(other, functionOnKn)
return functionOnKn(self.algebra, self.basisOfKn, [a + b for a,b in zip(self.components, other.components)])
def __mul__(self,other):
return functionOnKn(self.algebra, self.basisOfKn, [a * other for a in self.components] )
if __name__ == '__main__':
pass
|
chriscampbell19/polygnome | tensor.py | import abstractTensor
import pureTensor
import composite
class tensor(composite.composite,abstractTensor.abstractTensor):
"""
File: tensor.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A composite class of pureTensors
"""
##############################################################################
###### CONSTRUCTORS
##############################################################################
def __init__(self,polynomials=()):
if polynomials == 0:
polynomials = ()
pureTensors = ()
if isinstance(polynomials, pureTensor.pureTensor):
pureTensors = (polynomials,)
if len(polynomials) >= 1:
if isinstance(polynomials[0], pureTensor.pureTensor): # If this is a list of pureTensors, just conver to tuple and carry on
pureTensors = tuple(polynomials)
else: #Otherwise, we assume it is a list of polynomials and try and seperate into a list of pure tensors
def pureTensorHelper(polynomials):
assert len(polynomials) > 0
for i in polynomials:
if i.isZero():
return tensor()
pureTensors = []
if len(polynomials) == 1:
for mono in polynomials[0]:
pureTensors.append(pureTensor.pureTensor( (mono,) ))
else:
tempTensors = pureTensorHelper(polynomials[1:])
for mono in polynomials[0]:
for pT in tempTensors:
pureTensors.append(pureTensor.pureTensor( (mono,) ).tensorProduct(pT))
return tuple(pureTensors)
pureTensors = pureTensorHelper(polynomials)
assert isinstance(pureTensors,tuple)
composite.composite.__init__(self,pureTensors)
self.pureTensors = self.components
##############################################################################
###### MATHEMATICAL METHODS
##############################################################################
def __mul__(self,other):
if len(self) == 0:
return self
else:
return tensor( map(lambda x: x * other, self))
def __rmul__(self,other):
if len(self) == 0:
return self
else:
return tensor( map(lambda x: other * x, self))
def __add__(self,other):
if other == 0:
return self
if isinstance(other,abstractTensor.abstractTensor):
return composite.composite.__add__(self,other)
else:
return NotImplemented
def tensorProduct(self,other):
answer = tensor()
for i in self:
answer = answer + i.tensorProduct(other)
return answer
|
chriscampbell19/polygnome | bimoduleMapDecorator.py | import pureTensor
class bimoduleMapDecorator(object):
"""
File: chainMaps.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A decorator that extends a function defined on a tensor algebra bilinearly with respect to some algebra.
You must pass it the domain and codomain tensor algebras in order for it to reduce input and output for you.
"""
def __init__(self, domain, codomain):
self.domain = domain
self.codomain = codomain
def __call__(self,func):
def wrapped_func(tens):
if tens == 0:
return self.codomain.zero()
tens = tens.clean()
tens = self.domain.reduce(tens)
firstItem = True
for pure in tens:
left =pure[0]
right = pure[-1]
middle = pureTensor.pureTensor(1).tensorProduct(pure.subTensor(1,len(pure)-1)).tensorProduct(1)
if firstItem:
answer = pure.coefficient * left * func(middle) * right
firstItem = False
else:
answer = answer + pure.coefficient * left * func(middle) * right
return self.codomain.reduce(answer)
return wrapped_func
|
chriscampbell19/polygnome | polygnomeObject.py | from abc import ABCMeta, abstractmethod
class polygnomeObject(object):
__metaclass__ = ABCMeta
"""
File: printableObject.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: An abstract superclass that every object must inherit from. Outlines basic methods
every polygnome class must have.
"""
@abstractmethod
def __repr__(self): pass #for printing within python
@abstractmethod
def toLatex(self): pass #so that every object will know how to output it's own latex representation
|
chriscampbell19/polygnome | monomial.py | <reponame>chriscampbell19/polygnome
import re
import abstractPolynomial
import coefficient
import polynomial
class monomial(abstractPolynomial.abstractPolynomial):
"""
File: monomial.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A monomial is basically a coefficient and a tuple of generators.
"""
##############################################################################
###### CONSTRUCTORS
##############################################################################
generatorRE = re.compile(r"(?P<letter>[a-zA-Z])(?P<digits>\d*)")
def __init__(self, coeff=None, generators=() ):
if not isinstance(coeff,coefficient.coefficient):
coeff = coefficient.coefficient(coeff)
if type(generators) is str:
generators = (generators,)
generators = tuple(generators)
for i in generators:
assert re.match(monomial.generatorRE,i)
self.coefficient = coeff.clean()
self.generators = generators
##############################################################################
###### SORTING METHODS
##############################################################################
def clean(self):
return monomial(self.coefficient.clean(),self.generators)
def submonomial(self,a,b):
"""Returns the monomial from position a to position b (right hand open).
e.g. xy._submonomial(0,1) = x. Sets coefficient to one"""
return self[a:b]
def withCoefficientOf1(self):
"""Returns a monomial with the same list of generators but with a coefficient
of 1"""
return self.submonomial(0, len(self.generators))
def __iter__(self):
yield self
def __getitem__(self,index):
"""Access the generators as submonomials"""
if isinstance(index,slice):
return monomial(1,self.generators[index])
return monomial(1,[self.generators[index]])
##############################################################################
###### MATHEMATICAL METHODS
##############################################################################
def degree(self):
return len(self.generators)
def isAddable(self,other):
"""Tests whether the generator tuples are equal"""
return self.generators == other.generators
def isZero(self):
return self.coefficient.isZero()
def __add__(self,other ):
if isinstance(other,monomial):
if self.isAddable(other):
newCoefficient = self.coefficient + other.coefficient
return monomial(newCoefficient,self.generators)
else:
return polynomial.polynomial((self,other))
elif isinstance(other,coefficient.coefficient) or type(other) in [str,float,int]:
#in this case we treat whatever we are adding as though it is a
#coefficient lying in the underlying field
return self + monomial(other)
else:
return NotImplemented
def __mul__(self,other):
if isinstance(other,monomial):
newCoeff = self.coefficient * other.coefficient
newGenerators = self.generators + other.generators
return monomial(newCoeff,newGenerators)
elif isinstance(other,coefficient.coefficient) or type(other) in [float,int,str]:
return self * monomial(other)
else:
return NotImplemented
def __rmul__(self,other):
return self * other #this case should only hit for coefficients or numbers in which case the multiplication is commutative
##############################################################################
###### PRINTING AND TYPING
##############################################################################
def __repr__(self): # TODO: add the +1 -1 stuff here.
if self.isZero():
return "0"
elif self.degree() == 0:
return repr(self.coefficient)
else:
if self.coefficient == 1:
return ''.join(self.generators)
if self.coefficient == -1:
return '-' + ''.join(self.generators)
return repr(self.coefficient) + ''.join(self.generators)
def toLatex(self):
if self.isZero():
return "0"
elif self.degree() == 0:
return self.coefficient.toLatex()
else:
#next two lines add subscripts before numbers
temp = [re.match(monomial.generatorRE, x) for x in self.generators]
temp = [ x.group('letter') + '_{' + x.group('digits')+ '}' if x.group('digits') != ''
else x.group('letter') for x in temp]
#next block adds superscripts for repititions
newTemp = []
currentCount = 1
for index, i in enumerate(temp):
if currentCount > 1:
currentCount -= 1
continue
for j in temp[index+1:]:
if j == i:
currentCount += 1
else:
break
if currentCount == 1:
newTemp.append(i)
else:
newTemp.append(i + '^{' + str(currentCount) + '}')
temp = newTemp
if self.coefficient == 1:
return ''.join(temp)
elif self.coefficient == -1:
return '-' + ''.join(temp)
else:
return self.coefficient.toLatex() + ''.join(temp)
def generators(inString):
return map(lambda x: monomial(1,x),inString.split(' '))
if __name__ == '__main__':
pass
|
chriscampbell19/polygnome | __init__.py | from monomial import generators,monomial
from polynomial import polynomial
from coefficient import coefficient
from relation import relation
from pureTensor import pureTensor
from tensor import tensor
from algebra import algebra
from tensorAlgebra import tensorAlgebra
from vector import vector
from chainMaps import b_n,k_1,k_2,k_3,k_4,i_1,i_2,i_3,m_2,m_1,m_1Dual,m_2Dual,k_2Dual,k_3Dual,k_4Dual,GerstenhaberBracket
from functionOnKn import functionOnKn
from doublyDefined import doublyDefined
from bimoduleMapDecorator import bimoduleMapDecorator
import latexWriters
|
chriscampbell19/polygnome | abstractTensor.py | <gh_stars>0
from abc import ABCMeta, abstractmethod
import arithmeticInterface
class abstractTensor(arithmeticInterface.arithmeticInterface):
__metaclass__ = ABCMeta
"""
File: abstractTensor.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: An abstract class for elements of tensor product algebras.
"""
@abstractmethod
def __iter__(): pass
|
chriscampbell19/polygnome | coefficient.py | <reponame>chriscampbell19/polygnome<gh_stars>0
import re
import arithmeticInterface
class coefficient(arithmeticInterface.arithmeticInterface):
"""
File: coefficient.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A coefficient is an element of a commutative ring with variables of the form <letter><number>+ (e.g. a1 + a2 could be
stored as a coefficient but not var1).
"""
##############################################################################
###### CONSTRUCTORS
##############################################################################
def __init__(self, coeffs=None):
if type(coeffs) in [float,int]:
coeffs = {'' : coeffs}
elif type(coeffs) is str:
if len(coeffs) > 0 and coeffs[0] == '-':
coeffs = {coeffs[1:] : -1}
else:
coeffs = {coeffs : 1}
elif coeffs is None:
coeffs = {'' : 1}
assert isinstance(coeffs,dict)
self.coeffs = coeffs
##############################################################################
###### SORTING METHODS
##############################################################################
def _sortVars(self,variables):#this helper function sorts variables by splitting into an array using the varRE regex and then sorting the array and joining it all up again
assert isinstance(variables, str)
if variables == "":
return ""
arr = []
while variables != "":
m = re.match(r"([a-zA-Z][\d]*)+",variables)
arr.append(m.group(1))
variables = variables[:-len(m.group(1))]
variables = "".join(sorted(arr))
return variables
def clean(self): # simplifies expressions like x1x2 + x2x1 into 2*x1x2
newCoeffs = {}
#This assumes length one variable names plus numbers. e.g. y100,x1,x,z
for key in self.coeffs:
newkey = self._sortVars(key)
if newkey in newCoeffs:
newCoeffs[newkey] += self.coeffs[key]
else:
newCoeffs[newkey] = self.coeffs[key]
newCoeffs = {key : value for key, value in newCoeffs.items() if value != 0}
if newCoeffs == {}:
newCoeffs = {'':0}
return coefficient(newCoeffs)
##############################################################################
###### MATHEMATICAL METHODS
##############################################################################
def isNum(self):
"""Is this coefficient just a number?"""
for i in self.coeffs:
if i != "":
if self.coeffs[i] != 0:
return False
return True
def isZero(self):
x = self.clean()
for i in x.coeffs:
if x.coeffs[i] !=0:
return False
return True
def __add__(self,other ):
if isinstance(other,coefficient):
newCoeffs = {}
for i in other.coeffs:
if i in self.coeffs:
newCoeffs[i] = self.coeffs[i]+other.coeffs[i]
else:
newCoeffs[i]=other.coeffs[i]
for i in self.coeffs:
if i in other.coeffs:
continue
else:
newCoeffs[i] = self.coeffs[i]
newCoefficient = coefficient(newCoeffs)
return newCoefficient.clean()
elif type(other) in [float,int,str,dict]:
return self + coefficient(other)
else:
return NotImplemented
def __mul__(self,other):
if isinstance(other,coefficient):
newCoeffs = {}
for i in self.coeffs:
for j in other.coeffs:
if i+j in newCoeffs:
newCoeffs[i+j] += self.coeffs[i]*other.coeffs[j]
else:
newCoeffs[i+j] = self.coeffs[i]*other.coeffs[j]
newCoefficient = coefficient(newCoeffs)
return newCoefficient.clean()
elif type(other) in [float,int,str,dict]:
return self * coefficient(other)
else:
return NotImplemented
def __rmul__(self,other):
if type(other) in [float,int,str,dict]:
return self * other
else:
return NotImplemented
def __getitem__(self,index):
return self.coeffs[index]
def __iter__(self):
for i in self.coeffs:
yield i
##############################################################################
###### PRINTING AND TYPING
##############################################################################
def __repr__(self):
if self.isZero():
ret = "0"
else:
bracketFlag = False
if len(self.coeffs) == 1:
bracketFlag = True
ret = ""
else:
ret = "("
ret += "+".join( str(self.coeffs[i])+i if (self.coeffs[i]!=1 and self.coeffs[i]!=0 and self.coeffs[i]!=-1)\
else i if (self.coeffs[i] == 1 and i != "")\
else str(1) if self.coeffs[i] == 1\
else "-"+i if self.coeffs[i] == -1 and i!= ''\
else '-1' if self.coeffs[i] == -1\
else "0" for i in self.coeffs)
if not bracketFlag:
ret += ")"
return ret
def toLatex(self):
string = self.__repr__()
varWithNumRE = re.compile(r"([a-zA-Z])(\d*)")
return re.sub(varWithNumRE, r'\1_{\2}', string)
if __name__ == '__main__':
pass
|
chriscampbell19/polygnome | latexWriters.py | def listOfObjectsToLatex(lst, numPerLine=5): #this prints out numPerLine vectors in a row and then prints a new line and carries on. This does add \( and \) at beginning and end.
print '\\( '
for inum, i in enumerate(lst):
print i.toLatex()
if inum % numPerLine == numPerLine - 1:
if not (inum == len(lst) -1):
print '\\) \n \n \\('
print '\\)'
def latexOpen():
print '\\documentclass[11pt, oneside]{report}'
print '\\usepackage{MyThesis}'
print '\\begin{document}'
def latexClose():
print "\\end{document}"
|
chriscampbell19/polygnome | polynomial.py | import abstractPolynomial
import monomial
import coefficient
import composite
class polynomial(composite.composite,abstractPolynomial.abstractPolynomial):
"""
File: polynomial.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: The polynomial class is the composite class for abstractPolynomials.
"""
##############################################################################
###### CONSTRUCTORS
##############################################################################
def __init__(self,monomials=()):
if isinstance(monomials, monomial.monomial):
monomials = (monomials,)
monomials = tuple(monomials)
composite.composite.__init__(self,monomials)
self.monomials = self.components
##############################################################################
###### MATHEMATICAL METHODS
##############################################################################
def __mul__(self, other):
if len(self) == 0:
return self
if isinstance(other,monomial.monomial) or (type(other) in [str,float,int]) or isinstance(other,coefficient.coefficient):
newMonos = []
for i in self:
newMonos.append(i * other)
return polynomial(tuple(newMonos)).clean()
if isinstance(other,polynomial):
if len(other.monomials) == 0:
return other
newMonos = []
for mono1 in self:
for mono2 in other.monomials:
newMonos.append(mono1 * mono2)
return polynomial(tuple(newMonos)).clean()
# From here on we know length of monomials > 0
def __rmul__(self,other):
if isinstance(other,monomial.monomial):
other = polynomial(other)
return (other * self).clean()
elif (type(other) in [str,float,int]) or isinstance(other,coefficient.coefficient):
return self * other
else:
return NotImplemented
def __add__(self,other):
if isinstance(other,abstractPolynomial.abstractPolynomial):
return composite.composite.__add__(self,other)
elif (type(other) in [str,float,int]) or isinstance(other,coefficient.coefficient):
return self + monomial.monomial(other)
else:
return NotImplemented
if __name__ == '__main__':
pass
|
chriscampbell19/polygnome | chainMaps.py | from pureTensor import pureTensor
import relation
from tensor import tensor
from monomial import monomial
from tensorAlgebra import tensorAlgebra
from algebra import algebra
from functionOnKn import functionOnKn
from bimoduleMapDecorator import bimoduleMapDecorator
"""
File: chainMaps.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: The chain maps for the koszul and bar complexes.
"""
##############################################################################
###### Chain map definitions
##############################################################################
def b_n(tens,alg):
tens = tens.clean()
if tens == 0:
return 0
else:
for pure in tens:
domain = tensorAlgebra([alg] * len(pure))
codomain = tensorAlgebra([alg] * (len(pure) - 1))
break
@bimoduleMapDecorator(domain,codomain)
def b_nInner(tens):
assert isinstance(tens, pureTensor)
assert len(tens) >= 2
tens = tens.clean()
if len(tens) == 2:
return tens[0] * tens[1]
else:
answer = tens.subTensor(1,len(tens) )
answer = answer - pureTensor(1).tensorProduct(tens[1]*tens[2]).tensorProduct(tens.subTensor(3,len(tens)))
if len(tens) != 3:
answer = answer + tens.subTensor(0,2).tensorProduct(b_n(tens.subTensor(2,len(tens)), alg))
return answer
return b_nInner(tens)
def k_1(tens,alg):
freeAlgebra = algebra()
K1 = tensorAlgebra([alg,freeAlgebra,alg])
K0 = tensorAlgebra([alg,alg])
@bimoduleMapDecorator(K1,K0)
def k_1Inner(pT):
assert isinstance(pT,pureTensor)
generator = pT[1]
return pureTensor([generator,1])-pureTensor([1,generator])
return k_1Inner(tens)
def k_2(tens,alg):
freeAlgebra = algebra()
K1 = K2 = tensorAlgebra([alg,freeAlgebra,alg])
@bimoduleMapDecorator(K2,K1)
def k_2Inner(tens):
assert isinstance(tens,pureTensor)
answer= tensor()
rel =tens.monomials[1]
for i in rel.leadingMonomial:
answer = answer + i.coefficient * pureTensor((i.submonomial(0,1),i.submonomial(1,2), 1))
answer = answer + i.coefficient * pureTensor((1,i.submonomial(0,1),i.submonomial(1,2)))
for i in rel.lowerOrderTerms:
answer = answer - i.coefficient * pureTensor((i.submonomial(0,1),i.submonomial(1,2), 1))
answer = answer - i.coefficient * pureTensor((1,i.submonomial(0,1),i.submonomial(1,2)))
return answer
return k_2Inner(tens)
def k_3(tens,alg):
freeAlgebra = algebra()
K3 = K2 = tensorAlgebra([alg,freeAlgebra,alg])
@bimoduleMapDecorator(K3,K2)
def k_3Inner(pT):
answer= tensor()
doublyDefined = pT[1]
for generator, rel in doublyDefined.leftHandRepresentation:
answer = answer + pureTensor((generator,rel,1)).clean()
for rel, generator in doublyDefined.rightHandRepresentation:
answer = answer - pureTensor((1,rel,generator)).clean()
return answer
return k_3Inner(tens)
def k_4(tens,alg):
freeAlgebra = algebra()
K4 = K3 = tensorAlgebra([alg,freeAlgebra,alg])
@bimoduleMapDecorator(K4,K3)
def k_4Inner(pT):
answer= tensor()
doublyDefined = pT[1]
for generator, rel in doublyDefined.leftHandRepresentation:
answer = answer + pureTensor((generator,rel,1)).clean()
for rel, generator in doublyDefined.rightHandRepresentation:
answer = answer - pureTensor((1,rel,generator)).clean()
return answer
return k_4Inner(tens)
def i_1(tens,alg):
freeAlgebra = algebra()
B1 = tensorAlgebra([alg] * 3)
K1 = tensorAlgebra([alg,freeAlgebra,alg])
@bimoduleMapDecorator(K1,B1)
def i_1Inner(pT):
return pT
return i_1Inner(tens)
def i_2(tens,alg):
freeAlgebra = algebra()
B2 = tensorAlgebra([alg] * 4)
K2 = tensorAlgebra([alg,freeAlgebra,alg])
@bimoduleMapDecorator(K2,B2)
def i_2Inner(pT):
answer = tensor()
rel = pT[1]
for term in rel.leadingMonomial:
answer = answer + term.coefficient * pureTensor((1,term[0],term[1],1))
for term in rel.lowerOrderTerms:
answer = answer - term.coefficient * pureTensor((1,term[0],term[1],1))
return answer
return i_2Inner(tens)
def i_3(tens,alg):
freeAlgebra = algebra()
B3 = tensorAlgebra([alg] * 5)
K3 = tensorAlgebra([alg,freeAlgebra,alg])
@bimoduleMapDecorator(K3,B3)
def i_3Inner(pT):
answer = tensor()
doublyDefined = pT[1]
for generator, rel in doublyDefined.leftHandRepresentation:
rightHandSide = generator * i_2(pureTensor([1,rel,1]),alg)
answer = answer + pureTensor(1).tensorProduct(rightHandSide)
return answer
return i_3Inner(tens)
def m_2(abcd,alg):
B2 = tensorAlgebra([alg]*4)
freeAlgebra = algebra()
K2 = tensorAlgebra([alg,freeAlgebra,alg])
@bimoduleMapDecorator(B2,K2)
def m_2Inner(PT):
assert isinstance(PT, pureTensor)
assert len(PT) == 4
PT = PT.clean()
w = PT[1] * PT[2]
answer = tensor()
sequence = alg.makeReductionSequence(w)
for reductionFunction, weight in sequence:
answer += PT.coefficient * weight * PT[0] \
* pureTensor([reductionFunction.leftMonomial,
reductionFunction.relation,
reductionFunction.rightMonomial]) * PT[3]
return answer
return m_2Inner(abcd)
def m_1(abc,alg):
K1 = B1 = tensorAlgebra([alg]*3)
@bimoduleMapDecorator(B1,K1)
def m_1Inner(b):
b = b[1].clean()
answer = tensor()
if b.degree() != 0:
for i in range(b.degree()):
answer += b.coefficient * pureTensor([b[0:i],b[i],b[i+1:]])
return answer
return m_1Inner(abc)
##############################################################################
###### Dualised chain maps
##############################################################################
def dualMap(chainMap):
def functionFactory(func):
def newfunc(tensor):
return func(chainMap(tensor,func.algebra))
return newfunc
return functionFactory
m_1Dual = dualMap(m_1)
m_2Dual = dualMap(m_2)
def koszulDualMap(chainMap):
def functionFactory(func,knBasis):
images = [func(chainMap(i,func.algebra)) for i in knBasis]
return functionOnKn(func.algebra, knBasis, images)
return functionFactory
k_2Dual = koszulDualMap(k_2)
k_3Dual = koszulDualMap(k_3)
k_4Dual = koszulDualMap(k_4)
def i_3Dual(func,alg,basisOfK3):
images= []
for i in basisOfK3:
images.append(func(i_3(i,alg)))
return functionOnKn(alg,basisOfK3,images)
##############################################################################
###### Gerstenhaber Bracket
##############################################################################
def o0(f,g,alg):
B3 = tensorAlgebra([alg] * 5)
@bimoduleMapDecorator(B3,alg)
def localO(abcde):
intermediate = g(pureTensor([1,abcde[1],abcde[2],1]))
return f(pureTensor(abcde[0]).tensorProduct(intermediate).tensorProduct(abcde[3:]))
return localO
def o1(f,g,alg):
B3 = tensorAlgebra([alg] * 5)
@bimoduleMapDecorator(B3,alg)
def localO(abcde):
intermediate = g(pureTensor([1,abcde[2],abcde[3],1]))
return f(abcde[:2].tensorProduct(intermediate).tensorProduct(abcde[4]))
return localO
def o(f,g,alg):
def localO(abcde):
return o0(f,g,alg)(abcde)-o1(f,g,alg)(abcde)
return localO
def GerstenhaberBracket(f,g,basisOfK3):
alg = f.algebra
f = m_2Dual(f)
g = m_2Dual(g)
def localBracket(abcde):
return o(f,g,alg)(abcde)+o(g,f,alg)(abcde)
return i_3Dual(localBracket, alg, basisOfK3)
|
chriscampbell19/polygnome | vector.py | import arithmeticInterface
import composite
class vector(arithmeticInterface.arithmeticInterface):
"""
File: vector.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A vector class for vectors of abstractPolynomials.
"""
def __init__(self,components):
components = tuple(components)
assert isinstance(components, tuple)
self.components = components
def clean(self):
newComponents = [x.clean() for x in self.components]
return vector(tuple(newComponents))
def __mul__(self,other):
newComponents = []
for i in self.components:
newComponents.append(i * other)
return vector(newComponents)
def __add__(self,other):
assert len(self) == len(other)
newComponents = []
for index, i in enumerate(self.components):
newComponents.append(other.components[index] + i)
return vector(newComponents)
def __len__(self):
return len(self.components)
def __getitem__(self,index):
if index > len(self):
raise IndexError
return self.components[index]
def isZero(self):
for i in self:
if not i.isZero():
return False
else:
return True
def reduceWithRespectTo(self,alg):
newComponents = []
for poly in self:
newComponents.append(alg.reduce(poly))
return vector(newComponents)
##############################################################################
###### PRINTING AND TYPING
##############################################################################
def __repr__(self):
return '(' + ','.join([repr(x) for x in self.components]) + ')'
def toLatex(self):
return '\\left( \\begin{array}{c} \n' +\
'\\\\ \n'.join([x.toLatex() for x in self.components]) +\
'\n \\end{array}\\right)'
|
chriscampbell19/polygnome | pureTensor.py | import abstractTensor
import tensor
import coefficient
import monomial
import relation
import doublyDefined #TODO: make interface PureTensorable so we can hide all this nonsense in tensor product
class pureTensor(abstractTensor.abstractTensor):
"""
File: pureTensor.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A pureTensor is simply a pure tensor element in a tensor product.
"""
##############################################################################
###### CONSTRUCTORS
##############################################################################
def __init__(self, monomials=(), coeff=coefficient.coefficient(1)):
if type(monomials) not in [list,tuple]:
monomials = (monomials,)
monomials= tuple(monomials)
self.coefficient = coeff
index = 0
while index < len(monomials):
if type(monomials[index]) in [coefficient.coefficient,int,float]:
monomials = monomials[:index] + (monomial.monomial(monomials[index]),) + monomials[index+1:]
index +=1
for i in monomials:
self.coefficient = self.coefficient * i.coefficient
self.monomials = tuple( [x.withCoefficientOf1() for x in monomials])
##############################################################################
###### MATHEMATICAL METHODS
##############################################################################
def isZero(self):
new = self.clean()
if len(new.monomials) == 0:
return True
for mono in new.monomials:
if mono.isZero():
return True
else:
return new.coefficient.isZero()
def clean(self):
"""Cleaning a pure tensors simply makes all components have coefficient one
and makes the coefficient to multiple of all components' coefficients"""
newCoefficient = reduce(lambda x,y: x * y, [x.coefficient for x in self.monomials], self.coefficient)
if newCoefficient.isZero():
return pureTensor()
newMonos = [x.clean() for x in self.monomials]
return pureTensor(tuple(newMonos), newCoefficient)
def __iter__(self):
yield self
def isAddable(self,other):
return self.monomials == other.monomials
def __add__(self,other ):
if other == 0:
return self
new1 = self.clean()
other = other.clean()
if isinstance(other,pureTensor):
if new1.isAddable(other):
if self.isZero():
return other
else:
newCoefficient = new1.coefficient + other.coefficient
return pureTensor(self.monomials,newCoefficient)
else:
return tensor.tensor((new1,other))
else:
return NotImplemented
def __mul__(self,other):
if self.isZero():
return self
newMonos = self.monomials[:-1] + (self.monomials[-1] * other,)
return pureTensor(newMonos,self.coefficient).clean()
def __rmul__(self,other):
if self.isZero():
return self
newMonos = (other * self.monomials[0],) + self.monomials[1:]
return pureTensor(newMonos,self.coefficient).clean()
def degree(self):
return reduce(lambda x,y: x+ y, self.monomials)
def tensorProduct(self,other):
"""Add other as a component(s) on the right of self. e.g. (a|b).tensorProduct(c) = a|b|c"""
if type(other) in [relation.relation,doublyDefined.doublyDefined,coefficient.coefficient, str, float, int, monomial.monomial]:
return self.tensorProduct(pureTensor(other))
if not isinstance(other,pureTensor):
return reduce(lambda x,y: x+y, [self.tensorProduct(z) for z in other], tensor.tensor())
return pureTensor(self.monomials + other.monomials, self.coefficient * other.coefficient)
def subTensor(self,a,b):
return self[a:b]
def __getitem__(self,index):
if isinstance(index,slice):
return pureTensor(self.monomials[index])
return self.monomials[index]
def __len__(self):
return len(self.monomials)
##############################################################################
###### PRINTING AND TYPING
##############################################################################
def __repr__(self): # TODO: add the +1 -1 stuff here.
if self.isZero():
return "0"
else:
if self.coefficient == -1:
return '-' + '|'.join([repr(x) for x in self.monomials])
elif self.coefficient == 1:
return '|'.join([repr(x) for x in self.monomials])
else:
return repr(self.coefficient) + '*' + '|'.join([repr(x) for x in self.monomials])
def toLatex(self):
if self.isZero():
return "0"
else:
coefficientJoiner = '*'
if self.coefficient == -1:
coefficientJoiner = ''
return self.coefficient.toLatex() + coefficientJoiner +'('+ \
'|'.join([i.toLatex() for i in self.monomials])+ ')'
|
chriscampbell19/polygnome | abstractPolynomial.py | from abc import ABCMeta, abstractmethod
import arithmeticInterface
class abstractPolynomial(arithmeticInterface.arithmeticInterface):
__metaclass__ = ABCMeta
"""
File: abstractPolynomial.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: An abstract superclass for polynomials and monomials.
Most of the methods must be instantiated for yourself.
"""
@abstractmethod
def __iter__(self): pass
|
chriscampbell19/polygnome | composite.py | <filename>composite.py
from abc import ABCMeta, abstractmethod
import polygnomeObject
class composite(polygnomeObject.polygnomeObject):
__metaclass__ = ABCMeta
"""
File: composite.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: This class abstracts the fact that tensor is to pure tensor as polynomial is to monomial.
"""
def __init__(self, components=()):
self.components = components
def __iter__(self):
for component in self.components:
yield component
def isZero(self):
other = self.clean()
return len(other.components) == 0
def clean(self):
currentComponents = map(lambda x: x.clean(),self.components)
newComponents = []
for index, component in enumerate(currentComponents): #iterate through components
for j in newComponents: #check if we've seen this before
if component.isAddable(j):
break
else: # if we haven't seen this before, take all of the componentmials with
# the same generators and add them all together
for index2, component2 in enumerate(currentComponents):
if index >= index2:
continue
else:
if component.isAddable(component2):
component = component + component2
newComponents.append(component)
newComponents = filter(lambda x: not x.isZero(), newComponents)
return type(self)(tuple(newComponents))
def __len__(self):
return len(self.components)
def __getitem__(self,index):
return self.components[index]
##############################################################################
###### MATHEMATICAL METHODS
##############################################################################
def __add__(self,other):
newComponents = [x for x in self] + [x for x in other]
return (type(self)(newComponents)).clean()
##############################################################################
###### PRINTING AND TYPING
##############################################################################
def __repr__(self):
if self.isZero():
return '0'
return "+".join(repr(x) for x in self if not x.isZero())
def toLatex(self):
if self.isZero():
return "0"
return "+".join(x.toLatex() for x in self if not x.isZero())
|
chriscampbell19/polygnome | reductionFunction.py | import polygnomeObject
import coefficient
import algebra
from collections import namedtuple
class reductionFunction(namedtuple('reductionFunction', ['leftMonomial','relation','rightMonomial']), polygnomeObject.polygnomeObject):
"""
File: reductionFunction.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A linear map on abstractPolynomials that depends on a relation
and two submonomials left and rightMonomial. If a monomial matches exactly these left
and right submonomials with the left hand side of the relation then it returns
the left and right wrapped around the right hand side of the relation.
"""
def degree(self):
return self.leftMonomial.degree() + self.rightMonomial.degree()+self.relation.degree()
def __call__(self,poly):
answer = coefficient.coefficient(0)
for mono in poly:
if self.degree() != mono.degree():
answer = answer + mono
#TODO: check impact of this choice of order of condition checks on running time
elif self.leftMonomial == mono.submonomial(0,self.leftMonomial.degree()) \
and self.rightMonomial == mono.submonomial(self.leftMonomial.degree() + self.relation.degree(),mono.degree())\
and self.relation.doesAct(mono.submonomial(self.leftMonomial.degree(),self.leftMonomial.degree()+self.relation.degree())):
answer = answer + mono.coefficient * self.leftMonomial * self.relation.lowerOrderTerms * self.rightMonomial
else:
answer = answer + mono
return answer.clean()
def __repr__(self):
return repr(self.leftMonomial) +"("+repr(self.relation)+")"+repr(self.rightMonomial)
def toLatex(self):
return self.leftMonomial.toLatex() +"("+self.relation.toLatex()+")"+self.rightMonomial.toLatex()
|
chriscampbell19/polygnome | doublyDefined.py | <reponame>chriscampbell19/polygnome
import polygnomeObject
import coefficient
class doublyDefined(polygnomeObject.polygnomeObject):
"""
File: doublyDefined.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A class to encapsulate elements of K3Bar
"""
def __init__(self, leftHandRepresentation ,rightHandRepresentation):
self.leftHandRepresentation = leftHandRepresentation
self.rightHandRepresentation = rightHandRepresentation
self.coefficient = coefficient.coefficient(1)
def __repr__( self):
answer = ''
for i in self.leftHandRepresentation:
answer += repr(i[0]) + '*' + repr(i[1])
return answer
def toLatex(self):
answer = ''
for i in self.leftHandRepresentation:
answer += i[0].toLatex() + '*' + i[1].toLatex()
return answer
def __eq__(self,other):
return self.leftHandRepresentation == other.leftHandRepresentation \
and self.rightHandRepresentation == other.rightHandRepresentation
##############################################################################
###### CODE TO MAKE THIS USEABLE IN TENSOR PRODUCTS
##############################################################################
def degree(self):
i,j = self.leftHandRepresentation[0]
return i.degree() + j.degree()
def clean(self):
return self
def isZero(self):
return False
def __iter__(self):
yield self
def withCoefficientOf1(self):
return self
|
chriscampbell19/polygnome | relation.py | from collections import namedtuple
import polygnomeObject
import coefficient
relationClass = namedtuple('relation',['leadingMonomial','lowerOrderTerms'])
class relation(relationClass,
polygnomeObject.polygnomeObject): # type definition: relation is
#a tuple with the leading monomial
# and the lower terms in the
# reduction hierarchy
"""
File: relation.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description:A relation is a tuple with the leadingMonomial and lowerOrderTerms.
"""
def __init__(self,*args,**kwargs):
relationClass.__init__(self,args,kwargs)
self.coefficient = coefficient.coefficient(1)
def __repr__( self):
return '(' + repr(self.asPolynomial()) + ')'
def toLatex(self):
return '(' + (self.asPolynomial()).toLatex() + ')'
def doesAct(self,poly):
return poly == self.leadingMonomial
def degree(self):
return self.leadingMonomial.degree()
def __eq__(self,other):
return self.leadingMonomial == other.leadingMonomial and self.lowerOrderTerms == other.lowerOrderTerms
def asPolynomial(self):
return self.leadingMonomial - self.lowerOrderTerms
##############################################################################
###### CODE TO MAKE THIS USEABLE IN TENSOR PRODUCTS
##############################################################################
def clean(self):
return self
def isZero(self):
return False
def __iter__(self):
yield self
def withCoefficientOf1(self):
return self
|
chriscampbell19/polygnome | arithmeticInterface.py | <reponame>chriscampbell19/polygnome
from abc import ABCMeta, abstractmethod
import polygnomeObject
class arithmeticInterface(polygnomeObject.polygnomeObject):
__metaclass__ = ABCMeta
"""
File: arithmeticInterface.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: A simple arithmetic interface that polynomials, tensors,
vectors, and coefficients will have to implement.
"""
##############################################################################
###### SORTING METHODS
##############################################################################
def clean(self): #This is the method that checks if for example we have x + x and simplifies it to 2 x.
return self
##############################################################################
###### MATHEMATICAL METHODS
##############################################################################
@abstractmethod
def isZero(self): pass
def __eq__(self,other):
x = self - other
x = x.clean()
if x.isZero():
return True
else:
return False
def __ne__(self,other):
return not self.__eq__(other)
@abstractmethod
def __add__(self,other): pass
@abstractmethod
def __mul__(self,other): pass
def __sub__(self,other):
return self + (other * (-1))
def __radd__(self,other): #addition is always commutative
return self + other
def __neg__(self):
return self * (-1)
def __pow__(self,other):
assert type(other) is int
assert other >= 0
if other == 0:
return 1
else:
return self * (self ** (other - 1 ))
|
chriscampbell19/polygnome | algebra.py | <filename>algebra.py
import polygnomeObject
import relation
import reductionFunction
import monomial
class algebra(polygnomeObject.polygnomeObject):
"""
File: algebra.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: An algebra stores a list of relations and can reduce words in
the free algebra by these relations. The relations have a choice of highest monomial already.
"""
def __init__(self,relations=()):
if isinstance(relations,relation.relation):
relations = (relations,)
relations = tuple(relations)
# If you only want one relation the following saves you having to
# define it seperately before making the algebra.
if len(relations) > 0 and isinstance(relations[0],tuple):
assert len(relations[0]) == 2
relations = map(lambda x: relation.relation( *x),relations)
for i in relations:
assert isinstance(i,relation.relation)
self.relations = relations
def zero(self):
return monomial.monomial(0)
def __repr__(self):
if len(self.relations) == 0:
return 'The free algebra'
return "Algebra subject to relations " + repr(self.relations)
def toLatex(self):
if len(self.relations) == 0:
return 'The free algebra'
return "Algebra subject to relations $" + "$,$".join([i.toLatex() for i in self.relations]) + "$"
def __iter__(self):
"""Iterating through an algebra simply returns the relations of that algebra"""
for i in self.relations:
yield i
def doesAct(self,poly):
"""Test if the polynomial has any monomial on which there is a relation that acts"""
for mono in poly:
monoDegree = mono.degree()
if monoDegree <= 1:
continue
for rel in self.relations:
# For each relation we check if it applies to the monomial.
# Firstly we check if the monomial has too low a degree
relDegree = rel.degree()
if monoDegree < relDegree:
continue
# Secondly we iterate through the submonomials of length
# relDegree and see if any of them are the leading monomial of
# rel.
for index in xrange(monoDegree - relDegree + 1):
if rel.doesAct(mono[index: index + relDegree]):
return True
return False
def makeReductionFunction(self, poly):
"""Only run this if you have already checked doesAct"""
for mono in poly:
monoDegree = mono.degree()
if monoDegree <= 1:
continue
for rel in self.relations:
# For each relation we check if it applies to the monomial.
# Firstly we check if the monomial has too low a degree
relDegree = rel.degree()
if monoDegree < relDegree:
continue
# Secondly we iterate through the submonomials of length
# relDegree and see if any of them are the leading monomial of
# rel.
for index in xrange(monoDegree - relDegree + 1):
if rel.doesAct(mono[index: index + relDegree]):
return (reductionFunction.reductionFunction(mono[0:index],
rel,
mono[index + relDegree:])
, mono.coefficient)
def makeReductionSequence(self,poly):
sequence = []
while self.doesAct(poly):
reduction, weight = self.makeReductionFunction(poly)
sequence.append((reduction,weight))
poly = reduction(poly)
return sequence
def reductionSequenceGenerator(self,poly):
while self.doesAct(poly):
reduction, weight = self.makeReductionFunction(poly)
yield (reduction, weight)
poly = reduction(poly)
def reduce(self,poly): # TODO: check running time on this, this is a slow way of doing iterable
for reduction, weight in self.makeReductionSequence(poly):
poly = reduction(poly)
return poly
def equivalent(self,polynomial1,polynomial2):
return self.reduce(polynomial1) == self.reduce(polynomial2)
if __name__ == '__main__':
pass
|
chriscampbell19/polygnome | tensorAlgebra.py | import polygnomeObject
import pureTensor
import tensor
import abstractPolynomial
class tensorAlgebra(polygnomeObject.polygnomeObject):
"""
File: tensorAlgebra.py
Author: <NAME>
Email: c (dot) j (dot) campbell (at) ed (dot) ac (dot) uk
Github: https://github.com/campbellC
Description: This class encapsulates tensor algebras so that one can reduce
tensors of elements from different algebras.
"""
def __init__(self, algebras=()):
self.algebras = tuple(algebras)
def reduce(self,tens):
if len(self) == 1: #If this is just an algebra
return self[0].reduce(tens)
newTens = tensor.tensor()
for pure in tens:
polys = []
for index in range(len(pure)):
currentAlgebra = self[index]
polys.append(currentAlgebra.reduce(pure[index]))
def listOfPolysToTensors(ps):
ps = iter(ps)
pureTensors = [pureTensor.pureTensor(mono) for mono in next(ps)]
for poly in ps:
newPureTensors = []
for mono in poly:
newPureTensors.extend([x.tensorProduct(mono) for x in pureTensors])
pureTensors = newPureTensors
return tensor.tensor(pureTensors)
newTens = newTens + pure.coefficient * listOfPolysToTensors(polys)
return newTens
def zero(self):
return pureTensor.pureTensor([0] * len(self))
def __getitem__(self,index):
return self.algebras[index]
def __len__(self):
return len(self.algebras)
def __repr__(self):
return 'A tensor algebra with algebras ' + '|'.join(repr(a) for a in self.algebras)
def toLatex(self):
return 'A tensor algebra with algebras ' + '|'.join(a.toLatex() for a in self.algebras)
|
JuliusHen/gimli | pygimli/core/math.py | # -*- coding: utf-8 -*-
"""Collection of mathematical functions."""
import numpy as np
from .core import (angle, besselI0, besselI1, besselK0, besselK1, cos,
cot, det, dot, exp, exp10, imag, log, log10, max, median,
min, pow, rand, randn, real, rms, round, rrms, sign,
sin, sqrt, sum, toComplex, unique)
def symlog(x, tol=None, linearSpread=0):
"""Symmetric bi-logarithmic transformation (as used in matplotlib).
Transforms a signed values in a logarithmic way preserving the sign.
All absolute values below a certain threshold are treated zero or linearly
distributed (if linearSpread>0).
Parameters
----------
x : iterable
array to be transformed
tol : float [None]
tolerance for minimum values to be treated zero (or linear)
linearSpread : float
define how wide linear transformation is done (0-not, 1-one decade)
"""
if tol is None:
tol = np.min(np.abs(x))
return np.sign(x) * (np.log10(1 + np.abs(x/tol))+linearSpread/2)
|
JuliusHen/gimli | apps/ipynbToGalleryExample.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""Convert jupyter notebook to sphinx gallery notebook styled examples.
Modified from here:https://gist.github.com/chsasank/7218ca16f8d022e02a9c0deb94a310fe#file-ipynb_to_gallery-py
Usage: python ipynbToGalleryExample.py <notebook.ipynb>
Dependencies:
pypandoc: install using `pip install pypandoc`
"""
import pypandoc as pdoc
import json
def fixRST(rst):
rst = rst.replace('\n', '')
rst = rst.replace('\r', '\n')
rst = rst.replace(r":raw-latex:`\begin{align}", "\n.. math::\n\n\t")
rst = rst.replace("\end{align}`", "")
# some spocky encoding problems with '
rst = rst.replace('’', "'")
# We prefer $h$ instead of :math:`h` which seems valid for ipynb
while ":math:`" in rst:
tkLen = len(":math:`")
start = rst.find(":math:`")
end = rst.find("`", start + tkLen)
rst = rst[:start] + '$' + rst[start + tkLen:end] + '$' + rst[end + 1:]
return rst
def convert_ipynb_to_gallery(file_name):
outFileName = file_name.replace('.ipynb', '.py')
print("Converting {0} -> {1}".format(file_name, outFileName))
header = "#!/usr/bin/env python\n" +\
"# -*- coding: utf-8 -*-\n"
with open(file_name, encoding="utf-8") as fi:
nb_dict = json.load(fi)
cells = nb_dict['cells']
first = True
for cell in cells:
try:
if cell['source'][0].startswith('%'):
continue
except:
pass
if first == True:
if cell['cell_type'] != 'markdown':
continue
first = False
md_source = ''.join(cell['source'])
rst_source = pdoc.convert_text(md_source, 'rst', 'md')
python_file = header + 'r"""\n' + fixRST(rst_source) + '\n"""'
else:
if cell['cell_type'] == 'markdown':
md_source = ''.join(cell['source'])
rst_source = pdoc.convert_text(md_source, 'rst', 'md')
rst_source = fixRST(rst_source)
commented_source = '\n'.join(['# ' + x for x in
rst_source.split('\n')])
python_file = python_file + '\n\n' + '%%%'+ '\n' + \
commented_source
elif cell['cell_type'] == 'code':
source = ''.join(cell['source'])
python_file = python_file + '\n\n' + source
python_file = python_file.replace("\n%", "\n# %")
open(outFileName, 'w', encoding="utf-8").write(python_file)
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' ipythonnotebook.ipynb')
else:
convert_ipynb_to_gallery(sys.argv[-1])
|
JuliusHen/gimli | pygimli/core/mesh.py | # -*- coding: utf-8 -*-
"""
Import and extensions of the core Mesh class.
"""
import numpy as np
from math import ceil
from .core import (cat, HexahedronShape, Line, RSparseMapMatrix,
Mesh, MeshEntity, Node, Boundary, RVector, RVector3,
PolygonFace, TetrahedronShape, TriangleFace)
from .logger import deprecated, error, info, warn, critical
from ..meshtools import mergePLC, exportPLC
from .base import isScalar, isArray, isPos, isR3Array, isComplex
def __Mesh_unique_dataKeys(self):
"""Return unique data keys"""
uniqueNames = {}
for d in self.dataMap().keys():
uName = d
if '_x' in d:
uName = uName[0:d.find('_x')]
if '_y' in d or '_z' in d:
continue
if '#' in d:
uName = uName[0:d.find('#')]
if not uName in uniqueNames:
uniqueNames[uName] = []
uniqueNames[uName].append(d)
return uniqueNames
Mesh.dataKeys = __Mesh_unique_dataKeys
def __Mesh_str(self):
st = "Mesh: Nodes: " + str(self.nodeCount()) + " Cells: " + str(
self.cellCount()) + " Boundaries: " + str(self.boundaryCount())
if (self.secondaryNodeCount() > 0):
st += " secNodes: " + str(self.secondaryNodeCount())
if len(list(self.dataMap().keys())) > 0:
st += "\nMesh contains data: "
uniqueNames = self.dataKeys()
for d, v in uniqueNames.items():
if len(v) > 1:
st += d + "[0,...,{})".format(len(v))
else:
st += d
st += ', '
st = st.rstrip(', ')
return st
Mesh.__repr__ =__Mesh_str
def __addPLCs__(self, other):
if self.isGeometry() and other.isGeometry():
return mergePLC([self, other])
else:
error("Addition is only supported for PLCs, i.e. meshs without cells.")
Mesh.__add__ = __addPLCs__
def __MeshEntity_str(self):
"""Give mesh entity infos."""
s = self.__repr__()
s += '\tID: ' + str(self.id()) + \
', Marker: ' + str(self.marker()) + \
', Size: ' + str(self.size()) + '\n'
if isinstance(self, PolygonFace) and len(self.nodes()) > 5:
s += '\t' + str(self.nodeCount()) + " Nodes.\n"
else:
for n in self.nodes():
s += '\t' + str(n.id()) + " " + str(n.pos()) + "\n"
return s
MeshEntity.__str__ =__MeshEntity_str
def __Node_str(self):
"""Give node infos."""
s = '\tID: ' + str(self.id()) + \
', Marker: ' + str(self.marker())
s += '\t' + str(self.pos()) + '\n'
return s
Node.__repr__ =__Node_str
def __Mesh_setVal(self, key, val):
"""Index access to the mesh data.
Multiple arrays via matrix will be saved too.
"""
# print(key, len(val), isR3Array(val))
if isR3Array(val):
return self.addData(key, val)
if isinstance(val, list) and isinstance(val[0], (RVector, np.ndarray)) or \
val.ndim == 2:
#print(val.ndim)
maxDigit = ceil(np.log10(len(val)))
for i, v in enumerate(val):
#print(i, v, maxDigit, '{}#{}'.format(key, str(i).zfill(maxDigit)))
self.addData('{}#{}'.format(key, str(i).zfill(maxDigit)),
np.asarray(v))
else:
self.addData(key, val)
#print('keys', self.dataMap.keys())
Mesh.__setitem__ = __Mesh_setVal
def __Mesh_getVal(self, key):
"""Index access to the mesh data"""
if self.haveData(key):
return self.data(key)
else:
uniqueNames = {}
for d in self.dataMap().keys():
if '_y' in d or '_z' in d:
continue
uName = d
if '_x' in uName:
uName = uName[0:uName.find('_x')]
d1 = self.data(d)
d2 = self.data(d.replace('_x', '_y'))
d3 = self.data(d.replace('_x', '_z'))
dat = np.array([d1, d2, d3]).T
else:
dat = self.data(d)
if '#' in uName:
uName = uName[0:uName.find('#')]
if not uName in uniqueNames:
uniqueNames[uName] = []
uniqueNames[uName].append(dat)
if key in uniqueNames:
if len(uniqueNames[key]) == 1:
return uniqueNames[key][0]
try:
return np.array(uniqueNames[key])
except:
return uniqueNames[key]
critical('The mesh does not have the requested data:', key,
'. Available:', uniqueNames)
Mesh.__getitem__ = __Mesh_getVal
def __MeshBoundingBox__(self):
bb = self.boundingBox()
mi = RVector3([bb.min()[i] for i in range(self.dim())])
ma = RVector3([bb.max()[i] for i in range(self.dim())])
return [mi, ma]
Mesh.bb = __MeshBoundingBox__
def __MeshGetCellMarker__(self):
deprecated(msg='Mesh::cellMarker()', hint='Mesh::cellMarkers()')
return self.cellMarkers()
def __MeshSetCellMarker__(self, m):
deprecated(msg='Mesh::setCellMarker()', hint='Mesh::setCellMarkers()')
return self.setCellMarkers(m)
def __MeshHoleMarkers__(self):
return self.holeMarker()
Mesh.cellMarker = __MeshGetCellMarker__
Mesh.setCellMarker = __MeshSetCellMarker__
Mesh.holeMarkers = __MeshHoleMarkers__
def __createSecondaryNodes__(self, n=3, verbose=False):
"""Create `n` equally distributed secondary nodes on the mesh boundaries.
This is useful to increase the accuracy of traveltime calculations.
Parameters
----------
n : int
Number of secondary nodes (the default is 3).
verbose : bool
Optionally output number of added nodes.
Returns
-------
pg.Mesh
Copy of the given mesh with secondary nodes.
"""
self.createNeighborInfos()
if self.boundary(0).nodeCount() != self.boundary(0).allNodeCount():
warn("Mesh already contains secondary nodes. Not adding any more.")
else:
if self.dim() == 2:
for b in self.boundaries():
A = b.node(0).pos()
B = b.node(1).pos()
line = Line(A, B)
for i in range(n):
sn = self.createSecondaryNode(line.at((i + 1) / (n + 1)))
b.addSecondaryNode(sn)
elif self.dim() == 3:
for b in self.boundaries():
bs = b.shape()
for sx in range(n):
nMax = n
if isinstance(b, TriangleFace):
nMax = n - sx
for sy in range(nMax):
if isinstance(b, TriangleFace):
pos = bs.xyz([(sx + 1) / (n + 2),
(sy + 1) / (n + 2)])
else:
pos = bs.xyz([(sx + 1) / (n + 1),
(sy + 1) / (n + 1)])
sn = self.createSecondaryNode(pos)
b.addSecondaryNode(sn)
for c in self.cells():
# add secondary nodes to the edges of 3 Entities
edges = []
if isinstance(c.shape(), HexahedronShape):
# 7------6
# /| /|
# 4------5 |
# | 3----|-2
# |/ |/
# 0------1
edges.append([c.shape().node(0), c.shape().node(1)])
edges.append([c.shape().node(1), c.shape().node(2)])
edges.append([c.shape().node(2), c.shape().node(3)])
edges.append([c.shape().node(3), c.shape().node(0)])
edges.append([c.shape().node(0), c.shape().node(4)])
edges.append([c.shape().node(1), c.shape().node(5)])
edges.append([c.shape().node(2), c.shape().node(6)])
edges.append([c.shape().node(3), c.shape().node(7)])
edges.append([c.shape().node(4), c.shape().node(5)])
edges.append([c.shape().node(5), c.shape().node(6)])
edges.append([c.shape().node(6), c.shape().node(7)])
edges.append([c.shape().node(7), c.shape().node(4)])
elif isinstance(c.shape(), TetrahedronShape):
edges.append([c.shape().node(0), c.shape().node(1)])
edges.append([c.shape().node(0), c.shape().node(2)])
edges.append([c.shape().node(0), c.shape().node(3)])
edges.append([c.shape().node(1), c.shape().node(2)])
edges.append([c.shape().node(2), c.shape().node(3)])
edges.append([c.shape().node(3), c.shape().node(1)])
else:
print(c)
warn('cell type unknown')
for e in edges:
line = Line(e[0].pos(), e[1].pos())
for i in range(n):
sn = self.createSecondaryNode(line.at((i+1)/(n+1)),
tol=1e-6)
c.addSecondaryNode(sn)
else:
warn("Unknown dimension. Don't know what to do.")
if verbose:
info("Added %d secondary nodes." % self.secondaryNodeCount())
def __createMeshWithSecondaryNodes__(self, n=3, verbose=False):
m = Mesh(self)
m.createSecondaryNodes(n, verbose)
return m
Mesh.createSecondaryNodes = __createSecondaryNodes__
Mesh.createMeshWithSecondaryNodes = __createMeshWithSecondaryNodes__
__Mesh_deform__ = Mesh.deform
def __deform__(self, eps, mag=1.0):
v = None
dof = self.nodeCount()
if hasattr(eps, 'ndim') and eps.ndim == 1:
v = eps
elif len(eps) == self.dim():
if len(eps[0]) == dof:
if self.dim() == 2:
v = cat(eps[0], eps[1])
elif self.dim() == 3:
v = cat(cat(eps[0], eps[1]), eps[2])
else:
v = eps[0]
else:
print(self)
print(len(eps), len(eps[0]))
error('Size of displacement does not match mesh nodes size.')
elif len(eps) == self.nodeCount() and eps.ndim == 2:
v = eps.reshape(self.nodeCount() * eps.shape[1], order='F')
return __Mesh_deform__(self, v, mag)
Mesh.deform = __deform__
Mesh.exportPLC = exportPLC
# just to keep backward compatibility 20191120
Mesh.createNeighbourInfos = Mesh.createNeighborInfos
Mesh.xmin = Mesh.xMin
Mesh.ymin = Mesh.yMin
Mesh.zmin = Mesh.zMin
Mesh.xmax = Mesh.xMax
Mesh.ymax = Mesh.yMax
Mesh.zmax = Mesh.zMax
def __Boundary_outside__(self):
"""Is the boundary is on the outside of the mesh."""
return self.leftCell() is not None and self.rightCell() is None
Boundary.outside = __Boundary_outside__
def __Mesh_h__(self):
return np.array([c.shape().h() for c in self.cells()])
Mesh.h = __Mesh_h__
def __Mesh_findPaths__(self, bounds):
"""Find paths of connected boundaries
Returns
-------
List of list of ids of connected nodes
"""
import pygimli as pg
scipy = pg.optImport('scipy')
scipy.sparse = pg.optImport('scipy.sparse')
S = pg.core.SparseMapMatrix()
for b in bounds:
# S[b.shape().node(0).id(), b.shape().node(1).id()] = 1
# S[b.shape().node(1).id(), b.shape().node(0).id()] = 1
S.addVal(b.shape().node(1).id(), b.shape().node(0).id(), 2.0)
S.addVal(b.shape().node(0).id(), b.shape().node(1).id(), 1.0)
S = scipy.sparse.dok_matrix(pg.utils.toCOO(S))
# print(S.shape)
# print(S)
paths = []
def followPath(path, S, rID):
# print('start', rID)
row = S[rID]
while 1:
cID = list(row.keys())[0][1]
# print('row', rID, 'col', cID)
# print('add', cID)
path.append(cID)
S.pop((rID, cID))
S.pop((cID, rID))
# print('pop-r', (rID, cID))
col = S[:, cID]
if len(col) == 1:
rID = list(col.keys())[0][0]
path.append(rID)
# print('add', rID)
# print('pop-c', (rID, cID))
S.pop((rID, cID))
S.pop((cID, rID))
row = S[rID]
if len(row) != 1:
break
else:
break
## first look for single starting
for i in range(S.shape[0]):
rID = i
row = S[rID]
if len(row) == 1:
#single starting
path = []
paths.append(path)
# starting node
path.append(rID)
followPath(path, S, rID)
## remaining are closed
for i in range(S.shape[0]):
rID = i
row = S[rID]
if len(row) == 2:
path = []
paths.append(path)
# starting node
path.append(rID)
followPath(path, S, rID)
return paths
Mesh.findPaths = __Mesh_findPaths__
def __Mesh_cutBoundary__(self, marker, boundaryMarker=None):
"""Cut the mesh along a given inner boundary.
Cut the mesh along a given boundary and convert this inner boundary to an outer. There will be new nodes to cut the connection between neighbouring cells. The new boundary can have an optional boundaryMarker.
Restrictions
------------
* 2D p1
* one connected path at once
* starting node need to be on an outer boundary
* end node needs to be inside the mesh
TODO
----
* remove restrictions
Arguments
---------
mesh: :gimliapi:`GIMLI::Mesh`
2D
marker: int
Marker for the boundary to be cut.
boundaryMarker: None
If set to None, boundaryMarker set to marker.
Example
-------
>>> import pygimli as pg
>>> import pygimli.meshtools as mt
>>> plc = mt.createCircle(segments=24)
>>> l = mt.createLine(start=[0, -1], end=[0, -0.1], boundaryMarker=2)
>>> mesh = mt.createMesh([plc, l], area=0.1)
>>> fig, axs= pg.plt.subplots(1, 2)
>>> ax ,_ = pg.show(mesh, boundaryMarkers=True, ax=axs[0])
>>> oldNodeCount = mesh.nodeCount()
>>> print(mesh)
Mesh: Nodes: 43 Cells: 60 Boundaries: 102
>>> mesh.cutBoundary(marker=2, boundaryMarker=3)
>>> print(mesh)
Mesh: Nodes: 46 Cells: 60 Boundaries: 105
>>> ## just move the new nodes little rightwards to see the cut
>>> for n in range(oldNodeCount, mesh.nodeCount()):
... mesh.node(n).setPos(mesh.node(n).pos() + [0.1, 0.0])
>>> ax, _ = pg.show(mesh, data=range(mesh.cellCount()),
... boundaryMarkers=True, colorBar=False,
... showMesh=True, boundaryProps={'lw':2}, ax=axs[1])
>>> for b in mesh.boundaries():
... if b.marker() != 0:
... c = b.center()
... n = b.norm()
... _ = ax.annotate('', xytext=(c[0], c[1]),
... xy=((c+n/30.)[0], (c+n/30.)[1]),
... arrowprops=dict(arrowstyle="-|>", lw=1),
... )
"""
import pygimli as pg
if boundaryMarker is None:
boundaryMarker = marker
mesh = self
def replaceNode_(mesh, c, n1, n2, marker, lastC=None):
if c is None or n1.id() not in c.ids():
return
# pg._y('check in cell', c.id(), n1.id(), n2.id())
toBeReplaced = []
for i in range(c.boundaryCount()):
b = c.boundary(i)
if b is not None and n1.id() in b.ids():
# pg._y('\tbound: ', b.id(), ':',
# b.node(0).id(), b.node(1).id(), "ma:", b.marker())
if b.marker() != marker:
lC = b.leftCell()
rC = b.rightCell()
# rcS = None
# if rC is not None:
# rcS = rC.id()
# lcS = None
# if lC is not None:
# lcS = lC.id()
# pg._y('\tNeigh: {0} : {1}'.format(lcS, rcS))
# pg._r("add:", b.id())
toBeReplaced.append(b)
if lC != c and lC is not None and lC != lastC:
# pg._r("follow up left:", lC.id())
replaceNode_(mesh, lC, n1, n2, marker, c)
if rC != c and rC is not None and rC != lastC:
# pg._r("follow up right:", rC.id())
replaceNode_(mesh, rC, n1, n2, marker, c)
for b in toBeReplaced:
nIds = [n2.id() if n == n1.id() else n for n in b.ids()]
# pg._r('replace in boundary', b.id(), ':', n1.id(), n2.id())
b.setNodes(mesh.nodes(nIds))
# pg._r('replace in cell:', c.id(), ':', n1.id(), n2.id())
nIds = [n2.id() if n == n1.id() else n for n in c.ids()]
c.setNodes(mesh.nodes(nIds))
paths = mesh.findPaths(mesh.findBoundaryByMarker(marker))
if len(paths) == 0:
pg.error("did not found path for marker: {0}".format(marker))
newNodes = []
if len(paths[0]) == 0:
pg.error("did not found path for marker: {0}".format(marker))
## step 1 . fix direction along the path
for i in range(len(paths[0])-1):
nA1 = mesh.node(paths[0][i])
nB1 = mesh.node(paths[0][i+1])
b = pg.core.findBoundary(nA1, nB1)
if b.node(0) != nA1:
b.swapNorm()
lC = b.leftCell()
rC = b.rightCell()
if rC is None or lC is None:
pg.error('Path is not inside the mesh')
return
## add new nodes and decouple cells along the path
rightCells = []
for i in range(len(paths[0])-1):
nA1 = mesh.node(paths[0][i])
nB1 = mesh.node(paths[0][i+1])
b = pg.core.findBoundary(nA1, nB1)
lC = b.leftCell()
rC = b.rightCell()
# pg._y(b.node(0).id(), b.node(1).id(), 'N', nA1.id(), nB1.id(), ':', lC.id(), rC.id())
### only if on outer boundary .. need check!!
nA2 = mesh.createNode(nA1.pos(), nA1.marker())
newNodes.append(nA2)
# nA2 = newNodes[-1]
if rC is not None:
b.setRightCell(None)
rightCells.append(rC)
replaceNode_(mesh, rC, nA1, nA2, marker=b.marker())
newNodes.append(mesh.node(paths[0][-1]))
for i in range(len(newNodes)-1):
b = mesh.createBoundary([newNodes[i+1].id(), newNodes[i].id()],
marker=boundaryMarker)
b.setLeftCell(rightCells[i])
Mesh.cutBoundary = __Mesh_cutBoundary__
|
JuliusHen/gimli | pygimli/viewer/showmatrix.py | <gh_stars>100-1000
# -*- coding: utf-8 -*-
"""Generic matrix visualization tools."""
import matplotlib as mpl
import numpy as np
import pygimli as pg
from .mpl import createColorBar, updateColorBar
from .mpl.matrixview import drawBlockMatrix, drawSparseMatrix
def showMatrix(mat, ax=None, **kwargs):
"""Show various pyGIMLi matrices using matplotlib.
Args
----
mat: matrix
ax: mpl.axes
Keyword Args
------------
**kwargs : forwarded to mpl plotting commands
Returns
-------
mpl.axes, Colorbar
"""
if ax is None:
print(ax)
ax = pg.show()[0]
try:
from scipy.sparse import spmatrix
if isinstance(mat, spmatrix):
gci = drawSparseMatrix(ax, mat, **kwargs)
return ax, None
except ImportError:
pass
if isinstance(mat, (pg.core.RSparseMapMatrix, pg.core.RSparseMatrix)):
gci = drawSparseMatrix(ax, mat, **kwargs)
cBar = None
elif isinstance(mat, pg.matrix.BlockMatrix):
gci, cBar = drawBlockMatrix(ax, mat, **kwargs)
if cBar is None:
uniqueIDs = pg.unique([e.matrixID for e in mat.entries()])
cMap = pg.plt.cm.get_cmap("Set3", len(uniqueIDs))
sm = pg.plt.cm.ScalarMappable(cmap=cMap)
cBar = createColorBar(sm, ax=ax, label="Matrix ID",
cMin=-0.5, cMax=len(uniqueIDs)-0.5)
ticks = np.arange(len(uniqueIDs))
cBar.set_ticks(ticks)
labels = []
for ID in uniqueIDs:
label = "{:d}".format(ID)
labels.append(label)
cBar.set_ticklabels(labels)
else:
pg.error("Matrix type not supported yet.")
return ax, cBar
|
JuliusHen/gimli | pygimli/physics/ert/ertModelling.py | <gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""ERT modelling operator classes
* base class
* standard BERT modelling class using pygimli.core (C++) functions
* 2.5D non-optimized totalfield forward operator for ERT (reference)
"""
import numpy as np
import pygimli as pg
from pygimli.frameworks import MeshModelling
from .visualization import showERTData
from pygimli import pf
class ERTModellingBase(MeshModelling):
"""Modelling base class for ERT modelling."""
def __init__(self, **kwargs):
super(ERTModellingBase, self).__init__(**kwargs)
def drawData(self, ax, data=None, **kwargs):
"""Draw data in given axe."""
kwargs['label'] = kwargs.pop('label', pg.unit('res'))
kwargs['cMap'] = kwargs.pop('cMap', pg.utils.cMap('res'))
if hasattr(data, '__iter__'):
vals = data
data = self.data
elif data is None:
data = self.data
vals = kwargs.pop('vals', data['rhoa'])
return showERTData(data, vals=vals, ax=ax, **kwargs)
def drawModel(self, ax, model, **kwargs):
"""Draw the para domain with option model values."""
kwargs.setdefault('label', pg.unit('res'))
kwargs.setdefault('cMap', pg.utils.cMap('res'))
kwargs.setdefault('logScale', True)
return super().drawModel(ax=ax, model=model, **kwargs)
class ERTModelling(ERTModellingBase):
"""Forward operator for Electrical Resistivity Tomography.
Note
----
Convention for complex resistiviy inversion:
We want to use logarithm transformation for the imaginary part of model
so we need the startmodel to have positive imaginary parts.
The sign is flipped back to physical correct assumption before we call
the response function.
The Jacobian is calculated with negative imaginary parts and will
be a conjugated complex block matrix for further calulations.
"""
def __init__(self, sr=True, verbose=False):
super(ERTModelling, self).__init__()
# don't use DC*fop or its regionmanager directly
#
self._core = None
if sr:
self._core = pg.core.DCSRMultiElectrodeModelling(verbose=verbose)
else:
self._core = pg.core.DCMultiElectrodeModelling(verbose=verbose)
self._core.initJacobian()
self.setJacobian(self._core.jacobian())
# called from the ERTManager .. needed?
self.solution = self._core.solution
self.setComplex = self._core.setComplex
self.complex = self._core.complex
self.calculate = self._core.calculate
self.calcGeometricFactor = self._core.calcGeometricFactor
self.mapERTModel = self._core.mapERTModel
self._conjImag = False # the imaginary parts are flipped for log trans
def setVerbose(self, v):
super().setVerbose(v)
self._core.setVerbose(v)
def setDefaultBackground(self):
"""Set the default background behaviour."""
if self.complex():
self.regionManager().addRegion(3, self._baseMesh, 2)
regionIds = self.regionManager().regionIdxs()
pg.info("Found {} regions.".format(len(regionIds)))
if len(regionIds) > 1:
bk = pg.sort(regionIds)[0]
pg.info("Region with smallest marker ({0}) "
"set to background".format(bk))
self.setRegionProperties(bk, background=True)
def createStartModel(self, dataVals):
"""Create Starting model for ERT inversion."""
if self.complex():
dataC = pg.utils.toComplex(dataVals)
nModel = self.regionManager().parameterCount() // 2
smRe = np.ones(nModel) * np.median(np.median(dataC.real))
smIm = np.ones(nModel) * np.median(np.median(dataC.imag))
if min(smIm) < 0:
# we want positive phase model
sm = smRe - 1j * smIm
pg.info("Model imaginary part being flipped to positive.")
self._conjImag = True
else:
sm = smRe + 1j * smIm
return pg.utils.squeezeComplex(sm) # complex impedance
else:
return super(ERTModelling, self).createStartModel(dataVals)
def flipImagPart(self, v):
"""Flip imaginary port (convention)."""
z = pg.utils.toComplex(v)
pg.warn('pre min/max={0} / {1} im: {2} / {3}'.format(
pf(min(z.real)), pf(max(z.real)),
pf(min(z.imag)), pf(max(z.imag))))
v = pg.utils.squeezeComplex(pg.utils.toComplex(v),
conj=self._conjImag)
z = pg.utils.toComplex(v)
pg.warn('pos min/max={0} / {1} im: {2} / {3}'.format(
pf(min(z.real)), pf(max(z.real)),
pf(min(z.imag)), pf(max(z.imag))))
return v
def response(self, mod):
"""Forward response (apparent resistivity)."""
# ensure the mesh is initialized
self.mesh()
if self.complex() and self._conjImag:
pg.warn('flip imaginary part for response calc')
mod = self.flipImagPart(mod)
resp = self._core.response(mod)
if self.complex() and self._conjImag:
pg.warn('backflip imaginary part after response calc')
resp = self.flipImagPart(resp)
return resp
def createJacobian(self, mod):
"""Compute Jacobian matrix and store but not return."""
# ensure the mesh is initialized
self.mesh()
if self.complex():
if self._conjImag:
pg.warn("Flipping imaginary part for jacobian calc")
mod = self.flipImagPart(mod)
self._core.createJacobian(mod)
self._J = pg.utils.squeezeComplex(self._core.jacobian(),
conj=self._conjImag
)
self.setJacobian(self._J)
# pg._r("create Jacobian", self, self._J)
return self._J
return self._core.createJacobian(mod)
def setDataPost(self, data):
""""""
self._core.setData(data)
def setMeshPost(self, mesh):
""""""
self._core.setMesh(mesh, ignoreRegionManager=True)
class ERTModellingReference(ERTModellingBase):
"""Reference implementation for 2.5D Electrical Resistivity Tomography."""
def __init__(self, **kwargs):
super(ERTModelling, self).__init__()
self.subPotentials = None
self.lastResponse = None
# only for mixed boundary hack since this need to know resistivies.
self.resistivity = None
# abscissa k and weight for 2.5 inverse cos-transform
self.k = None
self.w = None
def response(self, model):
"""Solve forward task and return apparent resistivity for self.mesh."""
# NOTE TODO can't be MT until mixed boundary condition depends on
# self.resistivity
pg.tic()
if not self.data.allNonZero('k'):
pg.error('Need valid geometric factors: "k".')
pg.warn('Fallback "k" values to -sign("rhoa")')
self.data.set('k', -pg.math.sign(self.data('rhoa')))
mesh = self.mesh()
nDof = mesh.nodeCount()
elecs = self.data.sensorPositions()
nEle = len(elecs)
nData = self.data.size()
self.resistivity = res = self.createMappedModel(model, -1.0)
if self.verbose:
print("Calculate response for model:", min(res), max(res))
rMin = elecs[0].dist(elecs[1]) / 2.0
rMax = elecs[0].dist(elecs[-1]) * 2.0
k, w = self.getIntegrationWeights(rMin, rMax)
self.k = k
self.w = w
# pg.show(mesh, res, label='res')
# pg.wait()
rhs = self.createRHS(mesh, elecs)
# store all potential fields
u = np.zeros((nEle, nDof))
self.subPotentials = [pg.Matrix(nEle, nDof) for i in range(len(k))]
for i, ki in enumerate(k):
uE = pg.solve(mesh, a=1./res, b=-(ki * ki)/res, f=rhs,
bc={'Robin': ['*', self.mixedBC]},
userData={'sourcePos': elecs, 'k': ki},
verbose=False, stats=0, debug=False)
self.subPotentials[i] = uE
u += w[i] * uE
# collect potential matrix,
# i.e., potential for all electrodes and all injections
pM = np.zeros((nEle, nEle))
for i in range(nEle):
pM[i] = pg.interpolate(mesh, u[i, :], destPos=elecs)
# collect resistivity values for all 4 pole measurements
r = np.zeros(nData)
for i in range(nData):
iA = int(self.data('a')[i])
iB = int(self.data('b')[i])
iM = int(self.data('m')[i])
iN = int(self.data('n')[i])
uAB = pM[iA] - pM[iB]
r[i] = uAB[iM] - uAB[iN]
self.lastResponse = r * self.data('k')
if self.verbose:
print("Resp min/max: {0} {1} {2}s".format(min(self.lastResponse),
max(self.lastResponse),
pg.dur()))
return self.lastResponse
def createJacobian(self, model):
"""TODO WRITEME."""
if self.subPotentials is None:
self.response(model)
J = self.jacobian()
J.resize(self.data.size(), self.regionManager().parameterCount())
cells = self.mesh().findCellByMarker(0, -1)
Si = pg.matrix.ElementMatrix()
St = pg.matrix.ElementMatrix()
u = self.subPotentials
pg.tic()
if self.verbose:
print("Calculate sensitivity matrix for model: ",
min(model), max(model))
Jt = pg.Matrix(self.data.size(),
self.regionManager().parameterCount())
for kIdx, w in enumerate(self.w):
k = self.k[kIdx]
w = self.w[kIdx]
Jt *= 0.
A = pg.matrix.ElementMatrixMap()
for i, c in enumerate(cells):
modelIdx = c.marker()
# 2.5D
Si.u2(c)
Si *= k * k
Si += St.ux2uy2uz2(c)
# 3D
# Si.ux2uy2uz2(c); w = w* 2
A.add(modelIdx, Si)
for dataIdx in range(self.data.size()):
a = int(self.data('a')[dataIdx])
b = int(self.data('b')[dataIdx])
m = int(self.data('m')[dataIdx])
n = int(self.data('n')[dataIdx])
Jt[dataIdx] = A.mult(u[kIdx][a] - u[kIdx][b],
u[kIdx][m] - u[kIdx][n])
J += w * Jt
m2 = model*model
k = self.data('k')
for i in range(J.rows()):
J[i] /= (m2 / k[i])
if self.verbose:
sumsens = np.zeros(J.rows())
for i in range(J.rows()):
sumsens[i] = pg.sum(J[i])
print("sens sum: median = ", pg.math.median(sumsens),
" min = ", pg.min(sumsens),
" max = ", pg.max(sumsens))
def calcGeometricFactor(self, data):
"""Calculate geometry factors for a given dataset."""
if pg.y(data.sensorPositions()) == pg.z(data.sensorPositions()):
k = np.zeros(data.size())
for i in range(data.size()):
a = data.sensorPosition(data('a')[i])
b = data.sensorPosition(data('b')[i])
m = data.sensorPosition(data('m')[i])
n = data.sensorPosition(data('n')[i])
k[i] = 1./(2.*np.pi) * (1./a.dist(m) - 1./a.dist(n) -
1./b.dist(m) + 1./b.dist(n))
return k
else:
raise BaseException("Please use BERT for non-standard "
"data sets" + str(data))
def uAnalytical(self, p, sourcePos, k):
"""
Calculate analytical potential for homogeneous halfspace.
For sigma = 1 [S m]
"""
r1A = (p - sourcePos).abs()
# Mirror on surface at depth=0
r2A = (p - pg.RVector3(1.0, -1.0, 1.0) * sourcePos).abs()
if r1A > 1e-12 and r2A > 1e-12:
return (pg.math.besselK0(r1A * k) + pg.math.besselK0(r2A * k)) / \
(2.0 * np.pi)
else:
return 0.
def getIntegrationWeights(self, rMin, rMax):
"""TODO WRITEME."""
nGauLegendre = max(int((6.0 * np.log10(rMax / rMin))), 4)
nGauLaguerre = 4
k = pg.Vector()
w = pg.Vector()
k0 = 1.0 / (2.0 * rMin)
pg.GaussLegendre(0.0, 1.0, nGauLegendre, k, w)
kLeg = k0 * k * k
wLeg = 2.0 * k0 * k * w / np.pi
pg.GaussLaguerre(nGauLaguerre, k, w)
kLag = k0 * (k + 1.0)
wLag = k0 * np.exp(k) * w / np.pi
return pg.cat(kLeg, kLag), pg.cat(wLeg, wLag)
def mixedBC(self, boundary, userData):
"""Apply mixed boundary conditions."""
if boundary.marker() != pg.core.MARKER_BOUND_MIXED:
return 0
sourcePos = pg.center(userData['sourcePos'])
k = userData['k']
r1 = boundary.center() - sourcePos
# Mirror on surface at depth=0
r2 = boundary.center() - pg.RVector3(1.0, -1.0, 1.0) * sourcePos
r1A = r1.abs()
r2A = r2.abs()
rho = 1.
if self.resistivity is not None:
rho = self.resistivity[boundary.leftCell().id()]
n = boundary.norm()
if r1A > 1e-12 and r2A > 1e-12:
# see mod-dc-2d example for robin like BC and the negative sign
if (pg.math.besselK0(r1A * k) + pg.math.besselK0(r2A * k)) > 1e-12:
return k / rho * (r1.dot(n) / r1A * pg.math.besselK1(r1A * k) +
r2.dot(n) / r2A * pg.math.besselK1(r2A * k))\
/ (pg.math.besselK0(r1A * k) +
pg.math.besselK0(r2A * k))
else:
return 0.
else:
return 0.
def pointSource(self, cell, f, userData):
r"""
Define function for the current source term.
:math:`\delta(x-pos), \int f(x) \delta(x-pos)=f(pos)=N(pos)`
Right hand side entries will be shape functions(pos)
"""
i = userData['i']
sourcePos = userData['sourcePos'][i]
if cell.shape().isInside(sourcePos):
f.setVal(cell.N(cell.shape().rst(sourcePos)), cell.ids())
def createRHS(self, mesh, elecs):
"""Create right-hand-side vector."""
rhs = np.zeros((len(elecs), mesh.nodeCount()))
for i, e in enumerate(elecs):
c = mesh.findCell(e)
rhs[i][c.ids()] = c.N(c.shape().rst(e))
return rhs
if __name__ == "__main__":
pass
|
JuliusHen/gimli | doc/examples/2_seismics/plot_04_koenigsee.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. _ex:koenigsee:
Field data inversion ("Koenigsee")
==================================
This minimalistic example shows how the Refraction Manager can be used to invert
a field data set. Here, we consider the Koenigsee data set, which represents
classical refraction seismics data set with slightly heterogeneous overburden
and some high-velocity bedrock. The data file can be found in the `pyGIMLi
example data repository
<https://github.com/gimli-org/example-data/blob/master/traveltime/koenigsee.sgt>`_.
"""
# sphinx_gallery_thumbnail_number = 2
################################################################################
# We import pyGIMLi and the refraction manager.
import pygimli as pg
from pygimli.physics import TravelTimeManager
################################################################################
# The helper function `pg.getExampleFile` downloads the data set and saves it
# into a temporary location. Printing the data reveals that there are 714 data
# points using 63 sensors (shots and geophones) with the data columns s (shot),
# g (geophone), and t (traveltime). By default, there is also a validity flag.
data = pg.getExampleFile("traveltime/koenigsee.sgt", load=True, verbose=True)
print(data)
################################################################################
# Let's have a look at the data in the form of traveltime curves.
fig, ax = pg.plt.subplots()
pg.physics.traveltime.drawFirstPicks(ax, data)
################################################################################
# We initialize the refraction manager.
mgr = TravelTimeManager()
# Alternatively, one can plot a matrix plot of apparent velocities which is the
# more general function also making sense for crosshole data.
ax, cbar = mgr.showData(data)
################################################################################
# Finally, we call the `invert` method and plot the result.The mesh is created
# based on the sensor positions on-the-fly.
mgr.invert(data, secNodes=3, paraMaxCellSize=5.0,
zWeight=0.2, vTop=500, vBottom=5000,
verbose=1)
ax, cbar = mgr.showResult(logScale=True)
mgr.drawRayPaths(ax=ax, color="w", lw=0.3, alpha=0.5)
################################################################################
# Show result and fit of measured data and model response. You may want to save your results too.
fig = mgr.showResultAndFit()
mgr.saveResult()
################################################################################
# You can play around with the gradient starting model (`vTop` and `vBottom`
# arguments) and the regularization strength `lam`. You can also customize the
# mesh.
|
JuliusHen/gimli | doc/tutorials/3_inversion/plot_6-geostatConstraints.py | <filename>doc/tutorials/3_inversion/plot_6-geostatConstraints.py
#!/usr/bin/env python
# encoding: utf-8
r"""
Geostatistical regularization
-----------------------------
In this example we illustrate the use of geostatistical constraints on
irregular meshes as presented by :cite:`jordi2018geostatistical`, compared to
classical smoothness operators of first or second kind.
The elements of the covariance matrix :math:`\textbf{C}_{\text{M}}` are defined
by the distances H between the model cells i and j into the three directions
.. math::
\textbf{C}_{\text{M},ij}=\sigma^{2}\exp{\left(
-3\sqrt{\left(\frac{\textbf{H}^x_{ij}}{I_{x}}\right)^{2}+
\left(\frac{\textbf{H}^y_{ij}}{I_{y}}\right)^{2}+
\left(\frac{\textbf{H}^z_{ij}}{I_{z}}\right)^{2}}\right)}.
It defines the correlation between model cells as a function of correlation
lenghts (ranges) :math:`I_x`, :math:`I_y`, and :math:`I_z`. Of course, the
orientation of the coordinate axes is arbitrary and can be chosen by rotation.
Let us illustrate this by a simple mesh:
"""
# %%
# Computing covariance and constraint matrices
# --------------------------------------------
# We create a simple mesh using a box geometry
import matplotlib.pyplot as plt
import pygimli as pg
import pygimli.meshtools as mt
# We create a rectangular domain and mesh it with small triangles
rect = mt.createRectangle(start=[0, -10], end=[10, 0])
mesh = mt.createMesh(rect, quality=34.5, area=0.1)
# %%
# We compute such a covariance matrix by calling
CM = pg.utils.covarianceMatrix(mesh, I=5) # I taken for both x and y
# We search for the cell where the midpoint (5, -5) is located in
ind = mesh.findCell([5, -5]).id()
# and plot the according column using index access (numpy)
ax, cb = pg.show(mesh, CM[:, ind], cMap="magma_r")
# %%
# According to inverse theory, we use the square root of the covariance matrix
# as single-side regularization matrix C. It is computed by using an eigenvalue
# decomposition
#
# .. math::
#
# \textbf{C}_\text{M} = \textbf{Q}\textbf{D}\textbf{Q}^{T}
#
# based on LAPACK (numpy.linalg). The inverse square root is defined by
#
# .. math::
#
# \textbf{C}_\text{M}^{-0.5} = \textbf{Q}\textbf{D}^{-0.5}\textbf{Q}^{T}
#
# In order to avoid a matrix inverse (square root), a special matrix is derived
# doing the decomposition and storing the eigenvectors and eigenvalues values.
# A multiplication is done by multiplying with Q and scaling with the diagonal.
# This matrix is implemented in the :mod:`pygimli.matrix` module
# by the class :py:mod:`pg.matrix.Cm05Matrix`
Cm05 = pg.matrix.Cm05Matrix(CM)
# %%
# However, this matrix does not return a zero vector for a constant vector
out = Cm05 * pg.Vector(mesh.cellCount(), 1.0)
print("min/max value ", min(out), max(out))
# %%
# as desired for a roughness operator. Therefore, an additional matrix called
# :py:mod:`pg.matrix.GeostatisticalConstraintsMatrix`
# was implemented where this spur is corrected for.
# It is, like the correlation matrix, created by a mesh, a list of correlation
# lengths I, a dip angle that distorts the x/y plane and a strike angle
# towards the third direction.
#
C = pg.matrix.GeostatisticConstraintsMatrix(mesh=mesh, I=5)
# %%
# In order to extract a column, we generate a vector with a single 1, multiply
vec = pg.Vector(mesh.cellCount())
vec[ind] = 1.0
cor = C * vec
# %%
# and plot it using a linear or logarithmic scale
kwLin = dict(cMin=-1, cMax=1, cMap="bwr")
ax, cb = pg.show(mesh, cor, **kwLin)
kwLog = dict(cMin=1e-3, cMax=1, cMap="magma_r", logScale=True)
ax, cb = pg.show(mesh, pg.abs(cor), **kwLog)
# %%
# The constraints have a rather small footprint compared to the correlation
# if one considers values below a certain threshold as insignificant.
# %%
# Such a matrix can also be defined for different ranges and dip angles, e.g.
Cdip = pg.matrix.GeostatisticConstraintsMatrix(mesh=mesh, I=[9, 2], dip=-25)
ax, cb = pg.show(mesh, Cdip * vec, **kwLin)
ax, cb = pg.show(mesh, pg.abs(Cdip * vec), **kwLog)
# %%
# Even in the linear scale, but more in the log scale one can see the
# regularization footprint in the shape of an ellipsis.
# %%
# In order to illustrate the role of the constraints, we use a very simple
# mapping forward operator that retrieves the values in the mesh at some given
# positions. The constraints are therefore used as interpolation operators.
# Note that the mapping forward operator can also be used for defining prior
# knowledge if combined with another forward operator in a classical joint
# inversion framework.
# In the initialization, the indices are stored and a mapping matrix is created
# that projects the model vector to the forward response.
# This matrix is also the Jacobian matrix for the inversion.
class PriorFOP(pg.Modelling):
"""Forward operator for grabbing values."""
def __init__(self, mesh, pos, **kwargs):
"""Init with mesh and some positions that are converted into ids."""
super().__init__(**kwargs)
self.setMesh(mesh)
self.ind = [mesh.findCell(po).id() for po in pos]
self.J = pg.SparseMapMatrix()
self.J.resize(len(self.ind), mesh.cellCount())
for i, ii in enumerate(self.ind):
self.J.setVal(i, ii, 1.0)
self.setJacobian(self.J)
def response(self, model):
"""Return values at the indexed cells."""
return model[self.ind]
def createJacobian(self, model):
"""Do nothing (linear)."""
pass
# %%
# Inversion with geostatistical constraints
# -----------------------------------------
# We choose some positions and initialize the forward operator
pos = [[2, -2], [8, -2], [5, -5], [2, -8], [8, -8]]
fop = PriorFOP(mesh, pos)
# For plotting the results, we create a figure and define some plotting options
fig, ax = plt.subplots(nrows=2, ncols=2, sharex=True, sharey=True)
kw = dict(
colorBar=True,
cMin=30,
cMax=300,
orientation='vertical',
cMap='Spectral_r',
logScale=True)
# We want to use a homogenenous starting model
tLog = pg.trans.TransLog()
vals = [30, 50, 300, 100, 200]
# We assume a 5% relative accuracy of the values
error = pg.Vector(len(vals), 0.05)
# set up data and model transformation log-scaled
inv = pg.Inversion(fop=fop)
inv.transData = tLog
inv.transModel = tLog
inv.lam = 40
startModel = pg.Vector(mesh.cellCount(), 30)
inv.startModel = startModel
# Initially, we use the first-order constraints (default)
res = inv.run(vals, error, cType=1, lam=35)
print(('Ctype=1: ' + '{:.1f} ' * 6).format((*fop(res)), inv.chi2()))
pg.show(mesh, res, ax=ax[0, 0], **kw)
ax[0, 0].set_title("1st order")
# Next, we use the second order (curvature) constraint type
res = inv.run(vals, error, cType=2, lam=1000)
print(('Ctype=2: ' + '{:.1f} ' * 6).format((*fop(res)), inv.chi2()))
pg.show(mesh, res, ax=ax[0, 1], **kw)
ax[0, 1].set_title("2nd order")
# Now we set the geostatistic isotropic operator with 5m correlation length
fop.setConstraints(C)
res = inv.run(vals, error, lam=25)
print(('Cg-5/5m: ' + '{:.1f} ' * 6).format((*fop(res)), inv.chi2()))
pg.show(mesh, res, ax=ax[1, 0], **kw)
ax[1, 0].set_title("I=5")
# and finally we use the dipping constraint matrix
fop.setConstraints(Cdip)
res = inv.run(vals, error, lam=35)
print(('Cg-9/2m: ' + '{:.1f} ' * 6).format((*fop(res)), inv.chi2()))
pg.show(mesh, res, ax=ax[1, 1], **kw)
ax[1, 1].set_title("I=[10/2], dip=25")
# plot the position of the priors
for ai in ax.flat:
for po in pos:
ai.plot(*po, marker='o', markersize=10, color='k', fillstyle='none')
# %%
# Note that all four regularization operators fit the data equivalently but
# the images (i.e. how the gaps between the data points are filled) are quite
# different. This is something we should have in mind using regularization.
# %%
# Generating geostatistical media
# -------------------------------
# For generating geostatistical media, one can use the function
# generateGeostatisticalModel. It computes a correlation matrix and multiplies
# it with a pseudo-random (randn) series. The arguments are the same as for the
# correlation or constraint matrices.
model = pg.utils.generateGeostatisticalModel(mesh, I=[20, 4])
ax, cb = pg.show(mesh, model)
|
JuliusHen/gimli | doc/tutorials/3_inversion/plot_0-expfit.py | #!/usr/bin/env python
# encoding: utf-8
r"""
Simple fit
==========
This tutorial shows how to do the simplest inversion case, a curve fit, by
setting up an own forward operator. The function to be fitted is`
.. math::
f(x) = A * e^{-x/X}
with the two unknown coefficients A (a signal amplitude) and X (a decay rate).
Both A and X are assumed to be positive which is often the case for physical
properties. The easiest way to do this is via a logarithmic transformation of
the model vector (containing A and X) which is very easily done in pyGIMLi.
First we import the pygimli library under a short name pg and the numerics
library numpy. Additionally we load the python plotting module of the library
matplotlib. Both are contained in most python distributions and systems.
"""
import pygimli as pg
import numpy as np
import matplotlib.pyplot as plt
###############################################################################
# We set up the modelling operator, i.e. to return :math:`{\bf f}({\bf x})` for
# given model parameters A and X subsumed in a vector. In order to be able to
# use operator in inversion, we derive from the abstract modelling base class.
# The latter holds the main mimic of generating Jacobian and adminstrating the
# model, the regularization and so on. The only function to overwrite is
# **response()**. If no function **createJacobian** is provided, they are
# computed by brute force (forward calculations with altered parameters).
class ExpModelling(pg.Modelling):
def __init__(self, xvec, verbose=False):
super().__init__()
self.x = xvec
def response(self, model):
return model[0] * pg.exp(-self.x / model[1])
def createStartModel(self, dataVals):
return pg.Vector([1.0, 3.0])
###############################################################################
# The init function saves the x vector and defines the parameterization, i.e.
# two independent parameters (a 1D mesh with 1 cell and 2 properties).
# The response function computes the function using A=model[0] and X=model[1]
# The function startModel defines a meaningful starting vector. There are other
# methods to set the starting model as inv.setModel() but this one is a default
# one for people who use the class and forget about a starting model.
# We first create an abscissa vector using numpy (note that pygimli also
# provides an exp function and generate synthetic data with two arbitrary A and
# X values.
x = np.arange(0, 1, 1e-2)
data = 10.5 * np.exp(- x / 550e-3)
###############################################################################
# We define an (absolute) error level and add Gaussian noise to the data.
error = 0.5
data += pg.randn(*data.shape)*error
relError = error / data
###############################################################################
# Next, an instance of the forward operator is created. We could use it for
# calculating the synthetic data using f.response([10.5, 0.55]) or just
# f([10.5, 0.55]). We create a real-valued (R) inversion passing the forward
# operator, the data. A verbose boolean flag could be added to provide some
# output the inversion, another one prints more and saves files for debugging.
f = ExpModelling(x)
inv = pg.Inversion(f)
###############################################################################
# We create a real-valued logarithmic transformation and apply it to the model.
# Similar could be done for the data which are by default treated linearly.
# We then set the error level that is used for data weighting. It can be a
# float number or a vector of data length. One can also set a relative error.
# Finally, we define the inversion style as Marquardt scheme (pure local damping
# with decreasing the regularization parameter subsequently) and start with a
# relatively large regularization strength to avoid overshoot.
# Finally run yields the coefficient vector and we plot some statistics.
tLog = pg.trans.TransLog()
f.modelTrans = tLog
inv._inv.setMarquardtScheme()
inv._inv.setLambda(100)
coeff = inv.run(data, relError, verbose=True)
print(inv.relrms(), inv.chi2())
print(coeff)
###############################################################################
# We see that after 5 iterations the absolute rms value equals the noise level
# corresponding to a chi-squared misfit value of 1 as it should be the case for
# synthetic data. The relative rms (in %) is less relevant here but can be for
# other applications. Additionally the ranges for model and model response are
# given and the objective function consisting of data misfit and model
# roughness times lambda. Note that due to the local regularization the second
# term does not contribute to Phi. Set verbose to True to see the whole course
# of inversion. The values of the coefficient vector (a GIMLi real vector) are
# as expected close (i.e. equivalent) to the synthetic model.
###############################################################################
# We finally create a plotting figure and plot both data and model response.
plt.figure()
plt.plot(x, data, 'rx', x, inv.response, 'b-')
###############################################################################
# The createMesh1D automatically attributed the markers 0 and 1 to the two
# model parameters A and X, respectively. Each marker leads to a region that
# can be individually treated, e.g. the starting value, lower or upper bounds,
# or all three at the same time (setParameters). This changes the model
# transformation which can of course be region-specific.
# f.region(0).setLowerBound(0.1)
# f.region(0).setStartModel(3)
# f.region(1).setParameters(0.3, 0.01, 1.0)
###############################################################################
# If these are set before the inversion is used, they are used automatically.
# We set the model by hand using the new starting model
# inv.setVerbose(True)
# inv.setModel(f.createStartModel())
# print(inv.run())
# inv.echoStatus()
###############################################################################
# The result is pretty much the same as before but for stronger equivalence or
# smoothness-constrained regularization prior information might help a lot.
|
JuliusHen/gimli | pygimli/physics/traveltime/__init__.py | <filename>pygimli/physics/traveltime/__init__.py
# -*- coding: utf-8 -*-
"""Refraction seismics or first arrival traveltime calculations."""
from .importData import load
from .tt import simulate, DataContainerTT
from .plotting import drawFirstPicks, drawTravelTimeData, drawVA, showVA
from .utils import (createGradientModel2D, createRAData, shotReceiverDistances,
createCrossholeData)
#from .refraction import Refraction, Tomography # will be removed(201909)
from .refraction1d import RefractionNLayer, RefractionNLayerFix1stLayer
from .TravelTimeManager import TravelTimeDijkstraModelling, TravelTimeManager
__all__ = [
'drawTravelTimeData',
'drawVA',
'showVA',
'drawFirstPicks',
'createRAData',
'createGradientModel2D',
'RefractionNLayer',
'RefractionNLayerFix1stLayer',
'shotReceiverDistances',
'TravelTimeManager',
'TravelTimeDijkstraModelling'
]
|
JuliusHen/gimli | doc/examples/3_dc_and_ip/plot_05_ert_4_point_sensitivities.py | <gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Four-point sensitivities
------------------------
In this example, we illustrate how to visualize the sensitivities of four-point
arrays. You can easily loop over the plotting command to create something like:
https://www.youtube.com/watch?v=lt1qV-2d5Ps
"""
import numpy as np
import matplotlib.pyplot as plt
import pygimli as pg
import pygimli.meshtools as mt
from pygimli.physics import ert
###############################################################################
# We start by creating a ERT data container with three four-point arrays.
scheme = pg.DataContainerERT()
nelecs = 10
pos = np.zeros((nelecs, 2))
pos[:, 0] = np.linspace(5, 25, nelecs)
scheme.setSensorPositions(pos)
measurements = np.array((
[0, 3, 6, 9], # Dipole-Dipole
[0, 9, 3, 6], # Wenner
[0, 9, 4, 5] # Schlumberger
))
for i, elec in enumerate("abmn"):
scheme[elec] = measurements[:,i]
scheme["k"] = ert.createGeometricFactors(scheme)
###############################################################################
# Now we set up a 2D mesh.
world = mt.createWorld(start=[0, 0], end=[30, -10], worldMarker=True)
for pos in scheme.sensorPositions():
world.createNode(pos)
mesh = mt.createMesh(world, area=.05, quality=33, marker=1)
###############################################################################
# As a last step we invoke the ERT manager and calculate the Jacobian for a
# homogeneous half-space.
fop = ert.ERTModelling()
fop.setData(scheme)
fop.setMesh(mesh)
model = np.ones(mesh.cellCount())
fop.createJacobian(model)
###############################################################################
# Final visualization
def getABMN(scheme, idx):
""" Get coordinates of four-point cfg with id `idx` from DataContainerERT
`scheme`."""
coords = {}
for elec in "abmn":
elec_id = int(scheme(elec)[idx])
elec_pos = scheme.sensorPosition(elec_id)
coords[elec] = elec_pos.x(), elec_pos.y()
return coords
def plotABMN(ax, scheme, idx):
""" Visualize four-point configuration on given axes. """
coords = getABMN(scheme, idx)
for elec in coords:
x, y = coords[elec]
if elec in "ab":
color = "red"
else:
color = "blue"
ax.plot(x, y, marker=".", color=color, ms=10)
ax.annotate(elec.upper(), xy=(x, y), size=12, ha="center", fontsize=10, bbox=dict(
boxstyle="round", fc=(0.8, 0.8, 0.8), ec=color), xytext=(0, 20),
textcoords='offset points', arrowprops=dict(
arrowstyle="wedge, tail_width=.5", fc=color, ec=color,
patchA=None, alpha=0.75))
ax.plot(coords["a"][0],)
labels = ["Dipole-Dipole", "Wenner", "Schlumberger"]
fig, ax = plt.subplots(scheme.size(), 1, sharex=True, figsize=(8,8))
for i, sens in enumerate(fop.jacobian()):
# Label in lower-left corner
ax[i].text(.01, .15, labels[i], horizontalalignment='left',
verticalalignment='top', transform=ax[i].transAxes, fontsize=12,
fontweight="bold")
# Electrode annotations
plotABMN(ax[i], scheme, i)
# Log-scaled and normalized sensitivity
normsens = pg.utils.logDropTol(sens/mesh.cellSizes(), 8e-4)
normsens /= np.max(normsens)
pg.show(mesh, normsens, cMap="RdGy_r", ax=ax[i], orientation="vertical",
label="Normalized\nsensitivity", nLevs=3, cMin=-1, cMax=1)
|
JuliusHen/gimli | pygimli/core/core.py | <filename>pygimli/core/core.py
# -*- coding: utf-8 -*-
import sys
import os
#if sys.platform == 'win32':
# os.environ['PATH'] = os.path.abspath(__file__) + ';' + os.environ['PATH']
pgcore = None
try:
from . import _pygimli_ # if it works: as pgcore, replace all _pygimli_
# from . import _pygimli_ as pgcore # if it works: as pgcore, replace all _pygimli_
# dir(pgcore)
from ._pygimli_ import * # check if . can be omitted
pgcore = _pygimli_
except ImportError as e:
# print("did not find in-place pg core, try import pgcore")
# import pgcore as _pygimli_
import pgcore
# from pgcore import _pygimli_ # check version compatibility
from pgcore import *
# pass
except ImportError as e:
print(e)
traceback.print_exc(file=sys.stdout)
sys.stderr.write("ERROR: cannot import the library '_pygimli_'.\n")
|
JuliusHen/gimli | pygimli/core/__init__.py | <reponame>JuliusHen/gimli
# -*- coding: utf-8 -*-
"""
Imports and extensions of the C++ bindings.
"""
import os
import sys
import traceback
import numpy as np
from .core import pgcore
from .core import *
# #######################################
# ### Global convenience functions #####
# #######################################
pgcore.load = None
###########################
# print function for gimli stuff
############################
def __RVector_str(self, valsOnly=False):
s = str()
if not valsOnly:
s = str(self.size())
if len(self) == 0:
return s
else:
s += " ["
if len(self) < 101:
for i in range(0, len(self) - 1):
s = s + str(self[i]) + ", "
s = s + str(self[len(self) - 1]) + "]"
return s
return (str(self.size()) + " [" + str(self[0]) + ",...," + str(
self[self.size() - 1]) + "]")
def __RVector3_str(self):
return ("RVector3: (" + str(self.x()) + ", " + str(self.y()) + ", " + str(
self.z()) + ")")
def __R3Vector_str(self):
if self.size() < 20:
return self.array().__str__()
return "R3Vector: n=" + str(self.size())
def __Line_str(self):
return "Line: " + str(self.p0()) + " " + str(self.p1())
def __BoundingBox_str(self):
s = ''
s += "BoundingBox [{0}, {1}]".format(self.min(), self.max())
return s
pgcore.RVector.__repr__ =__RVector_str
pgcore.CVector.__repr__ =__RVector_str
pgcore.BVector.__repr__ =__RVector_str
pgcore.IVector.__repr__ =__RVector_str
pgcore.IndexArray.__repr__ =__RVector_str
pgcore.RVector3.__repr__ =__RVector3_str
pgcore.R3Vector.__repr__ =__R3Vector_str
pgcore.Line.__repr__ =__Line_str
pgcore.BoundingBox.__repr__ =__BoundingBox_str
############################
# compatibility stuff
############################
def nonzero_test(self):
raise BaseException("Warning! there is no 'and' and 'or' for "
"BVector and RVector. " +
"Use binary operators '&' or '|' instead. " +
"If you looking for the nonzero test, use len(v) > 0")
def np_round__(self, r):
return np.round(self.array(), r)
pgcore.RVector.__bool__ = nonzero_test
pgcore.R3Vector.__bool__ = nonzero_test
pgcore.BVector.__bool__ = nonzero_test
pgcore.CVector.__bool__ = nonzero_test
pgcore.IVector.__bool__ = nonzero_test
pgcore.IndexArray.__bool__ = nonzero_test
pgcore.RVector.__nonzero__ = nonzero_test
pgcore.R3Vector.__nonzero__ = nonzero_test
pgcore.BVector.__nonzero__ = nonzero_test
pgcore.CVector.__nonzero__ = nonzero_test
pgcore.IVector.__nonzero__ = nonzero_test
pgcore.IndexArray.__nonzero__ = nonzero_test
pgcore.RVector.__round__ = np_round__
def _invertBVector_(self):
return pgcore.inv(self)
pgcore.BVector.__invert__ = _invertBVector_
pgcore.BVector.__inv__ = _invertBVector_
def _lowerThen_(self, v2):
"""Overwrite bvector = v1 < v2 since there is a wrong operator due to the
boost binding generation
"""
return pgcore.inv(self >= v2)
pgcore.RVector.__lt__ = _lowerThen_
pgcore.R3Vector.__lt__ = _lowerThen_
pgcore.BVector.__lt__ = _lowerThen_
pgcore.CVector.__lt__ = _lowerThen_
pgcore.IVector.__lt__ = _lowerThen_
pgcore.IndexArray.__lt__ = _lowerThen_
######################
# special constructors
######################
# Overwrite constructor for IndexArray
# This seams ugly but necessary until we can recognize numpy array in
# custom_rvalue
__origIndexArrayInit__ = pgcore.IndexArray.__init__
def __newIndexArrayInit__(self, arr, val=None):
""""""
# print("Custom IndexArray", arr, val)
if hasattr(arr, 'dtype') and hasattr(arr, '__iter__'):
__origIndexArrayInit__(self, [int(a) for a in arr])
else:
if val:
__origIndexArrayInit__(self, arr, val)
else:
__origIndexArrayInit__(self, arr)
pgcore.IndexArray.__init__ = __newIndexArrayInit__
# Overwrite constructor for BVector
# This seams ugly but necessary until we can recognize numpy array in
# custom_rvalue
__origBVectorInit__ = pgcore.BVector.__init__
def __newBVectorInit__(self, arr, val=None):
if hasattr(arr, 'dtype') and hasattr(arr, '__iter__'):
# this is hell slow .. better in custom_rvalue.cpp or in
# vector.h directly from pyobject
__origBVectorInit__(self, len(arr))
for i, a in enumerate(arr):
self.setVal(bool(a), i)
else:
if val:
__origBVectorInit__(self, arr, val)
else:
__origBVectorInit__(self, arr)
pgcore.BVector.__init__ = __newBVectorInit__
######################
# special overwrites
######################
# RVector + int fails .. so we need to tweak this command
__oldRVectorAdd__ = pgcore.RVector.__add__
def __newRVectorAdd__(a, b):
if isinstance(b, np.ndarray) and b.dtype == complex:
return __oldRVectorAdd__(a, CVector(b))
if isinstance(b, int):
return __oldRVectorAdd__(a, float(b))
if isinstance(a, int):
return __oldRVectorAdd__(float(a), b)
return __oldRVectorAdd__(a, b)
pgcore.RVector.__add__ = __newRVectorAdd__
__oldRVectorSub__ = pgcore.RVector.__sub__
def __newRVectorSub__(a, b):
if isinstance(b, int):
return __oldRVectorSub__(a, float(b))
if isinstance(a, int):
return __oldRVectorSub__(float(a), b)
return __oldRVectorSub__(a, b)
pgcore.RVector.__sub__ = __newRVectorSub__
__oldRVectorMul__ = pgcore.RVector.__mul__
def __newRVectorMul__(a, b):
if isinstance(b, int):
return __oldRVectorMul__(a, float(b))
if isinstance(a, int):
return __oldRVectorMul__(float(a), b)
return __oldRVectorMul__(a, b)
pgcore.RVector.__mul__ = __newRVectorMul__
try:
__oldRVectorTrueDiv__ = pgcore.RVector.__truediv__
def __newRVectorTrueDiv__(a, b):
if isinstance(b, int):
return __oldRVectorTrueDiv__(a, float(b))
if isinstance(a, int):
return __oldRVectorTrueDiv__(float(a), b)
return __oldRVectorTrueDiv__(a, b)
pgcore.RVector.__truediv__ = __newRVectorTrueDiv__
except:
__oldRVectorTrueDiv__ = pgcore.RVector.__div__
def __newRVectorTrueDiv__(a, b):
if isinstance(b, int):
return __oldRVectorTrueDiv__(a, float(b))
if isinstance(a, int):
return __oldRVectorTrueDiv__(float(a), b)
return __oldRVectorTrueDiv__(a, b)
pgcore.RVector.__div__ = __newRVectorTrueDiv__
################################################################################
# override wrong default conversion from int to IndexArray(int) for setVal #
################################################################################
__origRVectorSetVal__ = pgcore.RVector.setVal
def __newRVectorSetVal__(self, *args, **kwargs):
#print('__newRVectorSetVal__', *args, **kwargs)
if len(args) == 2:
if isinstance(args[1], int):
if args[1] < 0:
return __origRVectorSetVal__(self, args[0],
i=len(self) + args[1])
else:
return __origRVectorSetVal__(self, args[0], i=args[1])
if isinstance(args[1], pgcore.BVector):
return __origRVectorSetVal__(self, args[0], bv=args[1])
return __origRVectorSetVal__(self, *args, **kwargs)
pgcore.RVector.setVal = __newRVectorSetVal__
__origR3VectorSetVal__ = pgcore.R3Vector.setVal
def __newR3VectorSetVal__(self, *args, **kwargs):
#print('__newRVectorSetVal__', *args, **kwargs)
if len(args) == 2:
if isinstance(args[1], int):
return __origR3VectorSetVal__(self, args[0], i=args[1])
if isinstance(args[1], pgcore.BVector):
return __origR3VectorSetVal__(self, args[0], bv=args[1])
return __origR3VectorSetVal__(self, *args, **kwargs)
pgcore.R3Vector.setVal = __newR3VectorSetVal__
__origBVectorSetVal__ = pgcore.BVector.setVal
def __newBVectorSetVal__(self, *args, **kwargs):
if len(args) == 2:
if isinstance(args[1], int):
return __origBVectorSetVal__(self, args[0], i=args[1])
if isinstance(args[1], pgcore.BVector):
return __origBVectorSetVal__(self, args[0], bv=args[1])
return __origBVectorSetVal__(self, *args, **kwargs)
pgcore.BVector.setVal = __newBVectorSetVal__
__origCVectorSetVal__ = pgcore.CVector.setVal
def __newCVectorSetVal__(self, *args, **kwargs):
if len(args) == 2:
if isinstance(args[1], int):
return __origCVectorSetVal__(self, args[0], i=args[1])
if isinstance(args[1], pgcore.BVector):
return __origCVectorSetVal__(self, args[0], bv=args[1])
return __origCVectorSetVal__(self, *args, **kwargs)
pgcore.CVector.setVal = __newCVectorSetVal__
__origIVectorSetVal__ = pgcore.IVector.setVal
def __newIVectorSetVal__(self, *args, **kwargs):
if len(args) == 2:
if isinstance(args[1], int):
return __origIVectorSetVal__(self, args[0], i=args[1])
if isinstance(args[1], pgcore.BVector):
return __origIVectorSetVal__(self, args[0], bv=args[1])
return __origIVectorSetVal__(self, *args, **kwargs)
pgcore.IVector.setVal = __newIVectorSetVal__
__origIndexArraySetVal__ = pgcore.IndexArray.setVal
def __newIndexArraySetVal__(self, *args, **kwargs):
if len(args) == 2:
if isinstance(args[1], int):
return __origIndexArraySetVal__(self, args[0], i=args[1])
if isinstance(args[1], pgcore.BVector):
return __origIndexArraySetVal__(self, args[0], bv=args[1])
return __origIndexArraySetVal__(self, *args, **kwargs)
pgcore.IndexArray.setVal = __newIndexArraySetVal__
############################
# Indexing [] operator for RVector, CVector,
# RVector3, R3Vector, RMatrix, CMatrix
############################
def __getVal(self, idx):
"""Hell slow"""
# print("getval", type(idx), idx)
# print(dir(idx))
if isinstance(idx, pgcore.BVector) or isinstance(
idx, pgcore.IVector) or isinstance(idx, pgcore.IndexArray):
# print("BVector, IVector, IndexArray", idx)
return self.get_(idx)
elif isinstance(idx, slice):
s = idx.start
e = idx.stop
if s is None:
s = 0
if e is None:
e = len(self)
if idx.step is None:
return self.getVal(int(s), int(e))
else:
#print(s,e,idx.step)
step = idx.step
if step < 0 and idx.start is None and idx.stop is None:
ids = range(e - 1, s - 1, idx.step)
else:
ids = range(s, e, idx.step)
if len(ids):
return self.get_(ids)
else:
return self.get_(0)
#raise Exception("slice invalid")
elif isinstance(idx, list) or hasattr(idx, '__iter__'):
if isinstance(idx[0], int):
return self.get_(idx)
elif hasattr(idx[0], 'dtype'):
# print("numpy: ", idx[0].dtype.str, idx[0].dtype ,type(idx[0]))
if idx[0].dtype == 'bool':
return self.get_([i for i, x in enumerate(idx) if x])
# return self[np.nonzero(idx)[0]]
# print("default")
return self.get_([int(a) for a in idx])
elif idx == -1:
idx = len(self) - 1
return self.getVal(int(idx))
def __setVal(self, idx, val):
""" Index write access [] """
# print("__setVal", self, 'idx', idx, 'val:', val)
if isinstance(idx, slice):
if idx.step is None:
if idx.start is None:
self.setVal(val, 0, int(idx.stop))
else:
self.setVal(val, int(idx.start), int(idx.stop))
return
else:
pg.critical("not yet implemented for slice:", slice)
elif isinstance(idx, tuple):
# print("tuple", idx, type(idx))
if isinstance(self, pgcore.RMatrix):
self.rowRef(int(idx[0])).setVal(val, int(idx[1]))
return
else:
pg.error("Can't set index with tuple", idx, "for", self)
return
# if isinstance(idx, pgcore.BVector):
# print("__setVal", self, idx, 'val:', val)
# self.setVal(val, bv=idx)
# return
if isinstance(val, complex):
if isinstance(idx, int):
return self.setVal(val=val, id=idx)
else:
return self.setVal(val=val, ids=idx)
if isinstance(self, pgcore.RMatrix):
self.setVal(idx, val)
else:
self.setVal(val, idx)
def __getValMatrix(self, idx):
# print(idx, type(idx))
if isinstance(idx, slice):
step = idx.step
if step is None:
step = 1
start = idx.start
if start is None:
start = 0
stop = idx.stop
if stop is None:
stop = len(self)
return [self.rowRef(i) for i in range(start, stop, step)]
elif isinstance(idx, tuple):
# print(idx, type(idx))
if isinstance(idx[0], slice):
if isinstance(idx[1], int):
tmp = self.__getitem__(idx[0])
ret = pgcore.RVector(len(tmp))
for i, t in enumerate(tmp):
ret[i] = t[idx[1]]
return ret
else:
return self.row(int(idx[0])).__getitem__(idx[1])
if idx == -1:
idx = len(self) - 1
return self.row(idx)
pgcore.RVector.__setitem__ = __setVal
pgcore.RVector.__getitem__ = __getVal # very slow -- inline is better
pgcore.CVector.__setitem__ = __setVal
pgcore.CVector.__getitem__ = __getVal # very slow -- inline is better
pgcore.BVector.__setitem__ = __setVal
pgcore.BVector.__getitem__ = __getVal # very slow -- inline is better
pgcore.IVector.__setitem__ = __setVal
pgcore.IVector.__getitem__ = __getVal # very slow -- inline is better
pgcore.R3Vector.__setitem__ = __setVal
pgcore.R3Vector.__getitem__ = __getVal # very slow -- inline is better
pgcore.IndexArray.__setitem__ = __setVal
pgcore.IndexArray.__getitem__ = __getVal # very slow -- inline is better
pgcore.RVector3.__setitem__ = __setVal
pgcore.RMatrix.__getitem__ = __getValMatrix # very slow -- inline is better
pgcore.RMatrix.__setitem__ = __setVal
pgcore.CMatrix.__getitem__ = __getValMatrix # very slow -- inline is better
pgcore.CMatrix.__setitem__ = __setVal
############################
# len(RVector), RMatrix
############################
_vecs = [pgcore.RVector,
pgcore.BVector,
pgcore.CVector,
pgcore.IVector,
pgcore.IndexArray]
for v in _vecs:
v.ndim = 1
v.__len__ = lambda self: self.size()
v.shape = property(lambda self: (self.size(), None))
# if hasattr(v, '__call__') and callable(getattr(v, '__call__')):
try:
del v.__call__
except AttributeError:
pass
pgcore.RVector.dtype = np.float
pgcore.BVector.dtype = np.bool
pgcore.CVector.dtype = np.complex
pgcore.IVector.dtype = np.long
pgcore.IndexArray.dtype = np.uint
pgcore.RVector3.dtype = np.float
pgcore.RVector3.__len__ = lambda self: 3
pgcore.RVector3.ndim = 1
pgcore.RVector3.shape = (3,)
pgcore.R3Vector.dtype = np.float
pgcore.R3Vector.__len__ = lambda self: self.size()
pgcore.R3Vector.ndim = 2
pgcore.R3Vector.shape = property(lambda self: (self.size(), 3))
#remove me
pgcore.stdVectorRVector3.ndim = 2
############################
# abs(RVector), RMatrix
############################
pgcore.RVector.__abs__ = pgcore.fabs
pgcore.CVector.__abs__ = pgcore.mag
pgcore.R3Vector.__abs__ = pgcore.absR3
############################
# __hash__ settings
############################
pgcore.RVector.__hash__ = pgcore.RVector.hash
pgcore.CVector.__hash__ = pgcore.CVector.hash
pgcore.IVector.__hash__ = pgcore.IVector.hash
pgcore.IndexArray.__hash__ = pgcore.IndexArray.hash
pgcore.R3Vector.__hash__ = pgcore.R3Vector.hash
pgcore.RVector3.__hash__ = pgcore.RVector3.hash
pgcore.DataContainer.__hash__ = pgcore.DataContainer.hash
pgcore.DataContainerERT.__hash__ = pgcore.DataContainerERT.hash
pgcore.Mesh.__hash__ = pgcore.Mesh.hash
############################
# Iterator support for RVector allow to apply python build-ins
############################
class VectorIter:
def __init__(self, vec):
self.it = vec.beginPyIter()
self.vec = vec
def __iter__(self):
return self
# this is for python < 3
def next(self):
return self.it.nextForPy()
# this is the same but for python > 3
def __next__(self):
return self.it.nextForPy()
def __VectorIterCall__(self):
return VectorIter(self)
# don't use pygimli iterators here until the reference for temporary
# vectors are collected
# return pgcore.RVectorIter(self.beginPyIter())
pgcore.RVector.__iter__ = __VectorIterCall__
pgcore.R3Vector.__iter__ = __VectorIterCall__
pgcore.BVector.__iter__ = __VectorIterCall__
pgcore.IVector.__iter__ = __VectorIterCall__
pgcore.IndexArray.__iter__ = __VectorIterCall__
pgcore.CVector.__iter__ = __VectorIterCall__
class DefaultContainerIter:
def __init__(self, vec):
self.vec = vec
self.length = len(vec)
self.pos = -1
def __iter__(self):
return self
def next(self):
return self.__next__()
# this is the same but for python > 3
def __next__(self):
self.pos += 1
if self.pos == self.length:
raise StopIteration()
else:
return self.vec[self.pos]
def __MatIterCall__(self):
return DefaultContainerIter(self)
pgcore.RMatrix.__iter__ = __MatIterCall__
pgcore.CMatrix.__iter__ = __MatIterCall__
class Vector3Iter():
"""
Simple iterator for RVector3, cause it lacks the core function
.beginPyIter()
"""
def __init__(self, vec):
self.vec = vec
self.length = 3
self.pos = -1
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
self.pos += 1
if self.pos == self.length:
raise StopIteration()
else:
return self.vec[self.pos]
def __Vector3IterCall__(self):
return Vector3Iter(self)
pgcore.RVector3.__iter__ = __Vector3IterCall__
# ######### c to python converter ######
# default converter from RVector3 to numpy array
def __RVector3ArrayCall__(self, dtype=None):
#if idx:
#print(self)
#print(idx)
#raise Exception("we need to fix this")
import numpy as np
return np.array([self.getVal(0), self.getVal(1), self.getVal(2)])
# default converter from RVector to numpy array
def __RVectorArrayCall__(self, dtype=None):
#if idx and not isinstance(idx, numpy.dtype):
#print("self:", self)
#print("idx:", idx, type(idx) )
#raise Exception("we need to fix this")
# probably fixed!!!
# import numpy as np
# we need to copy the array until we can handle increasing the reference
# counter in self.array() else it leads to strange behavior
# test in testRValueConverter.py:testNumpyFromRVec()
# return np.array(self.array())
return self.array()
def __CVectorArrayCall__(self, dtype=None):
#if idx and not isinstance(idx, numpy.dtype):
#print("self:", self)
#print("idx:", idx, type(idx) )
#raise Exception("we need to fix this")
# probably fixed!!! or not!!
# import numpy as np
# we need to copy the array until we can handle increasing the reference
# counter in self.array() else it leads to strange behavior
# test in testRValueConverter.py:testNumpyFromRVec()
#return np.array(self.array())
return self.array()
# default converter from RVector to numpy array
pgcore.RVector.__array__ = __RVectorArrayCall__
# not yet ready handmade_wrappers.py
pgcore.BVector.__array__ = __RVectorArrayCall__
# not yet ready handmade_wrappers.py
# pgcore.IndexArray.__array__ = __RVectorArrayCall__
pgcore.R3Vector.__array__ = __RVectorArrayCall__
pgcore.RVector3.__array__ = __RVector3ArrayCall__
# see bug description
pgcore.CVector.__array__ = __CVectorArrayCall__
# hackish until stdVectorRVector3 will be removed
def __stdVectorRVector3ArrayCall(self, dtype=None):
#if idx is not None:
#print(self)
#print(idx)
return pgcore.stdVectorRVector3ToR3Vector(self).array()
pgcore.stdVectorRVector3.__array__ = __stdVectorRVector3ArrayCall
# pgcore.RVector3.__array__ = pgcore.RVector3.array
# del pgcore.RVector.__array__
##################################
# custom rvalues for special cases
##################################
def find(v):
if hasattr(v, 'dtype') and hasattr(v, '__iter__'):
# print('new find', v, pgcore.BVector(v))
return pgcore.find(pgcore.BVector(v))
else:
# print('orig find')
return pgcore.find(v)
def pow(v, p):
"""
pow(v, int) is misinterpreted as pow(v, rvec(int))
so we need to fix this
"""
if isinstance(p, int):
return pgcore.pow(v, float(p))
return pgcore.pow(v, p)
def __RVectorPower(self, m):
return pow(self, m)
pgcore.RVector.__pow__ = __RVectorPower
##################################
# usefull aliases
##################################
Vector = pgcore.RVector
Inversion = pgcore.RInversion
Pos = pgcore.RVector3
PosVector = pgcore.R3Vector
PosList = PosVector
############################
# non automatic exposed functions
############################
def abs(v):
"""Create abs in the sense of distance instead of just vanishing the sign.
Create abs in the sense of distance instead of vanishing the sign. Used
to calculate the length of coordinates, or anything that can be interpreted
as coordinate.
Args
----
v: iterable of float, complex, or :gimliapi:`GIMLI::Pos`
Returns
-------
length: iterable or scalar
Array of lenghts.
Examples
--------
>>> import numpy as np
>>> import pygimli as pg
>>> pg.abs([1.0, 1.0, 1.0])
1.7320508075688772
>>> pg.abs(np.array([1.0, 1.0, 1.0]))
1.7320508075688772
>>> pg.abs(np.array([1.0, 1.0]))
1.4142135623730951
>>> pg.abs([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]])
2 [1.7320508075688772, 1.7320508075688772]
>>> pg.abs(np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]))
2 [1.7320508075688772, 1.7320508075688772]
>>> # Note, this will be interpreted as 3 2Dim Pos
>>> pg.abs(np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]).T)
3 [1.4142135623730951, 1.4142135623730951, 1.4142135623730951]
>>> pg.abs(pg.PosList([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]))
2 [1.7320508075688772, 1.7320508075688772]
"""
if isinstance(v, pgcore.CVector):
return pgcore.mag(v)
elif isPos(v):
return pgcore.RVector3(v).abs()
elif isPosList(v):
return pgcore.absR3(v)
elif isinstance(v, list):
## possible [x,y,[z]] or [pos, ...]
try:
return pgcore.RVector3(v).abs()
except:
return pgcore.absR3(np.array(v).T)
elif isinstance(v, pgcore.R3Vector):
return pgcore.absR3(v)
elif isinstance(v, np.ndarray):
if v.ndim == 1:
return np.abs(v)
if v.shape[0] == 2 or v.shape[0] == 3:
return pgcore.absR3(v.T)
else:
return pgcore.absR3(v)
elif isinstance(v, pgcore.RMatrix):
raise BaseException("IMPLEMENTME")
for i in range(len(v)):
v[i] = pgcore.abs(v[i])
return v
elif hasattr(v, 'vals'):
return pg.abs(v.vals)
elif hasattr(v, 'values'):
return pg.abs(v.values)
return pgcore.fabs(v)
# default BVector operator == (RVector, int) will be casted to
# BVector operator == (RVector, RVector(int)) and fails
# this needs a monkey patch for BVector operator == (RVector, int)
pgcore.__EQ_RVector__ = pgcore.RVector.__eq__
def __EQ_RVector__(self, val):
if isinstance(val, int):
val = float(val)
return pgcore.__EQ_RVector__(self, val)
pgcore.RVector.__eq__ = __EQ_RVector__
############################
# usefull stuff
############################
def toIVector(v):
print("do not use toIVector(v) use ndarray directly .. "
"this method will be removed soon")
ret = pgcore.IVector(len(v), 0)
for i, r in enumerate(v):
ret[i] = int(r)
return ret
#__catOrig__ = pgcore.cat
#def __cat__(v1, v2):
#print("mycat")
#if isinstance(v1, ndarray) and isinstance(v2, ndarray):
#return cat(RVector(v1), v2)
#else:
#return __catOrig__(v1, v2)
#pgcore.cat = __cat__
# DEPRECATED for backward compatibility should be removed
def asvector(array):
print("do not use asvector(ndarray) use ndarray directly .. "
"this method will be removed soon")
return pgcore.RVector(array)
# ##########################
# We want ModellingBase with multi threading jacobian brute force
# ##########################
def __GLOBAL__response_mt_shm_(fop, model, shm, i):
resp = fop.response_mt(model, i)
for j in range(len(resp)):
shm[j] = resp[j]
def __ModellingBase__createJacobian_mt__(self, model, resp):
from math import ceil
from multiprocessing import Process, Array
import numpy as np
nModel = len(model)
nData = len(resp)
fak = 1.05
dModel = pgcore.RVector(len(model))
nProcs = self.multiThreadJacobian()
if sys.platform == 'win32':
# strange pickle problem: see python test_PhysicsManagers.py ves
from .logger import warn
warn('Multiprocess jacobian currently unavailable for win build')
nProcs = 1
if nProcs == 1:
self.createJacobian(model, resp)
return
shm = []
oldBertThread = self.threadCount()
self.setThreadCount(1)
# print("Model/Data/nProcs", nModel, nData, nProcs, int(ceil(float(nModel)/nProcs)))
for pCount in range(int(ceil(float(nModel) / nProcs))):
procs = []
#if self.verbose():
# tic()
if self.verbose():
print("Jacobian MT:(", pCount * nProcs, "--",
(pCount + 1) * nProcs, ") /", nModel, '... ')
for i in range(int(pCount * nProcs), int((pCount + 1) * nProcs)):
if i < nModel:
modelChange = pgcore.RVector(model)
modelChange[i] *= fak
dModel[i] = modelChange[i] - model[i]
shm.append(Array('d', len(resp)))
procs.append(
Process(target=__GLOBAL__response_mt_shm_,
args=(self, modelChange, shm[i], i)))
for i, p in enumerate(procs):
p.start()
for i, p in enumerate(procs):
p.join()
# if self.verbose():
# print(dur(), 's')
self.setThreadCount(oldBertThread)
for i in range(nModel):
dData = np.array(shm[i]) - resp
self._J.setCol(i, dData / dModel[i])
def __ModellingBase__responses_mt__(self, models, respos):
nModel = len(models)
nProcs = self.multiThreadJacobian()
if nProcs == 1:
for i, m in enumerate(models):
respos[i] = self.response_mt(m, i)
return
from math import ceil
from multiprocessing import Process, Array
import numpy as np
if models.ndim != 2:
raise BaseException("models need to be a matrix(N, nModel):" +
str(models.shape))
if respos.ndim != 2:
raise BaseException("respos need to be a matrix(N, nData):" +
str(respos.shape))
nData = len(respos[0])
shm = []
oldBertThread = self.threadCount()
self.setThreadCount(1)
# print("*"*100)
# print(nModel, nProcs)
# print("*"*100)
for pCount in range(int(ceil(nModel / nProcs))):
procs = []
if self.verbose():
print(pCount * nProcs, "/", nModel)
for i in range(int(pCount * nProcs), int((pCount + 1) * nProcs)):
if i < nModel:
shm.append(Array('d', nData))
procs.append(
Process(target=__GLOBAL__response_mt_shm_,
args=(self, models[i], shm[i], i)))
for i, p in enumerate(procs):
p.start()
for i, p in enumerate(procs):
p.join()
self.setThreadCount(oldBertThread)
for i in range(nModel):
resp = np.array(shm[i])
respos[i] = resp
class ModellingBaseMT__(pgcore.ModellingBase):
def __init__(self, mesh=None, dataContainer=None, verbose=False):
if mesh and dataContainer:
pgcore.ModellingBase.__init__(
self, mesh=mesh, dataContainer=dataContainer, verbose=verbose)
elif isinstance(mesh, pgcore.Mesh):
pgcore.ModellingBase.__init__(self, mesh=mesh, verbose=verbose)
elif dataContainer:
pgcore.ModellingBase.__init__(self, dataContainer=dataContainer,
verbose=verbose)
else:
pgcore.ModellingBase.__init__(self, verbose=verbose)
self._J = pgcore.RMatrix()
self.setJacobian(self._J)
ModellingBaseMT__.createJacobian_mt = __ModellingBase__createJacobian_mt__
ModellingBaseMT__.responses = __ModellingBase__responses_mt__
ModellingBase = ModellingBaseMT__
###########################
# unsorted stuff
###########################
# DEPRECATED
# pgcore.interpolate = pgcore.interpolate_GILsave__
############################
# some backward compatibility
############################
def __getCoords(coord, dim, ent):
"""Syntactic sugar to find all x-coordinates of a given entity.
"""
if isinstance(ent, R3Vector) or isinstance(ent, stdVectorRVector3):
return getattr(pgcore, coord)(ent)
if isinstance(ent, list) and isinstance(ent[0], RVector3):
return getattr(pgcore, coord)(ent)
if isinstance(ent, list) and isPos(ent[0]):
return getattr(pgcore, coord)(ent)
if isinstance(ent, DataContainer):
return getattr(pgcore, coord)(ent.sensorPositions())
if isinstance(ent, Mesh):
return getattr(pgcore, coord)(ent.positions())
if isinstance(ent, pgcore.stdVectorNodes):
return np.array([n.pos()[dim] for n in ent])
if isinstance(ent, pgcore.Node):
return ent.pos()[dim]
if isinstance(ent, pgcore.RVector3):
return ent[dim]
if isinstance(ent, list) and isinstance(ent[0], pgcore.Node):
return [n.pos()[dim] for n in ent]
if hasattr(ent, 'ndim') and ent.ndim == 2 and len(ent[0] > dim):
return ent[:, dim]
# use logger here
raise Exception(
"Don't know how to find the " + coord + "-coordinates of entity:", ent)
def x(instance):
"""Syntactic sugar to find all x-coordinates of a given class instance.
Convenience function to return all associated x-coordinates
of a given class instance.
Parameters
----------
instance : DataContainer, Mesh, R3Vector, np.array, list(RVector3)
Return the associated coordinate positions for the given class instance.
Examples
--------
>>> import numpy as np
>>> import pygimli as pg
>>> pg.x([[1.0, 1.0, 1.0]])
1 [1.0]
>>> pg.x([[0, 0], [1, 0]])
2 [0.0, 1.0]
"""
return __getCoords('x', 0, instance)
def y(instance):
"""Syntactic sugar to find all y-coordinates of a given class instance.
Convenience function to return all associated x-coordinates
of a given class instance.
Parameters
----------
instance : DataContainer, Mesh, R3Vector, np.array, list(RVector3)
Return the associated coordinate positions for the given class instance.
"""
return __getCoords('y', 1, instance)
def z(instance):
"""Syntactic sugar to find all z-coordinates of a given class instance.
Convenience function to return all associated x-coordinates
of a given class instance.
Parameters
----------
instance : DataContainer, Mesh, R3Vector, np.array, list(RVector3)
Return the associated coordinate positions for the given class instance.
"""
return __getCoords('z', 2, instance)
def search(what):
"""Utility function to search docstrings for string `what`."""
np.lookfor(what, module="pygimli", import_modules=False)
from .base import (isScalar, isArray, isPos, isR3Array,
isPosList, isComplex, isMatrix)
# Import from submodules at the end
from .mesh import Mesh, MeshEntity, Node
from .datacontainer import DataContainer, DataContainerERT
from .trans import * # why do we need that?
# from .matrix import (Cm05Matrix, LMultRMatrix, LRMultRMatrix, MultLeftMatrix,
# MultLeftRightMatrix, MultRightMatrix, RMultRMatrix)
from .matrix import (BlockMatrix, SparseMatrix, SparseMapMatrix, IdentityMatrix,
Matrix)
|
JuliusHen/gimli | pygimli/physics/traveltime/tt.py | <filename>pygimli/physics/traveltime/tt.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Plotting functions for traveltime."""
# general purpose
import pygimli as pg
from .TravelTimeManager import TravelTimeManager
def simulate(mesh, scheme, slowness=None, vel=None, **kwargs):
"""Simulate traveltime data."""
mgr = TravelTimeManager()
return mgr.simulate(mesh, scheme, slowness=slowness, vel=vel, **kwargs)
simulate.__doc__ = TravelTimeManager.__doc__
class DataContainerTT(pg.DataContainer):
"""Data Container for traveltime."""
def __init__(self, data=None, **kwargs):
"""Initialize empty data container, load or copy existing."""
if isinstance(data, pg.DataContainer):
super().__init__(data, **kwargs)
self.registerSensorIndex("s")
self.registerSensorIndex("g")
self.setSensorIndexOnFileFromOne(True)
else:
super().__init__(**kwargs)
self.registerSensorIndex("s")
self.registerSensorIndex("g")
if isinstance(data, str):
self.load(data)
|
JuliusHen/gimli | pygimli/viewer/mpl/matrixview.py | <reponame>JuliusHen/gimli
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions to draw various pygimli matrices with matplotlib."""
import numpy as np
import matplotlib.pyplot as plt
import pygimli as pg
def drawSparseMatrix(ax, mat, **kwargs):
"""Draw a view of a matrix into the axes.
Parameters
----------
ax : mpl axis instance, optional
Axis instance where the matrix will be plotted.
mat: pg.matrix.SparseMatrix or pg.matrix.SparseMapMatrix
Returns
-------
mpl.lines.line2d
Examples
--------
>>> import numpy as np
>>> import pygimli as pg
>>> from pygimli.viewer.mpl import drawSparseMatrix
>>> A = pg.randn((10,10), seed=0)
>>> SM = pg.core.SparseMapMatrix()
>>> for i in range(10):
... SM.setVal(i, i, 5.0)
>>> fig, (ax1, ax2) = pg.plt.subplots(1, 2, sharey=True, sharex=True)
>>> _ = drawSparseMatrix(ax1, A, colOffset=5, rowOffset=5, color='blue')
>>> _ = drawSparseMatrix(ax2, SM, color='green')
"""
row = kwargs.pop('rowOffset', 0)
col = kwargs.pop('colOffset', 0)
color = kwargs.pop('color', None)
mat = pg.utils.sparseMatrix2coo(mat)
mat.row += row
mat.col += col
gci = ax.spy(mat, color=color)
ax.autoscale(enable=True, axis='both', tight=True)
return gci
def drawBlockMatrix(ax, mat, **kwargs):
"""Draw a view of a matrix into the axes.
Arguments
---------
ax : mpl axis instance, optional
Axis instance where the matrix will be plotted.
mat: pg.Matrix.BlockMatrix
Keyword Arguments
-----------------
spy: bool [False]
Draw all matrix entries instead of colored blocks
Returns
-------
ax:
Examples
--------
>>> import numpy as np
>>> import pygimli as pg
>>> I = pg.matrix.IdentityMatrix(10)
>>> SM = pg.matrix.SparseMapMatrix()
>>> for i in range(10):
... SM.setVal(i, 10 - i, 5.0)
... SM.setVal(i, i, 5.0)
>>> B = pg.matrix.BlockMatrix()
>>> B.add(I, 0, 0)
0
>>> B.add(SM, 10, 10)
1
>>> print(B)
pg.matrix.BlockMatrix of size 20 x 21 consisting of 2 submatrices.
>>> fig, (ax1, ax2) = pg.plt.subplots(1, 2, sharey=True)
>>> _ = pg.show(B, ax=ax1)
>>> _ = pg.show(B, spy=True, ax=ax2)
"""
if kwargs.pop('spy', False):
gci = []
ids = pg.unique([e.matrixID for e in mat.entries()])
cMap = pg.plt.cm.get_cmap("Set3", len(ids))
for e in mat.entries():
mid = e.matrixID
mati = mat.mat(mid)
if isinstance(mati, pg.core.IdentityMatrix):
mati = np.eye(mati.size())
gci.append(drawSparseMatrix(ax, mati,
rowOffset=e.rowStart,
colOffset=e.colStart,
color=cMap(mid)))
return gci, None
else:
plcs = []
for e in mat.entries():
mid = e.matrixID
widthy = mat.mat(mid).rows() - 0.1 # to make sure non-matrix regions are not connected in the plot
widthx = mat.mat(mid).cols() - 0.1
plc = pg.meshtools.createRectangle([e.colStart, e.rowStart],
[e.colStart + widthx, e.rowStart + widthy],
marker=mid)
plcs.append(plc)
bm = pg.meshtools.mergePLC(plcs)
gci, cBar = pg.viewer.mpl.drawPLC(ax, bm, fitView=False)
ax.invert_yaxis()
ax.xaxis.tick_top()
cBar.set_label("Matrix ID")
if len(mat.entries()) > 10:
gci.set_cmap("viridis")
return gci, cBar
|
JuliusHen/gimli | pygimli/physics/ert/ert.py | <filename>pygimli/physics/ert/ert.py<gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Electrical resistivity tomography"""
import numpy as np
import pygimli as pg
from .ertModelling import ERTModelling
from .ertScheme import createData
createERTData = createData # backward compatibility
def simulate(mesh, scheme, res, **kwargs):
"""Simulate an ERT measurement.
Perform the forward task for a given mesh, resistivity distribution &
measuring scheme and return data (apparent resistivity) or potentials.
For complex resistivity, the apparent resistivities is complex as well.
The forward operator itself only calculates potential values for the
electrodes in the given data scheme.
To calculate apparent resistivities, geometric factors (k) are needed.
If there are no values k in the DataContainerERT scheme, the function
tries to calculate them, either analytically or numerically by using a
p2-refined version of the given mesh.
TODO
----
* 2D + Complex + SR
Args
----
mesh : :gimliapi:`GIMLI::Mesh`
2D or 3D Mesh to calculate for.
res : float, array(mesh.cellCount()) | array(N, mesh.cellCount()) |
list
Resistivity distribution for the given mesh cells can be:
. float for homogeneous resistivity (e.g. 1.0)
. single array of length mesh.cellCount()
. matrix of N resistivity distributions of length mesh.cellCount()
. resistivity map as [[regionMarker0, res0],
[regionMarker0, res1], ...]
scheme : :gimliapi:`GIMLI::DataContainerERT`
Data measurement scheme.
Keyword Args
------------
verbose: bool[False]
Be verbose. Will override class settings.
calcOnly: bool [False]
Use fop.calculate instead of fop.response. Useful if you want
to force the calculation of impedances for homogeneous models.
No noise handling. Solution is put as token 'u' in the returned
DataContainerERT.
noiseLevel: float [0.0]
add normally distributed noise based on
scheme['err'] or on noiseLevel if error>0 is not contained
noiseAbs: float [0.0]
Absolute voltage error in V
returnArray: bool [False]
Returns an array of apparent resistivities instead of
a DataContainerERT
returnFields: bool [False]
Returns a matrix of all potential values (per mesh nodes)
for each injection electrodes.
Returns
-------
DataContainerERT | array(data.size()) | array(N, data.size()) |
array(N, mesh.nodeCount()):
Data container with resulting apparent resistivity data and
errors (if noiseLevel or noiseAbs is set).
Optional returns a Matrix of rhoa values
(for returnArray==True forces noiseLevel=0).
In case of a complex valued resistivity model, phase values are
returned in the DataContainerERT (see example below), or as an
additionally returned array.
Examples
--------
# >>> from pygimli.physics import ert
# >>> import pygimli as pg
# >>> import pygimli.meshtools as mt
# >>> world = mt.createWorld(start=[-50, 0], end=[50, -50],
# ... layers=[-1, -5], worldMarker=True)
# >>> scheme = ert.createData(
# ... elecs=pg.utils.grange(start=-10, end=10, n=21),
# ... schemeName='dd')
# >>> for pos in scheme.sensorPositions():
# ... _= world.createNode(pos)
# ... _= world.createNode(pos + [0.0, -0.1])
# >>> mesh = mt.createMesh(world, quality=34)
# >>> rhomap = [
# ... [1, 100. + 0j],
# ... [2, 50. + 0j],
# ... [3, 10.+ 0j],
# ... ]
# >>> ert = pg.ERTManager()
# >>> data = ert.simulate(mesh, res=rhomap, scheme=scheme, verbose=True)
# >>> rhoa = data.get('rhoa').array()
# >>> phia = data.get('phia').array()
"""
verbose = kwargs.pop('verbose', True)
calcOnly = kwargs.pop('calcOnly', False)
returnFields = kwargs.pop("returnFields", False)
returnArray = kwargs.pop('returnArray', False)
noiseLevel = kwargs.pop('noiseLevel', 0.0)
noiseAbs = kwargs.pop('noiseAbs', 1e-4)
seed = kwargs.pop('seed', None)
sr = kwargs.pop('sr', True) # self.sr)
# segfaults with self.fop (test & fix)
fop = ERTModelling(sr=sr, verbose=verbose)
# fop = self.createForwardOperator(useBert=True, # self.useBert,
# sr=sr, verbose=verbose)
fop.data = scheme
fop.setMesh(mesh, ignoreRegionManager=True)
rhoa = None
phia = None
isArrayData = False
# parse the given res into mesh-cell-sized array
if isinstance(res, int) or isinstance(res, float):
res = np.ones(mesh.cellCount()) * float(res)
elif isinstance(res, complex):
res = np.ones(mesh.cellCount()) * res
elif hasattr(res[0], '__iter__'): # ndim == 2
if len(res[0]) == 2: # res seems to be a res map
# check if there are markers in the mesh that are not defined
# the rhomap. better signal here before it results in errors
meshMarkers = list(set(mesh.cellMarkers()))
mapMarkers = [m[0] for m in res]
if any([mark not in mapMarkers for mark in meshMarkers]):
left = [m for m in meshMarkers if m not in mapMarkers]
pg.critical("Mesh contains markers without assigned "
"resistivities {}. Please fix given "
"rhomap.".format(left))
res = pg.solver.parseArgToArray(res, mesh.cellCount(), mesh)
else: # probably nData x nCells array
# better check for array data here
isArrayData = True
if isinstance(res[0], np.complex) or isinstance(res, pg.CVector):
pg.info("Complex resistivity values found.")
fop.setComplex(True)
else:
fop.setComplex(False)
if not scheme.allNonZero('k') and not calcOnly:
if verbose:
pg.info('Calculate geometric factors.')
scheme.set('k', fop.calcGeometricFactor(scheme))
ret = pg.DataContainerERT(scheme)
# just to be sure that we don't work with artifacts
ret['u'] *= 0.0
ret['i'] *= 0.0
ret['r'] *= 0.0
if isArrayData:
rhoa = np.zeros((len(res), scheme.size()))
for i, r in enumerate(res):
rhoa[i] = fop.response(r)
if verbose:
print(i, "/", len(res), " : ", pg.dur(), "s",
"min r:", min(r), "max r:", max(r),
"min r_a:", min(rhoa[i]), "max r_a:", max(rhoa[i]))
else: # res is single resistivity array
if len(res) == mesh.cellCount():
if calcOnly:
fop.mapERTModel(res, 0)
dMap = pg.core.DataMap()
fop.calculate(dMap)
if fop.complex():
pg.critical('Implement me')
else:
ret["u"] = dMap.data(scheme)
ret["i"] = np.ones(ret.size())
if returnFields:
return pg.Matrix(fop.solution())
return ret
else:
if fop.complex():
res = pg.utils.squeezeComplex(res)
resp = fop.response(res)
if fop.complex():
rhoa, phia = pg.utils.toPolar(resp)
else:
rhoa = resp
else:
print(mesh)
print("res: ", res)
raise BaseException(
"Simulate called with wrong resistivity array.")
if not isArrayData:
ret['rhoa'] = rhoa
if phia is not None:
ret.set('phia', phia)
else:
ret.set('rhoa', rhoa[0])
if phia is not None:
ret.set('phia', phia[0])
if returnFields:
return pg.Matrix(fop.solution())
if noiseLevel > 0: # if errors in data noiseLevel=1 just triggers
if not ret.allNonZero('err'):
# 1A and #100µV
ret.set('err', estimateError(ret,
relativeError=noiseLevel,
absoluteUError=noiseAbs,
absoluteCurrent=1))
print("Data error estimate (min:max) ",
min(ret('err')), ":", max(ret('err')))
rhoa *= 1. + pg.randn(ret.size(), seed=seed) * ret('err')
ret.set('rhoa', rhoa)
ipError = None
if phia is not None:
if scheme.allNonZero('iperr'):
ipError = scheme('iperr')
else:
# np.abs(self.data("phia") +TOLERANCE) * 1e-4absoluteError
if noiseLevel > 0.5:
noiseLevel /= 100.
if 'phiErr' in kwargs:
ipError = np.ones(ret.size()) * kwargs.pop('phiErr') / 1000
else:
ipError = abs(ret["phia"]) * noiseLevel
if verbose:
print("Data IP abs error estimate (min:max) ",
min(ipError), ":", max(ipError))
phia += pg.randn(ret.size(), seed=seed) * ipError
ret['iperr'] = ipError
ret['phia'] = phia
# check what needs to be setup and returned
if returnArray:
if phia is not None:
return rhoa, phia
else:
return rhoa
return ret
def simulateOld(mesh, scheme, res, sr=True, useBert=True,
verbose=False, **kwargs):
"""ERT forward calculation.
Convenience function to use the ERT modelling operator
if you like static functions.
See :py:mod:`pygimli.ert.ERTManager.simulate` for description
of the arguments.
Parameters
----------
mesh: :gimliapi:`GIMLI::Mesh` | str
Modelling domain. Mesh can be a file name here.
scheme: :gimliapi:`GIMLI::DataContainerERT` | str
Data configuration. Scheme can be a file name here.
res: see :py:mod:`pygimli.ert.ERTManager.simulate`
Resistivity distribution.
sr: bool [True]
Use singularity removal technique.
useBert: bool [True]
Use Bert forward operator instead of the reference implementation.
**kwargs:
Forwarded to :py:mod:`pygimli.ert.ERTManager.simulate`
"""
from .ertManager import ERTManager
ert = ERTManager(useBert=useBert, sr=sr, verbose=verbose)
if isinstance(mesh, str):
mesh = pg.load(mesh)
if isinstance(scheme, str):
scheme = pg.physics.ert.load(scheme)
return ert.simulate(mesh=mesh, res=res, scheme=scheme,
verbose=verbose, **kwargs)
@pg.cache
def createGeometricFactors(scheme, numerical=None, mesh=None, dim=3,
h2=True, p2=True, verbose=False):
"""Create geometric factors for a given data scheme.
Create geometric factors for a data scheme with and without topography.
Calculation will be done analytical (only for half space geometry)
or numerical.
This function caches the result depending on scheme, mesh and pg.version()
Parameters
----------
scheme: :gimliapi:`GIMLI::DataContainerERT`
Datacontainer of the scheme.
numerical: bool | None [False]
If numerical is None, False is assumed, we try to guess topography
and warn if we think we found them.
If set to True or False, numerical calculation will used or not.
mesh: :gimliapi:`GIMLI::Mesh` | str
Mesh for numerical calculation. If not given, analytical geometric
factors for halfspace earth are guessed or a default mesh will be
created (and h/p refined according to h2/p2). If given topo is set to
True. If the numerical effort is to high or the accuracy to low
you should consider calculating the factors manually.
h2: bool [True]
Default spatial refinement to achieve high accuracy
p2: bool [True]
Default polynomial refinement to achieve high accuracy
verbose: bool
Give some output.
"""
if numerical is None:
numerical = False
if min(pg.z(scheme)) != max(pg.z(scheme)):
verbose = True
pg.warn('Sensor z-coordinates not equal. Is there topography?')
if numerical is False and mesh is None:
if verbose:
pg.info('Calculate analytical flat earth geometric factors.')
return pg.core.geometricFactors(scheme, forceFlatEarth=True, dim=dim)
if mesh is None:
mesh = createInversionMesh(scheme)
if verbose:
pg.info('mesh', mesh)
if h2 is True:
m = mesh.createH2()
if verbose:
pg.info('h2 refine', m)
if p2 is True:
m = m.createP2()
if verbose:
pg.info('p2 refine', m)
if verbose:
pg.info('Calculate numerical geometric factors.')
d = simulate(m, res=1.0, scheme=scheme, sr=False, useBert=True,
calcOnly=True, verbose=True)
return 1./d['u']
def createInversionMesh(data, **kwargs):
"""Create default mesh for ERT inversion.
Parameters
----------
data: :gimliapi:`GIMLI::DataContainerERT`
Data Container needs at least sensors to define the geometry of the
mesh.
Other Parameters
----------------
Forwarded to :py:mod:`pygimli.meshtools.createParaMesh`
Returns
-------
mesh: :gimliapi:`GIMLI::Mesh`
Inversion mesh with default marker (1 for background,
2 parametric domain)
"""
mesh = pg.meshtools.createParaMesh(data.sensors(), **kwargs)
return mesh
def createERTDataNotUsedAnymore(elecs, schemeName='none', **kwargs):
"""Create data scheme for compatibility (advanced version in BERT).
Parameters
----------
sounding : bool [False]
Create a 1D VES Schlumberger configuration.
elecs need to be an array with elecs[0] = mn/2 and elecs[1:] = ab/2.
"""
if kwargs.pop('sounding', False):
data = pg.DataContainerERT()
data.setSensors(pg.cat(-elecs[::-1], elecs))
nElecs = len(elecs)
for i in range(nElecs-1):
data.createFourPointData(i, i, 2*nElecs-i-1, nElecs-1, nElecs)
return data
if schemeName != "dd":
return createData(elecs, schemeName, **kwargs)
isClosed = kwargs.pop('closed', False)
data = pg.DataContainerERT()
data.setSensors(elecs)
nElecs = len(elecs)
a = []
b = []
m = []
n = []
eb = 0
for i in range(nElecs):
for j in range(eb + 2, nElecs):
ea = i
eb = ea + 1
em = j
en = em + 1
if isClosed:
en = en % nElecs
if en < nElecs and en != ea:
a.append(ea)
b.append(eb)
m.append(em)
n.append(en)
data.resize(len(a))
data.add('a', a)
data.add('b', b)
data.add('m', m)
data.add('n', n)
data.set('valid', np.ones(len(a)))
return data
def estimateError(data, absoluteError=0.001, relativeError=0.03,
absoluteUError=None, absoluteCurrent=0.1):
"""Estimate error composed of an absolute and a relative part.
Parameters
----------
absoluteError : float [0.001]
Absolute data error in Ohm m. Need 'rhoa' values in data.
relativeError : float [0.03]
relative error level in %/100
absoluteUError : float [0.001]
Absolute potential error in V. Need 'u' values in data. Or
calculate them from 'rhoa', 'k' and absoluteCurrent if no 'i'
is given
absoluteCurrent : float [0.1]
Current level in A for reconstruction for absolute potential V
Returns
-------
error : Array
"""
if relativeError >= 0.5:
print("relativeError set to a value > 0.5 .. assuming this "
"is a percentage Error level dividing them by 100")
relativeError /= 100.0
if absoluteUError is None:
if not data.allNonZero('rhoa'):
pg.critical("We need apparent resistivity values "
"(rhoa) in the data to estimate a "
"data error.")
error = relativeError + pg.abs(absoluteError / data('rhoa'))
else:
u = None
i = absoluteCurrent
if data.haveData("i"):
i = data('i')
if data.haveData("u"):
u = data('u')
else:
if data.haveData("r"):
u = data('r') * i
elif data.haveData("rhoa"):
if data.haveData("k"):
u = data('rhoa') / data('k') * i
else:
pg.critical("We need (rhoa) and (k) in the"
"data to estimate data error.")
else:
pg.critical("We need apparent resistivity values "
"(rhoa) or impedances (r) "
"in the data to estimate data error.")
error = pg.abs(absoluteUError / u) + relativeError
return error
if __name__ == "__main__":
pass
|
JuliusHen/gimli | pygimli/solver/__init__.py | <filename>pygimli/solver/__init__.py
# -*- coding: utf-8 -*-
"""General physics independent solver interface."""
from .utils import (anisotropyMatrix, constitutiveMatrix,
createAnisotropyMatrix, createConstitutiveMatrix)
from .green import greenDiffusion1D
from .solver import *
from .solver import cellValues
from .solverFiniteVolume import *
__all__ = []
class WorkSpace:
pass
|
JuliusHen/gimli | doc/examples/3_dc_and_ip/plot_modTank3d.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
3D modeling in a closed geometry
================================
This is a synthetic model of an experimental tank with a highly heterogeneous
resistivity, motivated by the BAM Berlin.
Geometry: 0.99m x 0.5m x 1.0m
Data: 48 Electrodes and 588 Measurements defined in modeltank.shm
Each 24 electrodes are located at two opposite sides of the tank.
We use the pygimli meshtools to create a PLC of the tank and an inhomogeneity.
The needed mesh is created by calling tetgen.
"""
import numpy as np
import pygimli as pg
import pygimli.meshtools as mt
from pygimli.physics import ert
###############################################################################
# In contrast to field measurements, experimental tanks have well-defined
# spatial dimensions and need different boundary conditions (BC).
#
# As there is no current flow through the tanks boundary at all, homogeneous
# (Neumann) BC are defined for the whole boundary.
# Neumann BC are natural (intrinsic) for the finite element simulations.
# \link{tutorial:fem:bc}, so we just need to define a cube geometry including
# region markers.
plc = mt.createCube(size=[0.99, 0.5, 1.0], pos=[0.495, 0.25], boundaryMarker=1)
###############################################################################
# We first read the measuring scheme file and add the electrodes as nodes with
# the marker -99 to the geometry.
filename = pg.getExampleFile("ert/modeltank.shm")
shm = pg.DataContainerERT(filename)
for s in shm.sensors():
plc.createNode(s, marker=-99)
###############################################################################
# There are two small problems to overcome for simulating Neumann bodies.
#
# First, we always need dipole current injection since there can be no current
# flow out of the closed boundaries of our experimental tank.
# (Note that by default single poles are simulated and superpositioned.)
# Therefore we define a reference electrode position inside the PLC, with a
# marker -999, somewhere away from the electrodes.
plc.createNode([0.5, 0.5, -0.5], marker=-999)
###############################################################################
# The second problem for pure Neumann domains is the non-uniqueness of
# the partial differential equation (there are only partial derivatives of the
# electric potential so an arbitrary value might be added, i.e. calibrated).
#
# Therefore we add calibration node with marker -1000 where the potential is
# fixed , somewhere on the boundary and far from the electrodes.
plc.createNode([0.75, 0.25, 0.5], marker=-1000)
###############################################################################
# For sufficient numerical accuracy it is generally a good idea to refine the
# mesh in the vicinity of the electrodes positions.
# We force the local mesh refinement by an additional node at 1/2 mm
# distance in -z-direction.
for s in plc.positions(pg.find(plc.nodeMarkers() == -99)):
plc.createNode(s - [0.0, 0.0, 1e-3/2])
# Also refine the reference node
plc.createNode([0.5, 0.5, -0.5 - 1e-3/2])
###############################################################################
# Create the tetrahedron mesh (calling the tetgen mesh generator)
mesh = mt.createMesh(plc)
###############################################################################
# First we want to simulate our ERT response for a homogeneous resistivity
# of 1 :math:`\Omega`m. Usually, simulate will calculate apparent resistivities
# (rhoa) and put them into the returned DataContainerERT.
# However, for the calculation of rhoa, geometric factors (k) are expected in
# the data container.
# If simulate does not find any k in the scheme file, it tries to determine
# them itself, either analytically (halfspace) or numerically (topography).
# Note that the automatic numerical calculating can only be a fallback mode.
# You usually want to keep full control about this calculation because you want
# the accuracy as high as possible by providing a special mesh with
# higher quality, lower max cell area, finer local mesh refinement,
# or quadratic base functions (p2).
#
# We don't want the automatic k generation here because we want also to
# demonstrate how you can solve this task yourself.
# The argument 'calcOnly=True' omits the check for valid k factors
# and will add the simulated voltages (u) in the returned DataContainerERT.
# The simulation current (i) is 1 A by default. In addition, the flag
# 'sr=False' omits the singularity removal technique (default) which not
# applicable in absence of (analytic) primary potential.
hom = ert.simulate(mesh, res=1.0, scheme=shm, sr=False,
calcOnly=True, verbose=True)
hom.save('homogeneous.ohm', 'a b m n u')
###############################################################################
# We now create an inhomogeneity (cube) and merge it with the above PLC.
# marker=2 ensures all cells of the cube being associated with a cell marker 2.
cube = mt.createCube(size=[0.3, 0.2, 0.8], pos=[0.7, 0.2], marker=2)
plc += cube
mesh = mt.createMesh(plc)
print(mesh)
###############################################################################
# Until we find a way (TODO) showing 3D structures in a 2D documentation,
# it is advisable to control the geometry in a 3D viewer. We recommend Paraview
# https://www.paraview.org/ and will export the geometry and in the
# vtk file format.
# plc.exportVTK('plc')
# mesh.exportVTK('mesh')
###############################################################################
# Now that we have a mesh with different regions and cell markers, we can define
# a relationship between region marker and resistivity by defining a appropriate
# list. We also set sr=False here because singularity removal would need highly
# accurate primary potentials which can also only be calculated numerically.
res = [[1, 10.0], [2, 100.0]] # map markers 1 and 2 to 10 and 100 Ohmm, resp.
het = ert.simulate(mesh, res=res, scheme=shm, sr=False,
calcOnly=True, verbose=True)
pg.show(mesh, notebook=True)
###############################################################################
# The apparent resistivity for a homogeneous model of 1 Ohmm should be 1 Ohmm.
# Therefore we can take the inverse of the modeled resistances for the
# homogeneous model and use it as geometric factors to find the apparent
# resistivities for the inhomogeneous model.
het.set('k', 1.0/ (hom('u') / hom('i')))
het.set('rhoa', het('k') * het('u') / het('i'))
het.save('simulated.dat', 'a b m n rhoa k u i')
np.testing.assert_approx_equal(het('rhoa')[0], 9.95, 2)
# np.testing.assert_approx_equal(het('k')[0], 0.820615269548)
pg.wait()
###############################################################################
# For such kind of simulations, the homogeneous part should be high accurate
# because it is usually needed once after storing the geometric factors.
# Note, do not forget to add some noise if you plan to invert such simulated
# data.
#
# see also:
#
# TODO
# * inversion example
#
# checks:
# TODO:
# * any idea for a Figure here?
# * Alternatively, we can create a real cavity by changing the marker
# in isHole flag for createCube (check)
|
JuliusHen/gimli | pygimli/physics/ert/ves.py | # -*- coding: utf-8 -*-
"""
Vertical electrical sounding (VES) manager class.
"""
import numpy as np
import pygimli as pg
# from pygimli.frameworks import Modelling, Block1DModelling
from pygimli.frameworks import Block1DModelling, MethodManager1d
class VESModelling(Block1DModelling):
"""Vertical Electrical Sounding (VES) forward operator.
Attributes
----------
am :
Part of data basis. Distances between A and M electrodes.
A is first power, M is first potential electrode.
bm :
Part of data basis. Distances between B and M electrodes.
B is second power, M is first potential electrode.
an :
Part of data basis. Distances between A and N electrodes.
A is first power, N is second potential electrode.
bn :
Part of data basis. Distances between B and N electrodes.
B is second power, N is second potential electrode.
ab2 :
Half distance between A and B.
mn2 :
Half distance between A and B.
Only used for input (feeding am etc.).
"""
def __init__(self, ab2=None, mn2=None, **kwargs):
r"""Constructor
"""
self.am = None
self.bm = None
self.an = None
self.bn = None
self.ab2 = None
self.mn2 = None
super(VESModelling, self).__init__(**kwargs)
if 'dataContainerERT' in kwargs:
data = kwargs['dataContainerERT']
if isinstance(data, pg.DataContainerERT):
kwargs['am'] = [data.sensorPosition(data('a')[i]).distance(
data('m')[i]) for i in range(data.size())]
kwargs['an'] = [data.sensorPosition(data('a')[i]).distance(
data('n')[i]) for i in range(data.size())]
kwargs['bm'] = [data.sensorPosition(data('b')[i]).distance(
data('m')[i]) for i in range(data.size())]
kwargs['bn'] = [data.sensorPosition(data('b')[i]).distance(
data('n')[i]) for i in range(data.size())]
self.setDataSpace(ab2=ab2, mn2=mn2, **kwargs)
def createStartModel(self, rhoa):
r"""
"""
if self.nLayers == 0:
pg.critical("Model space is not been initialized.")
startThicks = np.logspace(np.log10(min(self.mn2)/2),
np.log10(max(self.ab2)/5),
self.nLayers - 1)
startThicks = pg.utils.diff(pg.cat([0.0], startThicks))
# layer thickness properties
self.setRegionProperties(0, startModel=startThicks, trans='log')
# resistivity properties
self.setRegionProperties(1, startModel=np.median(rhoa), trans='log')
return super(VESModelling, self).createStartModel()
def setDataSpace(self, ab2=None, mn2=None,
am=None, bm=None, an=None, bn=None,
**kwargs):
"""Set data basis, i.e., arrays for all am, an, bm, bn distances.
Parameters
----------
"""
# Sometimes you don't have AB2/MN2 but provide am etc.
self.am = am
self.an = an
self.bm = bm
self.bn = bn
if ab2 is not None and mn2 is not None: # overrides am etc.
if isinstance(mn2, float):
mn2 = np.ones(len(ab2))*mn2
if len(ab2) != len(mn2):
print("ab2", ab2)
print("mn2", mn2)
raise Exception("length of ab2 is unequal length of mn2")
self.am = ab2 - mn2
self.an = ab2 + mn2
self.bm = ab2 + mn2
self.bn = ab2 - mn2
elif (am is not None and bm is not None and an is not None
and bn is not None):
self.am = am
self.bm = bm
self.an = an
self.bn = bn
if self.am is not None and self.bm is not None:
self.ab2 = (self.am + self.bm) / 2
self.mn2 = abs(self.am - self.an) / 2
self.k = (2.0 * np.pi) / (1.0 / self.am - 1.0 / self.an -
1.0 / self.bm + 1.0 / self.bn)
def response(self, par):
return self.response_mt(par, 0)
def response_mt(self, par, i=0):
if self.am is not None and self.bm is not None:
nLayers = (len(par)+1) // 2
fop = pg.core.DC1dModelling(nLayers,
self.am, self.bm, self.an, self.bn)
else:
pg.critical("No data space defined don't know what to calculate.")
return fop.response(par)
def drawModel(self, ax, model, **kwargs):
pg.viewer.mpl.drawModel1D(ax=ax,
model=model,
plot=kwargs.pop('plot', 'loglog'),
xlabel=r'Resistivity ($\Omega$m)', **kwargs)
ax.set_ylabel('Depth in (m)')
def drawData(self, ax, data, error=None, label=None, **kwargs):
r"""Draw modeled apparent resistivity data.
Parameters
----------
ax: axes
Matplotlib axes object to draw into.
data: iterable
Apparent resistivity values to draw.
error: iterable [None]
Adds an error bar if you have error values.
label: str ['$\varrho_a$']
Set legend label for the amplitude.
Other parameters
----------------
ab2: iterable
Override ab2 that fits data size.
mn2: iterable
Override mn2 that fits data size.
plot: function name
Matplotlib plot function, e.g., plot, loglog, semilogx or semilogy
"""
ab2 = kwargs.pop('ab2', self.ab2)
# mn2 = kwargs.pop('mn2', self.mn2)
plot = kwargs.pop('plot', 'loglog')
ra = data
raE = error
style = dict(pg.frameworks.modelling.DEFAULT_STYLES.get(
label, pg.frameworks.modelling.DEFAULT_STYLES['Default']))
style.update(kwargs)
a1 = ax
plot = getattr(a1, plot)
if label is None:
label = r'$\varrho_a$'
del style["linestyle"] # to remove mpl warning
plot(ra, ab2, 'x-', label=label, **style)
if raE is not None:
raErr = np.array(ra * raE)
if pg.isArray(raErr, len(ra)):
a1.errorbar(ra, ab2,
xerr=raErr, barsabove=True,
**pg.frameworks.modelling.DEFAULT_STYLES.get('Error',
pg.frameworks.modelling.DEFAULT_STYLES['Default']),
label='_nolegend_')
a1.set_ylim(max(ab2), min(ab2))
a1.set_xlabel(r'Apparent resistivity ($\Omega$m)')
a1.set_ylabel(r'AB/2 (m)')
a1.grid(True)
a1.legend()
class VESCModelling(VESModelling):
"""Vertical Electrical Sounding (VES) forward operator. (complex)
Vertical Electrical Sounding (VES) forward operator for complex
resistivity values. see: :py:mod:`pygimli.physics.ert.VESModelling`
"""
def __init__(self, **kwargs):
super(VESCModelling, self).__init__(nPara=2, **kwargs)
self.phiAxe = None
def phaseModel(self, model):
"""Return the current phase model values."""
nLay = (len(model) + 1) // 3
return pg.cat(model[0:nLay-1], 1000. * model[nLay*2-1::])
def resModel(self, model):
"""Return the resistivity model values."""
nLay = (len(model) + 1) // 3
return model[0:nLay*2-1]
def createStartModel(self, rhoa):
startThicks = np.logspace(np.log10(min(self.mn2)/2),
np.log10(max(self.ab2)/5),
self._nLayers-1)
startThicks = pg.utils.diff(pg.cat([0.0], startThicks))
# layer thickness properties
self.setRegionProperties(0, startModel=startThicks,
trans='log')
# resistivity properties
self.setRegionProperties(1, startModel=np.median(rhoa),
trans='log')
self.setRegionProperties(2, startModel=np.median(rhoa[len(rhoa)//2::]),
trans='log')
sm = self.regionManager().createStartModel()
return sm
def response_mt(self, par, i=0):
""" Multithread response for parametrization.
Returns [|rhoa|, +phi(rad)] for [thicks, res, phi(rad)]
"""
if self.am is not None and self.bm is not None:
nLayers = (len(par) + 1) // 3
fop = pg.core.DC1dModellingC(nLayers,
self.am, self.bm, self.an, self.bn)
else:
pg.critical("No data basis known.")
return fop.response(par)
def drawModel(self, ax, model, **kwargs):
"""Draw 1D VESC Modell."""
a1 = ax
a2 = pg.viewer.mpl.createTwinY(ax)
super(VESCModelling, self).drawModel(a1,
model=self.resModel(model),
**kwargs)
plot = kwargs.pop('plot', 'semilogy')
if plot == 'loglog':
plot = 'semilogy'
elif plot == 'semilogx':
plot = 'plot'
pg.viewer.mpl.drawModel1D(ax=a2,
model=self.phaseModel(model),
plot=plot,
color='C2',
xlabel='Phase (mrad)',
**kwargs)
a2.set_xlabel('neg. phase (mRad)', color='C2')
def drawData(self, ax, data, error=None, labels=None, ab2=None, mn2=None,
**kwargs):
r"""Draw modeled apparent resistivity and apparent phase data.
Parameters
----------
ax: axes
Matplotlib axes object to draw into.
data: iterable
Apparent resistivity values to draw. [rhoa phia].
error: iterable [None]
Rhoa in Ohm m and phia in radiand.
Adds an error bar if you have error values. [err_rhoas err_phia]
The error of amplitudes are assumed to be relative and the error
of the phases is assumed to be absolute in mrad.
labels: str [r'$\varrho_a$', r'$\varphi_a$']
Set legend labels for amplitude and phase.
Other parameters:
-----------------
ab2: iterable
Override ab2 that fits data size.
mn2: iterable
Override mn2 that fits data size.
plot: function name
Matplotlib plot function, e.g., plot, loglog, semilogx or semilogy
"""
a1 = None
a2 = None
if hasattr(ax, '__iter__'):
if len(ax) == 2:
a1 = ax[0]
a2 = ax[1]
else:
a1 = ax
a2 = pg.viewer.mpl.createTwinY(ax)
if ab2 is not None and mn2 is not None:
self.setDataSpace(ab2=ab2, mn2=mn2)
ra = data[0:len(data)//2]
phi = data[len(data)//2::] * 1000. # mRad
phiE = None # abs err
raE = None # rel err
if error is not None:
if type(error) is float:
raE = np.ones(len(data)//2) * error
phiE = np.ones(len(data)//2) * error
else:
raE = error[0:len(data)//2]
phiE = error[len(data)//2::]
if labels is None:
labels = [r'$\varrho_a$', r'$\varphi_a$']
label = kwargs.pop('label', 'Data')
style = dict(pg.frameworks.modelling.DEFAULT_STYLES.get(
label, pg.frameworks.modelling.DEFAULT_STYLES['Default']))
style.update(kwargs)
super(VESCModelling, self).drawData(a1, ra, error=raE,
label=labels[0], **style)
style['Color'] = 'C2'
a2.semilogy(phi, self.ab2, label=labels[1], **style)
if phiE is not None:
a2.errorbar(phi, self.ab2,
xerr=phiE,
**pg.frameworks.modelling.DEFAULT_STYLES.get('Error',
pg.frameworks.modelling.DEFAULT_STYLES['Default']),
barsabove=True,
label='_nolegend_'
)
a2.set_ylim(max(self.ab2), min(self.ab2))
a2.set_xlabel('Apparent neg. phase (mRad)', color='C2')
a2.set_ylabel('AB/2 in (m)')
a2.legend()
a2.grid(True)
class VESManager(MethodManager1d):
r"""Vertical electrical sounding (VES) manager class.
Examples
--------
>>> import numpy as np
>>> import pygimli as pg
>>> from pygimli.physics import VESManager
>>> ab2 = np.logspace(np.log10(1.5), np.log10(100), 32)
>>> mn2 = 1.0
>>> # 3 layer with 100, 500 and 20 Ohmm
>>> # and layer thickness of 4, 6, 10 m
>>> # over a Halfspace of 800 Ohmm
>>> synthModel = pg.cat([4., 6., 10.], [100., 5., 20., 800.])
>>> ves = VESManager()
>>> ra, err = ves.simulate(synthModel, ab2=ab2, mn2=mn2, noiseLevel=0.01)
>>> ax = ves.showData(ra, error=err)
>>> # _= ves.invert(ra, err, nLayer=4, showProgress=0, verbose=0)
>>> # ax = ves.showModel(synthModel)
>>> # ax = ves.showResult(ax=ax)
>>> pg.wait()
"""
def __init__(self, **kwargs):
"""Constructor
Parameters
----------
complex : bool
Accept complex resistivities.
Attributes
----------
complex : bool
Accept complex resistivities.
"""
self._complex = kwargs.pop('complex', False)
super(VESManager, self).__init__(**kwargs)
self.inv.setDeltaChiStop(1)
self.dataTrans = None
self.rhoaTrans = pg.trans.TransLog()
self.phiaTrans = pg.trans.TransLin()
@property
def complex(self):
return self._complex
@complex.setter
def complex(self, c):
self._complex = c
self.reinitForwardOperator()
def createForwardOperator(self, **kwargs):
"""Create Forward Operator.
Create Forward Operator based on complex attribute.
"""
if self.complex:
return VESCModelling(**kwargs)
else:
return VESModelling(**kwargs)
def simulate(self, model, ab2=None, mn2=None, **kwargs):
"""Simulate measurement data."""
if ab2 is not None and mn2 is not None:
self._fw.fop.setDataSpace(ab2=ab2, mn2=mn2)
return super(VESManager, self).simulate(model, **kwargs)
def preErrorCheck(self, err, dataVals=None):
"""Called before the validity check of the error values."""
err = np.atleast_1d(err)
if self.complex:
if len(err) == 2:
nData = len(dataVals) // 2
err = pg.cat(np.ones(nData)*err[0],
np.abs(err[1] / dataVals[nData:]))
else:
if len(err) == 1:
err = np.ones(nData)*err[0]
return err
def invert(self, data=None, err=None, ab2=None, mn2=None, **kwargs):
"""Invert measured data.
Parameters
----------
Keyword Arguments
----------------
**kwargs
Additional kwargs inherited from %(MethodManager1d.invert) and
%(Inversion.run)
Returns
-------
model : pg.Vector
inversion result
"""
if ab2 is not None and mn2 is not None:
self.fop.setDataSpace(ab2=ab2, mn2=mn2)
if data is not None:
if self.complex:
nData = len(data)//2
self.dataTrans = pg.trans.TransCumulative()
self.dataTrans.add(self.rhoaTrans, nData)
self.dataTrans.add(self.phiaTrans, nData)
else:
self.dataTrans = pg.trans.TransLog()
self.inv.dataTrans = self.dataTrans
if 'layerLimits' not in kwargs:
kwargs['layerLimits'] = [min(self.fop.mn2)/5,
max(self.fop.ab2)/2]
if 'paraLimits' in kwargs and self.complex:
pL = kwargs['paraLimits'][1]
kwargs['paraLimits'][1] = [pL[0]/1000, pL[1]/1000]
return super(VESManager, self).invert(data=data, err=err, **kwargs)
def loadData(self, fileName, **kwargs):
""" Load simple data matrix
"""
mat = np.loadtxt(fileName)
if len(mat[0]) == 4:
self.fop.setDataSpace(ab2=mat[:, 0], mn2=mat[:, 1])
return mat.T
if len(mat[0]) == 6:
self.complex = True
self.fop.setDataSpace(ab2=mat[:, 0], mn2=mat[:, 1])
return (mat[:, 0], mat[:, 1],
np.array(pg.cat(mat[:, 2], mat[:, 4])),
np.array(pg.cat(mat[:, 3], mat[:, 5])))
def exportData(self, fileName, data=None, error=None):
"""Export data into simple ascii matrix.
Usefull?
"""
mn2 = np.abs((self.fop.am - self.fop.an) / 2.)
ab2 = (self.fop.am + self.fop.bm) / 2.
mat = None
if data is None:
data = self.inv.dataVals
if error is None:
error = self.inv.errorVals
if self.complex:
nData = len(data)//2
mat = np.array([ab2, mn2,
data[:nData], error[:nData],
data[nData:], error[nData:]
]).T
np.savetxt(fileName, mat,
header=r'ab/2\tmn/2\trhoa\terr\tphia\terrphi')
else:
mat = np.array([ab2, mn2, data, error]).T
np.savetxt(fileName, mat, header=r'ab/2\tmn/2\trhoa\terr')
def VESManagerApp():
"""Call VESManager as console app"""
parser = VESManager.createArgParser(dataSuffix='ves')
options = parser.parse_args()
verbose = not options.quiet
if verbose:
print("VES Manager console application.")
print(options._get_kwargs())
mgr = VESManager(verbose=verbose, debug=pg.debug())
ab2, mn2, ra, err = mgr.loadData(options.dataFileName)
mgr.showData(ra, err)
mgr.invert(ra, err, ab2, mn2,
maxIter=options.maxIter,
lam=options.lam,
)
mgr.showResultAndFit()
pg.wait()
if __name__ == '__main__':
VESManagerApp()
|
JuliusHen/gimli | pygimli/core/trans.py | <reponame>JuliusHen/gimli
# -*- coding: utf-8 -*-
"""Some specialization to the trans functions."""
from .core import pgcore
import numpy as np
__TransCumulative_addForGC__ = pgcore.RTransCumulative.add
def __TransCumulative_addForGC_MP__(self, T, *args):
"""Don't use directly.
Monkey patch to keep the GC happy until redesign.
"""
if not hasattr(self, '__trans__'):
self.__trans__ = []
self.__trans__.append(T)
if len(args) == 1:
# be sure avoid auto conversion from int to IndexArray
if isinstance(args[0], int):
return __TransCumulative_addForGC__(self, T, size=args[0])
return __TransCumulative_addForGC__(self, T, *args)
pgcore.RTransCumulative.add = __TransCumulative_addForGC_MP__
# Aliases
Trans = pgcore.RTrans
TransLinear = pgcore.RTransLinear
TransLin = pgcore.RTransLin
TransPower = pgcore.RTransPower
TransLog = pgcore.RTransLog
TransLogLU = pgcore.RTransLogLU
TransCotLU = pgcore.RTransCotLU
TransCumulative = pgcore.RTransCumulative
class TransSymLog(pgcore.RTrans):
"""Transformation using a bilogarithmic scaling."""
def __init__(self, tol=1e-12):
"""Forward transformation."""
super().__init__()
self.tol = tol
def trans(self, x):
"""Forward transformation."""
return pgcore.log(1 + np.abs(x) / self.tol) * np.sign(x)
def invTrans(self, y):
"""Inverse transformation."""
return (pgcore.exp(np.abs(y)) - 1.) * self.tol * np.sign(y)
def deriv(self, x):
"""Derivative of the transformation."""
return 1. / (np.abs(x) / self.tol + 1) / self.tol
|
JuliusHen/gimli | pygimli/testing/test_Logger.py | <filename>pygimli/testing/test_Logger.py<gh_stars>100-1000
import pygimli as pg
#log = logging.getLogger('pyGIMLi')
#logging.basicConfig(level=logging.DEBUG,
#format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
#datefmt='%m/%d/%Y %H:%M:%S',
##filename='example.log'
#)
pg.version()
# test pygimli log
pg.info("Start numeric log test." + str(pg.log(pg.Vector(1, 1.))))
pg.setVerbose(True)
pg.verbose("some verbose notes")
pg.warn("Start warning test.")
def testTraceback1():
def testTraceback2():
pg.error("Start error test.: int", 1, " vec", pg.Vector(2))
testTraceback2()
testTraceback1()
@pg.v
def testVerboseDecorator1():
pg.verbose('testVerboseDecorator1 should be seen even if verbose is set to false')
def testVerboseDecorator2():
pg.verbose('testVerboseDecorator2 should be only seen even if verbose is set to true')
pg.setVerbose(False)
testVerboseDecorator1()
pg.setVerbose(False)
testVerboseDecorator2()
pg.setVerbose(True)
testVerboseDecorator2()
@pg.d
def testDebugDecorator():
pg.debug('testDebugDecorator should always seen even if debug is set to false')
testDebugDecorator()
pg.d(testDebugDecorator())
pg.debug("debug 0")
pg.setDebug(1)
pg.debug("debug ON")
pg.setThreadCount(2)
pg.debug("debug with a trace", withTrace=True)
# should not printed out
pg.setDebug(0)
pg.debug("debug OFF")
pg.setThreadCount(2)
# test core log (should not be used outside the core)
pg.log(pg.core.Info, "core log ")
pg.log(pg.core.Warning, "core log ")
pg.log(pg.core.Error, "core log ")
try:
pg.log(pg.core.Critical, "core log ")
except BaseException as e:
print("cought core exception:", e)
#pg.exception("Exception")
def testMethod(**kwargs):
pg.core.logger.warnNonEmptyArgs(kwargs)
testMethod(a=1, b='foo')
#
# teste colored output
print(pg._('Green', c='g'), pg._('Red', c='r'), pg._('Yellow', c='y'))
#print(pg._g('Green'), pg._g('Red'), pg._y('Yellow'))
print(pg._('more', 'then', 'one', c='6;30;42'))
|
JuliusHen/gimli | doc/examples/5_misc/plot_1_dcem.py | #!/usr/bin/env python
# encoding: utf-8
"""
DC-EM Joint inversion
---------------------
This is an old script from early pyGIMLi jointly inverting direct current (DC)
and electromagnetic (EM) soundings on the modelling abstraction level.
Note that this is not recommended as a basis for programming, because there
is a dedicated framework for classical joint inversion. However, it explains
what happens under the hood in the much simpler script that follows."""
###############################################################################
# The case has been documented by :cite:`Guenther2013NSG`.
import numpy as np
import matplotlib.pyplot as plt
import pygimli as pg
from pygimli.viewer.mpl import drawModel1D
###############################################################################
# First, we define a modelling class that calls two other classes and pastes
# their results to one vector.
class DCEM1dModelling(pg.core.ModellingBase):
"""Modelling jointing DC and EM 1Dforward operators."""
def __init__(self, nlay, ab2, mn2, freq, coilspacing, verbose=False):
"""Init number of layers, AB/2, MN/2, frequencies & coil spacing."""
pg.core.ModellingBase.__init__(self, verbose)
self.nlay_ = nlay
self.fDC_ = pg.core.DC1dModelling(nlay, ab2, mn2, verbose)
self.fEM_ = pg.core.FDEM1dModelling(nlay, freq, coilspacing, verbose)
self.mesh_ = pg.meshtools.createMesh1DBlock(nlay)
self.setMesh(self.mesh_)
def response(self, model):
"""Return concatenated response of DC and EM FOPs."""
return pg.cat(self.fDC_(model), self.fEM_(model))
###############################################################################
# The actual script starts here. There are some options to play with
noiseEM = 1. # absolute (per cent of primary signal)
noiseDC = 3. # in per cent
lamEM, lamDC, lamDCEM = 300., 500., 500. # regularization strength
verbose = False
###############################################################################
# First we create a synthetic model.
nlay = 3 # number of layers
thk = pg.Vector(nlay - 1, 15.0) # 15m thickness each
res = pg.Vector(nlay, 200.0) # 200 Ohmm
res[1] = 10.
res[2] = 50.
model = pg.cat(thk, res) # paste together to one model
###############################################################################
# We first set up EM forward operator and generate synthetic data with noise
coilspacing = 50.
nf = 10
freq = pg.Vector(nf, 110.)
for i in range(nf-1):
freq[i+1] = freq[i] * 2.
fEM = pg.core.FDEM1dModelling(nlay, freq, coilspacing)
dataEM = fEM(model)
dataEM += pg.randn(len(dataEM), seed=1234) * noiseEM
###############################################################################
# We define model transformations: logarithms and log with upper+lower bounds
transRhoa = pg.trans.TransLog()
transThk = pg.trans.TransLog()
transRes = pg.trans.TransLogLU(1., 1000.)
transEM = pg.trans.Trans()
fEM.region(0).setTransModel(transThk)
fEM.region(1).setTransModel(transRes)
###############################################################################
# We set up the independent EM inversion and run the model.
invEM = pg.core.Inversion(dataEM, fEM, transEM, True, True)
modelEM = pg.Vector(nlay * 2 - 1, 50.)
invEM.setModel(modelEM)
invEM.setAbsoluteError(noiseEM)
invEM.setLambda(lamEM)
invEM.setMarquardtScheme(0.9)
modelEM = invEM.run()
respEM = invEM.response()
###############################################################################
# Next we set up the DC forward operator and generate synthetic data with noise
ab2 = pg.Vector(20, 3.)
na = len(ab2)
mn2 = pg.Vector(na, 1.0)
for i in range(na-1):
ab2[i+1] = ab2[i] * 1.3
fDC = pg.core.DC1dModelling(nlay, ab2, mn2)
dataDC = fDC(model)
dataDC *= 1. + pg.randn(len(dataDC), seed=1234) * noiseDC / 100.
fDC.region(0).setTransModel(transThk)
fDC.region(1).setTransModel(transRes)
# We set up the independent DC inversion and let it run.
invDC = pg.core.Inversion(dataDC, fDC, transRhoa, verbose)
modelDC = pg.Vector(nlay*2-1, 20.)
invDC.setModel(modelDC)
invDC.setRelativeError(noiseDC/100.)
invDC.setLambda(lamDC)
invDC.setMarquardtScheme(0.9)
modelDC = invDC.run()
respDC = invDC.response()
###############################################################################
# Next we create a the joint forward operator (see class above).
fDCEM = DCEM1dModelling(nlay, ab2, mn2, freq, coilspacing)
fDCEM.region(0).setTransModel(transThk)
fDCEM.region(1).setTransModel(transRes)
###############################################################################
# We setup the joint inversion combining, transformations, data and errors.
transData = pg.trans.TransCumulative()
transData.add(transRhoa, na)
transData.add(transEM, nf*2)
invDCEM = pg.core.Inversion(pg.cat(dataDC, dataEM), fDCEM, transData, verbose)
modelDCEM = pg.Vector(nlay * 2 - 1, 20.)
invDCEM.setModel(modelDCEM)
err = pg.cat(dataDC * noiseDC / 100., pg.Vector(len(dataEM), noiseEM))
invDCEM.setAbsoluteError(err)
invDCEM.setLambda(lamDCEM)
invDCEM.setMarquardtScheme(0.9)
modelDCEM = invDCEM.run()
respDCEM = invDCEM.response()
###############################################################################
# The results of the inversion are plotted for comparison.
for inv in [invEM, invDC, invDCEM]:
inv.echoStatus()
print([invEM.chi2(), invDC.chi2(), invDCEM.chi2()]) # chi-square values
###############################################################################
# %% We finally plot the results
fig = plt.figure(1, figsize=(10, 5))
ax1 = fig.add_subplot(131)
drawModel1D(ax1, thk, res, plot='semilogx', color='blue')
drawModel1D(ax1, modelEM[0:nlay-1], modelEM[nlay-1:nlay*2-1], color='green')
drawModel1D(ax1, modelDC[0:nlay-1], modelDC[nlay-1:nlay*2-1], color='cyan')
drawModel1D(ax1, modelDCEM[0:nlay-1], modelDCEM[nlay-1:nlay*2-1],
color='red')
ax1.legend(('syn', 'EM', 'DC', 'JI'))
ax1.set_xlim((10., 1000.))
ax1.set_ylim((40., 0.))
ax1.grid(which='both')
ax2 = fig.add_subplot(132)
ax2.semilogy(dataEM[0:nf], freq, 'bx', label='syn IP')
ax2.semilogy(dataEM[nf:nf*2], freq, 'bo', label='syn OP')
ax2.semilogy(respEM[0:nf], freq, 'g--', label='EM')
ax2.semilogy(respEM[nf:nf*2], freq, 'g--')
ax2.semilogy(respDCEM[na:na+nf], freq, 'r:', label='DCEM')
ax2.semilogy(respDCEM[na+nf:na+nf*2], freq, 'r:')
ax2.set_ylim((min(freq), max(freq)))
ax2.set_xlabel("IP/OP in %")
ax2.set_ylabel("$f$ in Hz")
ax2.yaxis.set_label_position("right")
ax2.grid(which='both')
ax2.legend(loc="best")
ax3 = fig.add_subplot(133)
ax3.loglog(dataDC, ab2, 'bx-', label='syn')
ax3.loglog(respDC, ab2, 'c-', label='DC')
ax3.loglog(respDCEM[0:na], ab2, 'r:', label='DCEM')
# ax3.axis('tight')
ax3.set_ylim((max(ab2), min(ab2)))
ax3.grid(which='both')
ax3.set_xlabel(r"$\rho_a$ in $\Omega$m")
ax3.set_ylabel("AB/2 in m")
ax3.yaxis.set_ticks_position("right")
ax3.yaxis.set_label_position("right")
ax3.legend(loc="best")
pg.wait()
###############################################################################
# <NAME>. (2013): On Inversion of Frequency Domain Electromagnetic Data in
# Salt Water Problems - Sensitivity and Resolution. Ext. Abstr., 19th European
# Meeting of Environmental and Engineering Geophysics, Bochum, Germany.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.