code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
import unicornhat as uh
uh.clear()
uh.show()
|
hekim13/unicorn-app
|
src/unicorn-scripts/off.py
|
Python
|
mit
| 46
|
import numpy as np
import hyperspy.api as hs
from hyperspy_gui_ipywidgets.tests.utils import KWARGS
def test_span_roi():
roi = hs.roi.SpanROI(left=0, right=10)
wd = roi.gui(**KWARGS)["ipywidgets"]["wdict"]
assert wd["left"].value == 0
assert wd["right"].value == 10
wd["left"].value = -10
wd["right"].value = 0
assert roi.left == -10
assert roi.right == 0
def test_point_1d_roi():
roi = hs.roi.Point1DROI(value=5.5)
wd = roi.gui(**KWARGS)["ipywidgets"]["wdict"]
assert wd["value"].value == 5.5
wd["value"].value = 0
assert roi.value == 0
def test_point2d():
roi = hs.roi.Point2DROI(x=0, y=10)
wd = roi.gui(**KWARGS)["ipywidgets"]["wdict"]
assert wd["x"].value == 0
assert wd["y"].value == 10
wd["x"].value = -10
wd["y"].value = 0
assert roi.x == -10
assert roi.y == 0
def test_rectangular_roi():
roi = hs.roi.RectangularROI(left=0, right=10, top=-10, bottom=0)
wd = roi.gui(**KWARGS)["ipywidgets"]["wdict"]
assert wd["left"].value == 0
assert wd["right"].value == 10
assert wd["top"].value == -10
assert wd["bottom"].value == 0
wd["left"].value = -10
wd["right"].value = 0
wd["bottom"].value = 1.2
wd["top"].value = 1.1
assert roi.left == -10
assert roi.right == 0
assert roi.top == 1.1
assert roi.bottom == 1.2
def test_circle_roi():
roi = hs.roi.CircleROI(cx=0, cy=0, r=1, r_inner=0.5)
wd = roi.gui(**KWARGS)["ipywidgets"]["wdict"]
assert wd["cx"].value == 0
assert wd["cy"].value == 0
assert wd["radius"].value == 1
assert wd["inner_radius"].value == 0.5
wd["cx"].value = 1
wd["cy"].value = 2
wd["radius"].value = 4
wd["inner_radius"].value = 1.5
assert roi.cx == 1
assert roi.cy == 2
assert roi.r == 4
assert roi.r_inner == 1.5
def test_line2d_roi():
roi = hs.roi.Line2DROI(x1=0, x2=10, y1=0, y2=10, linewidth=2)
wd = roi.gui(**KWARGS)["ipywidgets"]["wdict"]
assert wd["x1"].value == 0
assert wd["x2"].value == 10
assert wd["y1"].value == 0
assert wd["y2"].value == 10
assert wd["linewidth"].value == 2
wd["x1"].value = 12
wd["x2"].value = 23
wd["y1"].value = -12
wd["y2"].value = -23
wd["linewidth"].value = 100
assert roi.x1 == 12
assert roi.x2 == 23
assert roi.y1 == -12
assert roi.y2 == -23
assert roi.linewidth == 100
|
hyperspy/hyperspy_gui_ipywidgets
|
hyperspy_gui_ipywidgets/tests/test_roi.py
|
Python
|
gpl-3.0
| 2,405
|
#!/usr/bin/env python2
from configobj import ConfigObj
from validate import Validator
import os, sys, subprocess, signal, tempfile, shutil, pipes, time, locale
from datetime import datetime
from PyQt4 import QtGui, QtCore
try:
from gi.repository import Notify
except:
Notify = None
dirname = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(dirname, os.path.pardir))
from lib import wireutils
wireutils.color_printing_config.name = "Perdyshot"
wireutils.color_printing_config.color = wireutils.ansi_colors.DARKCYAN
cwd = os.getcwd()
ICON = os.path.join(dirname, os.path.pardir, "icon_glow.png")
LOGO = os.path.join(dirname, os.path.pardir, "icon_plain.png")
VERSION = 'Perdyshot ' + open(os.path.join(dirname, os.path.pardir, '.version'), 'r').read()
URL = "https://github.com/Locercus/Perdyshot"
DATE = os.path.getmtime(os.path.join(dirname, os.path.pardir, ".version"))
DATE = datetime.fromtimestamp(DATE).strftime(locale.nl_langinfo(locale.D_T_FMT))
if Notify:
Notify.init("Perdyshot")
config = ConfigObj(os.path.join(dirname, os.path.pardir, 'perdyshot.conf'), encoding = 'UTF8', configspec = os.path.join(dirname, os.path.pardir, 'perdyshot.conf.spec'))
validator = Validator()
if not config.validate(validator):
wireutils.color_print("Invalid configuration file", color = wireutils.ansi_colors.DARKRED)
sys.exit(1)
settings = {}
settings['modes'] = config['GUI']['CaptureModes']
app = QtGui.QApplication(sys.argv)
# Create about dialog
class AboutDialog(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
self.setFixedSize(450, 240)
self.setWindowTitle("About Perdyshot")
image = QtGui.QLabel(self)
image.setPixmap(QtGui.QPixmap(LOGO))
image.move((450 - image.sizeHint().width()) / 2, 10)
text = QtGui.QLabel(self)
text.setText("<center><b>%s © 2015 Jonatan Nordentoft. MIT License</b><br/>Released on %s</center>" % (VERSION, DATE))
text.move((450 - text.sizeHint().width()) / 2, image.sizeHint().height() + 30)
website = QtGui.QPushButton("Website", self)
website.move((450 - website.sizeHint().width()) / 2, image.sizeHint().height() + text.sizeHint().height() + 45)
website.clicked.connect(self.openWebsite)
def closeEvent(self, event):
event.ignore()
self.hide()
def openWebsite(self):
subprocess.call(["xdg-open", URL])
# Create tray icon
# http://stackoverflow.com/a/895721/1248084
class SystemTrayIcon(QtGui.QSystemTrayIcon):
def __init__(self, icon, parent = None):
QtGui.QSystemTrayIcon.__init__(self, icon, parent)
menu = QtGui.QMenu(parent)
self.menu = menu
# Add the options
for key in settings['modes']:
action = menu.addAction(key)
action.triggered.connect(
lambda x, key = key: self.onCapture(key)
)
menu.addSeparator()
menu.addAction("About", self.onAbout)
menu.addAction("Quit", self.onQuit)
self.setContextMenu(menu)
def onQuit(self):
sys.exit(0)
def onAbout(self):
aboutDialog.show()
def onCapture(self, item):
options = settings['modes'][item]
if options['type'] == 'script':
subprocess.call([ options['file'] ])
elif options['type'] == 'simple':
filename = os.path.join(tempfile.gettempdir(), 'perdygui.png')
if not os.path.exists(filename):
open(filename, "w").close()
args = [
'/usr/bin/env', 'python2', os.path.join(dirname, os.path.pardir, 'cli', options['mode'] + '.py'),
'-f', filename
]
if options['mode'] == 'window':
args.extend(['--delay', '0'])
# Capture the screenshot
subprocess.call(args)
# Okay the screenshot has been captures. What do we do now?
if options['file'] != None:
newFilename = time.strftime(options['file'])
shutil.copyfile(filename, newFilename)
filename = newFilename
if options['program'] != None:
subprocess.call(options['program'] % filename, shell = True)
if options['copy']:
subprocess.call('xclip -i -sel clip < ' + pipes.quote(filename), shell = True)
if options['notification'] and Notify:
notification = Notify.Notification.new(
options['notificationTitle'],
options['notificationDescription'],
options['notificationImage'] if options['notificationImage'] != None else ICON
)
notification.show()
aboutDialog = AboutDialog()
iconWidget = QtGui.QWidget()
trayIcon = SystemTrayIcon(QtGui.QIcon(ICON), iconWidget)
trayIcon.show()
signal.signal(signal.SIGINT, signal.SIG_DFL)
app.exec_()
|
yrsegal/Perdyshot
|
gui/perdyshot.py
|
Python
|
mit
| 4,988
|
from __future__ import annotations
# standard libraries
import asyncio
import typing
# third party libraries
import numpy.typing
# local libraries
from nion.data import Image
from nion.swift import MimeTypes
from nion.swift import Thumbnails
from nion.swift.model import DisplayItem
from nion.swift.model import DocumentModel
from nion.ui import CanvasItem
from nion.ui import UserInterface
from nion.ui import Widgets
from nion.utils import Geometry
if typing.TYPE_CHECKING:
from nion.swift.model import Persistence
from nion.ui import DrawingContext
from nion.ui import Window
from nion.utils import Binding
from nion.utils import Event
_ImageDataType = Image._ImageDataType
_NDArray = numpy.typing.NDArray[typing.Any]
class AbstractThumbnailSource:
def __init__(self) -> None:
self.on_thumbnail_data_changed: typing.Optional[typing.Callable[[typing.Optional[_NDArray]], None]] = None
self.__thumbnail_data: typing.Optional[_NDArray] = None
self.overlay_canvas_item: CanvasItem.AbstractCanvasItem = CanvasItem.EmptyCanvasItem()
def close(self) -> None:
self.on_thumbnail_data_changed = None
@property
def thumbnail_data(self) -> typing.Optional[_NDArray]:
return self.__thumbnail_data
def _set_thumbnail_data(self, thumbnail_data: typing.Optional[_NDArray]) -> None:
self.__thumbnail_data = thumbnail_data
def populate_mime_data_for_drag(self, mime_data: UserInterface.MimeData, size: Geometry.IntSize) -> typing.Tuple[bool, typing.Optional[_NDArray]]:
return False, None
class BitmapOverlayCanvasItem(CanvasItem.CanvasItemComposition):
def __init__(self) -> None:
super().__init__()
self.focusable = True
self.__dropping = False
self.__focused = False
self.wants_drag_events = True
self.wants_mouse_events = True
self.__drag_start: typing.Optional[Geometry.IntPoint] = None
self.on_drop_mime_data: typing.Optional[typing.Callable[[UserInterface.MimeData, int, int], str]] = None
self.on_delete: typing.Optional[typing.Callable[[], None]] = None
self.on_drag_pressed: typing.Optional[typing.Callable[[int, int, UserInterface.KeyboardModifiers], None]] = None
self.active = False
def close(self) -> None:
self.on_drop_mime_data = None
self.on_delete = None
self.on_drag_pressed = None
super().close()
@property
def focused(self) -> bool:
return self.__focused
def _set_focused(self, focused: bool) -> None:
if self.__focused != focused:
self.__focused = focused
self.update()
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
super()._repaint(drawing_context)
# canvas size
canvas_size = self.canvas_size
if canvas_size:
focused_style = "#3876D6" # TODO: platform dependent
if self.active:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.round_rect(2, 2, 6, 6, 3)
drawing_context.fill_style = "rgba(0, 255, 0, 0.80)"
drawing_context.fill()
if self.__dropping:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.rect(0, 0, canvas_size.width, canvas_size.height)
drawing_context.fill_style = "rgba(255, 0, 0, 0.10)"
drawing_context.fill()
if self.focused:
stroke_style = focused_style
drawing_context.begin_path()
drawing_context.rect(2, 2, canvas_size.width - 4, canvas_size.height - 4)
drawing_context.line_join = "miter"
drawing_context.stroke_style = stroke_style
drawing_context.line_width = 4.0
drawing_context.stroke()
def drag_enter(self, mime_data: UserInterface.MimeData) -> str:
self.__dropping = True
self.update()
return "ignore"
def drag_leave(self) -> str:
self.__dropping = False
self.update()
return "ignore"
def drop(self, mime_data: UserInterface.MimeData, x: int, y: int) -> str:
if callable(self.on_drop_mime_data):
result = self.on_drop_mime_data(mime_data, x, y)
if result:
return result
return super().drop(mime_data, x, y)
def key_pressed(self, key: UserInterface.Key) -> bool:
if key.is_delete:
on_delete = self.on_delete
if callable(on_delete):
on_delete()
return True
return super().key_pressed(key)
def mouse_pressed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__drag_start = Geometry.IntPoint(x=x, y=y)
return True
def mouse_released(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
self.__drag_start = None
return True
def mouse_position_changed(self, x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> bool:
if self.__drag_start is not None and Geometry.distance(Geometry.FloatPoint(y, x), self.__drag_start.to_float_point()) > 2:
self.__drag_start = None
on_drag_pressed = self.on_drag_pressed
if on_drag_pressed:
on_drag_pressed(x, y, modifiers)
return True
return False
class ThumbnailCanvasItem(CanvasItem.CanvasItemComposition):
def __init__(self, ui: UserInterface.UserInterface, thumbnail_source: AbstractThumbnailSource, size: typing.Optional[Geometry.IntSize] = None) -> None:
super().__init__()
bitmap_overlay_canvas_item = BitmapOverlayCanvasItem()
bitmap_canvas_item = CanvasItem.BitmapCanvasItem(background_color="#CCC", border_color="#444")
bitmap_overlay_canvas_item.add_canvas_item(bitmap_canvas_item)
if size is not None:
bitmap_canvas_item.update_sizing(bitmap_canvas_item.sizing.with_fixed_size(size))
thumbnail_source.overlay_canvas_item.update_sizing(thumbnail_source.overlay_canvas_item.sizing.with_fixed_size(size))
bitmap_overlay_canvas_item.add_canvas_item(thumbnail_source.overlay_canvas_item)
self.__thumbnail_source = thumbnail_source
self.on_drag: typing.Optional[typing.Callable[[UserInterface.MimeData, typing.Optional[_ImageDataType], int, int], None]] = None
self.on_drop_mime_data: typing.Optional[typing.Callable[[UserInterface.MimeData, int, int], str]] = None
self.on_delete: typing.Optional[typing.Callable[[], None]] = None
def drag_pressed(x: int, y: int, modifiers: UserInterface.KeyboardModifiers) -> None:
on_drag = self.on_drag
if callable(on_drag):
mime_data = ui.create_mime_data()
valid, thumbnail = thumbnail_source.populate_mime_data_for_drag(mime_data, Geometry.IntSize(width=80, height=80))
if valid:
on_drag(mime_data, thumbnail, x, y)
def drop_mime_data(mime_data: UserInterface.MimeData, x: int, y: int) -> str:
if callable(self.on_drop_mime_data):
return self.on_drop_mime_data(mime_data, x, y)
return "ignore"
def delete() -> None:
on_delete = self.on_delete
if callable(on_delete):
on_delete()
bitmap_overlay_canvas_item.on_drag_pressed = drag_pressed
bitmap_overlay_canvas_item.on_drop_mime_data = drop_mime_data
bitmap_overlay_canvas_item.on_delete = delete
def thumbnail_data_changed(thumbnail_data: typing.Optional[_NDArray]) -> None:
bitmap_canvas_item.rgba_bitmap_data = thumbnail_data
self.__thumbnail_source.on_thumbnail_data_changed = thumbnail_data_changed
bitmap_canvas_item.rgba_bitmap_data = self.__thumbnail_source.thumbnail_data
self.add_canvas_item(bitmap_overlay_canvas_item)
def close(self) -> None:
self.__thumbnail_source.close()
self.__thumbnail_source = typing.cast(typing.Any, None)
self.on_drag = None
self.on_drop_mime_data = None
self.on_delete = None
super().close()
class SquareCanvasItemLayout(CanvasItem.CanvasItemLayout):
def layout(self, canvas_origin: Geometry.IntPoint, canvas_size: Geometry.IntSize,
canvas_items: typing.Sequence[CanvasItem.AbstractCanvasItem], *, immediate: bool = False) -> None:
r = Geometry.IntRect(origin=canvas_origin, size=canvas_size)
if canvas_size.width > canvas_size.height:
r = Geometry.fit_to_size(r, Geometry.IntSize(w=canvas_size.height, h=canvas_size.height)).to_int_rect()
super().layout(canvas_origin, r.size, canvas_items, immediate=immediate)
else:
r = Geometry.fit_to_size(r, Geometry.IntSize(w=canvas_size.width, h=canvas_size.width)).to_int_rect()
super().layout(canvas_origin, r.size, canvas_items, immediate=immediate)
class ThumbnailWidget(Widgets.CompositeWidgetBase):
# when this widget is placed within a container, it will have no intrinsic size unless size is passed as a
# parameter. for the case where the size parameter is unspecified, setting the size policy to expanding (in both
# directions) tells the container that this widget would like to use all available space. however, in order for this
# to take effect, the container hierarchy cannot utilize unbound stretches or else this widget will not expand. the
# minimum size is present so that it always uses at least 32x32 pixels. the square canvas layout ensures that the
# thumbnail area is always square and aligned to the top-left of the container.
def __init__(self, ui: UserInterface.UserInterface, thumbnail_source: AbstractThumbnailSource,
size: typing.Optional[Geometry.IntSize] = None,
properties: typing.Optional[Persistence.PersistentDictType] = None,
is_expanding: bool = False) -> None:
content_widget = ui.create_column_widget(properties={"size-policy-horizontal": "expanding",
"size-policy-vertical": "expanding"} if is_expanding else None)
super().__init__(content_widget)
if not is_expanding:
size = size or Geometry.IntSize(width=80, height=80)
thumbnail_canvas_item = ThumbnailCanvasItem(ui, thumbnail_source, size)
properties = properties or ({"height": size.height, "width": size.width} if size else dict())
bitmap_canvas_widget = ui.create_canvas_widget(properties=properties)
thumbnail_square = CanvasItem.CanvasItemComposition()
thumbnail_square.layout = SquareCanvasItemLayout()
thumbnail_square.add_canvas_item(thumbnail_canvas_item)
bitmap_canvas_widget.canvas_item.add_canvas_item(thumbnail_square)
content_widget.add(bitmap_canvas_widget)
self.on_drop_mime_data: typing.Optional[typing.Callable[[UserInterface.MimeData, int, int], str]] = None
self.on_drag: typing.Optional[typing.Callable[[UserInterface.MimeData, typing.Optional[_ImageDataType], int, int], None]] = None
self.on_delete: typing.Optional[typing.Callable[[], None]] = None
def drop_mime_data(mime_data: UserInterface.MimeData, x: int, y: int) -> str:
if callable(self.on_drop_mime_data):
return self.on_drop_mime_data(mime_data, x, y)
return "ignore"
def drag(mime_data: UserInterface.MimeData, thumbnail: typing.Optional[_NDArray], x: int, y: int) -> None:
on_drag = self.on_drag
if callable(on_drag):
on_drag(mime_data, thumbnail, x, y)
def delete() -> None:
on_delete = self.on_delete
if callable(on_delete):
on_delete()
thumbnail_canvas_item.on_drop_mime_data = drop_mime_data
thumbnail_canvas_item.on_drag = drag
thumbnail_canvas_item.on_delete = delete
def close(self) -> None:
self.on_drop_mime_data = None
self.on_drag = None
self.on_delete = None
super().close()
class DataItemBitmapOverlayCanvasItem(CanvasItem.AbstractCanvasItem):
def __init__(self) -> None:
super().__init__()
self.__active = False
@property
def active(self) -> bool:
return self.__active
@active.setter
def active(self, value: bool) -> None:
if value != self.__active:
self.__active = value
self.update()
def _repaint(self, drawing_context: DrawingContext.DrawingContext) -> None:
super()._repaint(drawing_context)
if self.active:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.round_rect(2, 2, 6, 6, 3)
drawing_context.fill_style = "rgba(0, 255, 0, 0.80)"
drawing_context.fill()
class DataItemThumbnailSource(AbstractThumbnailSource):
def __init__(self, ui: UserInterface.UserInterface, *,
display_item: typing.Optional[DisplayItem.DisplayItem] = None,
window: typing.Optional[Window.Window] = None) -> None:
super().__init__()
self.ui = ui
self.__display_item: typing.Optional[DisplayItem.DisplayItem] = None
self.__window = window
self.__display_item_binding: typing.Optional[Binding.Binding] = None
self.__thumbnail_source: typing.Optional[Thumbnails.ThumbnailSource] = None
self.__thumbnail_updated_event_listener: typing.Optional[Event.EventListener] = None
self.overlay_canvas_item = DataItemBitmapOverlayCanvasItem()
if display_item:
self.set_display_item(display_item)
self.__update_display_item_task: typing.Optional[asyncio.Task[None]] = None
def close(self) -> None:
self.__detach_listeners()
if self.__display_item_binding:
self.__display_item_binding.close()
self.__display_item_binding = None
if self.__update_display_item_task:
self.__update_display_item_task.cancel()
self.__update_display_item_task = None
super().close()
def __detach_listeners(self) -> None:
if self.__thumbnail_updated_event_listener:
self.__thumbnail_updated_event_listener.close()
self.__thumbnail_updated_event_listener = None
if self.__thumbnail_source:
self.__thumbnail_source.remove_ref()
self.__thumbnail_source = None
def __update_thumbnail(self) -> None:
if self.__display_item:
self._set_thumbnail_data(Thumbnails.ThumbnailManager().thumbnail_data_for_display_item(self.__display_item))
setattr(self.overlay_canvas_item, "active", self.__display_item.is_live)
else:
self._set_thumbnail_data(None)
setattr(self.overlay_canvas_item, "active", False)
if callable(self.on_thumbnail_data_changed):
self.on_thumbnail_data_changed(self.thumbnail_data)
def set_display_item(self, display_item: typing.Optional[DisplayItem.DisplayItem]) -> None:
if self.__display_item != display_item:
self.__detach_listeners()
self.__display_item = display_item
if display_item:
self.__thumbnail_source = Thumbnails.ThumbnailManager().thumbnail_source_for_display_item(self.ui, display_item).add_ref()
self.__thumbnail_updated_event_listener = self.__thumbnail_source.thumbnail_updated_event.listen(self.__update_thumbnail)
self.__update_thumbnail()
if self.__display_item_binding:
self.__display_item_binding.update_source(display_item)
def populate_mime_data_for_drag(self, mime_data: UserInterface.MimeData, size: Geometry.IntSize) -> typing.Tuple[bool, typing.Optional[_NDArray]]:
if self.__display_item:
MimeTypes.mime_data_put_display_item(mime_data, self.__display_item)
rgba_image_data = self.__thumbnail_source.thumbnail_data if self.__thumbnail_source else None
thumbnail = Image.get_rgba_data_from_rgba(Image.scaled(Image.get_rgba_view_from_rgba_data(rgba_image_data), (80, 80))) if rgba_image_data is not None else None
return True, thumbnail
return False, None
@property
def display_item(self) -> typing.Optional[DisplayItem.DisplayItem]:
return self.__display_item
@display_item.setter
def display_item(self, value: typing.Optional[DisplayItem.DisplayItem]) -> None:
self.set_display_item(value)
def bind_display_item(self, binding: Binding.Binding) -> None:
if self.__display_item_binding:
self.__display_item_binding.close()
self.__display_item_binding = None
self.display_item = binding.get_target_value()
self.__display_item_binding = binding
def update_display_item(display_item: DisplayItem.DisplayItem) -> None:
if self.__window:
async def update_display_item_() -> None:
self.display_item = display_item
self.__update_display_item_task = None
self.__update_display_item_task = self.__window.event_loop.create_task(update_display_item_())
self.__display_item_binding.target_setter = update_display_item
def unbind_display_item(self) -> None:
if self.__display_item_binding:
self.__display_item_binding.close()
self.__display_item_binding = None
class DataItemReferenceThumbnailSource(DataItemThumbnailSource):
"""Used to track a data item referenced by a data item reference.
Useful, for instance, for displaying a live update thumbnail that can be dragged to other locations."""
def __init__(self, ui: UserInterface.UserInterface, document_model: DocumentModel.DocumentModel, data_item_reference: DocumentModel.DocumentModel.DataItemReference) -> None:
data_item = data_item_reference.data_item
display_item = document_model.get_display_item_for_data_item(data_item) if data_item else None
super().__init__(ui, display_item=display_item)
def data_item_changed() -> None:
data_item = data_item_reference.data_item
display_item = document_model.get_display_item_for_data_item(data_item) if data_item else None
self.set_display_item(display_item)
self.__data_item_reference_changed_event_listener = data_item_reference.data_item_reference_changed_event.listen(
data_item_changed)
def close(self) -> None:
self.__data_item_reference_changed_event_listener.close()
self.__data_item_reference_changed_event_listener = typing.cast(typing.Any, None)
super().close()
DataItemThumbnailWidget = ThumbnailWidget
|
nion-software/nionswift
|
nion/swift/DataItemThumbnailWidget.py
|
Python
|
gpl-3.0
| 19,101
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le masque <groupe>."""
import re
from primaires.interpreteur.masque.masque import Masque
from primaires.interpreteur.masque.fonctions import *
from primaires.interpreteur.masque.exceptions.erreur_validation \
import ErreurValidation
NOM_VALIDE = r"^[A-Za-z]{4,}$"
class NvGroupe(Masque):
"""Masque <groupe>.
On attend un nom de groupe en paramètre.
"""
nom = "nv_groupe"
nom_complet = "nom du nouveau groupe"
def init(self):
"""Initialisation des attributs"""
self.nom_groupe = ""
def repartir(self, personnage, masques, commande):
"""Répartition du masque."""
nom_groupe = liste_vers_chaine(commande)
if not nom_groupe:
raise ErreurValidation(
"Précisez un nom pour le nouveau groupe.")
self.a_interpreter = nom_groupe
commande[:] = []
masques.append(self)
return True
def valider(self, personnage, dic_masques):
"""Validation du masque"""
Masque.valider(self, personnage, dic_masques)
nom_groupe = self.a_interpreter
if not re.match(NOM_VALIDE, nom_groupe):
raise ErreurValidation(
"|err|Ce nom de groupe est invalide.|ff|")
noms_groupes = [groupe.nom for groupe in \
type(self).importeur.interpreteur.groupes._groupes.values()]
if nom_groupe in noms_groupes:
raise ErreurValidation(
"|err|Ce nom de groupe est déjà utilisé.|ff|")
self.nom_groupe = nom_groupe.lower()
return True
|
vlegoff/tsunami
|
src/primaires/joueur/masques/nv_groupe/__init__.py
|
Python
|
bsd-3-clause
| 3,211
|
"""
Tests for RunCaseVersionResource api.
"""
from tests import case
from datetime import datetime
class RunCaseVersionResourceTest(case.api.ApiTestCase):
@property
def factory(self):
"""The model factory for this object."""
return self.F.RunCaseVersionFactory
@property
def resource_name(self):
return "runcaseversion"
def test_runcaseversion_list(self):
"""Get a list of existing runcaseversions"""
r1 = self.F.RunFactory.create(name="RunA")
c1 = self.F.CaseVersionFactory.create(name="Case1", description="ab")
c2 = self.F.CaseVersionFactory.create(name="Case2", description="cd")
rcv1 = self.factory.create(caseversion=c1, run=r1)
rcv2 = self.factory.create(caseversion=c2, run=r1)
res = self.get_list()
self.assertEqual(res.status_int, 200)
act = res.json
act_meta = act["meta"]
exp_meta = {
"limit": 20,
"next": None,
"offset": 0,
"previous": None,
"total_count": 2,
}
self.assertEquals(act_meta, exp_meta)
act_objects = act["objects"]
exp_objects = []
for rcv in [rcv1, rcv2]:
cv = rcv.caseversion
exp_objects.append({
u"caseversion": {
u"case": unicode(self.get_detail_url("case", cv.case.id)),
u"created_by": None,
u"description": unicode(cv.description),
u"environments": [],
u"id": cv.id,
u"modified_by": None,
u"modified_on": unicode(cv.modified_on.strftime("%Y-%m-%dT%H:%M:%S")),
u"name": unicode(cv.name),
u"priority": unicode(None),
u"productversion": unicode(self.get_detail_url(
"productversion",
cv.productversion.id)),
u"productversion_name": unicode(cv.productversion.name),
u"resource_uri": unicode(self.get_detail_url(
"caseversion",
cv.id,
)),
u'steps': [],
u'tags': [],
u'status': unicode(cv.status),
},
u"id": rcv.id,
u"run": unicode(self.get_detail_url("run", rcv.run.id)),
u"resource_uri": unicode(self.get_detail_url(
"runcaseversion",
rcv.id,
)),
})
self.maxDiff = None
self.assertEqual(exp_objects, act_objects)
|
mozilla/moztrap
|
tests/model/execution/api/test_runcaseversion.py
|
Python
|
bsd-2-clause
| 2,717
|
#! coding=UTF-8
from tbparser.grammar import Rule, tokenNode as tnode, \
connector, sequence, zeroToOne, Switch, oneToMany, zeroToMany
from tbparser.parser import AstNode
from gobjcreator2.input.grammar.tokens import *
from gobjcreator2.input.grammar.type_name import TypeName
from gobjcreator2.input.grammar.misc_rules import ComposedId, \
Visibility, Inheritance, Scope, Type
import gobjcreator2.input.grammar.util as util
class Method(Rule):
def __init__(self, ident=''):
Rule.__init__(self, 'method', ident)
def expand(self, start, end, context):
start_ = connector()
branches = {}
branches[PARAMETER] = Parameter('parameter')
branches[RESULT] = Result('result')
if not context.getEnvVar('INTERFACE'):
branches[VISI] = Visibility('visi')
branches[INHERITANCE] = Inheritance('inh')
branches[SCOPE] = Scope('scope')
start\
.connect(tnode(METHOD))\
.connect(tnode(ID, 'name'))\
.connect(tnode(BRACE_OPEN))\
.connect(start_)
start_.connect(Switch(branches)).connect(start_)
start_\
.connect(zeroToOne(sequence(tnode(FURTHER_PARAMETERS, 'further_params'), tnode(SEMICOLON))))\
.connect(tnode(BRACE_CLOSE))\
.connect(end)
def transform(self, astNode):
res = AstNode(self.getName())
nameNode = AstNode('name', astNode.getChildById('name').getText())
res.addChild(nameNode)
util.addOptionalChildren(astNode, res, 'parameter')
util.addOptionalChild(astNode, res, 'result')
util.addOptionalChild(astNode, res, 'visi')
util.addOptionalChild(astNode, res, 'inh')
util.addOptionalChild(astNode, res, 'scope')
node = astNode.getChildById('further_params')
if node:
res.addChild(AstNode('further_params'))
return res
class Signal(Rule):
def __init__(self, ident=''):
Rule.__init__(self, 'signal', ident)
def expand(self, start, end, context):
branches = {
PARAMETER: Parameter('parameter'),
RESULT: Result('result')
}
start\
.connect(tnode(SIGNAL))\
.connect(ComposedId('name'))\
.connect(tnode(BRACE_OPEN))\
.connect(zeroToMany(Switch(branches)))\
.connect(tnode(BRACE_CLOSE))\
.connect(end)
def transform(self, astNode):
res = AstNode(self.getName())
nameNode = AstNode('name', astNode.getChildById('name').getText())
res.addChild(nameNode)
util.addOptionalChild(astNode, res, 'result')
util.addOptionalChildren(astNode, res, 'parameter')
return res
class Parameter(Rule):
def __init__(self, ident=''):
Rule.__init__(self, 'parameter', ident)
def expand(self, start, end, context):
start_ = connector()
end_ = connector()
start\
.connect(tnode(PARAMETER))\
.connect(tnode(ID, 'name'))\
.connect(tnode(BRACE_OPEN))\
.connect(Type('type'))\
.connect(start_)
end_.connect(tnode(BRACE_CLOSE)).connect(end)
start_.connect(end_)
start_\
.connect(tnode(MODIFIERS, 'modifiers'))\
.connect(tnode(COLON))\
.connect(tnode(CONST, 'const'))\
.connect(tnode(SEMICOLON))\
.connect(start_)
if context.getEnvVar('CONSTRUCTOR'):
start_\
.connect(tnode(BIND_PROPERTY, 'bindProperty'))\
.connect(tnode(COLON))\
.connect(ComposedId('propertyId'))\
.connect(tnode(SEMICOLON))\
.connect(start_)
def transform(self, astNode):
res = AstNode(self.getName())
res.addChild(AstNode('name', astNode.getChildById('name').getText()))
typeNode = astNode.getChildById('type')
typeNode.setId('')
res.addChild(typeNode)
modifiersNode = astNode.getChildById('modifiers')
if modifiersNode:
res.addChild(AstNode('const'))
bindNode = astNode.getChildById('bindProperty')
if bindNode:
propIdNode = astNode.getChildById('propertyId')
res.addChild(AstNode('bindProperty', propIdNode.getText()))
return res
class Result(Rule):
def __init__(self, ident=''):
Rule.__init__(self, 'result', ident)
def expand(self, start, end, context):
modifiers = sequence(tnode(MODIFIERS, 'modifiers'),
tnode(COLON),
tnode(CONST, 'const'),
tnode(SEMICOLON)
)
start\
.connect(tnode(RESULT))\
.connect(tnode(BRACE_OPEN))\
.connect(Type('type'))\
.connect(zeroToOne(modifiers))\
.connect(tnode(BRACE_CLOSE))\
.connect(end)
def transform(self, astNode):
res = AstNode(self.getName())
typeNode = astNode.getChildById('type')
typeNode.setId('')
res.addChild(typeNode)
modifiersNode = astNode.getChildById('modifiers')
if modifiersNode:
res.addChild(AstNode('const'))
return res
|
ThomasBollmeier/GObjectCreator2
|
src/gobjcreator2/input/grammar/method.py
|
Python
|
gpl-3.0
| 5,664
|
# Demo of a robust regression model with multivariate-t distributed noise
import numpy as np
import numpy.random as npr
np.random.seed(0)
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style("white")
from pybasicbayes.util.text import progprint_xrange
from pybasicbayes.distributions import Regression, RobustRegression
D_out = 1
D_in = 2
N = 100
# Make a regression model and simulate data
A = npr.randn(D_out, D_in)
b = npr.randn(D_out)
Sigma = 0.1 * np.eye(D_out)
true_reg = Regression(A=np.column_stack((A, b)), sigma=Sigma, affine=True)
X = npr.randn(N, D_in)
y = true_reg.rvs(x=X, return_xy=False)
# Corrupt a fraction of the data
inds = npr.rand(N) < 0.1
y[inds] = 3 * npr.randn(inds.sum(), D_out)
# Make a test regression and fit it
std_reg = Regression(nu_0=D_out + 2,
S_0=np.eye(D_out),
M_0=np.zeros((D_out, D_in+1)),
K_0=np.eye(D_in+1),
affine=True)
robust_reg = RobustRegression(nu_0=D_out+2,
S_0=np.eye(D_out),
M_0=np.zeros((D_out, D_in+1)),
K_0=np.eye(D_in+1),
affine=True)
def _collect(r):
ll = r.log_likelihood((X, y))[~inds].sum()
err = ((y - r.predict(X))**2).sum(1)
mse = np.mean(err[~inds])
return r.A.copy(), ll, mse
def _update(r):
r.resample([(X,y)])
return _collect(r)
# Fit the standard regression
smpls = [_collect(std_reg)]
for _ in progprint_xrange(100):
smpls.append(_update(std_reg))
smpls = zip(*smpls)
std_As, std_lls, std_mses = tuple(map(np.array, smpls))
# Fit the robust regression
smpls = [_collect(robust_reg)]
for _ in progprint_xrange(100):
smpls.append(_update(robust_reg))
smpls = zip(*smpls)
robust_As, robust_lls, robust_mses = tuple(map(np.array, smpls))
# Plot the inferred regression function
plt.figure(figsize=(8, 4))
xlim = (-3, 3)
ylim = abs(y).max()
npts = 50
x1, x2 = np.meshgrid(np.linspace(*xlim, npts), np.linspace(*xlim, npts))
plt.subplot(131)
mu = true_reg.predict(np.column_stack((x1.ravel(), x2.ravel())))
plt.imshow(mu.reshape((npts, npts)),
cmap="RdBu", vmin=-ylim, vmax=ylim,
alpha=0.8,
extent=xlim + tuple(reversed(xlim)))
plt.scatter(X[~inds,0], X[~inds,1], c=y[~inds, 0], cmap="RdBu", vmin=-ylim, vmax=ylim, edgecolors='gray')
plt.scatter(X[inds,0], X[inds,1], c=y[inds, 0], cmap="RdBu", vmin=-ylim, vmax=ylim, edgecolors='k', linewidths=1)
plt.xlim(xlim)
plt.ylim(xlim)
plt.title("True")
plt.subplot(132)
mu = std_reg.predict(np.column_stack((x1.ravel(), x2.ravel())))
plt.imshow(mu.reshape((npts, npts)),
cmap="RdBu", vmin=-ylim, vmax=ylim,
alpha=0.8,
extent=xlim + tuple(reversed(xlim)))
plt.scatter(X[~inds,0], X[~inds,1], c=y[~inds, 0], cmap="RdBu", vmin=-ylim, vmax=ylim, edgecolors='gray')
plt.scatter(X[inds,0], X[inds,1], c=y[inds, 0], cmap="RdBu", vmin=-ylim, vmax=ylim, edgecolors='k', linewidths=1)
plt.xlim(xlim)
plt.ylim(xlim)
plt.title("Standard Regression")
plt.subplot(133)
mu = robust_reg.predict(np.column_stack((x1.ravel(), x2.ravel())))
plt.imshow(mu.reshape((npts, npts)),
cmap="RdBu", vmin=-ylim, vmax=ylim,
alpha=0.8,
extent=xlim + tuple(reversed(xlim)))
plt.scatter(X[~inds,0], X[~inds,1], c=y[~inds, 0], cmap="RdBu", vmin=-ylim, vmax=ylim, edgecolors='gray')
plt.scatter(X[inds,0], X[inds,1], c=y[inds, 0], cmap="RdBu", vmin=-ylim, vmax=ylim, edgecolors='k', linewidths=1)
plt.xlim(xlim)
plt.ylim(xlim)
plt.title("Robust Regression")
print("True A: {}".format(true_reg.A))
print("Std A: {}".format(std_As.mean(0)))
print("Robust A: {}".format(robust_As.mean(0)))
# Plot the log likelihoods and mean squared errors
plt.figure(figsize=(8, 4))
plt.subplot(121)
plt.plot(std_lls)
plt.plot(robust_lls)
plt.xlabel("Iteration")
plt.ylabel("Log Likelihood")
plt.subplot(122)
plt.plot(std_mses, label="Standard")
plt.plot(robust_mses, label="Robust")
plt.legend(loc="upper right")
plt.xlabel("Iteration")
plt.ylabel("Mean Squared Error")
plt.show()
|
mattjj/pybasicbayes
|
examples/robust_regression.py
|
Python
|
mit
| 4,079
|
# -*- coding: utf-8 -*-
"""AST nodes generated by the parser for the compiler. Also provides
some node tree helper functions used by the parser and compiler in order
to normalize nodes.
"""
import operator
from collections import deque
from markupsafe import Markup
from ._compat import izip
from ._compat import PY2
from ._compat import text_type
from ._compat import with_metaclass
_binop_to_func = {
"*": operator.mul,
"/": operator.truediv,
"//": operator.floordiv,
"**": operator.pow,
"%": operator.mod,
"+": operator.add,
"-": operator.sub,
}
_uaop_to_func = {"not": operator.not_, "+": operator.pos, "-": operator.neg}
_cmpop_to_func = {
"eq": operator.eq,
"ne": operator.ne,
"gt": operator.gt,
"gteq": operator.ge,
"lt": operator.lt,
"lteq": operator.le,
"in": lambda a, b: a in b,
"notin": lambda a, b: a not in b,
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(mcs, name, bases, d):
for attr in "fields", "attributes":
storage = []
storage.extend(getattr(bases[0], attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) == 1, "multiple inheritance not allowed"
assert len(storage) == len(set(storage)), "layout conflict"
d[attr] = tuple(storage)
d.setdefault("abstract", False)
return type.__new__(mcs, name, bases, d)
class EvalContext(object):
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(self, environment, template_name=None):
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self):
return self.__dict__.copy()
def revert(self, old):
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
raise RuntimeError(
"if no eval context is passed, the "
"node must have an attached "
"environment."
)
return EvalContext(node.environment)
return ctx
class Node(with_metaclass(NodeType, object)):
"""Baseclass for all Jinja nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields = ()
attributes = ("lineno", "environment")
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
raise TypeError("abstract nodes are not instantiable")
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError("%r takes 0 arguments" % self.__class__.__name__)
raise TypeError(
"%r takes 0 or %d argument%s"
% (
self.__class__.__name__,
len(self.fields),
len(self.fields) != 1 and "s" or "",
)
)
for name, arg in izip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError("unknown attribute %r" % next(iter(attributes)))
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (
(exclude is only is None)
or (exclude is not None and name not in exclude)
or (only is not None and name in only)
):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for _, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
for result in child.find_all(node_type):
yield result
def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if "ctx" in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if "lineno" in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other):
return type(self) is type(other) and tuple(self.iter_fields()) == tuple(
other.iter_fields()
)
def __ne__(self, other):
return not self.__eq__(other)
# Restore Python 2 hashing behavior on Python 3
__hash__ = object.__hash__
def __repr__(self):
return "%s(%s)" % (
self.__class__.__name__,
", ".join("%s=%r" % (arg, getattr(self, arg, None)) for arg in self.fields),
)
def dump(self):
def _dump(node):
if not isinstance(node, Node):
buf.append(repr(node))
return
buf.append("nodes.%s(" % node.__class__.__name__)
if not node.fields:
buf.append(")")
return
for idx, field in enumerate(node.fields):
if idx:
buf.append(", ")
value = getattr(node, field)
if isinstance(value, list):
buf.append("[")
for idx, item in enumerate(value):
if idx:
buf.append(", ")
_dump(item)
buf.append("]")
else:
_dump(value)
buf.append(")")
buf = []
_dump(self)
return "".join(buf)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ("body",)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ("nodes",)
class Extends(Stmt):
"""Represents an extends statement."""
fields = ("template",)
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ("target", "iter", "body", "else_", "test", "recursive")
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ("test", "body", "elif_", "else_")
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ("name", "args", "defaults", "body")
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ("call", "args", "defaults", "body")
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ("body", "filter")
class With(Stmt):
"""Specific node for with statements. In older versions of Jinja the
with statement was implemented on the base of the `Scope` node instead.
.. versionadded:: 2.9.3
"""
fields = ("targets", "values", "body")
class Block(Stmt):
"""A node that represents a block."""
fields = ("name", "body", "scoped")
class Include(Stmt):
"""A node that represents the include tag."""
fields = ("template", "with_context", "ignore_missing")
class Import(Stmt):
"""A node that represents the import tag."""
fields = ("template", "target", "with_context")
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ("template", "names", "with_context")
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ("node",)
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ("target", "node")
class AssignBlock(Stmt):
"""Assigns a block to a target."""
fields = ("target", "filter", "body")
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ("left", "right")
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if (
self.environment.sandboxed
and self.operator in self.environment.intercepted_binops
):
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ("node",)
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if (
self.environment.sandboxed
and self.operator in self.environment.intercepted_unops
):
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ("name", "ctx")
def can_assign(self):
return self.name not in ("true", "false", "none", "True", "False", "None")
class NSRef(Expr):
"""Reference to a namespace value assignment"""
fields = ("name", "attr")
def can_assign(self):
# We don't need any special checks here; NSRef assignments have a
# runtime check to ensure the target is a namespace object which will
# have been checked already as it is created using a normal assignment
# which goes through a `Name` node.
return True
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ("value",)
def as_const(self, eval_ctx=None):
rv = self.value
if (
PY2
and type(rv) is text_type
and self.environment.policies["compiler.ascii_str"]
):
try:
rv = rv.encode("ascii")
except UnicodeError:
pass
return rv
@classmethod
def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ("data",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ("items", "ctx")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self):
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ("items",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ("items",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ("key", "value")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ("key", "value")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ("test", "expr1", "expr2")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
def args_as_const(node, eval_ctx):
args = [x.as_const(eval_ctx) for x in node.args]
kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
if node.dyn_args is not None:
try:
args.extend(node.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if node.dyn_kwargs is not None:
try:
kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
return args, kwargs
class Filter(Expr):
"""This node applies a filter on an expression. `name` is the name of
the filter, the rest of the fields are the same as for :class:`Call`.
If the `node` of a filter is `None` the contents of the last buffer are
filtered. Buffers are created by macros and filter blocks.
"""
fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile or self.node is None:
raise Impossible()
# we have to be careful here because we call filter_ below.
# if this variable would be called filter, 2to3 would wrap the
# call in a list because it is assuming we are talking about the
# builtin filter function here which no longer returns a list in
# python 3. because of that, do not rename filter_ to filter!
filter_ = self.environment.filters.get(self.name)
if filter_ is None or getattr(filter_, "contextfilter", False) is True:
raise Impossible()
# We cannot constant handle async filters, so we need to make sure
# to not go down this path.
if eval_ctx.environment.is_async and getattr(
filter_, "asyncfiltervariant", False
):
raise Impossible()
args, kwargs = args_as_const(self, eval_ctx)
args.insert(0, self.node.as_const(eval_ctx))
if getattr(filter_, "evalcontextfilter", False) is True:
args.insert(0, eval_ctx)
elif getattr(filter_, "environmentfilter", False) is True:
args.insert(0, self.environment)
try:
return filter_(*args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
def as_const(self, eval_ctx=None):
test = self.environment.tests.get(self.name)
if test is None:
raise Impossible()
eval_ctx = get_eval_context(self, eval_ctx)
args, kwargs = args_as_const(self, eval_ctx)
args.insert(0, self.node.as_const(eval_ctx))
try:
return test(*args, **kwargs)
except Exception:
raise Impossible()
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ("node", "arg", "ctx")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != "load":
raise Impossible()
try:
return self.environment.getitem(
self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ("node", "attr", "ctx")
def as_const(self, eval_ctx=None):
if self.ctx != "load":
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx), self.attr)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ("start", "stop", "step")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting them to
unicode.
"""
fields = ("nodes",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return "".join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\\s.
"""
fields = ("expr", "ops")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
if not result:
return False
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ("op", "expr")
if __debug__:
Operand.__doc__ += "\nThe following operators are available: " + ", ".join(
sorted(
"``%s``" % x
for x in set(_binop_to_func) | set(_uaop_to_func) | set(_cmpop_to_func)
)
)
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = "*"
class Div(BinExpr):
"""Divides the left by the right node."""
operator = "/"
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = "//"
class Add(BinExpr):
"""Add the left to the right node."""
operator = "+"
class Sub(BinExpr):
"""Subtract the right from the left node."""
operator = "-"
class Mod(BinExpr):
"""Left modulo right."""
operator = "%"
class Pow(BinExpr):
"""Left to the power of right."""
operator = "**"
class And(BinExpr):
"""Short circuited AND."""
operator = "and"
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = "or"
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = "not"
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = "-"
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = "+"
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ("name",)
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ("identifier", "name")
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ("importname",)
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
fields = ("name",)
def __init__(self):
raise TypeError(
"Can't create internal names. Use the "
"`free_identifier` method on a parser."
)
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ("expr",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ("expr",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
This is basically equivalent to using the
:func:`~jinja2.contextfunction` decorator when using the
high-level API, which causes a reference to the context to be passed
as the first argument to a function.
"""
class DerivedContextReference(Expr):
"""Return the current template context including locals. Behaves
exactly like :class:`ContextReference`, but includes local
variables, such as from a ``for`` loop.
.. versionadded:: 2.11
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ("body",)
class OverlayScope(Stmt):
"""An overlay scope for extensions. This is a largely unoptimized scope
that however can be used to introduce completely arbitrary variables into
a sub scope from a dictionary or dictionary like object. The `context`
field has to evaluate to a dictionary object.
Example usage::
OverlayScope(context=self.call_method('get_context'),
body=[...])
.. versionadded:: 2.10
"""
fields = ("context", "body")
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ("options",)
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ("body",)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
raise TypeError("can't create custom node types")
NodeType.__new__ = staticmethod(_failing_new)
del _failing_new
|
sserrot/champion_relationships
|
venv/Lib/site-packages/jinja2/nodes.py
|
Python
|
mit
| 31,095
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import sys
import logging
import getpass
import threading
from optparse import OptionParser
import sleekxmpp
from sleekxmpp.exceptions import IqError, IqTimeout
# Python versions before 3.0 do not use UTF-8 encoding
# by default. To ensure that Unicode is handled properly
# throughout SleekXMPP, we will set the default encoding
# ourselves to UTF-8.
if sys.version_info < (3, 0):
from sleekxmpp.util.misc_ops import setdefaultencoding
setdefaultencoding('utf8')
else:
raw_input = input
class RosterBrowser(sleekxmpp.ClientXMPP):
"""
A basic script for dumping a client's roster to
the command line.
"""
def __init__(self, jid, password):
sleekxmpp.ClientXMPP.__init__(self, jid, password)
# The session_start event will be triggered when
# the bot establishes its connection with the server
# and the XML streams are ready for use. We want to
# listen for this event so that we we can initialize
# our roster. We need threaded=True so that the
# session_start handler doesn't block event processing
# while we wait for presence stanzas to arrive.
self.add_event_handler("session_start", self.start, threaded=True)
self.add_event_handler("changed_status", self.wait_for_presences)
self.received = set()
self.presences_received = threading.Event()
def start(self, event):
"""
Process the session_start event.
Typical actions for the session_start event are
requesting the roster and broadcasting an initial
presence stanza.
Arguments:
event -- An empty dictionary. The session_start
event does not provide any additional
data.
"""
try:
self.get_roster()
except IqError as err:
print('Error: %' % err.iq['error']['condition'])
except IqTimeout:
print('Error: Request timed out')
self.send_presence()
print('Waiting for presence updates...\n')
self.presences_received.wait(5)
print('Roster for %s' % self.boundjid.bare)
groups = self.client_roster.groups()
for group in groups:
print('\n%s' % group)
print('-' * 72)
for jid in groups[group]:
sub = self.client_roster[jid]['subscription']
name = self.client_roster[jid]['name']
if self.client_roster[jid]['name']:
print(' %s (%s) [%s]' % (name, jid, sub))
else:
print(' %s [%s]' % (jid, sub))
connections = self.client_roster.presence(jid)
for res, pres in connections.items():
show = 'available'
if pres['show']:
show = pres['show']
print(' - %s (%s)' % (res, show))
if pres['status']:
print(' %s' % pres['status'])
self.disconnect()
def wait_for_presences(self, pres):
"""
Track how many roster entries have received presence updates.
"""
self.received.add(pres['from'].bare)
if len(self.received) >= len(self.client_roster.keys()):
self.presences_received.set()
else:
self.presences_received.clear()
if __name__ == '__main__':
# Setup the command line arguments.
optp = OptionParser()
optp.add_option('-q','--quiet', help='set logging to ERROR',
action='store_const',
dest='loglevel',
const=logging.ERROR,
default=logging.ERROR)
optp.add_option('-d','--debug', help='set logging to DEBUG',
action='store_const',
dest='loglevel',
const=logging.DEBUG,
default=logging.ERROR)
optp.add_option('-v','--verbose', help='set logging to COMM',
action='store_const',
dest='loglevel',
const=5,
default=logging.ERROR)
# JID and password options.
optp.add_option("-j", "--jid", dest="jid",
help="JID to use")
optp.add_option("-p", "--password", dest="password",
help="password to use")
opts,args = optp.parse_args()
# Setup logging.
logging.basicConfig(level=opts.loglevel,
format='%(levelname)-8s %(message)s')
if opts.jid is None:
opts.jid = raw_input("Username: ")
if opts.password is None:
opts.password = getpass.getpass("Password: ")
xmpp = RosterBrowser(opts.jid, opts.password)
# If you are working with an OpenFire server, you may need
# to adjust the SSL version used:
# xmpp.ssl_version = ssl.PROTOCOL_SSLv3
# If you want to verify the SSL certificates offered by a server:
# xmpp.ca_certs = "path/to/ca/cert"
# Connect to the XMPP server and start processing XMPP stanzas.
if xmpp.connect():
# If you do not have the dnspython library installed, you will need
# to manually specify the name of the server if it does not match
# the one in the JID. For example, to use Google Talk you would
# need to use:
#
# if xmpp.connect(('talk.google.com', 5222)):
# ...
xmpp.process(block=True)
else:
print("Unable to connect.")
|
emesene/emesene
|
emesene/e3/xmpp/SleekXMPP/examples/roster_browser.py
|
Python
|
gpl-3.0
| 5,744
|
from django.contrib import admin
from import_export.admin import ImportExportModelAdmin
from .models import Concept
class ConceptAdmin(ImportExportModelAdmin):
list_display = ('name',)
search_fields = ('name',)
ordering = ('name',)
admin.site.register(Concept, ConceptAdmin)
|
effa/flocs
|
concepts/admin.py
|
Python
|
gpl-2.0
| 289
|
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import sys
from builtins import map, object
import pkg_resources
from pants.base.build_environment import pants_version
from pants.base.exceptions import BuildConfigurationError
from pants.goal.goal import Goal
from pants.init.extension_loader import load_backends_and_plugins
from pants.init.global_subsystems import GlobalSubsystems
from pants.init.plugin_resolver import PluginResolver
from pants.option.global_options import GlobalOptionsRegistrar
from pants.subsystem.subsystem import Subsystem
logger = logging.getLogger(__name__)
class BuildConfigInitializer(object):
"""Initializes a BuildConfiguration object.
This class uses a class-level cache for the internally generated `BuildConfiguration` object,
which permits multiple invocations in the same runtime context without re-incurring backend &
plugin loading, which can be expensive and cause issues (double task registration, etc).
"""
_cached_build_config = None
@classmethod
def get(cls, options_bootstrapper):
if cls._cached_build_config is None:
cls._cached_build_config = cls(options_bootstrapper).setup()
return cls._cached_build_config
@classmethod
def reset(cls):
cls._cached_build_config = None
def __init__(self, options_bootstrapper):
self._options_bootstrapper = options_bootstrapper
self._bootstrap_options = options_bootstrapper.get_bootstrap_options().for_global_scope()
self._working_set = PluginResolver(self._options_bootstrapper).resolve()
def _load_plugins(self, working_set, python_paths, plugins, backend_packages):
# Add any extra paths to python path (e.g., for loading extra source backends).
for path in python_paths:
if path not in sys.path:
sys.path.append(path)
pkg_resources.fixup_namespace_packages(path)
# Load plugins and backends.
return load_backends_and_plugins(plugins, working_set, backend_packages)
def setup(self):
"""Load backends and plugins.
:returns: A `BuildConfiguration` object constructed during backend/plugin loading.
"""
return self._load_plugins(
self._working_set,
self._bootstrap_options.pythonpath,
self._bootstrap_options.plugins,
self._bootstrap_options.backend_packages
)
class OptionsInitializer(object):
"""Initializes options."""
@staticmethod
def _construct_options(options_bootstrapper, build_configuration):
"""Parse and register options.
:returns: An Options object representing the full set of runtime options.
"""
# Now that plugins and backends are loaded, we can gather the known scopes.
# Gather the optionables that are not scoped to any other. All known scopes are reachable
# via these optionables' known_scope_infos() methods.
top_level_optionables = (
{GlobalOptionsRegistrar} |
GlobalSubsystems.get() |
build_configuration.optionables() |
set(Goal.get_optionables())
)
# Now that we have the known scopes we can get the full options. `get_full_options` will
# sort and de-duplicate these for us.
known_scope_infos = [si
for optionable in top_level_optionables
for si in optionable.known_scope_infos()]
return options_bootstrapper.get_full_options(known_scope_infos)
@classmethod
def create(cls, options_bootstrapper, build_configuration, init_subsystems=True):
global_bootstrap_options = options_bootstrapper.get_bootstrap_options().for_global_scope()
if global_bootstrap_options.pants_version != pants_version():
raise BuildConfigurationError(
'Version mismatch: Requested version was {}, our version is {}.'
.format(global_bootstrap_options.pants_version, pants_version())
)
pants_runtime_python_version = global_bootstrap_options.pants_runtime_python_version
current_python_version = '.'.join(map(str, sys.version_info[0:2]))
if pants_runtime_python_version and pants_runtime_python_version != current_python_version:
raise BuildConfigurationError(
'Running Pants with a different Python interpreter version than requested. '
'You requested {}, but are running with {}.\n\n'
'Note that Pants cannot use the value you give for `--pants-runtime-python-version` to '
'dynamically change the interpreter it uses, as it is too late for it to change once the program '
'is already running. Instead, your setup script (e.g. `./pants`) must configure which Python '
'interpreter and virtualenv to use. For example, the setup script we distribute '
'at https://www.pantsbuild.org/install.html#recommended-installation will read the '
'`pants_runtime_python_version` defined in your pants.ini to determine which Python '
'version to run with.'.format(pants_runtime_python_version, current_python_version)
)
# Parse and register options.
options = cls._construct_options(options_bootstrapper, build_configuration)
GlobalOptionsRegistrar.validate_instance(options.for_global_scope())
if init_subsystems:
Subsystem.set_options(options)
return options
|
twitter/pants
|
src/python/pants/init/options_initializer.py
|
Python
|
apache-2.0
| 5,384
|
from flask import Blueprint, current_app as app
from flask_api import exceptions
from ._settings import CONTRIBUTING_URL
blueprint = Blueprint('fonts', __name__, url_prefix="/api/fonts/")
@blueprint.route("")
def get():
"""Get a list of all available fonts."""
return sorted(app.font_service.all())
@blueprint.route("", methods=['POST'])
def create_font():
raise exceptions.PermissionDenied(CONTRIBUTING_URL)
|
DanLindeman/memegen
|
memegen/routes/api_fonts.py
|
Python
|
mit
| 428
|
"""This module contains code from
Think Python by Allen B. Downey
http://thinkpython.com
Copyright 2012 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import math
try:
# see if Swampy is installed as a package
from swampy.TurtleWorld import *
except ImportError:
# otherwise see if the modules are on the PYTHONPATH
from TurtleWorld import *
def draw_pie(t, n, r):
"""Draws a pie, then moves into position to the right.
t: Turtle
n: number of segments
r: length of the radial spokes
"""
polypie(t, n, r)
pu(t)
fd(t, r*2 + 10)
pd(t)
def polypie(t, n, r):
"""Draws a pie divided into radial segments.
t: Turtle
n: number of segments
r: length of the radial spokes
"""
angle = 360.0 / n
for i in range(n):
isosceles(t, r, angle/2)
lt(t, angle)
def isosceles(t, r, angle):
"""Draws an icosceles triangle.
The turtle starts and ends at the peak, facing the middle of the base.
t: Turtle
r: length of the equal legs
angle: peak angle in degrees
"""
y = r * math.sin(angle * math.pi / 180)
rt(t, angle)
fd(t, r)
lt(t, 90+angle)
fd(t, 2*y)
lt(t, 90+angle)
fd(t, r)
lt(t, 180-angle)
# create the world and bob
world = TurtleWorld()
bob = Turtle()
bob.delay = 0
pu(bob)
bk(bob, 130)
pd(bob)
# draw polypies with various number of sides
size = 40
draw_pie(bob, 5, size)
draw_pie(bob, 6, size)
draw_pie(bob, 7, size)
draw_pie(bob, 8, size)
die(bob)
# dump the contents of the campus to the file canvas.eps
world.canvas.dump()
wait_for_user()
|
simontakite/sysadmin
|
pythonscripts/thinkpython/pie.py
|
Python
|
gpl-2.0
| 1,636
|
# stopanalyzer package
|
romanchyla/pylucene-trunk
|
samples/LuceneInAction/lia/analysis/stopanalyzer/__init__.py
|
Python
|
apache-2.0
| 23
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for manipulating Conductors via the DB API"""
import datetime
import mock
from oslo_utils import timeutils
from ironic.common import exception
from ironic.tests.unit.db import base
from ironic.tests.unit.db import utils
class DbConductorTestCase(base.DbTestCase):
def test_register_conductor_existing_fails(self):
c = utils.get_test_conductor()
self.dbapi.register_conductor(c)
self.assertRaises(
exception.ConductorAlreadyRegistered,
self.dbapi.register_conductor,
c)
def test_register_conductor_override(self):
c = utils.get_test_conductor()
self.dbapi.register_conductor(c)
self.dbapi.register_conductor(c, update_existing=True)
def _create_test_cdr(self, **kwargs):
c = utils.get_test_conductor(**kwargs)
return self.dbapi.register_conductor(c)
def test_get_conductor(self):
c1 = self._create_test_cdr()
c2 = self.dbapi.get_conductor(c1.hostname)
self.assertEqual(c1.id, c2.id)
def test_get_conductor_not_found(self):
self._create_test_cdr()
self.assertRaises(
exception.ConductorNotFound,
self.dbapi.get_conductor,
'bad-hostname')
def test_unregister_conductor(self):
c = self._create_test_cdr()
self.dbapi.unregister_conductor(c.hostname)
self.assertRaises(
exception.ConductorNotFound,
self.dbapi.unregister_conductor,
c.hostname)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_touch_conductor(self, mock_utcnow):
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
c = self._create_test_cdr()
self.assertEqual(test_time, timeutils.normalize_time(c.updated_at))
test_time = datetime.datetime(2000, 1, 1, 0, 1)
mock_utcnow.return_value = test_time
self.dbapi.touch_conductor(c.hostname)
c = self.dbapi.get_conductor(c.hostname)
self.assertEqual(test_time, timeutils.normalize_time(c.updated_at))
def test_touch_conductor_not_found(self):
# A conductor's heartbeat will not create a new record,
# it will only update existing ones
self._create_test_cdr()
self.assertRaises(
exception.ConductorNotFound,
self.dbapi.touch_conductor,
'bad-hostname')
def test_touch_offline_conductor(self):
# Ensure that a conductor's periodic heartbeat task can make the
# conductor visible again, even if it was spuriously marked offline
c = self._create_test_cdr()
self.dbapi.unregister_conductor(c.hostname)
self.assertRaises(
exception.ConductorNotFound,
self.dbapi.get_conductor,
c.hostname)
self.dbapi.touch_conductor(c.hostname)
self.dbapi.get_conductor(c.hostname)
def test_clear_node_reservations_for_conductor(self):
node1 = self.dbapi.create_node({'reservation': 'hostname1'})
node2 = self.dbapi.create_node({'reservation': 'hostname2'})
node3 = self.dbapi.create_node({'reservation': None})
self.dbapi.clear_node_reservations_for_conductor('hostname1')
node1 = self.dbapi.get_node_by_id(node1.id)
node2 = self.dbapi.get_node_by_id(node2.id)
node3 = self.dbapi.get_node_by_id(node3.id)
self.assertIsNone(node1.reservation)
self.assertEqual('hostname2', node2.reservation)
self.assertIsNone(node3.reservation)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_get_active_driver_dict_one_host_no_driver(self, mock_utcnow):
h = 'fake-host'
expected = {}
mock_utcnow.return_value = datetime.datetime.utcnow()
self._create_test_cdr(hostname=h, drivers=[])
result = self.dbapi.get_active_driver_dict()
self.assertEqual(expected, result)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_get_active_driver_dict_one_host_one_driver(self, mock_utcnow):
h = 'fake-host'
d = 'fake-driver'
expected = {d: set([h])}
mock_utcnow.return_value = datetime.datetime.utcnow()
self._create_test_cdr(hostname=h, drivers=[d])
result = self.dbapi.get_active_driver_dict()
self.assertEqual(expected, result)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_get_active_driver_dict_one_host_many_drivers(self, mock_utcnow):
h = 'fake-host'
d1 = 'driver-one'
d2 = 'driver-two'
expected = {d1: set([h]), d2: set([h])}
mock_utcnow.return_value = datetime.datetime.utcnow()
self._create_test_cdr(hostname=h, drivers=[d1, d2])
result = self.dbapi.get_active_driver_dict()
self.assertEqual(expected, result)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_get_active_driver_dict_many_hosts_one_driver(self, mock_utcnow):
h1 = 'host-one'
h2 = 'host-two'
d = 'fake-driver'
expected = {d: set([h1, h2])}
mock_utcnow.return_value = datetime.datetime.utcnow()
self._create_test_cdr(id=1, hostname=h1, drivers=[d])
self._create_test_cdr(id=2, hostname=h2, drivers=[d])
result = self.dbapi.get_active_driver_dict()
self.assertEqual(expected, result)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_get_active_driver_dict_many_hosts_and_drivers(self, mock_utcnow):
h1 = 'host-one'
h2 = 'host-two'
h3 = 'host-three'
d1 = 'driver-one'
d2 = 'driver-two'
expected = {d1: set([h1, h2]), d2: set([h2, h3])}
mock_utcnow.return_value = datetime.datetime.utcnow()
self._create_test_cdr(id=1, hostname=h1, drivers=[d1])
self._create_test_cdr(id=2, hostname=h2, drivers=[d1, d2])
self._create_test_cdr(id=3, hostname=h3, drivers=[d2])
result = self.dbapi.get_active_driver_dict()
self.assertEqual(expected, result)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_get_active_driver_dict_with_old_conductor(self, mock_utcnow):
past = datetime.datetime(2000, 1, 1, 0, 0)
present = past + datetime.timedelta(minutes=2)
d = 'common-driver'
h1 = 'old-host'
d1 = 'old-driver'
mock_utcnow.return_value = past
self._create_test_cdr(id=1, hostname=h1, drivers=[d, d1])
h2 = 'new-host'
d2 = 'new-driver'
mock_utcnow.return_value = present
self._create_test_cdr(id=2, hostname=h2, drivers=[d, d2])
# verify that old-host does not show up in current list
one_minute = 60
expected = {d: set([h2]), d2: set([h2])}
result = self.dbapi.get_active_driver_dict(interval=one_minute)
self.assertEqual(expected, result)
# change the interval, and verify that old-host appears
two_minute = one_minute * 2
expected = {d: set([h1, h2]), d1: set([h1]), d2: set([h2])}
result = self.dbapi.get_active_driver_dict(interval=two_minute)
self.assertEqual(expected, result)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_get_offline_conductors(self, mock_utcnow):
self.config(heartbeat_timeout=60, group='conductor')
time_ = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = time_
c = self._create_test_cdr()
# Only 30 seconds passed since last heartbeat, it's still
# considered alive
mock_utcnow.return_value = time_ + datetime.timedelta(seconds=30)
self.assertEqual([], self.dbapi.get_offline_conductors())
# 61 seconds passed since last heartbeat, it's dead
mock_utcnow.return_value = time_ + datetime.timedelta(seconds=61)
self.assertEqual([c.hostname], self.dbapi.get_offline_conductors())
|
devananda/ironic
|
ironic/tests/unit/db/test_conductor.py
|
Python
|
apache-2.0
| 8,622
|
"""
tree2tree.py - manipulate trees
===============================
:Author: Andreas Heger
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
This script reads a collection of trees from stdin and outputs the again on stdout
after manipulating them. Manipulations include
* renaming taxa
* normalizing branch lengths
The complete list of methods is:
normalize
+++++++++
divide-by-tree
++++++++++++++
rename
++++++
set-uniform-branch-length
+++++++++++++++++++++++++
extract-with-pattern
++++++++++++++++++++
build-map
++++++++++++++++++++
remove-pattern
++++++++++++++++++++
unroot
++++++++++++++++++++
midpoint-root
++++++++++++++++++++
balanced-root
++++++++++++++++++++
add-node-names
++++++++++++++++++++
Usage
-----
Example::
python <script_name>.py --help
Type::
python <script_name>.py --help
for command line help.
Command line options
--------------------
"""
import sys
import re
import CGAT.Experiment as E
import CGAT.TreeTools as TreeTools
import CGAT.IOTools as IOTools
import CGAT.WrapperPhylip as WrapperPhylip
def Process(lines, other_trees, options, map_old2new, ntree):
nexus = TreeTools.Newick2Nexus(map(lambda x: x[:-1], lines))
if options.loglevel >= 1:
options.stdlog.write("# read %i trees.\n" % len(nexus.trees))
nskipped = 0
ntotal = len(nexus.trees)
extract_pattern = None
species2remove = None
write_map = False
phylip_executable = None
phylip_options = None
index = 0
# default: do not output internal node names
write_all_taxa = False
for tree in nexus.trees:
if options.outgroup:
tree.root_with_outgroup(options.outgroup)
for method in options.methods:
if options.loglevel >= 3:
options.stdlog.write(
"# applying method %s to tree %i.\n" % (method, index))
if method == "midpoint-root":
tree.root_midpoint()
elif method == "balanced-root":
tree.root_balanced()
elif method == "unroot":
TreeTools.Unroot(tree)
elif method == "phylip":
if not phylip_executable:
phylip_executable = options.parameters[0]
del options.parameters[0]
phylip_options = re.split("@", options.parameters[0])
del options.parameters[0]
phylip = WrapperPhylip.Phylip()
phylip.setProgram(phylip_executable)
phylip.setOptions(phylip_options)
phylip.setTree(tree)
result = phylip.run()
nexus.trees[index] = result.mNexus.trees[0]
elif method == "normalize":
if options.value == 0:
v = 0
for n in tree.chain.keys():
v = max(v, tree.node(n).data.branchlength)
else:
v = options.value
for n in tree.chain.keys():
tree.node(n).data.branchlength /= float(options.value)
elif method == "divide-by-tree":
if len(other_trees) > 1:
other_tree = other_trees[ntree]
else:
other_tree = other_trees[0]
# the trees have to be exactly the same!!
if options.loglevel >= 2:
print tree.display()
print other_tree.display()
if not tree.is_identical(other_tree):
nskipped += 1
continue
# even if the trees are the same (in topology), the
# node numbering might not be the same. Thus build a
# map of node ids.
map_a2b = TreeTools.GetNodeMap(tree, other_tree)
for n in tree.chain.keys():
try:
tree.node(
n).data.branchlength /= float(
other_tree.node(map_a2b[n]).data.branchlength)
except ZeroDivisionError:
options.stdlog.write(
"# Warning: branch for nodes %i and %i in "
"tree-pair %i: divide by zero\n" %
(n, map_a2b[n], ntree))
continue
elif method == "rename":
if not map_old2new:
map_old2new = IOTools.ReadMap(
open(options.parameters[0], "r"), columns=(0, 1))
if options.invert_map:
map_old2new = IOTools.getInvertedDictionary(
map_old2new, make_unique=True)
del options.parameters[0]
unknown = []
for n, node in tree.chain.items():
if node.data.taxon:
try:
node.data.taxon = map_old2new[node.data.taxon]
except KeyError:
unknown.append(node.data.taxon)
for taxon in unknown:
tree.prune(taxon)
# reformat terminals
elif method == "extract-with-pattern":
if not extract_pattern:
extract_pattern = re.compile(options.parameters[0])
del options.parameters[0]
for n in tree.get_terminals():
node = tree.node(n)
node.data.taxon = extract_pattern.search(
node.data.taxon).groups()[0]
elif method == "set-uniform-branchlength":
for n in tree.chain.keys():
tree.node(n).data.branchlength = options.value
elif method == "build-map":
# build a map of identifiers
options.write_map = True
for n in tree.get_terminals():
node = tree.node(n)
if node.data.taxon not in map_old2new:
new = options.template_identifier % (
len(map_old2new) + 1)
map_old2new[node.data.taxon] = new
node.data.taxon = map_old2new[node.data.taxon]
elif method == "remove-pattern":
if species2remove is None:
species2remove = re.compile(options.parameters[0])
del options.parameters
taxa = []
for n in tree.get_terminals():
t = tree.node(n).data.taxon
skip = False
if species2remove.search(t):
continue
if not skip:
taxa.append(t)
TreeTools.PruneTree(tree, taxa)
elif method == "add-node-names":
inode = 0
write_all_taxa = True
for n, node in tree.chain.items():
if not node.data.taxon:
node.data.taxon = "inode%i" % inode
inode += 1
elif method == "newick2nhx":
# convert names to species names
for n in tree.get_terminals():
t = tree.node(n).data.taxon
d = t.split("|")
if len(d) >= 2:
tree.node(n).data.species = d[0]
index += 1
ntree += 1
if options.output_format == "nh":
options.stdout.write(TreeTools.Nexus2Newick(
nexus, write_all_taxa=True,
with_branchlengths=options.with_branchlengths) + "\n")
else:
for tree in nexus.trees:
tree.writeToFile(options.stdout, format=options.output_format)
return ntotal, nskipped, ntree
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
parser = E.OptionParser(version="%prog version: $Id: tree2tree.py 2782 2009-09-10 11:40:29Z andreas $",
usage=globals()["__doc__"])
parser.add_option("-d", "--value", dest="value", type="float",
help="normalizing value.")
parser.add_option(
"-m", "--method", dest="methods", type="string",
help="""methods to apply [normalize|divide-by-tree|divide-by-tree|rename|set-uniform-branch-length|extract-with-pattern|build-map|remove-pattern|unroot|midpoint-root|balanced-root|add-node-names""")
parser.add_option(
"-2", "--filename-tree2", dest="filename_tree2", type="string",
help="filename with second tree.")
parser.add_option("-o", "--outgroup", dest="outgroup", type="string",
help="reroot with outgroup before processing.")
parser.add_option(
"-p", "--parameters", dest="parameters", type="string",
help="parameters for methods.")
parser.add_option(
"-e", "--template-identifier", dest="template_identifier", type="string",
help="""template identifier [%default]. A %i is replaced by the position
of the sequence in the file.""" )
parser.add_option(
"-i", "--invert-map", dest="invert_map", action="store_true",
help="""invert map.""")
parser.add_option(
"-f", "--method=filter --filter-method", dest="filter", type="choice",
choices=("max-branch-length",),
help="filter trees")
parser.add_option(
"--output-format", dest="output_format", type="choice",
choices=("nh", "nhx"),
help=("output format for trees."))
parser.add_option(
"-b", "--no-branch-lengths", dest="with_branchlengths",
action="store_false",
help="do not write branchlengths. Per default, 0 branch "
"lengths are added.")
parser.set_defaults(
value=0,
methods="",
filename_tree2=None,
outgroup=None,
parameters="",
template_identifier="ID%06i",
write_map=False,
invert_map=False,
filter=None,
output_format="nh",
with_branchlengths=True,
)
(options, args) = E.Start(parser, add_pipe_options=True)
options.methods = options.methods.split(",")
options.parameters = options.parameters.split(",")
other_trees = []
# read other trees
if options.filename_tree2:
other_nexus = TreeTools.Newick2Nexus(open(options.filename_tree2, "r"))
if len(other_nexus.trees) > 0:
other_trees = other_nexus.trees
else:
other_tree = other_nexus.trees[0]
other_trees = [other_tree]
lines = sys.stdin.readlines()
ntotal, nskipped, ntree = 0, 0, 0
if options.filter:
nexus = TreeTools.Newick2Nexus(lines)
new_trees = []
value = float(options.parameters[0])
del options.parameters[0]
# decision functions: return true, if tree
# is to be skipped
if options.filter == "max-branch-length":
f = lambda x: x >= value
for tree in nexus.trees:
ntotal += 1
for id, node in tree.chain.items():
if f(node.data.branchlength):
nskipped += 1
break
else:
new_trees.append(tree)
ntree += 1
nexus.trees = new_trees
options.stdout.write(
TreeTools.Nexus2Newick(nexus, with_names=True) + "\n")
else:
# iterate over chunks
chunks = filter(lambda x: lines[x][0] == ">", range(len(lines)))
map_old2new = {}
if chunks:
for c in range(len(chunks) - 1):
a, b = chunks[c], chunks[c + 1]
options.stdout.write(lines[a])
a += 1
Process(lines[a:b], other_trees, options, map_old2new, ntree)
options.stdout.write(lines[chunks[-1]])
t, s, ntree = Process(
lines[chunks[-1] + 1:], other_trees,
options, map_old2new, ntree)
ntotal += t
nskipped += s
else:
ntotal, nskipped, ntree = Process(
lines, other_trees, options, map_old2new, ntree)
if options.write_map:
p = options.parameters[0]
if p:
outfile = open(p, "w")
else:
outfile = options.stdout
outfile.write("old\tnew\n")
for old_id, new_id in map_old2new.items():
outfile.write("%s\t%s\n" % (old_id, new_id))
if p:
outfile.close()
if options.loglevel >= 1:
options.stdlog.write("# ntotal=%i, nskipped=%i\n" % (ntotal, nskipped))
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
CGATOxford/Optic
|
scripts/tree2tree.py
|
Python
|
mit
| 13,003
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# License: GNU General Public License v3. See license.txt
import webnotes
from webnotes.utils import cint
def get_website_settings(context):
post_login = []
cart_enabled = cint(webnotes.conn.get_default("shopping_cart_enabled"))
if cart_enabled:
post_login += [{"label": "Cart", "url": "cart", "icon": "icon-shopping-cart", "class": "cart-count"},
{"class": "divider"}]
post_login += [
{"label": "Profile", "url": "profile", "icon": "icon-user"},
{"label": "Addresses", "url": "addresses", "icon": "icon-map-marker"},
{"label": "My Orders", "url": "orders", "icon": "icon-list"},
{"label": "My Tickets", "url": "tickets", "icon": "icon-tags"},
{"label": "Invoices", "url": "invoices", "icon": "icon-file-text"},
{"label": "Shipments", "url": "shipments", "icon": "icon-truck"},
{"class": "divider"}
]
context.update({
"shopping_cart_enabled": cart_enabled,
"post_login": post_login + context.get("post_login", [])
})
|
Yellowen/Owrang
|
startup/webutils.py
|
Python
|
agpl-3.0
| 1,022
|
#!/usr/bin/env python
#
# Electrum - Lightweight Bitcoin Client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from functools import partial
from threading import Thread
import re
from decimal import Decimal
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum_gui.qt.util import *
from electrum_gui.qt.qrcodewidget import QRCodeWidget
from electrum_gui.qt.amountedit import AmountEdit
from electrum_gui.qt.main_window import StatusBarButton
from electrum.i18n import _
from electrum.plugins import hook
from electrum import wizard
from trustedcoin import TrustedCoinPlugin, DISCLAIMER, server
def need_server(wallet, tx):
from electrum.account import BIP32_Account
# Detect if the server is needed
long_id, short_id = wallet.get_user_id()
xpub3 = wallet.master_public_keys['x3/']
for x in tx.inputs_to_sign():
if x[0:2] == 'ff':
xpub, sequence = BIP32_Account.parse_xpubkey(x)
if xpub == xpub3:
return True
return False
class Plugin(TrustedCoinPlugin):
@hook
def on_new_window(self, window):
wallet = window.wallet
if not isinstance(wallet, self.wallet_class):
return
if wallet.can_sign_without_server():
msg = ' '.join([
_('This wallet is was restored from seed, and it contains two master private keys.'),
_('Therefore, two-factor authentication is disabled.')
])
action = lambda: window.show_message(msg)
else:
action = partial(self.settings_dialog, window)
button = StatusBarButton(QIcon(":icons/trustedcoin.png"),
_("TrustedCoin"), action)
window.statusBar().addPermanentWidget(button)
t = Thread(target=self.request_billing_info, args=(wallet,))
t.setDaemon(True)
t.start()
def auth_dialog(self, window):
d = WindowModalDialog(window, _("Authorization"))
vbox = QVBoxLayout(d)
pw = AmountEdit(None, is_int = True)
msg = _('Please enter your Google Authenticator code')
vbox.addWidget(QLabel(msg))
grid = QGridLayout()
grid.setSpacing(8)
grid.addWidget(QLabel(_('Code')), 1, 0)
grid.addWidget(pw, 1, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
return pw.get_amount()
@hook
def sign_tx(self, window, tx):
wallet = window.wallet
if not isinstance(wallet, self.wallet_class):
return
if not wallet.can_sign_without_server():
self.print_error("twofactor:sign_tx")
auth_code = None
if need_server(wallet, tx):
auth_code = self.auth_dialog(window)
else:
self.print_error("twofactor: xpub3 not needed")
window.wallet.auth_code = auth_code
def waiting_dialog(self, window, on_finished=None):
task = partial(self.request_billing_info, window.wallet)
return WaitingDialog(window, 'Getting billing information...', task,
on_finished)
def confirm(self, window, msg):
vbox = QVBoxLayout()
vbox.addWidget(WWLabel(msg))
window.set_main_layout(vbox)
def show_disclaimer(self, wallet, window):
window.set_icon(':icons/trustedcoin.png')
self.confirm(window, '\n\n'.join(DISCLAIMER))
self.set_enabled(wallet, True)
@hook
def abort_send(self, window):
wallet = window.wallet
if not isinstance(wallet, self.wallet_class):
return
if not wallet.can_sign_without_server():
if wallet.billing_info is None:
# request billing info before forming the transaction
waiting_dialog(self, window).wait()
if wallet.billing_info is None:
window.show_message('Could not contact server')
return True
return False
def settings_dialog(self, window):
self.waiting_dialog(window, partial(self.show_settings_dialog, window))
def show_settings_dialog(self, window, success):
if not success:
window.show_message(_('Server not reachable.'))
return
wallet = window.wallet
d = WindowModalDialog(window, _("TrustedCoin Information"))
d.setMinimumSize(500, 200)
vbox = QVBoxLayout(d)
hbox = QHBoxLayout()
logo = QLabel()
logo.setPixmap(QPixmap(":icons/trustedcoin.png"))
msg = _('This wallet is protected by TrustedCoin\'s two-factor authentication.') + '<br/>'\
+ _("For more information, visit") + " <a href=\"https://api.trustedcoin.com/#/electrum-help\">https://api.trustedcoin.com/#/electrum-help</a>"
label = QLabel(msg)
label.setOpenExternalLinks(1)
hbox.addStretch(10)
hbox.addWidget(logo)
hbox.addStretch(10)
hbox.addWidget(label)
hbox.addStretch(10)
vbox.addLayout(hbox)
vbox.addStretch(10)
msg = _('TrustedCoin charges a fee per co-signed transaction. You may pay on each transaction (an extra output will be added to your transaction), or you may purchase prepaid transaction using this dialog.') + '<br/>'
label = QLabel(msg)
label.setWordWrap(1)
vbox.addWidget(label)
vbox.addStretch(10)
grid = QGridLayout()
vbox.addLayout(grid)
price_per_tx = wallet.price_per_tx
v = price_per_tx.get(1)
grid.addWidget(QLabel(_("Price per transaction (not prepaid):")), 0, 0)
grid.addWidget(QLabel(window.format_amount(v) + ' ' + window.base_unit()), 0, 1)
i = 1
if 10 not in price_per_tx:
price_per_tx[10] = 10 * price_per_tx.get(1)
for k, v in sorted(price_per_tx.items()):
if k == 1:
continue
grid.addWidget(QLabel("Price for %d prepaid transactions:"%k), i, 0)
grid.addWidget(QLabel("%d x "%k + window.format_amount(v/k) + ' ' + window.base_unit()), i, 1)
b = QPushButton(_("Buy"))
b.clicked.connect(lambda b, k=k, v=v: self.on_buy(window, k, v, d))
grid.addWidget(b, i, 2)
i += 1
n = wallet.billing_info.get('tx_remaining', 0)
grid.addWidget(QLabel(_("Your wallet has %d prepaid transactions.")%n), i, 0)
# tranfer button
#def on_transfer():
# server.transfer_credit(self.user_id, recipient, otp, signature_callback)
# pass
#b = QPushButton(_("Transfer"))
#b.clicked.connect(on_transfer)
#grid.addWidget(b, 1, 2)
#grid.addWidget(QLabel(_("Next Billing Address:")), i, 0)
#grid.addWidget(QLabel(self.billing_info['billing_address']), i, 1)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
def on_buy(self, window, k, v, d):
d.close()
if window.pluginsdialog:
window.pluginsdialog.close()
wallet = window.wallet
uri = "bitcoin:" + wallet.billing_info['billing_address'] + "?message=TrustedCoin %d Prepaid Transactions&amount="%k + str(Decimal(v)/100000000)
wallet.is_billing = True
window.pay_to_URI(uri)
window.payto_e.setFrozen(True)
window.message_e.setFrozen(True)
window.amount_e.setFrozen(True)
def accept_terms_of_use(self, window):
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_("Terms of Service")))
tos_e = QTextEdit()
tos_e.setReadOnly(True)
vbox.addWidget(tos_e)
vbox.addWidget(QLabel(_("Please enter your e-mail address")))
email_e = QLineEdit()
vbox.addWidget(email_e)
next_button = window.next_button
prior_button_text = next_button.text()
next_button.setText(_('Accept'))
def request_TOS():
tos = server.get_terms_of_service()
self.TOS = tos
window.emit(SIGNAL('twofactor:TOS'))
def on_result():
tos_e.setText(self.TOS)
def set_enabled():
next_button.setEnabled(re.match(regexp,email_e.text()) is not None)
window.connect(window, SIGNAL('twofactor:TOS'), on_result)
t = Thread(target=request_TOS)
t.setDaemon(True)
t.start()
regexp = r"[^@]+@[^@]+\.[^@]+"
email_e.textChanged.connect(set_enabled)
email_e.setFocus(True)
window.set_main_layout(vbox, next_enabled=False)
next_button.setText(prior_button_text)
return str(email_e.text())
def setup_google_auth(self, window, _id, otp_secret):
vbox = QVBoxLayout()
if otp_secret is not None:
uri = "otpauth://totp/%s?secret=%s"%('trustedcoin.com', otp_secret)
l = QLabel("Please scan the following QR code in Google Authenticator. You may as well use the following key: %s"%otp_secret)
l.setWordWrap(True)
vbox.addWidget(l)
qrw = QRCodeWidget(uri)
vbox.addWidget(qrw, 1)
msg = _('Then, enter your Google Authenticator code:')
else:
label = QLabel("This wallet is already registered, but it was never authenticated. To finalize your registration, please enter your Google Authenticator Code. If you do not have this code, delete the wallet file and start a new registration")
label.setWordWrap(1)
vbox.addWidget(label)
msg = _('Google Authenticator code:')
hbox = QHBoxLayout()
hbox.addWidget(WWLabel(msg))
pw = AmountEdit(None, is_int = True)
pw.setFocus(True)
pw.setMaximumWidth(50)
hbox.addWidget(pw)
vbox.addLayout(hbox)
def set_enabled():
window.next_button.setEnabled(len(pw.text()) == 6)
pw.textChanged.connect(set_enabled)
while True:
if not window.set_main_layout(vbox, next_enabled=False,
raise_on_cancel=False):
return False
otp = pw.get_amount()
try:
server.auth(_id, otp)
return True
except:
window.show_message(_('Incorrect password'))
pw.setText('')
|
cryptapus/electrum-uno
|
plugins/trustedcoin/qt.py
|
Python
|
mit
| 11,440
|
import json
def response_to_str(response):
content = response.content
try:
# A bytes message, decode it as str
if isinstance(content, bytes):
content = content.decode()
if response.headers.get("content-type") == "application/json":
# Errors from Artifactory looks like:
# {"errors" : [ {"status" : 400, "message" : "Bla bla bla"}]}
try:
data = json.loads(content)["errors"][0]
content = "{}: {}".format(data["status"], data["message"])
except Exception:
pass
return content
except Exception:
return response.content
|
memsharded/conan
|
conans/client/rest/__init__.py
|
Python
|
mit
| 681
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from open_municipio.locations.models import Location
class LocationAdmin(admin.ModelAdmin):
list_display = ('name', 'count')
admin.site.register(Location, LocationAdmin)
|
openpolis/open_municipio
|
open_municipio/locations/admin.py
|
Python
|
agpl-3.0
| 267
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: vyos_config
version_added: "2.2"
author: "Nathaniel Case (@qalthos)"
short_description: Manage VyOS configuration on remote device
description:
- This module provides configuration file management of VyOS
devices. It provides arguments for managing both the
configuration file and state of the active configuration. All
configuration statements are based on `set` and `delete` commands
in the device configuration.
extends_documentation_fragment: vyos
notes:
- Tested against VYOS 1.1.7
- Abbreviated commands are NOT idempotent, see
L(Network FAQ,../network/user_guide/faq.html#why-do-the-config-modules-always-return-changed-true-with-abbreviated-commands).
options:
lines:
description:
- The ordered set of configuration lines to be managed and
compared with the existing configuration on the remote
device.
src:
description:
- The C(src) argument specifies the path to the source config
file to load. The source config file can either be in
bracket format or set format. The source file can include
Jinja2 template variables.
match:
description:
- The C(match) argument controls the method used to match
against the current active configuration. By default, the
desired config is matched against the active config and the
deltas are loaded. If the C(match) argument is set to C(none)
the active configuration is ignored and the configuration is
always loaded.
default: line
choices: ['line', 'none']
backup:
description:
- The C(backup) argument will backup the current devices active
configuration to the Ansible control host prior to making any
changes. The backup file will be located in the backup folder
in the playbook root directory or role root directory, if
playbook is part of an ansible role. If the directory does not
exist, it is created.
type: bool
default: 'no'
comment:
description:
- Allows a commit description to be specified to be included
when the configuration is committed. If the configuration is
not changed or committed, this argument is ignored.
default: 'configured by vyos_config'
config:
description:
- The C(config) argument specifies the base configuration to use
to compare against the desired configuration. If this value
is not specified, the module will automatically retrieve the
current active configuration from the remote device.
save:
description:
- The C(save) argument controls whether or not changes made
to the active configuration are saved to disk. This is
independent of committing the config. When set to True, the
active configuration is saved.
type: bool
default: 'no'
"""
EXAMPLES = """
- name: configure the remote device
vyos_config:
lines:
- set system host-name {{ inventory_hostname }}
- set service lldp
- delete service dhcp-server
- name: backup and load from file
vyos_config:
src: vyos.cfg
backup: yes
- name: render a Jinja2 template onto the VyOS router
vyos_config:
src: vyos_template.j2
- name: for idempotency, use full-form commands
vyos_config:
lines:
# - set int eth eth2 description 'OUTSIDE'
- set interface ethernet eth2 description 'OUTSIDE'
"""
RETURN = """
commands:
description: The list of configuration commands sent to the device
returned: always
type: list
sample: ['...', '...']
filtered:
description: The list of configuration commands removed to avoid a load failure
returned: always
type: list
sample: ['...', '...']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: string
sample: /playbooks/ansible/backup/vyos_config.2016-07-16@22:28:34
"""
import re
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import ConnectionError
from ansible.module_utils.network.vyos.vyos import load_config, get_config, run_commands
from ansible.module_utils.network.vyos.vyos import vyos_argument_spec, get_connection
DEFAULT_COMMENT = 'configured by vyos_config'
CONFIG_FILTERS = [
re.compile(r'set system login user \S+ authentication encrypted-password')
]
def get_candidate(module):
contents = module.params['src'] or module.params['lines']
if module.params['src']:
contents = format_commands(contents.splitlines())
contents = '\n'.join(contents)
return contents
def format_commands(commands):
return [line for line in commands if len(line.strip()) > 0]
def diff_config(commands, config):
config = [str(c).replace("'", '') for c in config.splitlines()]
updates = list()
visited = set()
for line in commands:
item = str(line).replace("'", '')
if not item.startswith('set') and not item.startswith('delete'):
raise ValueError('line must start with either `set` or `delete`')
elif item.startswith('set') and item not in config:
updates.append(line)
elif item.startswith('delete'):
if not config:
updates.append(line)
else:
item = re.sub(r'delete', 'set', item)
for entry in config:
if entry.startswith(item) and line not in visited:
updates.append(line)
visited.add(line)
return list(updates)
def sanitize_config(config, result):
result['filtered'] = list()
index_to_filter = list()
for regex in CONFIG_FILTERS:
for index, line in enumerate(list(config)):
if regex.search(line):
result['filtered'].append(line)
index_to_filter.append(index)
# Delete all filtered configs
for filter_index in sorted(index_to_filter, reverse=True):
del config[filter_index]
def run(module, result):
# get the current active config from the node or passed in via
# the config param
config = module.params['config'] or get_config(module)
# create the candidate config object from the arguments
candidate = get_candidate(module)
# create loadable config that includes only the configuration updates
connection = get_connection(module)
try:
response = connection.get_diff(candidate=candidate, running=config, diff_match=module.params['match'])
except ConnectionError as exc:
module.fail_json(msg=to_text(exc, errors='surrogate_then_replace'))
commands = response.get('config_diff')
sanitize_config(commands, result)
result['commands'] = commands
commit = not module.check_mode
comment = module.params['comment']
diff = None
if commands:
diff = load_config(module, commands, commit=commit, comment=comment)
if result.get('filtered'):
result['warnings'].append('Some configuration commands were '
'removed, please see the filtered key')
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
def main():
argument_spec = dict(
src=dict(type='path'),
lines=dict(type='list'),
match=dict(default='line', choices=['line', 'none']),
comment=dict(default=DEFAULT_COMMENT),
config=dict(),
backup=dict(type='bool', default=False),
save=dict(type='bool', default=False),
)
argument_spec.update(vyos_argument_spec)
mutually_exclusive = [('lines', 'src')]
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True
)
warnings = list()
result = dict(changed=False, warnings=warnings)
if module.params['backup']:
result['__backup__'] = get_config(module=module)
if any((module.params['src'], module.params['lines'])):
run(module, result)
if module.params['save']:
diff = run_commands(module, commands=['configure', 'compare saved'])[1]
if diff != '[edit]':
run_commands(module, commands=['save'])
result['changed'] = True
run_commands(module, commands=['exit'])
module.exit_json(**result)
if __name__ == '__main__':
main()
|
sgerhart/ansible
|
lib/ansible/modules/network/vyos/vyos_config.py
|
Python
|
mit
| 9,276
|
# -*- coding: utf-8 -*-
{
'!langcode!': 'bg',
'!langname!': 'Български',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'"User Exception" debug mode. ': '"User Exception" debug mode. ',
'%s': '%s',
'%s %%{row} deleted': '%s записите бяха изтрити',
'%s %%{row} updated': '%s записите бяха обновени',
'%s selected': '%s selected',
'%s students registered': '%s students registered',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(requires internet access)': '(requires internet access)',
'(requires internet access, experimental)': '(requires internet access, experimental)',
'(something like "it-it")': '(something like "it-it")',
'(version %s)': '(version %s)',
'?': '?',
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
'A new version of web2py is available': 'A new version of web2py is available',
'A new version of web2py is available: %s': 'A new version of web2py is available: %s',
'Abort': 'Abort',
'About': 'about',
'About application': 'About application',
'Accept Terms': 'Accept Terms',
'Add breakpoint': 'Add breakpoint',
'additional code for your application': 'additional code for your application',
'Additional code for your application': 'Additional code for your application',
'Admin design page': 'Admin design page',
'admin disabled because no admin password': 'admin disabled because no admin password',
'admin disabled because not supported on google app engine': 'admin disabled because not supported on google apps engine',
'admin disabled because too many invalid login attempts': 'admin disabled because too many invalid login attempts',
'admin disabled because unable to access password file': 'admin disabled because unable to access password file',
'Admin is disabled because insecure channel': 'Admin is disabled because insecure channel',
'Admin is disabled because unsecure channel': 'Admin is disabled because unsecure channel',
'Admin language': 'Admin language',
'Admin versioning page': 'Admin versioning page',
'administrative interface': 'administrative interface',
'Administrator Password:': 'Administrator Password:',
'and rename it (required):': 'and rename it (required):',
'and rename it:': 'and rename it:',
'App does not exist or you are not authorized': 'App does not exist or you are not authorized',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin is disabled because insecure channel',
'Application': 'Application',
'application "%s" uninstalled': 'application "%s" uninstalled',
'Application cannot be generated in demo mode': 'Application cannot be generated in demo mode',
'application compiled': 'application compiled',
'Application exists already': 'Application exists already',
'application is compiled and cannot be designed': 'application is compiled and cannot be designed',
'Application name:': 'Application name:',
'Application updated via git pull': 'Application updated via git pull',
'are not used': 'are not used',
'are not used yet': 'are not used yet',
'Are you sure you want to delete file "%s"?': 'Are you sure you want to delete file "%s"?',
'Are you sure you want to delete plugin "%s"?': 'Are you sure you want to delete plugin "%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"': 'Are you sure you want to uninstall application "%s"',
'Are you sure you want to uninstall application "%s"?': 'Are you sure you want to uninstall application "%s"?',
'Are you sure you want to upgrade web2py now?': 'Are you sure you want to upgrade web2py now?',
'Are you sure?': 'Are you sure?',
'arguments': 'arguments',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'ATTENTION:': 'ATTENTION:',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.',
'ATTENTION: you cannot edit the running application!': 'ATTENTION: you cannot edit the running application!',
'Autocomplete Python Code': 'Autocomplete Python Code',
'Available databases and tables': 'Available databases and tables',
'Available Databases and Tables': 'Available Databases and Tables',
'back': 'back',
'Back to the plugins list': 'Back to the plugins list',
'Back to wizard': 'Back to wizard',
'Basics': 'Basics',
'Begin': 'Begin',
'breakpoint': 'breakpoint',
'Breakpoints': 'Breakpoints',
'breakpoints': 'breakpoints',
'Bulk Register': 'Bulk Register',
'Bulk Student Registration': 'Bulk Student Registration',
'Cache': 'Cache',
'cache': 'cache',
'Cache Cleared': 'Cache Cleared',
'Cache Keys': 'Cache Keys',
'cache, errors and sessions cleaned': 'cache, errors and sessions cleaned',
'can be a git repo': 'can be a git repo',
'Cancel': 'Cancel',
'Cannot be empty': 'Cannot be empty',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Cannot compile: there are errors in your app. Debug it, correct errors and try again.',
'Cannot compile: there are errors in your app:': 'Cannot compile: there are errors in your app:',
'cannot create file': 'cannot create file',
'cannot upload file "%(filename)s"': 'cannot upload file "%(filename)s"',
'Change Admin Password': 'Change Admin Password',
'Change admin password': 'change admin password',
'change editor settings': 'change editor settings',
'Changelog': 'Changelog',
'check all': 'check all',
'Check for upgrades': 'check for upgrades',
'Check to delete': 'Check to delete',
'Checking for upgrades...': 'Checking for upgrades...',
'Clean': 'clean',
'Clear': 'Clear',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'click here for online examples': 'щракни тук за онлайн примери',
'click here for the administrative interface': 'щракни тук за административния интерфейс',
'Click row to expand traceback': 'Click row to expand traceback',
'Click row to view a ticket': 'Click row to view a ticket',
'click to check for upgrades': 'click to check for upgrades',
'code': 'code',
'Code listing': 'Code listing',
'collapse/expand all': 'collapse/expand all',
'Command': 'Command',
'Comment:': 'Comment:',
'Commit': 'Commit',
'Commit form': 'Commit form',
'Committed files': 'Committed files',
'Compile': 'compile',
'Compile (all or nothing)': 'Compile (all or nothing)',
'Compile (skip failed views)': 'Compile (skip failed views)',
'compiled application removed': 'compiled application removed',
'Condition': 'Condition',
'continue': 'continue',
'Controllers': 'Controllers',
'controllers': 'controllers',
'Count': 'Count',
'Create': 'create',
'create file with filename:': 'create file with filename:',
'create new application:': 'create new application:',
'Create new simple application': 'Create new simple application',
'Create/Upload': 'Create/Upload',
'created by': 'created by',
'Created by:': 'Created by:',
'Created On': 'Created On',
'Created on:': 'Created on:',
'crontab': 'crontab',
'Current request': 'Current request',
'Current response': 'Current response',
'Current session': 'Current session',
'currently running': 'currently running',
'currently saved or': 'currently saved or',
'data uploaded': 'данните бяха качени',
'Database': 'Database',
'database': 'database',
'Database %s select': 'Database %s select',
'database %s select': 'database %s select',
'Database administration': 'Database administration',
'database administration': 'database administration',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': 'Date and Time',
'db': 'дб',
'Debug': 'Debug',
'defines tables': 'defines tables',
'Delete': 'Delete',
'delete': 'delete',
'delete all checked': 'delete all checked',
'delete plugin': 'delete plugin',
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
'Delete:': 'Delete:',
'deleted after first hit': 'deleted after first hit',
'Demo': 'Demo',
'Deploy': 'deploy',
'Deploy on Google App Engine': 'Deploy on Google App Engine',
'Deploy to OpenShift': 'Deploy to OpenShift',
'Deploy to pythonanywhere': 'Deploy to pythonanywhere',
'Deploy to PythonAnywhere': 'Deploy to PythonAnywhere',
'Deployment form': 'Deployment form',
'Deployment Interface': 'Deployment Interface',
'Description:': 'Description:',
'design': 'дизайн',
'DESIGN': 'DESIGN',
'Design for': 'Design for',
'Detailed traceback description': 'Detailed traceback description',
'details': 'details',
'direction: ltr': 'direction: ltr',
'directory not found': 'directory not found',
'Disable': 'Disable',
'Disabled': 'Disabled',
'disabled in demo mode': 'disabled in demo mode',
'disabled in GAE mode': 'disabled in GAE mode',
'disabled in multi user mode': 'disabled in multi user mode',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Display line numbers': 'Display line numbers',
'DO NOT use the "Pack compiled" feature.': 'DO NOT use the "Pack compiled" feature.',
'docs': 'docs',
'Docs': 'Docs',
'done!': 'готово!',
'Downgrade': 'Downgrade',
'Download .w2p': 'Download .w2p',
'Download as .exe': 'Download as .exe',
'download layouts': 'download layouts',
'Download layouts from repository': 'Download layouts from repository',
'download plugins': 'download plugins',
'Download plugins from repository': 'Download plugins from repository',
'EDIT': 'EDIT',
'Edit': 'edit',
'edit all': 'edit all',
'Edit application': 'Edit application',
'edit controller': 'edit controller',
'edit controller:': 'edit controller:',
'Edit current record': 'Edit current record',
'edit views:': 'edit views:',
'Editing %s': 'Editing %s',
'Editing file': 'Editing file',
'Editing file "%s"': 'Editing file "%s"',
'Editing Language file': 'Editing Language file',
'Editing Plural Forms File': 'Editing Plural Forms File',
'Editor': 'Editor',
'Email Address': 'Email Address',
'Enable': 'Enable',
'Enable Close-Tag': 'Enable Close-Tag',
'Enable Code Folding': 'Enable Code Folding',
'Enterprise Web Framework': 'Enterprise Web Framework',
'Error': 'Error',
'Error logs for "%(app)s"': 'Error logs for "%(app)s"',
'Error snapshot': 'Error snapshot',
'Error ticket': 'Error ticket',
'Errors': 'errors',
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
'Exception %s': 'Exception %s',
'Exception instance attributes': 'Exception instance attributes',
'Exit Fullscreen': 'Exit Fullscreen',
'Expand Abbreviation (html files only)': 'Expand Abbreviation (html files only)',
'export as csv file': 'export as csv file',
'Exports:': 'Exports:',
'exposes': 'exposes',
'exposes:': 'exposes:',
'extends': 'extends',
'failed to compile file because:': 'failed to compile file because:',
'failed to reload module': 'failed to reload module',
'failed to reload module because:': 'failed to reload module because:',
'File': 'File',
'file "%(filename)s" created': 'file "%(filename)s" created',
'file "%(filename)s" deleted': 'file "%(filename)s" deleted',
'file "%(filename)s" uploaded': 'file "%(filename)s" uploaded',
'file "%(filename)s" was not deleted': 'file "%(filename)s" was not deleted',
'file "%s" of %s restored': 'file "%s" of %s restored',
'file changed on disk': 'file changed on disk',
'file does not exist': 'file does not exist',
'file not found': 'file not found',
'file saved on %(time)s': 'file saved on %(time)s',
'file saved on %s': 'file saved on %s',
'filename': 'filename',
'Filename': 'Filename',
'Files added': 'Files added',
'filter': 'filter',
'Find Next': 'Find Next',
'Find Previous': 'Find Previous',
'Form has errors': 'Form has errors',
'Frames': 'Frames',
'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.',
'GAE Email': 'GAE Email',
'GAE Output': 'GAE Output',
'GAE Password': 'GAE Password',
'Generate': 'Generate',
'Get from URL:': 'Get from URL:',
'Git Pull': 'Git Pull',
'Git Push': 'Git Push',
'Globals##debug': 'Globals##debug',
'go!': 'go!',
'Google App Engine Deployment Interface': 'Google App Engine Deployment Interface',
'Google Application Id': 'Google Application Id',
'Goto': 'Goto',
'graph model': 'graph model',
'Graph Model': 'Graph Model',
'Hello World': 'Здравей, свят',
'Help': 'help',
'here': 'here',
'Hide/Show Translated strings': 'Hide/Show Translated strings',
'Highlight current line': 'Highlight current line',
'Hits': 'Hits',
'Home': 'Home',
'honored only if the expression evaluates to true': 'honored only if the expression evaluates to true',
'htmledit': 'htmledit',
'If start the downgrade, be patient, it may take a while to rollback': 'If start the downgrade, be patient, it may take a while to rollback',
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.': 'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.',
'import': 'import',
'Import/Export': 'Import/Export',
'In development, use the default Rocket webserver that is currently supported by this debugger.': 'In development, use the default Rocket webserver that is currently supported by this debugger.',
'includes': 'includes',
'Indent with tabs': 'Indent with tabs',
'insert new': 'insert new',
'insert new %s': 'insert new %s',
'inspect attributes': 'inspect attributes',
'Install': 'install',
'Installation of %(plugin)s for %(app)s': 'Installation of %(plugin)s for %(app)s',
'Installed applications': 'Installed applications',
'Interaction at %s line %s': 'Interaction at %s line %s',
'Interactive console': 'Interactive console',
'internal error': 'internal error',
'internal error: %s': 'internal error: %s',
'Internal State': 'Internal State',
'Invalid action': 'Invalid action',
'Invalid application name': 'Invalid application name',
'invalid circular reference': 'invalid circular reference',
'Invalid git repository specified.': 'Invalid git repository specified.',
'invalid password': 'invalid password',
'invalid password.': 'invalid password.',
'Invalid Query': 'Невалидна заявка',
'invalid request': 'невалидна заявка',
'Invalid request': 'Invalid request',
'invalid table names (auth_* tables already defined)': 'invalid table names (auth_* tables already defined)',
'invalid ticket': 'invalid ticket',
'Key': 'Key',
'Keyboard shortcuts': 'Keyboard shortcuts',
'kill process': 'kill process',
'language file "%(filename)s" created/updated': 'language file "%(filename)s" created/updated',
'Language files (static strings) updated': 'Language files (static strings) updated',
'languages': 'languages',
'Languages': 'Languages',
'languages updated': 'languages updated',
'Last Revision': 'Last Revision',
'Last saved on:': 'Last saved on:',
'License for': 'License for',
'License:': 'License:',
'Line Nr': 'Line Nr',
'Line number': 'Line number',
'lists by exception': 'lists by exception',
'lists by ticket': 'lists by ticket',
'Loading...': 'Loading...',
'loading...': 'loading...',
'Local Apps': 'Local Apps',
'locals': 'locals',
'Locals##debug': 'Locals##debug',
'Login': 'Login',
'login': 'login',
'Login successful': 'Login successful',
'Login to the Administrative Interface': 'Login to the Administrative Interface',
'Login/Register': 'Login/Register',
'Logout': 'logout',
'lost password': 'lost password',
'Main Menu': 'Main Menu',
'Manage': 'Manage',
'Manage %(action)s': 'Manage %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Admin Users/Students': 'Manage Admin Users/Students',
'Manage Cache': 'Manage Cache',
'Manage Students': 'Manage Students',
'Memberships': 'Memberships',
'merge': 'merge',
'Models': 'Models',
'models': 'models',
'Modified On': 'Modified On',
'Modules': 'Modules',
'modules': 'modules',
'Multi User Mode': 'Multi User Mode',
'new application "%s" created': 'new application "%s" created',
'new application "%s" imported': 'new application "%s" imported',
'New Application Wizard': 'New Application Wizard',
'New application wizard': 'New application wizard',
'new plugin installed': 'new plugin installed',
'New plugin installed: %s': 'New plugin installed: %s',
'New Record': 'New Record',
'new record inserted': 'новият запис беше добавен',
'New simple application': 'New simple application',
'next': 'next',
'next %s rows': 'next %s rows',
'next 100 rows': 'next 100 rows',
'NO': 'NO',
'no changes': 'no changes',
'No databases in this application': 'No databases in this application',
'No Interaction yet': 'No Interaction yet',
'no match': 'no match',
'no package selected': 'no package selected',
'no permission to uninstall "%s"': 'no permission to uninstall "%s"',
'Node:': 'Node:',
'Not Authorized': 'Not Authorized',
'Not supported': 'Not supported',
'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.': 'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.',
"On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.": "On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.",
'Open new app in new window': 'Open new app in new window',
'OpenShift Deployment Interface': 'OpenShift Deployment Interface',
'OpenShift Output': 'OpenShift Output',
'or alternatively': 'or alternatively',
'Or Get from URL:': 'Or Get from URL:',
'or import from csv file': 'or import from csv file',
'or provide app url:': 'or provide app url:',
'or provide application url:': 'or provide application url:',
'Original/Translation': 'Original/Translation',
'Overview': 'Overview',
'Overwrite installed app': 'overwrite installed app',
'Pack all': 'pack all',
'Pack compiled': 'pack compiled',
'Pack custom': 'Pack custom',
'pack plugin': 'pack plugin',
'PAM authenticated user, cannot change password here': 'PAM authenticated user, cannot change password here',
'password changed': 'password changed',
'Past revisions': 'Past revisions',
'Path to appcfg.py': 'Path to appcfg.py',
'Path to local openshift repo root.': 'Path to local openshift repo root.',
'Peeking at file': 'Peeking at file',
'Permission': 'Permission',
'Permissions': 'Permissions',
'Please': 'Please',
'Please wait, giving pythonanywhere a moment...': 'Please wait, giving pythonanywhere a moment...',
'plugin "%(plugin)s" deleted': 'plugin "%(plugin)s" deleted',
'Plugin "%s" in application': 'Plugin "%s" in application',
'plugin not specified': 'plugin not specified',
'Plugin page': 'Plugin page',
'plugins': 'plugins',
'Plugins': 'Plugins',
'Plural Form #%s': 'Plural Form #%s',
'Plural-Forms:': 'Plural-Forms:',
'Powered by': 'Powered by',
'Preferences saved correctly': 'Preferences saved correctly',
'Preferences saved on session only': 'Preferences saved on session only',
'previous %s rows': 'previous %s rows',
'previous 100 rows': 'previous 100 rows',
'Private files': 'Private files',
'private files': 'private files',
'Project Progress': 'Project Progress',
'Pull': 'Pull',
'Pull failed, certain files could not be checked out. Check logs for details.': 'Pull failed, certain files could not be checked out. Check logs for details.',
'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.': 'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.',
'Push': 'Push',
'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.': 'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.',
'pygraphviz library not found': 'pygraphviz library not found',
'PythonAnywhere Apps': 'PythonAnywhere Apps',
'PythonAnywhere Password': 'PythonAnywhere Password',
'Query:': 'Query:',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Rapid Search': 'Rapid Search',
'Record': 'Record',
'record': 'record',
'record does not exist': 'записът не съществува',
'record id': 'record id',
'Record id': 'Record id',
'refresh': 'refresh',
'register': 'register',
'Reload routes': 'Reload routes',
'Remove compiled': 'remove compiled',
'Removed Breakpoint on %s at line %s': 'Removed Breakpoint on %s at line %s',
'Replace': 'Replace',
'Replace All': 'Replace All',
'Repository (%s)': 'Repository (%s)',
'request': 'request',
'requires distutils, but not installed': 'requires distutils, but not installed',
'requires python-git, but not installed': 'requires python-git, but not installed',
'Resolve Conflict file': 'Resolve Conflict file',
'response': 'response',
'restart': 'restart',
'restore': 'restore',
'return': 'return',
'Revert': 'Revert',
'revert': 'revert',
'reverted to revision %s': 'reverted to revision %s',
'Revision %s': 'Revision %s',
'Revision:': 'Revision:',
'Role': 'Role',
'Roles': 'Roles',
'Rows in table': 'Rows in table',
'Rows in Table': 'Rows in Table',
'Rows selected': 'Rows selected',
'rules are not defined': 'rules are not defined',
'Run tests': 'Run tests',
'Run tests in this file': 'Run tests in this file',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
'Running on %s': 'Running on %s',
'Save': 'Save',
'save': 'save',
'Save file:': 'Save file:',
'Save file: %s': 'Save file: %s',
'Save model as...': 'Save model as...',
'Save via Ajax': 'Save via Ajax',
'Saved file hash:': 'Saved file hash:',
'Screenshot %s': 'Screenshot %s',
'Search': 'Search',
'Select Files to Package': 'Select Files to Package',
'selected': 'selected',
'session': 'session',
'session expired': 'session expired',
'Session saved correctly': 'Session saved correctly',
'Session saved on session only': 'Session saved on session only',
'Set Breakpoint on %s at line %s: %s': 'Set Breakpoint on %s at line %s: %s',
'shell': 'shell',
'Showing %s to %s of %s %s found': 'Showing %s to %s of %s %s found',
'Singular Form': 'Singular Form',
'Site': 'site',
'Size of cache:': 'Size of cache:',
'skip to generate': 'skip to generate',
'some files could not be removed': 'some files could not be removed',
'Something went wrong please wait a few minutes before retrying': 'Something went wrong please wait a few minutes before retrying',
'Sorry, could not find mercurial installed': 'Sorry, could not find mercurial installed',
'source : db': 'source : db',
'source : filesystem': 'source : filesystem',
'Start a new app': 'Start a new app',
'Start searching': 'Start searching',
'Start wizard': 'start wizard',
'state': 'състояние',
'Static': 'Static',
'static': 'static',
'Static files': 'Static files',
'Statistics': 'Statistics',
'Step': 'Step',
'step': 'step',
'stop': 'stop',
'submit': 'submit',
'Submit': 'Submit',
'successful': 'successful',
'Sure you want to delete this object?': 'Сигурен ли си, че искаш да изтриеш този обект?',
'switch to : db': 'switch to : db',
'switch to : filesystem': 'switch to : filesystem',
'Tab width (# characters)': 'Tab width (# characters)',
'table': 'table',
'Table': 'Table',
'Temporary': 'Temporary',
'test': 'test',
'Testing application': 'Testing application',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'The app exists, was created by wizard, continue to overwrite!': 'The app exists, was created by wizard, continue to overwrite!',
'The app exists, was NOT created by wizard, continue to overwrite!': 'The app exists, was NOT created by wizard, continue to overwrite!',
'the application logic, each URL path is mapped in one exposed function in the controller': 'the application logic, each URL path is mapped in one exposed function in the controller',
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
'the data representation, define database tables and sets': 'the data representation, define database tables and sets',
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
'the presentations layer, views are also known as templates': 'the presentations layer, views are also known as templates',
'Theme': 'Theme',
'There are no controllers': 'There are no controllers',
'There are no models': 'There are no models',
'There are no modules': 'There are no modules',
'There are no plugins': 'There are no plugins',
'There are no private files': 'There are no private files',
'There are no static files': 'There are no static files',
'There are no translators': 'There are no translators',
'There are no translators, only default language is supported': 'There are no translators, only default language is supported',
'There are no views': 'There are no views',
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
'these files are served without processing, your images go here': 'these files are served without processing, your images go here',
"This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.": "This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.",
'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk',
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
'This is the %(filename)s template': 'This is the %(filename)s template',
"This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.": "This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.",
'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.': 'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.',
'this page to see if a breakpoint was hit and debug interaction is required.': 'this page to see if a breakpoint was hit and debug interaction is required.',
'This will pull changes from the remote repo for application "%s"?': 'This will pull changes from the remote repo for application "%s"?',
'This will push changes to the remote repo for application "%s".': 'This will push changes to the remote repo for application "%s".',
'Ticket': 'Ticket',
'Ticket ID': 'Ticket ID',
'Ticket Missing': 'Ticket Missing',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'TM': 'TM',
'to previous version.': 'to previous version.',
'To create a plugin, name a file/folder plugin_[name]': 'To create a plugin, name a file/folder plugin_[name]',
'To emulate a breakpoint programatically, write:': 'To emulate a breakpoint programatically, write:',
'to use the debugger!': 'to use the debugger!',
'toggle breakpoint': 'toggle breakpoint',
'Toggle comment': 'Toggle comment',
'Toggle Fullscreen': 'Toggle Fullscreen',
'Traceback': 'Traceback',
'translation strings for the application': 'translation strings for the application',
'Translation strings for the application': 'Translation strings for the application',
'try': 'try',
'try something like': 'try something like',
'Try the mobile interface': 'Try the mobile interface',
'try view': 'try view',
'Type PDB debugger command in here and hit Return (Enter) to execute it.': 'Type PDB debugger command in here and hit Return (Enter) to execute it.',
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
'Unable to check for upgrades': 'Unable to check for upgrades',
'unable to create application "%s"': 'unable to create application "%s"',
'unable to delete file "%(filename)s"': 'unable to delete file "%(filename)s"',
'unable to delete file plugin "%(plugin)s"': 'unable to delete file plugin "%(plugin)s"',
'Unable to determine the line number!': 'Unable to determine the line number!',
'Unable to download': 'Unable to download',
'Unable to download app because:': 'Unable to download app because:',
'Unable to download because': 'Unable to download because',
'unable to download layout': 'unable to download layout',
'unable to download plugin: %s': 'unable to download plugin: %s',
'Unable to download the list of plugins': 'Unable to download the list of plugins',
'unable to install plugin "%s"': 'unable to install plugin "%s"',
'unable to parse csv file': 'не е възможна обработката на csv файла',
'unable to uninstall "%s"': 'unable to uninstall "%s"',
'unable to upgrade because "%s"': 'unable to upgrade because "%s"',
'uncheck all': 'uncheck all',
'Uninstall': 'uninstall',
'Unsupported webserver working mode: %s': 'Unsupported webserver working mode: %s',
'update': 'update',
'update all languages': 'update all languages',
'Update:': 'Update:',
'Upgrade': 'Upgrade',
'upgrade now to %s': 'upgrade now to %s',
'upgrade web2py now': 'upgrade web2py now',
'upload': 'upload',
'Upload': 'Upload',
'Upload & install packed application': 'Upload & install packed application',
'Upload a package:': 'Upload a package:',
'Upload and install packed application': 'Upload and install packed application',
'upload application:': 'upload application:',
'Upload existing application': 'Upload existing application',
'upload file:': 'upload file:',
'upload plugin file:': 'upload plugin file:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'Use an url:': 'Use an url:',
'User': 'User',
'Username': 'Username',
'Users': 'Users',
'Using the shell may lock the database to other users of this app.': 'Using the shell may lock the database to other users of this app.',
'variables': 'variables',
'Version': 'Version',
'Versioning': 'Versioning',
'versioning': 'versioning',
'view': 'view',
'Views': 'Views',
'views': 'views',
'Warning!': 'Warning!',
'WARNING:': 'WARNING:',
'WARNING: The following views could not be compiled:': 'WARNING: The following views could not be compiled:',
'Web Framework': 'Web Framework',
'web2py Admin Password': 'web2py Admin Password',
'web2py apps to deploy': 'web2py apps to deploy',
'web2py Debugger': 'web2py Debugger',
'web2py downgrade': 'web2py downgrade',
'web2py is up to date': 'web2py is up to date',
'web2py online debugger': 'web2py online debugger',
'web2py Recent Tweets': 'web2py Recent Tweets',
'web2py upgrade': 'web2py upgrade',
'web2py upgraded; please restart it': 'web2py upgraded; please restart it',
'Welcome to web2py': 'Добре дошъл в web2py',
'Working...': 'Working...',
'WSGI reference name': 'WSGI reference name',
'YES': 'YES',
'Yes': 'Yes',
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button',
'You can inspect variables using the console below': 'You can inspect variables using the console below',
'You have one more login attempt before you are locked out': 'You have one more login attempt before you are locked out',
'You need to set up and reach a': 'You need to set up and reach a',
'You only need these if you have already registered': 'You only need these if you have already registered',
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Your application will be blocked until you click an action button (next, step, continue, etc.)',
}
|
xiang12835/python_web
|
py2_web2py/web2py/applications/admin/languages/bg.py
|
Python
|
apache-2.0
| 35,738
|
from datetime import datetime
from app import db
from app.exceptions import ValidationError
from flask import current_app, url_for
from flask_login import UserMixin
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from werkzeug.security import generate_password_hash, check_password_hash
from . import lm
ROLE_APPLICANT = 0
ROLE_ADVISER = 1
ROLE_ADMIN = 2
HOUSE = 0
CONDO = 1
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), unique=True)
firstname = db.Column(db.String(100))
lastname = db.Column(db.String(100))
email = db.Column(db.String(120), index=True, unique=True)
pwdhash = db.Column(db.String(54))
phone = db.Column(db.Integer)
address = db.Column(db.String(64))
confirmed = db.Column(db.Boolean, default=False)
role = db.Column(db.SmallInteger, default=ROLE_APPLICANT)
comments = db.relationship('Comment', backref='author', lazy='dynamic')
posts = db.relationship('Post', order_by="Post.timestamp", backref='author',
lazy='dynamic', cascade="all, delete, delete-orphan")
about_me = db.Column(db.Text())
last_seen = db.Column(db.DateTime, default=datetime.utcnow)
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
portrait = db.Column(db.String(140))
pref = db.relationship('Preference', uselist=False, backref='author')
fav = db.relationship('Favourite', backref='user', lazy='dynamic')
active = db.Column(db.Boolean, default=False)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
def __init__(self, nickname, firstname, lastname, email, password, role):
self.nickname = nickname.title()
self.firstname = firstname.title()
self.lastname = lastname.title()
self.email = email.lower()
self.set_password(password)
self.role = role
def ping(self):
self.last_seen = datetime.utcnow()
db.session.add(self)
def set_password(self, password):
self.pwdhash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.pwdhash, password)
def is_authenticated(self):
return True
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.confirmed = True
db.session.add(self)
return True
def to_json(self):
json_user = {
'url': url_for('api.get_post', id=self.id, _external=True),
'nickname': self.nickname,
'member_since': self.member_since,
'last_seen': self.last_seen,
'posts': url_for('api.get_user_posts', id=self.id, _external=True),
'post_count': self.posts.count(),
}
return json_user
def generate_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def generate_auth_token(self, expiration):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'id': self.id})
@staticmethod
def verify_auth_token(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return None
return User.query.get(data['id'])
def is_active(self):
if self.active is True:
return True
else:
return False
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.nickname
class Post(db.Model):
__tablename__ = 'posts'
__searchable__ = ['body']
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(140))
body = db.Column(db.String(1400))
img = db.Column(db.String(140))
timestamp = db.Column(db.DateTime)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
location = db.Column(db.String(140))
price = db.Column(db.Integer)
interested_user = db.relationship('Favourite', backref='author', lazy='dynamic',
cascade="all, delete, delete-orphan")
comments = db.relationship('Comment', backref='post', lazy='dynamic')
style = db.Column(db.String(10), default="house")
bedroom_no = db.Column(db.Integer, default=1)
bathroom_no = db.Column(db.Integer, default=1)
garage_no = db.Column(db.Integer, default=0)
address = db.Column(db.String(100))
coordinate = db.Column(db.String(50))
def __repr__(self):
return '<Post %r>' % (self.body)
def to_json(self):
json_post = {
'url': url_for('api.get_post', id=self.id, _external=True),
'title': self.title,
'body': self.body,
'author': url_for('api.get_user', id=self.user_id, _external=True),
'location': self.location,
'timestamp': self.timestamp,
'price': self.price,
'style': self.style,
'bedroom_no': self.bedroom_no,
'bathroom_no': self.bathroom_no,
'garage_no': self.garage_no,
'address': self.address,
'comments': url_for('api.get_post_comments', id=self.id, _external=True),
'comment_count': self.comments.count()
}
return json_post
@staticmethod
def from_json(json_post):
body = json_post.get('body')
if body is None or body == '':
raise ValidationError('post does not have a body')
return Post(body=body)
class Favourite(db.Model):
__tablename__ = 'favourites'
id = db.Column(db.String(10), primary_key=True, unique=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
post_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
def __init__(self, user_id, post_id):
self.id = str(user_id) + ':' + str(post_id)
self.user_id = user_id
self.post_id = post_id
class Preference(db.Model):
__tablename__ = 'preferences'
id = db.Column(db.Integer, primary_key=True)
style = db.Column(db.String(10), default="house")
bedroom_no = db.Column(db.Integer)
bathroom_no = db.Column(db.Integer)
garage_no = db.Column(db.Integer)
location = db.Column(db.String(140))
price = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
notify = db.Column(db.SmallInteger, default=1)
class Comment(db.Model):
__tablename__ = 'comments'
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
post_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
def to_json(self):
json_comment = {
'url': url_for('api.get_comment', id=self.id, _external=True),
'post': url_for('api.get_post', id=self.post_id, _external=True),
'body': self.body,
'timestamp': self.timestamp,
'author': url_for('api.get_user', id=self.author_id, _external=True),
}
return json_comment
@staticmethod
def from_json(json_comment):
body = json_comment.get('body')
if body is None or body == '':
raise ValidationError('comment does not have a body')
return Comment(body=body)
|
AlvinCJin/RealEstateApp
|
app/models.py
|
Python
|
bsd-3-clause
| 8,303
|
#!/usr/bin/env python3
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Build a database from git commit histories. Can be used to audit git vs. jira. For usage,
# see README.md.
"""An application to assist Release Managers with ensuring that histories in Git and fixVersions in
JIRA are in agreement. See README.md for a detailed explanation.
"""
import argparse
import csv
import enum
import logging
import pathlib
import re
import sqlite3
import time
import enlighten
import git
import jira
class _DB:
"""Manages an instance of Sqlite on behalf of the application.
Args:
db_path (str): Path to the Sqlite database file. ':memory:' for an ephemeral database.
**_kwargs: Convenience for CLI argument parsing. Ignored.
Attributes:
conn (:obj:`sqlite3.db2api.Connection`): The underlying connection object.
"""
class Action(enum.Enum):
"""Describes an action to be taken against the database."""
ADD = 'ADD'
REVERT = 'REVERT'
SKIP = 'SKIP'
def __init__(self, db_path, initialize_db, **_kwargs):
self._conn = sqlite3.connect(db_path)
if initialize_db:
for table in 'git_commits', 'jira_versions':
self._conn.execute("DROP TABLE IF EXISTS %s" % table)
self._conn.execute("""
CREATE TABLE IF NOT EXISTS "git_commits"(
jira_id TEXT NOT NULL,
branch TEXT NOT NULL,
git_sha TEXT NOT NULL,
git_tag TEXT,
CONSTRAINT pk PRIMARY KEY (jira_id, branch, git_sha)
);""")
self._conn.execute("""
CREATE TABLE IF NOT EXISTS "jira_versions"(
jira_id TEXT NOT NULL,
fix_version TEXT NOT NULL,
CONSTRAINT pk PRIMARY KEY (jira_id, fix_version)
);""")
self._conn.commit()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._conn.close()
@property
def conn(self):
""":obj:`sqlite3.db2api.Connection`: Underlying database handle."""
return self._conn
def apply_commit(self, action, jira_id, branch, git_sha):
"""Apply an edit to the commits database.
Args:
action (:obj:`_DB.Action`): The action to execute.
jira_id (str): The applicable Issue ID from JIRA.
branch (str): The name of the git branch from which the commit originates.
git_sha (str): The commit's SHA.
"""
if action == _DB.Action.ADD:
self.conn.execute(
"INSERT INTO git_commits(jira_id, branch, git_sha) VALUES (upper(?),?,?)",
(jira_id, branch, git_sha))
elif action == _DB.Action.REVERT:
self.conn.execute("""
DELETE FROM git_commits WHERE
jira_id=upper(?)
AND branch=?
""", (jira_id.upper(), branch))
def flush_commits(self):
"""Commit any pending changes to the database."""
self.conn.commit()
def apply_git_tag(self, branch, git_sha, git_tag):
"""Annotate a commit in the commits database as being a part of the specified release.
Args:
branch (str): The name of the git branch from which the commit originates.
git_sha (str): The commit's SHA.
git_tag (str): The first release tag following the commit.
"""
self.conn.execute("UPDATE git_commits SET git_tag = ? WHERE branch = ? AND git_sha = ?",
(git_tag, branch, git_sha))
def apply_fix_version(self, jira_id, fix_version):
"""Annotate a Jira issue in the jira database as being part of the specified release
version.
Args:
jira_id (str): The applicable Issue ID from JIRA.
fix_version (str): The annotated `fixVersion` as seen in JIRA.
"""
self.conn.execute("INSERT INTO jira_versions(jira_id, fix_version) VALUES (upper(?),?)",
(jira_id, fix_version))
def unique_jira_ids_from_git(self):
"""Query the commits database for the population of Jira Issue IDs."""
results = self.conn.execute("SELECT distinct jira_id FROM git_commits").fetchall()
return [x[0] for x in results]
def backup(self, target):
"""Write a copy of the database to the `target` destination.
Args:
target (str): The backup target, a filesystem path.
"""
dst = sqlite3.connect(target)
with dst:
self._conn.backup(dst)
dst.close()
class _RepoReader:
"""This class interacts with the git repo, and encapsulates actions specific to HBase's git
history.
Args:
db (:obj:`_DB`): A handle to the database manager.
fallback_actions_path (str): Path to the file containing sha-specific actions
(see README.md).
remote_name (str): The name of the remote to query for branches and histories
(i.e., "origin").
development_branch (str): The name of the branch on which active development occurs
(i.e., "master").
release_line_regexp (str): Filter criteria used to select "release line" branches (such
as "branch-1," "branch-2," &c.).
**_kwargs: Convenience for CLI argument parsing. Ignored.
"""
_extract_release_tag_pattern = re.compile(r'^rel/(\d+\.\d+\.\d+)(\^0)?$', re.IGNORECASE)
_skip_patterns = [
re.compile(r'^preparing development version.+', re.IGNORECASE),
re.compile(r'^preparing hbase release.+', re.IGNORECASE),
re.compile(r'^\s*updated? pom.xml version (for|to) .+', re.IGNORECASE),
re.compile(r'^\s*updated? chang', re.IGNORECASE),
re.compile(r'^\s*updated? (book|docs|documentation)', re.IGNORECASE),
re.compile(r'^\s*updating (docs|changes).+', re.IGNORECASE),
re.compile(r'^\s*bump (pom )?versions?', re.IGNORECASE),
re.compile(r'^\s*updated? (version|poms|changes).+', re.IGNORECASE),
]
_identify_leading_jira_id_pattern = re.compile(r'^[\s\[]*(hbase-\d+)', re.IGNORECASE)
_identify_backport_jira_id_patterns = [
re.compile(r'^backport "(.+)".*', re.IGNORECASE),
re.compile(r'^backport (.+)', re.IGNORECASE),
]
_identify_revert_jira_id_pattern = re.compile(r'^revert:? "(.+)"', re.IGNORECASE)
_identify_revert_revert_jira_id_pattern = re.compile(
'^revert "revert "(.+)"\\.?"\\.?', re.IGNORECASE)
_identify_amend_jira_id_pattern = re.compile(r'^amend (.+)', re.IGNORECASE)
def __init__(self, db, fallback_actions_path, remote_name, development_branch,
release_line_regexp, parse_release_tags, **_kwargs):
self._db = db
self._repo = _RepoReader._open_repo()
self._fallback_actions = _RepoReader._load_fallback_actions(fallback_actions_path)
self._remote_name = remote_name
self._development_branch = development_branch
self._release_line_regexp = release_line_regexp
self._parse_release_tags = parse_release_tags
@property
def repo(self):
""":obj:`git.repo.base.Repo`: Underlying Repo handle."""
return self._repo
@property
def remote_name(self):
"""str: The name of the remote used for querying branches and histories."""
return self._remote_name
@property
def development_branch_ref(self):
""":obj:`git.refs.reference.Reference`: The git branch where active development occurs."""
refs = self.repo.remote(self._remote_name).refs
return [ref for ref in refs
if ref.name == '%s/%s' % (self._remote_name, self._development_branch)][0]
@property
def release_line_refs(self):
""":obj:`list` of :obj:`git.refs.reference.Reference`: The git branches identified as
"release lines", i.e., "branch-2"."""
refs = self.repo.remote(self._remote_name).refs
pattern = re.compile('%s/%s' % (self._remote_name, self._release_line_regexp))
return [ref for ref in refs if pattern.match(ref.name)]
@property
def release_branch_refs(self):
""":obj:`list` of :obj:`git.refs.reference.Reference`: The git branches identified as
"release branches", i.e., "branch-2.2"."""
refs = self.repo.remote(self._remote_name).refs
release_line_refs = self.release_line_refs
return [ref for ref in refs
if any([ref.name.startswith(release_line.name + '.')
for release_line in release_line_refs])]
@staticmethod
def _open_repo():
return git.Repo(pathlib.Path(__file__).parent.absolute(), search_parent_directories=True)
def identify_least_common_commit(self, ref_a, ref_b):
"""Given a pair of references, attempt to identify the commit that they have in common,
i.e., the commit at which a "release branch" originates from a "release line" branch.
"""
commits = self._repo.merge_base(ref_a, ref_b, "--all")
if commits:
return commits[0]
raise Exception("could not identify merge base between %s, %s" % (ref_a, ref_b))
@staticmethod
def _skip(summary):
return any([p.match(summary) for p in _RepoReader._skip_patterns])
@staticmethod
def _identify_leading_jira_id(summary):
match = _RepoReader._identify_leading_jira_id_pattern.match(summary)
if match:
return match.groups()[0]
return None
@staticmethod
def _identify_backport_jira_id(summary):
for pattern in _RepoReader._identify_backport_jira_id_patterns:
match = pattern.match(summary)
if match:
return _RepoReader._identify_leading_jira_id(match.groups()[0])
return None
@staticmethod
def _identify_revert_jira_id(summary):
match = _RepoReader._identify_revert_jira_id_pattern.match(summary)
if match:
return _RepoReader._identify_leading_jira_id(match.groups()[0])
return None
@staticmethod
def _identify_revert_revert_jira_id(summary):
match = _RepoReader._identify_revert_revert_jira_id_pattern.match(summary)
if match:
return _RepoReader._identify_leading_jira_id(match.groups()[0])
return None
@staticmethod
def _identify_amend_jira_id(summary):
match = _RepoReader._identify_amend_jira_id_pattern.match(summary)
if match:
return _RepoReader._identify_leading_jira_id(match.groups()[0])
return None
@staticmethod
def _action_jira_id_for(summary):
jira_id = _RepoReader._identify_leading_jira_id(summary)
if jira_id:
return _DB.Action.ADD, jira_id
jira_id = _RepoReader._identify_backport_jira_id(summary)
if jira_id:
return _DB.Action.ADD, jira_id
jira_id = _RepoReader._identify_revert_jira_id(summary)
if jira_id:
return _DB.Action.REVERT, jira_id
jira_id = _RepoReader._identify_revert_revert_jira_id(summary)
if jira_id:
return _DB.Action.ADD, jira_id
jira_id = _RepoReader._identify_amend_jira_id(summary)
if jira_id:
return _DB.Action.ADD, jira_id
return None
def _extract_release_tag(self, commit):
"""works for extracting the tag, but need a way to retro-actively tag
commits we've already seen."""
names = self._repo.git.name_rev(commit, tags=True, refs='rel/*')
for name in names.split(' '):
match = _RepoReader._extract_release_tag_pattern.match(name)
if match:
return match.groups()[0]
return None
def _set_release_tag(self, branch, tag, shas):
cnt = 0
for sha in shas:
self._db.apply_git_tag(branch, sha, tag)
cnt += 1
if cnt % 50 == 0:
self._db.flush_commits()
self._db.flush_commits()
def _resolve_ambiguity(self, commit):
if commit.hexsha not in self._fallback_actions:
logging.warning('Unable to resolve action for %s: %s', commit.hexsha, commit.summary)
return _DB.Action.SKIP, None
action, jira_id = self._fallback_actions[commit.hexsha]
if not jira_id:
jira_id = None
return _DB.Action[action], jira_id
def _row_generator(self, branch, commit):
if _RepoReader._skip(commit.summary):
return None
result = _RepoReader._action_jira_id_for(commit.summary)
if not result:
result = self._resolve_ambiguity(commit)
if not result:
raise Exception('Cannot resolve action for %s: %s' % (commit.hexsha, commit.summary))
action, jira_id = result
return action, jira_id, branch, commit.hexsha
def populate_db_release_branch(self, origin_commit, release_branch):
"""List all commits on `release_branch` since `origin_commit`, recording them as
observations in the commits database.
Args:
origin_commit (:obj:`git.objects.commit.Commit`): The sha of the first commit to
consider.
release_branch (str): The name of the ref whose history is to be parsed.
"""
global MANAGER
commits = list(self._repo.iter_commits(
"%s...%s" % (origin_commit.hexsha, release_branch), reverse=True))
logging.info("%s has %d commits since its origin at %s.", release_branch, len(commits),
origin_commit)
counter = MANAGER.counter(total=len(commits), desc=release_branch, unit='commit')
commits_since_release = list()
cnt = 0
for commit in counter(commits):
row = self._row_generator(release_branch, commit)
if row:
self._db.apply_commit(*row)
cnt += 1
if cnt % 50 == 0:
self._db.flush_commits()
commits_since_release.append(commit.hexsha)
if self._parse_release_tags:
tag = self._extract_release_tag(commit)
if tag:
self._set_release_tag(release_branch, tag, commits_since_release)
commits_since_release = list()
self._db.flush_commits()
@staticmethod
def _load_fallback_actions(file):
result = dict()
if pathlib.Path(file).exists():
with open(file, 'r') as handle:
reader = csv.DictReader(filter(lambda line: line[0] != '#', handle))
result = dict()
for row in reader:
result[row['hexsha']] = (row['action'], row['jira_id'])
return result
class _JiraReader:
"""This class interacts with the Jira instance.
Args:
db (:obj:`_DB`): A handle to the database manager.
jira_url (str): URL of the Jira instance to query.
**_kwargs: Convenience for CLI argument parsing. Ignored.
"""
def __init__(self, db, jira_url, **_kwargs):
self._db = db
self.client = jira.JIRA(jira_url)
self.throttle_time_in_sec = 1
def populate_db(self):
"""Query Jira for issue IDs found in the commits database, writing them to the jira
database."""
global MANAGER
jira_ids = self._db.unique_jira_ids_from_git()
logging.info("retrieving %s jira_ids from the issue tracker", len(jira_ids))
counter = MANAGER.counter(total=len(jira_ids), desc='fetch from Jira', unit='issue')
chunk_size = 50
chunks = [jira_ids[i:i + chunk_size] for i in range(0, len(jira_ids), chunk_size)]
cnt = 0
for chunk in chunks:
query = "key in (" + ",".join([("'" + jira_id + "'") for jira_id in chunk]) + ")"
results = self.client.search_issues(jql_str=query, maxResults=chunk_size,
fields='fixVersions')
for result in results:
jira_id = result.key
fix_versions = [version.name for version in result.fields.fixVersions]
for fix_version in fix_versions:
self._db.apply_fix_version(jira_id, fix_version)
cnt += 1
if cnt % 50:
self._db.flush_commits()
counter.update(incr=len(chunk))
time.sleep(5)
self._db.flush_commits()
def fetch_issues(self, jira_ids):
"""Retrieve the specified jira Ids."""
global MANAGER
logging.info("retrieving %s jira_ids from the issue tracker", len(jira_ids))
counter = MANAGER.counter(total=len(jira_ids), desc='fetch from Jira', unit='issue')
chunk_size = 50
chunks = [jira_ids[i:i + chunk_size] for i in range(0, len(jira_ids), chunk_size)]
ret = list()
for chunk in chunks:
query = "key IN (" + ",".join([("'" + jira_id + "'") for jira_id in chunk]) + ")"\
+ " ORDER BY issuetype ASC, priority DESC, key ASC"
results = self.client.search_issues(
jql_str=query, maxResults=chunk_size,
fields='summary,issuetype,priority,resolution,components')
for result in results:
val = dict()
val['key'] = result.key
val['summary'] = result.fields.summary.strip()
val['priority'] = result.fields.priority.name.strip()
val['issue_type'] = result.fields.issuetype.name.strip() \
if result.fields.issuetype else None
val['resolution'] = result.fields.resolution.name.strip() \
if result.fields.resolution else None
val['components'] = [x.name.strip() for x in result.fields.components if x] \
if result.fields.components else []
ret.append(val)
counter.update(incr=len(chunk))
return ret
class Auditor:
"""This class builds databases from git and Jira, making it possible to audit the two for
discrepancies. At some point, it will provide pre-canned audit queries against those databases.
It is the entrypoint to this application.
Args:
repo_reader (:obj:`_RepoReader`): An instance of the `_RepoReader`.
jira_reader (:obj:`_JiraReader`): An instance of the `JiraReader`.
db (:obj:`_DB`): A handle to the database manager.
**_kwargs: Convenience for CLI argument parsing. Ignored.
"""
def __init__(self, repo_reader, jira_reader, db, **_kwargs):
self._repo_reader = repo_reader
self._jira_reader = jira_reader
self._db = db
self._release_line_fix_versions = dict()
for k, v in _kwargs.items():
if k.endswith('_fix_version'):
release_line = k[:-len('_fix_version')]
self._release_line_fix_versions[release_line] = v
def populate_db_from_git(self):
"""Process the git repository, populating the commits database."""
for release_line in self._repo_reader.release_line_refs:
branch_origin = self._repo_reader.identify_least_common_commit(
self._repo_reader.development_branch_ref.name, release_line.name)
self._repo_reader.populate_db_release_branch(branch_origin, release_line.name)
for release_branch in self._repo_reader.release_branch_refs:
if not release_branch.name.startswith(release_line.name):
continue
self._repo_reader.populate_db_release_branch(branch_origin, release_branch.name)
def populate_db_from_jira(self):
"""Process the Jira issues identified by the commits database, populating the jira
database."""
self._jira_reader.populate_db()
@staticmethod
def _write_report(filename, issues):
with open(filename, 'w') as file:
fieldnames = ['key', 'issue_type', 'priority', 'summary', 'resolution', 'components']
writer = csv.DictWriter(file, fieldnames=fieldnames)
writer.writeheader()
for issue in issues:
writer.writerow(issue)
logging.info('generated report at %s', filename)
def report_new_for_release_line(self, release_line):
"""Builds a report of the Jira issues that are new on the target release line, not present
on any of the associated release branches. (i.e., on branch-2 but not
branch-{2.0,2.1,...})"""
matches = [x for x in self._repo_reader.release_line_refs
if x.name == release_line or x.name.endswith('/%s' % release_line)]
release_line_ref = next(iter(matches), None)
if not release_line_ref:
logging.error('release line %s not found. available options are %s.',
release_line, [x.name for x in self._repo_reader.release_line_refs])
return
cursor = self._db.conn.execute("""
SELECT distinct jira_id FROM git_commits
WHERE branch = ?
EXCEPT SELECT distinct jira_id FROM git_commits
WHERE branch LIKE ?
""", (release_line_ref.name, '%s.%%' % release_line_ref.name))
jira_ids = [x[0] for x in cursor.fetchall()]
issues = self._jira_reader.fetch_issues(jira_ids)
filename = 'new_for_%s.csv' % release_line.replace('/', '-')
Auditor._write_report(filename, issues)
@staticmethod
def _str_to_bool(val):
if not val:
return False
return val.lower() in ['true', 't', 'yes', 'y']
@staticmethod
def _build_first_pass_parser():
parser = argparse.ArgumentParser(add_help=False)
building_group = parser.add_argument_group(title='Building the audit database')
building_group.add_argument(
'--populate-from-git',
help='When true, populate the audit database from the Git repository.',
type=Auditor._str_to_bool,
default=True)
building_group.add_argument(
'--populate-from-jira',
help='When true, populate the audit database from Jira.',
type=Auditor._str_to_bool,
default=True)
building_group.add_argument(
'--db-path',
help='Path to the database file, or leave unspecified for a transient db.',
default=':memory:')
building_group.add_argument(
'--initialize-db',
help='When true, initialize the database tables. This is destructive to the contents'
+ ' of an existing database.',
type=Auditor._str_to_bool,
default=False)
report_group = parser.add_argument_group('Generating reports')
report_group.add_argument(
'--report-new-for-release-line',
help=Auditor.report_new_for_release_line.__doc__,
type=str,
default=None)
git_repo_group = parser.add_argument_group('Interactions with the Git repo')
git_repo_group.add_argument(
'--git-repo-path',
help='Path to the git repo, or leave unspecified to infer from the current'
+ ' file\'s path.',
default=__file__)
git_repo_group.add_argument(
'--remote-name',
help='The name of the git remote to use when identifying branches.'
+ ' Default: \'origin\'',
default='origin')
git_repo_group.add_argument(
'--development-branch',
help='The name of the branch from which all release lines originate.'
+ ' Default: \'master\'',
default='master')
git_repo_group.add_argument(
'--development-branch-fix-version',
help='The Jira fixVersion used to indicate an issue is committed to the development'
+ ' branch. Default: \'3.0.0\'',
default='3.0.0')
git_repo_group.add_argument(
'--release-line-regexp',
help='A regexp used to identify release lines.',
default=r'branch-\d+$')
git_repo_group.add_argument(
'--parse-release-tags',
help='When true, look for release tags and annotate commits according to their release'
+ ' version. An Expensive calculation, disabled by default.',
type=Auditor._str_to_bool,
default=False)
git_repo_group.add_argument(
'--fallback-actions-path',
help='Path to a file containing _DB.Actions applicable to specific git shas.',
default='fallback_actions.csv')
jira_group = parser.add_argument_group('Interactions with Jira')
jira_group.add_argument(
'--jira-url',
help='A URL locating the target JIRA instance.',
default='https://issues.apache.org/jira')
return parser, git_repo_group
@staticmethod
def _build_second_pass_parser(repo_reader, parent_parser, git_repo_group):
for release_line in repo_reader.release_line_refs:
name = release_line.name
git_repo_group.add_argument(
'--%s-fix-version' % name[len(repo_reader.remote_name) + 1:],
help='The Jira fixVersion used to indicate an issue is committed to the specified '
+ 'release line branch',
required=True)
return argparse.ArgumentParser(parents=[parent_parser])
MANAGER = None
def main():
global MANAGER
first_pass_parser, git_repo_group = Auditor._build_first_pass_parser()
first_pass_args, extras = first_pass_parser.parse_known_args()
first_pass_args_dict = vars(first_pass_args)
with _DB(**first_pass_args_dict) as db:
logging.basicConfig(level=logging.INFO)
repo_reader = _RepoReader(db, **first_pass_args_dict)
jira_reader = _JiraReader(db, **first_pass_args_dict)
second_pass_parser = Auditor._build_second_pass_parser(
repo_reader, first_pass_parser, git_repo_group)
second_pass_args = second_pass_parser.parse_args(extras, first_pass_args)
second_pass_args_dict = vars(second_pass_args)
auditor = Auditor(repo_reader, jira_reader, db, **second_pass_args_dict)
with enlighten.get_manager() as MANAGER:
if second_pass_args.populate_from_git:
auditor.populate_db_from_git()
if second_pass_args.populate_from_jira:
auditor.populate_db_from_jira()
if second_pass_args.report_new_for_release_line:
release_line = second_pass_args.report_new_for_release_line
auditor.report_new_for_release_line(release_line)
if __name__ == '__main__':
main()
|
francisliu/hbase
|
dev-support/git-jira-release-audit/git_jira_release_audit.py
|
Python
|
apache-2.0
| 27,558
|
import mock
from unittest import TestCase
from shade_janitor.resources import NoCloudException
from shade_janitor.resources import Resources
class TestResourcesCloud(TestCase):
def test_resources_fails_no_cloud(self):
with self.assertRaises(NoCloudException):
Resources(None)
def test_passes_with_something(self):
Resources(mock.Mock())
|
yazug/shade_janitor
|
shade_janitor/tests/unit/resources/test_cloud.py
|
Python
|
gpl-3.0
| 378
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
__doctest_skip__ = ['quantity_support']
def quantity_support(format='latex_inline'):
"""
Enable support for plotting `astropy.units.Quantity` instances in
matplotlib.
May be (optionally) used with a ``with`` statement.
>>> import matplotlib.pyplot as plt
>>> from astropy import units as u
>>> from astropy import visualization
>>> with visualization.quantity_support():
... plt.figure()
... plt.plot([1, 2, 3] * u.m)
[...]
... plt.plot([101, 125, 150] * u.cm)
[...]
... plt.draw()
Parameters
----------
format : `astropy.units.format.Base` instance or str
The name of a format or a formatter object. If not
provided, defaults to ``latex_inline``.
"""
from .. import units as u
from matplotlib import units
from matplotlib import ticker
def rad_fn(x, pos=None):
n = int((x / np.pi) * 2.0 + 0.25)
if n == 0:
return '0'
elif n == 1:
return 'π/2'
elif n == 2:
return 'π'
elif n % 2 == 0:
return '{0}π'.format(n / 2)
else:
return '{0}π/2'.format(n)
class MplQuantityConverter(units.ConversionInterface):
def __init__(self):
if u.Quantity not in units.registry:
units.registry[u.Quantity] = self
self._remove = True
else:
self._remove = False
@staticmethod
def axisinfo(unit, axis):
if unit == u.radian:
return units.AxisInfo(
majloc=ticker.MultipleLocator(base=np.pi/2),
majfmt=ticker.FuncFormatter(rad_fn),
label=unit.to_string(),
)
elif unit == u.degree:
return units.AxisInfo(
majloc=ticker.AutoLocator(),
majfmt=ticker.FormatStrFormatter('%i°'),
label=unit.to_string(),
)
elif unit is not None:
return units.AxisInfo(label=unit.to_string(format))
return None
@staticmethod
def convert(val, unit, axis):
if isinstance(val, u.Quantity):
return val.to_value(unit)
else:
return val
@staticmethod
def default_units(x, axis):
if hasattr(x, 'unit'):
return x.unit
return None
def __enter__(self):
return self
def __exit__(self, type, value, tb):
if self._remove:
del units.registry[u.Quantity]
return MplQuantityConverter()
|
kelle/astropy
|
astropy/visualization/units.py
|
Python
|
bsd-3-clause
| 2,941
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
import functools
class AboutDecoratingWithClasses(Koan):
def maximum(self, a, b):
if a > b:
return a
else:
return b
def test_partial_that_wrappers_no_args(self):
"""
Before we can understand this type of decorator we need to consider
the partial.
"""
max = functools.partial(self.maximum)
self.assertEqual(23, max(7, 23))
self.assertEqual(10, max(10, -10))
def test_partial_that_wrappers_first_arg(self):
max0 = functools.partial(self.maximum, 0)
self.assertEqual(0, max0(-4))
self.assertEqual(5, max0(5))
def test_partial_that_wrappers_all_args(self):
always99 = functools.partial(self.maximum, 99, 20)
always20 = functools.partial(self.maximum, 9, 20)
self.assertEqual(99, always99())
self.assertEqual(20, always20())
# ------------------------------------------------------------------
class doubleit(object):
def __init__(self, fn):
self.fn = fn
def __call__(self, *args):
return self.fn(*args) + ', ' + self.fn(*args)
def __get__(self, obj, cls=None):
if not obj:
# Decorating an unbound function
return self
else:
# Decorating a bound method
return functools.partial(self, obj)
@doubleit
def foo(self):
return "foo"
@doubleit
def parrot(self, text):
return text.upper()
def test_decorator_with_no_arguments(self):
# To clarify: the decorator above the function has no arguments, even
# if the decorated function does
self.assertEqual('foo, foo', self.foo())
self.assertEqual('PIECES OF EIGHT, PIECES OF EIGHT', self.parrot('pieces of eight'))
# ------------------------------------------------------------------
def sound_check(self):
#Note: no decorator
return "Testing..."
def test_what_a_decorator_is_doing_to_a_function(self):
#wrap the function with the decorator
self.sound_check = self.doubleit(self.sound_check)
self.assertEqual('Testing..., Testing...', self.sound_check())
# ------------------------------------------------------------------
class documenter(object):
def __init__(self, *args):
self.fn_doc = args[0]
def __call__(self, fn):
def decorated_function(*args):
return fn(*args)
if fn.__doc__:
decorated_function.__doc__ = fn.__doc__ + ": " + self.fn_doc
else:
decorated_function.__doc__ = self.fn_doc
return decorated_function
@documenter("Increments a value by one. Kind of.")
def count_badly(self, num):
num += 1
if num == 3:
return 5
else:
return num
@documenter("Does nothing")
def idler(self, num):
"Idler"
pass
def test_decorator_with_an_argument(self):
self.assertEqual(5, self.count_badly(2))
self.assertEqual('Increments a value by one. Kind of.', self.count_badly.__doc__)
def test_documentor_which_already_has_a_docstring(self):
self.assertEqual('Idler: Does nothing', self.idler.__doc__)
# ------------------------------------------------------------------
@documenter("DOH!")
@doubleit
@doubleit
def homer(self):
return "D'oh"
def test_we_can_chain_decorators(self):
self.assertEqual("D'oh, D'oh, D'oh, D'oh", self.homer())
self.assertEqual('DOH!', self.homer.__doc__)
|
aishraj/pykons_solution
|
python2/koans/about_decorating_with_classes.py
|
Python
|
mit
| 3,738
|
class VeppyFeatureException(Exception):
pass
class StopEffectPrediction(Exception):
pass
class VeppyFileException(Exception):
pass
class FeatureFileException(VeppyFileException):
pass
class FastaFileException(VeppyFileException):
pass
|
solvebio/veppy
|
veppy/errors.py
|
Python
|
mit
| 263
|
"""
Set of various HTML parsers.
"""
from bs4 import BeautifulSoup
def apply_linebreaks(text):
"""
Convert python-style linebreaks to a html-style ones.
:param text: text with python-style linebreaks.
:return: text with html-style linebreaks.
"""
line_break = "<br>"
return text.replace("\n", line_break)
def apply_paragraphs(text):
"""
Apply html-style paragraphs to the text.
:param text: text with python-style or no paragraphs.
:return: text with html-style paragraphs.
"""
paragraph_start = "<p>"
paragraph_end = "</p>"
text = paragraph_start + text + paragraph_end
text.replace("\n\n", paragraph_end + paragraph_start)
return text
def embed_images(content, image_urls):
"""
Embed img tag with image url into the html content.
:param content: html text.
:param image_urls: url to the image or list of urls.
:return: content with img tag embedded.
"""
line_break = "<br>"
separator = 2 * line_break
if isinstance(image_urls, str):
image_urls = [image_urls]
image_urls = ["<img src='{}' >".format(image_url) for image_url in image_urls]
return content + separator + separator.join(image_urls)
def delete_images(content):
"""
Remove all the img tags.
:param content: html text.
:return: content with img tags removed.
"""
parser = BeautifulSoup(content)
for img in parser.find_all("img"):
img.replace_with("")
return parser.prettify()
def list_images(content):
"""
Extract and list all images in a html text.
:param content: html text.
:return: list of image urls.
"""
return [img.get("src") for img in
BeautifulSoup(content).find_all("img")]
def extract_text(content):
"""
Get all plain text from a html document.
:param content: html text.
:return: plain text.
"""
parser = BeautifulSoup(content)
convert_linebreaks(parser)
return parser.get_text().strip()
def convert_linebreaks(parser):
"""
Converts all html-style linebreaks to the python-style ones.
:param parser: beautiful soup parser.
"""
for break_line in parser.find_all("br"):
break_line.replace_with("\n")
|
BrainTech/pisak
|
pisak/blog/html_parsers.py
|
Python
|
gpl-3.0
| 2,262
|
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os.path
import platform
import re
import textwrap
from ansible import __version__
from ansible.module_utils._text import to_text
from ansible.module_utils.six import string_types
def system(vagrant_version=None):
# Get most recent Trellis CHANGELOG entry
changelog_msg = ''
ansible_config_path = os.getenv('ANSIBLE_CONFIG')
ansible_path = os.path.dirname(ansible_config_path) if ansible_config_path else os.getcwd()
changelog = os.path.join(ansible_path, 'CHANGELOG.md')
if os.path.isfile(changelog):
with open(changelog) as f:
str = f.read(200)
# Retrieve release number if it is most recent entry
release = re.search(r'^###\s((?!HEAD).*)', str)
if release is not None:
changelog_msg = '\n Trellis {0}'.format(release.group(1))
# Retrieve most recent changelog entry
else:
change = re.search(r'^\*\s?(\[BREAKING\])?([^\(\n\[]+)', str, re.M|re.I)
if change is not None:
changelog_msg = '\n Trellis version (per changelog): "{0}"'.format(change.group(2).strip())
# Vagrant info, if available
vagrant = ' Vagrant {0};'.format(vagrant_version) if vagrant_version else ''
# Assemble components and return
return 'System info:\n Ansible {0};{1} {2}{3}'.format(__version__, vagrant, platform.system(), changelog_msg)
def reset_task_info(obj, task=None):
obj.action = None if task is None else task._get_parent_attribute('action')
obj.first_host = True
obj.first_item = True
obj.task_failed = False
# Display dict key only, instead of full json dump
def replace_item_with_key(obj, result):
item = '_ansible_item_label' if '_ansible_item_label' in result._result else 'item'
should_replace = (
not obj._display.verbosity
and 'label' not in result._task._ds.get('loop_control', {})
and item in result._result
)
if should_replace:
if 'key' in result._result[item]:
result._result[item] = result._result[item]['key']
elif type(result._result[item]) is dict:
subitem = '_ansible_item_label' if '_ansible_item_label' in result._result[item] else 'item'
if 'key' in result._result[item].get(subitem, {}):
result._result[item] = result._result[item][subitem]['key']
elif '_ansible_item_label' in result._result[item]:
result._result[item] = result._result[item]['_ansible_item_label']
def display(obj, result):
msg = ''
result = result._result
display = obj._display.display
wrap_width = 77
first = obj.first_host and obj.first_item
# Only display msg if debug module or if failed (some modules have undesired 'msg' on 'ok')
if 'msg' in result and (obj.task_failed or obj.action == 'debug'):
msg = result.pop('msg', '')
# Disable Ansible's verbose setting for debug module to avoid the CallbackBase._dump_results()
if '_ansible_verbose_always' in result:
del result['_ansible_verbose_always']
# Display additional info when failed
if obj.task_failed:
items = (item for item in ['reason', 'module_stderr', 'module_stdout', 'stderr'] if item in result and to_text(result[item]) != '')
for item in items:
msg = result[item] if msg == '' else '\n'.join([msg, result.pop(item, '')])
# Add blank line between this fail message and the json dump Ansible displays next
msg = '\n'.join([msg, ''])
# Must pass unicode strings to Display.display() to prevent UnicodeError tracebacks
if isinstance(msg, list):
msg = '\n'.join([to_text(x) for x in msg])
elif not isinstance(msg, string_types):
msg = to_text(msg)
# Wrap text
msg = '\n'.join([textwrap.fill(line, wrap_width, replace_whitespace=False)
for line in msg.splitlines()])
# Display system info and msg, with horizontal rule between hosts/items
hr = '-' * int(wrap_width*.67)
if obj.task_failed and first:
display(system(obj.vagrant_version), 'bright gray')
display(hr, 'bright gray')
if msg == '':
if obj.task_failed and not first:
display(hr, 'bright gray')
else:
return
else:
if not first:
display(hr, 'bright gray')
display(msg, 'red' if obj.task_failed else 'bright purple')
def display_host(obj, result):
if 'results' not in result._result:
display(obj, result)
obj.first_host = False
def display_item(obj, result):
display(obj, result)
obj.first_item = False
|
roots/bedrock-ansible
|
lib/trellis/utils/output.py
|
Python
|
mit
| 4,757
|
# Copyright 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import re
import stat
import time
import warnings
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import excutils
from oslo_utils import imageutils
from oslo_utils import units
import tenacity
from ironic_lib.common.i18n import _
from ironic_lib import disk_partitioner
from ironic_lib import exception
from ironic_lib import utils
opts = [
cfg.IntOpt('efi_system_partition_size',
default=200,
help='Size of EFI system partition in MiB when configuring '
'UEFI systems for local boot.'),
cfg.IntOpt('bios_boot_partition_size',
default=1,
help='Size of BIOS Boot partition in MiB when configuring '
'GPT partitioned systems for local boot in BIOS.'),
cfg.StrOpt('dd_block_size',
default='1M',
help='Block size to use when writing to the nodes disk.'),
cfg.IntOpt('partition_detection_attempts',
default=3,
min=1,
help='Maximum attempts to detect a newly created partition.'),
cfg.IntOpt('partprobe_attempts',
default=10,
help='Maximum number of attempts to try to read the '
'partition.'),
cfg.IntOpt('image_convert_memory_limit',
default=2048,
help='Memory limit for "qemu-img convert" in MiB. Implemented '
'via the address space resource limit.'),
cfg.IntOpt('image_convert_attempts',
default=3,
help='Number of attempts to convert an image.'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group='disk_utils')
LOG = logging.getLogger(__name__)
_PARTED_PRINT_RE = re.compile(r"^(\d+):([\d\.]+)MiB:"
r"([\d\.]+)MiB:([\d\.]+)MiB:(\w*):(.*):(.*);")
_PARTED_TABLE_TYPE_RE = re.compile(r'^.*partition\s+table\s*:\s*(gpt|msdos)',
re.IGNORECASE | re.MULTILINE)
CONFIGDRIVE_LABEL = "config-2"
MAX_CONFIG_DRIVE_SIZE_MB = 64
GPT_SIZE_SECTORS = 33
# Maximum disk size supported by MBR is 2TB (2 * 1024 * 1024 MB)
MAX_DISK_SIZE_MB_SUPPORTED_BY_MBR = 2097152
# Limit the memory address space to 1 GiB when running qemu-img
QEMU_IMG_LIMITS = None
def _qemu_img_limits():
global QEMU_IMG_LIMITS
if QEMU_IMG_LIMITS is None:
QEMU_IMG_LIMITS = processutils.ProcessLimits(
address_space=CONF.disk_utils.image_convert_memory_limit
* units.Mi)
return QEMU_IMG_LIMITS
def list_partitions(device):
"""Get partitions information from given device.
:param device: The device path.
:returns: list of dictionaries (one per partition) with keys:
number, start, end, size (in MiB), filesystem, partition_name,
flags, path.
"""
output = utils.execute(
'parted', '-s', '-m', device, 'unit', 'MiB', 'print',
use_standard_locale=True, run_as_root=True)[0]
if isinstance(output, bytes):
output = output.decode("utf-8")
lines = [line for line in output.split('\n') if line.strip()][2:]
# Example of line: 1:1.00MiB:501MiB:500MiB:ext4::boot
fields = ('number', 'start', 'end', 'size', 'filesystem', 'partition_name',
'flags')
result = []
for line in lines:
match = _PARTED_PRINT_RE.match(line)
if match is None:
LOG.warning("Partition information from parted for device "
"%(device)s does not match "
"expected format: %(line)s",
dict(device=device, line=line))
continue
# Cast int fields to ints (some are floats and we round them down)
groups = [int(float(x)) if i < 4 else x
for i, x in enumerate(match.groups())]
item = dict(zip(fields, groups))
item['path'] = partition_index_to_path(device, item['number'])
result.append(item)
return result
def count_mbr_partitions(device):
"""Count the number of primary and logical partitions on a MBR
:param device: The device path.
:returns: A tuple with the number of primary partitions and logical
partitions.
:raise: ValueError if the device does not have a valid MBR partition
table.
"""
# -d do not update the kernel table
# -s print a summary of the partition table
output, err = utils.execute('partprobe', '-d', '-s', device,
run_as_root=True, use_standard_locale=True)
if 'msdos' not in output:
raise ValueError('The device %s does not have a valid MBR '
'partition table' % device)
# Sample output: /dev/vdb: msdos partitions 1 2 3 <5 6 7>
# The partitions with number > 4 (and inside <>) are logical partitions
output = output.replace('<', '').replace('>', '')
partitions = [int(s) for s in output.split() if s.isdigit()]
return(sum(i < 5 for i in partitions), sum(i > 4 for i in partitions))
def get_disk_identifier(dev):
"""Get the disk identifier from the disk being exposed by the ramdisk.
This disk identifier is appended to the pxe config which will then be
used by chain.c32 to detect the correct disk to chainload. This is helpful
in deployments to nodes with multiple disks.
http://www.syslinux.org/wiki/index.php/Comboot/chain.c32#mbr:
:param dev: Path for the already populated disk device.
:raises OSError: When the hexdump binary is unavailable.
:returns: The Disk Identifier.
"""
disk_identifier = utils.execute('hexdump', '-s', '440', '-n', '4',
'-e', '''\"0x%08x\"''',
dev, run_as_root=True,
attempts=5, delay_on_retry=True)
return disk_identifier[0]
def get_partition_table_type(device):
"""Get partition table type, msdos or gpt.
:param device: the name of the device
:return: dos, gpt or None
"""
out = utils.execute('parted', '--script', device, '--', 'print',
run_as_root=True, use_standard_locale=True)[0]
m = _PARTED_TABLE_TYPE_RE.search(out)
if m:
return m.group(1)
LOG.warning("Unable to get partition table type for device %s", device)
return 'unknown'
def _blkid(device, probe=False, fields=None):
args = []
if probe:
args.append('-p')
if fields:
args += sum((['-s', field] for field in fields), [])
output, err = utils.execute('blkid', device, *args,
use_standard_locale=True, run_as_root=True)
if output.strip():
return output.split(': ', 1)[1]
else:
return ""
def _lsblk(device, deps=True, fields=None):
args = ['--pairs', '--bytes', '--ascii']
if not deps:
args.append('--nodeps')
if fields:
args.extend(['--output', ','.join(fields)])
else:
args.append('--output-all')
output, err = utils.execute('lsblk', device, *args,
use_standard_locale=True, run_as_root=True)
return output.strip()
def get_device_information(device, probe=False, fields=None):
"""Get information about a device using blkid.
Can be applied to all block devices: disks, RAID, partitions.
:param device: Device name.
:param probe: DEPRECATED, do not use.
:param fields: A list of fields to request (all by default).
:return: A dictionary with requested fields as keys.
:raises: ProcessExecutionError
"""
if probe:
output = _blkid(device, probe=True, fields=fields)
else:
output = _lsblk(device, fields=fields, deps=False)
if output:
return next(utils.parse_device_tags(output))
else:
return {}
def find_efi_partition(device):
"""Looks for the EFI partition on a given device.
A boot partition on a GPT disk is assumed to be an EFI partition as well.
:param device: the name of the device
:return: the EFI partition record from `list_partitions` or None
"""
is_gpt = get_partition_table_type(device) == 'gpt'
for part in list_partitions(device):
flags = {x.strip() for x in part['flags'].split(',')}
if 'esp' in flags or ('boot' in flags and is_gpt):
LOG.debug("Found EFI partition %s on device %s", part, device)
return part
else:
LOG.debug("No efi partition found on device %s", device)
def get_uefi_disk_identifier(dev):
"""Get the uuid from the disk being exposed by the ramdisk.
DEPRECATED: use find_efi_partition with get_device_information instead.
:param dev: Path for the already populated disk device.
:raises InstanceDeployFailure: Image is not UEFI bootable.
:returns: The UUID of the partition.
"""
warnings.warn("get_uefi_disk_identifier is deprecated, use "
"find_efi_partition and get_partition_information instead",
DeprecationWarning)
partition_id = None
try:
report, _ = utils.execute('fdisk', '-l', dev, run_as_root=True)
except processutils.ProcessExecutionError as e:
msg = _('Failed to find the partition on the disk %s ') % e
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
for line in report.splitlines():
if line.startswith(dev) and 'EFI System' in line:
vals = line.split()
partition_id = vals[0]
try:
lsblk_output = _lsblk(partition_id, fields=['UUID'])
disk_identifier = lsblk_output.split("=")[1].strip()
disk_identifier = disk_identifier.strip('"')
except processutils.ProcessExecutionError as e:
raise exception.InstanceDeployFailure("Image is not UEFI bootable. "
"Error: %s " % e)
return disk_identifier
_ISCSI_PREFIX = "iqn.2008-10.org.openstack:"
# TODO(dtantsur): deprecate node_uuid here, it's not overly useful (any iSCSI
# device should get the same treatment).
def is_iscsi_device(dev, node_uuid=None):
"""Check whether the device path belongs to an iSCSI device.
If node UUID is provided, checks that the device belongs to this UUID.
"""
if node_uuid:
return (_ISCSI_PREFIX + node_uuid) in dev
else:
return _ISCSI_PREFIX in dev
def is_last_char_digit(dev):
"""check whether device name ends with a digit"""
if len(dev) >= 1:
return dev[-1].isdigit()
return False
def partition_index_to_path(device, index):
"""Guess a partition path based on its device and index.
:param device: Device path.
:param index: Partition index.
"""
# the actual device names in the baremetal are like /dev/sda, /dev/sdb etc.
# While for the iSCSI device, the naming convention has a format which has
# iqn also embedded in it.
# When this function is called by ironic-conductor, the iSCSI device name
# should be appended by "part%d". While on the baremetal, it should name
# the device partitions as /dev/sda1 and not /dev/sda-part1.
if is_iscsi_device(device):
part_template = '%s-part%d'
elif is_last_char_digit(device):
part_template = '%sp%d'
else:
part_template = '%s%d'
return part_template % (device, index)
def make_partitions(dev, root_mb, swap_mb, ephemeral_mb,
configdrive_mb, node_uuid, commit=True,
boot_option="netboot", boot_mode="bios",
disk_label=None, cpu_arch=""):
"""Partition the disk device.
Create partitions for root, swap, ephemeral and configdrive on a
disk device.
:param dev: Path for the device to work on.
:param root_mb: Size of the root partition in mebibytes (MiB).
:param swap_mb: Size of the swap partition in mebibytes (MiB). If 0,
no partition will be created.
:param ephemeral_mb: Size of the ephemeral partition in mebibytes (MiB).
If 0, no partition will be created.
:param configdrive_mb: Size of the configdrive partition in
mebibytes (MiB). If 0, no partition will be created.
:param commit: True/False. Default for this setting is True. If False
partitions will not be written to disk.
:param boot_option: Can be "local" or "netboot". "netboot" by default.
:param boot_mode: Can be "bios" or "uefi". "bios" by default.
:param node_uuid: Node's uuid. Used for logging.
:param disk_label: The disk label to be used when creating the
partition table. Valid values are: "msdos", "gpt" or None; If None
Ironic will figure it out according to the boot_mode parameter.
:param cpu_arch: Architecture of the node the disk device belongs to.
When using the default value of None, no architecture specific
steps will be taken. This default should be used for x86_64. When
set to ppc64*, architecture specific steps are taken for booting a
partition image locally.
:returns: A dictionary containing the partition type as Key and partition
path as Value for the partitions created by this method.
"""
LOG.debug("Starting to partition the disk device: %(dev)s "
"for node %(node)s",
{'dev': dev, 'node': node_uuid})
part_dict = {}
if disk_label is None:
disk_label = 'gpt' if boot_mode == 'uefi' else 'msdos'
dp = disk_partitioner.DiskPartitioner(dev, disk_label=disk_label)
# For uefi localboot, switch partition table to gpt and create the efi
# system partition as the first partition.
if boot_mode == "uefi" and boot_option == "local":
part_num = dp.add_partition(CONF.disk_utils.efi_system_partition_size,
fs_type='fat32',
boot_flag='boot')
part_dict['efi system partition'] = partition_index_to_path(
dev, part_num)
if (boot_mode == "bios" and boot_option == "local" and disk_label == "gpt"
and not cpu_arch.startswith('ppc64')):
part_num = dp.add_partition(CONF.disk_utils.bios_boot_partition_size,
boot_flag='bios_grub')
part_dict['BIOS Boot partition'] = partition_index_to_path(
dev, part_num)
# NOTE(mjturek): With ppc64* nodes, partition images are expected to have
# a PrEP partition at the start of the disk. This is an 8 MiB partition
# with the boot and prep flags set. The bootloader should be installed
# here.
if (cpu_arch.startswith("ppc64") and boot_mode == "bios"
and boot_option == "local"):
LOG.debug("Add PReP boot partition (8 MB) to device: "
"%(dev)s for node %(node)s",
{'dev': dev, 'node': node_uuid})
boot_flag = 'boot' if disk_label == 'msdos' else None
part_num = dp.add_partition(8, part_type='primary',
boot_flag=boot_flag, extra_flags=['prep'])
part_dict['PReP Boot partition'] = partition_index_to_path(
dev, part_num)
if ephemeral_mb:
LOG.debug("Add ephemeral partition (%(size)d MB) to device: %(dev)s "
"for node %(node)s",
{'dev': dev, 'size': ephemeral_mb, 'node': node_uuid})
part_num = dp.add_partition(ephemeral_mb)
part_dict['ephemeral'] = partition_index_to_path(dev, part_num)
if swap_mb:
LOG.debug("Add Swap partition (%(size)d MB) to device: %(dev)s "
"for node %(node)s",
{'dev': dev, 'size': swap_mb, 'node': node_uuid})
part_num = dp.add_partition(swap_mb, fs_type='linux-swap')
part_dict['swap'] = partition_index_to_path(dev, part_num)
if configdrive_mb:
LOG.debug("Add config drive partition (%(size)d MB) to device: "
"%(dev)s for node %(node)s",
{'dev': dev, 'size': configdrive_mb, 'node': node_uuid})
part_num = dp.add_partition(configdrive_mb)
part_dict['configdrive'] = partition_index_to_path(dev, part_num)
# NOTE(lucasagomes): Make the root partition the last partition. This
# enables tools like cloud-init's growroot utility to expand the root
# partition until the end of the disk.
LOG.debug("Add root partition (%(size)d MB) to device: %(dev)s "
"for node %(node)s",
{'dev': dev, 'size': root_mb, 'node': node_uuid})
boot_val = 'boot' if (not cpu_arch.startswith("ppc64")
and boot_mode == "bios"
and boot_option == "local"
and disk_label == "msdos") else None
part_num = dp.add_partition(root_mb, boot_flag=boot_val)
part_dict['root'] = partition_index_to_path(dev, part_num)
if commit:
# write to the disk
dp.commit()
trigger_device_rescan(dev)
return part_dict
def is_block_device(dev):
"""Check whether a device is block or not."""
attempts = CONF.disk_utils.partition_detection_attempts
for attempt in range(attempts):
try:
s = os.stat(dev)
except OSError as e:
LOG.debug("Unable to stat device %(dev)s. Attempt %(attempt)d "
"out of %(total)d. Error: %(err)s",
{"dev": dev, "attempt": attempt + 1,
"total": attempts, "err": e})
time.sleep(1)
else:
return stat.S_ISBLK(s.st_mode)
msg = _("Unable to stat device %(dev)s after attempting to verify "
"%(attempts)d times.") % {'dev': dev, 'attempts': attempts}
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
def dd(src, dst, conv_flags=None):
"""Execute dd from src to dst."""
if conv_flags:
extra_args = ['conv=%s' % conv_flags]
else:
extra_args = []
utils.dd(src, dst, 'bs=%s' % CONF.disk_utils.dd_block_size, 'oflag=direct',
*extra_args)
def qemu_img_info(path):
"""Return an object containing the parsed output from qemu-img info."""
if not os.path.exists(path):
return imageutils.QemuImgInfo()
out, err = utils.execute('env', 'LC_ALL=C', 'LANG=C',
'qemu-img', 'info', path,
'--output=json',
prlimit=_qemu_img_limits())
return imageutils.QemuImgInfo(out, format='json')
def _retry_on_res_temp_unavailable(exc):
if (isinstance(exc, processutils.ProcessExecutionError)
and ('Resource temporarily unavailable' in exc.stderr
or 'Cannot allocate memory' in exc.stderr)):
return True
return False
@tenacity.retry(
retry=tenacity.retry_if_exception(_retry_on_res_temp_unavailable),
stop=tenacity.stop_after_attempt(CONF.disk_utils.image_convert_attempts),
reraise=True)
def convert_image(source, dest, out_format, run_as_root=False, cache=None,
out_of_order=False, sparse_size=None):
"""Convert image to other format."""
cmd = ['qemu-img', 'convert', '-O', out_format]
if cache is not None:
cmd += ['-t', cache]
if sparse_size is not None:
cmd += ['-S', sparse_size]
if out_of_order:
cmd.append('-W')
cmd += [source, dest]
# NOTE(TheJulia): Staticly set the MALLOC_ARENA_MAX to prevent leaking
# and the creation of new malloc arenas which will consume the system
# memory. If limited to 1, qemu-img consumes ~250 MB of RAM, but when
# another thread tries to access a locked section of memory in use with
# another thread, then by default a new malloc arena is created,
# which essentially balloons the memory requirement of the machine.
# Default for qemu-img is 8 * nCPU * ~250MB (based on defaults +
# thread/code/process/library overhead. In other words, 64 GB. Limiting
# this to 3 keeps the memory utilization in happy cases below the overall
# threshold which is in place in case a malicious image is attempted to
# be passed through qemu-img.
env_vars = {'MALLOC_ARENA_MAX': '3'}
try:
utils.execute(*cmd, run_as_root=run_as_root,
prlimit=_qemu_img_limits(),
use_standard_locale=True,
env_variables=env_vars)
except processutils.ProcessExecutionError as e:
if ('Resource temporarily unavailable' in e.stderr
or 'Cannot allocate memory' in e.stderr):
LOG.debug('Failed to convert image, retrying. Error: %s', e)
# Sync disk caches before the next attempt
utils.execute('sync')
raise
def populate_image(src, dst, conv_flags=None):
data = qemu_img_info(src)
if data.file_format == 'raw':
dd(src, dst, conv_flags=conv_flags)
else:
convert_image(src, dst, 'raw', True, sparse_size='0')
def block_uuid(dev):
"""Get UUID of a block device.
Try to fetch the UUID, if that fails, try to fetch the PARTUUID.
"""
info = get_device_information(dev, fields=['UUID', 'PARTUUID'])
if info.get('UUID'):
return info['UUID']
else:
LOG.debug('Falling back to partition UUID as the block device UUID '
'was not found while examining %(device)s',
{'device': dev})
return info.get('PARTUUID', '')
def get_image_mb(image_path, virtual_size=True):
"""Get size of an image in Megabyte."""
mb = 1024 * 1024
if not virtual_size:
image_byte = os.path.getsize(image_path)
else:
data = qemu_img_info(image_path)
image_byte = data.virtual_size
# round up size to MB
image_mb = int((image_byte + mb - 1) / mb)
return image_mb
def get_dev_block_size(dev):
"""Get the device size in 512 byte sectors."""
block_sz, cmderr = utils.execute('blockdev', '--getsz', dev,
run_as_root=True)
return int(block_sz)
def destroy_disk_metadata(dev, node_uuid):
"""Destroy metadata structures on node's disk.
Ensure that node's disk magic strings are wiped without zeroing the
entire drive. To do this we use the wipefs tool from util-linux.
:param dev: Path for the device to work on.
:param node_uuid: Node's uuid. Used for logging.
"""
# NOTE(NobodyCam): This is needed to work around bug:
# https://bugs.launchpad.net/ironic/+bug/1317647
LOG.debug("Start destroy disk metadata for node %(node)s.",
{'node': node_uuid})
try:
utils.execute('wipefs', '--force', '--all', dev,
run_as_root=True,
use_standard_locale=True)
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception() as ctxt:
# NOTE(zhenguo): Check if --force option is supported for wipefs,
# if not, we should try without it.
if '--force' in str(e):
ctxt.reraise = False
utils.execute('wipefs', '--all', dev,
run_as_root=True,
use_standard_locale=True)
# NOTE(TheJulia): sgdisk attempts to load and make sense of the
# partition tables in advance of wiping the partition data.
# This means when a CRC error is found, sgdisk fails before
# erasing partition data.
# This is the same bug as
# https://bugs.launchpad.net/ironic-python-agent/+bug/1737556
# Overwrite the Primary GPT, catch very small partitions (like EBRs)
dd_device = 'of=%s' % dev
dd_count = 'count=%s' % GPT_SIZE_SECTORS
dev_size = get_dev_block_size(dev)
if dev_size < GPT_SIZE_SECTORS:
dd_count = 'count=%s' % dev_size
utils.execute('dd', 'bs=512', 'if=/dev/zero', dd_device, dd_count,
run_as_root=True, use_standard_locale=True)
# Overwrite the Secondary GPT, do this only if there could be one
if dev_size > GPT_SIZE_SECTORS:
gpt_backup = dev_size - GPT_SIZE_SECTORS
dd_seek = 'seek=%i' % gpt_backup
dd_count = 'count=%s' % GPT_SIZE_SECTORS
utils.execute('dd', 'bs=512', 'if=/dev/zero', dd_device, dd_count,
dd_seek, run_as_root=True, use_standard_locale=True)
# Go ahead and let sgdisk run as well.
utils.execute('sgdisk', '-Z', dev, run_as_root=True,
use_standard_locale=True)
try:
utils.wait_for_disk_to_become_available(dev)
except exception.IronicException as e:
raise exception.InstanceDeployFailure(
_('Destroying metadata failed on device %(device)s. '
'Error: %(error)s')
% {'device': dev, 'error': e})
LOG.info("Disk metadata on %(dev)s successfully destroyed for node "
"%(node)s", {'dev': dev, 'node': node_uuid})
def list_opts():
"""Entry point for oslo-config-generator."""
return [('disk_utils', opts)]
def _fix_gpt_structs(device, node_uuid):
"""Checks backup GPT data structures and moves them to end of the device
:param device: The device path.
:param node_uuid: UUID of the Node. Used for logging.
:raises: InstanceDeployFailure, if any disk partitioning related
commands fail.
"""
try:
output, _err = utils.execute('sgdisk', '-v', device, run_as_root=True)
search_str = "it doesn't reside\nat the end of the disk"
if search_str in output:
utils.execute('sgdisk', '-e', device, run_as_root=True)
except (processutils.UnknownArgumentError,
processutils.ProcessExecutionError, OSError) as e:
msg = (_('Failed to fix GPT data structures on disk %(disk)s '
'for node %(node)s. Error: %(error)s') %
{'disk': device, 'node': node_uuid, 'error': e})
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
def fix_gpt_partition(device, node_uuid):
"""Fix GPT partition
Fix GPT table information when image is written to a disk which
has a bigger extend (e.g. 30GB image written on a 60Gb physical disk).
:param device: The device path.
:param node_uuid: UUID of the Node.
:raises: InstanceDeployFailure if exception is caught.
"""
try:
disk_is_gpt_partitioned = (get_partition_table_type(device) == 'gpt')
if disk_is_gpt_partitioned:
_fix_gpt_structs(device, node_uuid)
except Exception as e:
msg = (_('Failed to fix GPT partition on disk %(disk)s '
'for node %(node)s. Error: %(error)s') %
{'disk': device, 'node': node_uuid, 'error': e})
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
def udev_settle():
"""Wait for the udev event queue to settle.
Wait for the udev event queue to settle to make sure all devices
are detected once the machine boots up.
:return: True on success, False otherwise.
"""
LOG.debug('Waiting until udev event queue is empty')
try:
utils.execute('udevadm', 'settle')
except processutils.ProcessExecutionError as e:
LOG.warning('Something went wrong when waiting for udev '
'to settle. Error: %s', e)
return False
else:
return True
def partprobe(device, attempts=None):
"""Probe partitions on the given device.
:param device: The block device containing paritions that is attempting
to be updated.
:param attempts: Number of attempts to run partprobe, the default is read
from the configuration.
:return: True on success, False otherwise.
"""
if attempts is None:
attempts = CONF.disk_utils.partprobe_attempts
try:
utils.execute('partprobe', device, run_as_root=True, attempts=attempts)
except (processutils.UnknownArgumentError,
processutils.ProcessExecutionError, OSError) as e:
LOG.warning("Unable to probe for partitions on device %(device)s, "
"the partitioning table may be broken. Error: %(error)s",
{'device': device, 'error': e})
return False
else:
return True
def trigger_device_rescan(device, attempts=None):
"""Sync and trigger device rescan.
Disk partition performed via parted, when performed on a ramdisk
do not have to honor the fsync mechanism. In essence, fsync is used
on the file representing the block device, which falls to the kernel
filesystem layer to trigger a sync event. On a ramdisk using ramfs,
this is an explicit non-operation.
As a result of this, we need to trigger a system wide sync operation
which will trigger cache to flush to disk, after which partition changes
should be visible upon re-scan.
When ramdisks are not in use, this also helps ensure that data has
been safely flushed across the wire, such as on iscsi connections.
:param device: The block device containing paritions that is attempting
to be updated.
:param attempts: Number of attempts to run partprobe, the default is read
from the configuration.
:return: True on success, False otherwise.
"""
LOG.debug('Explicitly calling sync to force buffer/cache flush')
utils.execute('sync')
# Make sure any additions to the partitioning are reflected in the
# kernel.
udev_settle()
partprobe(device, attempts=attempts)
try:
# Also verify that the partitioning is correct now.
utils.execute('sgdisk', '-v', device, run_as_root=True)
except processutils.ProcessExecutionError as exc:
LOG.warning('Failed to verify partition tables on device %(dev)s: '
'%(err)s', {'dev': device, 'err': exc})
return False
else:
return True
|
openstack/ironic-lib
|
ironic_lib/disk_utils.py
|
Python
|
apache-2.0
| 30,540
|
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2010 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import platform
import cgi
import gtk
import gtk.gdk
import gobject
import pango
import sys
import shutil
import subprocess
import glob
import time
import urllib
import urllib2
import tempfile
import collections
import threading
from xml.sax import saxutils
import gpodder
try:
import dbus
import dbus.service
import dbus.mainloop
import dbus.glib
except ImportError:
# Mock the required D-Bus interfaces with no-ops (ugly? maybe.)
class dbus:
class SessionBus:
def __init__(self, *args, **kwargs):
pass
class glib:
class DBusGMainLoop:
pass
class service:
@staticmethod
def method(*args, **kwargs):
return lambda x: x
class BusName:
def __init__(self, *args, **kwargs):
pass
class Object:
def __init__(self, *args, **kwargs):
pass
from gpodder import feedcore
from gpodder import util
from gpodder import opml
from gpodder import download
from gpodder import my
from gpodder import youtube
from gpodder import player
from gpodder.liblogger import log
_ = gpodder.gettext
N_ = gpodder.ngettext
from gpodder.model import PodcastChannel
from gpodder.model import PodcastEpisode
from gpodder.dbsqlite import Database
from gpodder.gtkui.model import PodcastListModel
from gpodder.gtkui.model import EpisodeListModel
from gpodder.gtkui.config import UIConfig
from gpodder.gtkui.services import CoverDownloader
from gpodder.gtkui.widgets import SimpleMessageArea
from gpodder.gtkui.desktopfile import UserAppsReader
from gpodder.gtkui.draw import draw_text_box_centered
from gpodder.gtkui.interface.common import BuilderWidget
from gpodder.gtkui.interface.common import TreeViewHelper
from gpodder.gtkui.interface.addpodcast import gPodderAddPodcast
from gpodder.gtkui.mygpodder import MygPodderSettings
if gpodder.ui.desktop:
from gpodder.gtkui.download import DownloadStatusModel
from gpodder.gtkui.desktop.sync import gPodderSyncUI
from gpodder.gtkui.desktop.channel import gPodderChannel
from gpodder.gtkui.desktop.preferences import gPodderPreferences
from gpodder.gtkui.desktop.shownotes import gPodderShownotes
from gpodder.gtkui.desktop.episodeselector import gPodderEpisodeSelector
from gpodder.gtkui.desktop.podcastdirectory import gPodderPodcastDirectory
from gpodder.gtkui.desktop.dependencymanager import gPodderDependencyManager
try:
from gpodder.gtkui.desktop.trayicon import GPodderStatusIcon
have_trayicon = True
except Exception, exc:
log('Warning: Could not import gpodder.trayicon.', traceback=True)
log('Warning: This probably means your PyGTK installation is too old!')
have_trayicon = False
elif gpodder.ui.diablo:
from gpodder.gtkui.download import DownloadStatusModel
from gpodder.gtkui.maemo.channel import gPodderChannel
from gpodder.gtkui.maemo.preferences import gPodderPreferences
from gpodder.gtkui.maemo.shownotes import gPodderShownotes
from gpodder.gtkui.maemo.episodeselector import gPodderEpisodeSelector
from gpodder.gtkui.maemo.podcastdirectory import gPodderPodcastDirectory
have_trayicon = False
elif gpodder.ui.fremantle:
from gpodder.gtkui.frmntl.model import DownloadStatusModel
from gpodder.gtkui.frmntl.model import EpisodeListModel
from gpodder.gtkui.frmntl.model import PodcastListModel
from gpodder.gtkui.maemo.channel import gPodderChannel
from gpodder.gtkui.frmntl.preferences import gPodderPreferences
from gpodder.gtkui.frmntl.shownotes import gPodderShownotes
from gpodder.gtkui.frmntl.episodeselector import gPodderEpisodeSelector
from gpodder.gtkui.frmntl.podcastdirectory import gPodderPodcastDirectory
from gpodder.gtkui.frmntl.episodes import gPodderEpisodes
from gpodder.gtkui.frmntl.downloads import gPodderDownloads
have_trayicon = False
from gpodder.gtkui.frmntl.portrait import FremantleRotation
from gpodder.gtkui.interface.common import Orientation
from gpodder.gtkui.interface.welcome import gPodderWelcome
from gpodder.gtkui.interface.progress import ProgressIndicator
if gpodder.ui.maemo:
import hildon
from gpodder.dbusproxy import DBusPodcastsProxy
class gPodder(BuilderWidget, dbus.service.Object):
finger_friendly_widgets = ['btnCleanUpDownloads', 'button_search_episodes_clear']
ICON_GENERAL_ADD = 'general_add'
ICON_GENERAL_REFRESH = 'general_refresh'
ICON_GENERAL_CLOSE = 'general_close'
def __init__(self, bus_name, config):
dbus.service.Object.__init__(self, object_path=gpodder.dbus_gui_object_path, bus_name=bus_name)
self.podcasts_proxy = DBusPodcastsProxy(lambda: self.channels, \
self.on_itemUpdate_activate, \
self.playback_episodes, \
self.download_episode_list, \
bus_name)
self.db = Database(gpodder.database_file)
self.config = config
BuilderWidget.__init__(self, None)
def new(self):
if gpodder.ui.diablo:
import hildon
self.app = hildon.Program()
self.app.add_window(self.main_window)
self.main_window.add_toolbar(self.toolbar)
menu = gtk.Menu()
for child in self.main_menu.get_children():
child.reparent(menu)
self.main_window.set_menu(self.set_finger_friendly(menu))
self.bluetooth_available = False
elif gpodder.ui.fremantle:
import hildon
self.app = hildon.Program()
self.app.add_window(self.main_window)
appmenu = hildon.AppMenu()
for filter in (self.item_view_podcasts_all, \
self.item_view_podcasts_downloaded, \
self.item_view_podcasts_unplayed):
button = gtk.ToggleButton()
filter.connect_proxy(button)
appmenu.add_filter(button)
for action in (self.itemPreferences, \
self.item_downloads, \
self.itemRemoveOldEpisodes, \
self.item_unsubscribe, \
self.itemAbout):
button = hildon.Button(gtk.HILDON_SIZE_AUTO,\
hildon.BUTTON_ARRANGEMENT_HORIZONTAL)
action.connect_proxy(button)
if action == self.item_downloads:
button.set_title(_('Downloads'))
button.set_value(_('Idle'))
self.button_downloads = button
appmenu.append(button)
appmenu.show_all()
self.main_window.set_app_menu(appmenu)
# Initialize portrait mode / rotation manager
self._fremantle_rotation = FremantleRotation('gPodder', \
self.main_window, \
gpodder.__version__, \
self.config.rotation_mode)
if self.config.rotation_mode == FremantleRotation.ALWAYS:
util.idle_add(self.on_window_orientation_changed, \
Orientation.PORTRAIT)
self._last_orientation = Orientation.PORTRAIT
else:
self._last_orientation = Orientation.LANDSCAPE
self.bluetooth_available = False
else:
self._last_orientation = Orientation.LANDSCAPE
self.bluetooth_available = util.bluetooth_available()
self.toolbar.set_property('visible', self.config.show_toolbar)
self.config.connect_gtk_window(self.gPodder, 'main_window')
if not gpodder.ui.fremantle:
self.config.connect_gtk_paned('paned_position', self.channelPaned)
self.main_window.show()
self.player_receiver = player.MediaPlayerDBusReceiver(self.on_played)
self.gPodder.connect('key-press-event', self.on_key_press)
self.preferences_dialog = None
self.config.add_observer(self.on_config_changed)
self.tray_icon = None
self.episode_shownotes_window = None
self.new_episodes_window = None
if gpodder.ui.desktop:
# Mac OS X-specific UI tweaks: Native main menu integration
# http://sourceforge.net/apps/trac/gtk-osx/wiki/Integrate
if getattr(gtk.gdk, 'WINDOWING', 'x11') == 'quartz':
try:
import igemacintegration as igemi
# Move the menu bar from the window to the Mac menu bar
self.mainMenu.hide()
igemi.ige_mac_menu_set_menu_bar(self.mainMenu)
# Reparent some items to the "Application" menu
for widget in ('/mainMenu/menuHelp/itemAbout', \
'/mainMenu/menuPodcasts/itemPreferences'):
item = self.uimanager1.get_widget(widget)
group = igemi.ige_mac_menu_add_app_menu_group()
igemi.ige_mac_menu_add_app_menu_item(group, item, None)
quit_widget = '/mainMenu/menuPodcasts/itemQuit'
quit_item = self.uimanager1.get_widget(quit_widget)
igemi.ige_mac_menu_set_quit_menu_item(quit_item)
except ImportError:
print >>sys.stderr, """
Warning: ige-mac-integration not found - no native menus.
"""
self.sync_ui = gPodderSyncUI(self.config, self.notification, \
self.main_window, self.show_confirmation, \
self.update_episode_list_icons, \
self.update_podcast_list_model, self.toolPreferences, \
gPodderEpisodeSelector, \
self.commit_changes_to_database)
else:
self.sync_ui = None
self.download_status_model = DownloadStatusModel()
self.download_queue_manager = download.DownloadQueueManager(self.config)
if gpodder.ui.desktop:
self.show_hide_tray_icon()
self.itemShowAllEpisodes.set_active(self.config.podcast_list_view_all)
self.itemShowToolbar.set_active(self.config.show_toolbar)
self.itemShowDescription.set_active(self.config.episode_list_descriptions)
if not gpodder.ui.fremantle:
self.config.connect_gtk_spinbutton('max_downloads', self.spinMaxDownloads)
self.config.connect_gtk_togglebutton('max_downloads_enabled', self.cbMaxDownloads)
self.config.connect_gtk_spinbutton('limit_rate_value', self.spinLimitDownloads)
self.config.connect_gtk_togglebutton('limit_rate', self.cbLimitDownloads)
# When the amount of maximum downloads changes, notify the queue manager
changed_cb = lambda spinbutton: self.download_queue_manager.spawn_threads()
self.spinMaxDownloads.connect('value-changed', changed_cb)
self.default_title = 'gPodder'
if gpodder.__version__.rfind('git') != -1:
self.set_title('gPodder %s' % gpodder.__version__)
else:
title = self.gPodder.get_title()
if title is not None:
self.set_title(title)
else:
self.set_title(_('gPodder'))
self.cover_downloader = CoverDownloader()
# Generate list models for podcasts and their episodes
self.podcast_list_model = PodcastListModel(self.cover_downloader)
self.cover_downloader.register('cover-available', self.cover_download_finished)
self.cover_downloader.register('cover-removed', self.cover_file_removed)
if gpodder.ui.fremantle:
# Work around Maemo bug #4718
self.button_refresh.set_name('HildonButton-finger')
self.button_subscribe.set_name('HildonButton-finger')
self.button_refresh.set_sensitive(False)
self.button_subscribe.set_sensitive(False)
self.button_subscribe.set_image(gtk.image_new_from_icon_name(\
self.ICON_GENERAL_ADD, gtk.ICON_SIZE_BUTTON))
self.button_refresh.set_image(gtk.image_new_from_icon_name(\
self.ICON_GENERAL_REFRESH, gtk.ICON_SIZE_BUTTON))
# Make the button scroll together with the TreeView contents
action_area_box = self.treeChannels.get_action_area_box()
for child in self.buttonbox:
child.reparent(action_area_box)
self.vbox.remove(self.buttonbox)
action_area_box.set_spacing(2)
action_area_box.set_border_width(3)
self.treeChannels.set_action_area_visible(True)
from gpodder.gtkui.frmntl import style
sub_font = style.get_font_desc('SmallSystemFont')
sub_color = style.get_color('SecondaryTextColor')
sub = (sub_font.to_string(), sub_color.to_string())
sub = '<span font_desc="%s" foreground="%s">%%s</span>' % sub
self.label_footer.set_markup(sub % gpodder.__copyright__)
hildon.hildon_gtk_window_set_progress_indicator(self.main_window, True)
while gtk.events_pending():
gtk.main_iteration(False)
try:
# Try to get the real package version from dpkg
p = subprocess.Popen(['dpkg-query', '-W', '-f=${Version}', 'gpodder'], stdout=subprocess.PIPE)
version, _stderr = p.communicate()
del _stderr
del p
except:
version = gpodder.__version__
self.label_footer.set_markup(sub % ('v %s' % version))
self.label_footer.hide()
self.episodes_window = gPodderEpisodes(self.main_window, \
on_treeview_expose_event=self.on_treeview_expose_event, \
show_episode_shownotes=self.show_episode_shownotes, \
update_podcast_list_model=self.update_podcast_list_model, \
on_itemRemoveChannel_activate=self.on_itemRemoveChannel_activate, \
item_view_episodes_all=self.item_view_episodes_all, \
item_view_episodes_unplayed=self.item_view_episodes_unplayed, \
item_view_episodes_downloaded=self.item_view_episodes_downloaded, \
item_view_episodes_undeleted=self.item_view_episodes_undeleted, \
on_entry_search_episodes_changed=self.on_entry_search_episodes_changed, \
on_entry_search_episodes_key_press=self.on_entry_search_episodes_key_press, \
hide_episode_search=self.hide_episode_search, \
on_itemUpdateChannel_activate=self.on_itemUpdateChannel_activate, \
playback_episodes=self.playback_episodes, \
delete_episode_list=self.delete_episode_list, \
episode_list_status_changed=self.episode_list_status_changed, \
download_episode_list=self.download_episode_list, \
episode_is_downloading=self.episode_is_downloading, \
show_episode_in_download_manager=self.show_episode_in_download_manager, \
add_download_task_monitor=self.add_download_task_monitor, \
remove_download_task_monitor=self.remove_download_task_monitor, \
for_each_episode_set_task_status=self.for_each_episode_set_task_status, \
on_delete_episodes_button_clicked=self.on_itemRemoveOldEpisodes_activate, \
on_itemUpdate_activate=self.on_itemUpdate_activate)
# Expose objects for episode list type-ahead find
self.hbox_search_episodes = self.episodes_window.hbox_search_episodes
self.entry_search_episodes = self.episodes_window.entry_search_episodes
self.button_search_episodes_clear = self.episodes_window.button_search_episodes_clear
self.downloads_window = gPodderDownloads(self.main_window, \
on_treeview_expose_event=self.on_treeview_expose_event, \
on_btnCleanUpDownloads_clicked=self.on_btnCleanUpDownloads_clicked, \
_for_each_task_set_status=self._for_each_task_set_status, \
downloads_list_get_selection=self.downloads_list_get_selection, \
_config=self.config)
self.treeAvailable = self.episodes_window.treeview
self.treeDownloads = self.downloads_window.treeview
# Init the treeviews that we use
self.init_podcast_list_treeview()
self.init_episode_list_treeview()
self.init_download_list_treeview()
if self.config.podcast_list_hide_boring:
self.item_view_hide_boring_podcasts.set_active(True)
self.currently_updating = False
if gpodder.ui.maemo:
self.context_menu_mouse_button = 1
else:
self.context_menu_mouse_button = 3
if self.config.start_iconified:
self.iconify_main_window()
self.download_tasks_seen = set()
self.download_list_update_enabled = False
self.last_download_count = 0
self.download_task_monitors = set()
# Subscribed channels
self.active_channel = None
self.channels = PodcastChannel.load_from_db(self.db, self.config.download_dir)
self.channel_list_changed = True
self.update_podcasts_tab()
# load list of user applications for audio playback
self.user_apps_reader = UserAppsReader(['audio', 'video'])
threading.Thread(target=self.user_apps_reader.read).start()
# Set the "Device" menu item for the first time
if gpodder.ui.desktop:
self.update_item_device()
# Set up the first instance of MygPoClient
self.mygpo_client = my.MygPoClient(self.config)
# Now, update the feed cache, when everything's in place
if not gpodder.ui.fremantle:
self.btnUpdateFeeds.show()
self.updating_feed_cache = False
self.feed_cache_update_cancelled = False
self.update_feed_cache(force_update=self.config.update_on_startup)
self.message_area = None
def find_partial_downloads():
# Look for partial file downloads
partial_files = glob.glob(os.path.join(self.config.download_dir, '*', '*.partial'))
count = len(partial_files)
resumable_episodes = []
if count:
if not gpodder.ui.fremantle:
util.idle_add(self.wNotebook.set_current_page, 1)
indicator = ProgressIndicator(_('Loading incomplete downloads'), \
_('Some episodes have not finished downloading in a previous session.'), \
False, self.main_window)
indicator.on_message(N_('%d partial file', '%d partial files', count) % count)
candidates = [f[:-len('.partial')] for f in partial_files]
found = 0
for c in self.channels:
for e in c.get_all_episodes():
filename = e.local_filename(create=False, check_only=True)
if filename in candidates:
log('Found episode: %s', e.title, sender=self)
found += 1
indicator.on_message(e.title)
indicator.on_progress(float(found)/count)
candidates.remove(filename)
partial_files.remove(filename+'.partial')
resumable_episodes.append(e)
if not candidates:
break
if not candidates:
break
for f in partial_files:
log('Partial file without episode: %s', f, sender=self)
util.delete_file(f)
util.idle_add(indicator.on_finished)
if len(resumable_episodes):
def offer_resuming():
self.download_episode_list_paused(resumable_episodes)
if not gpodder.ui.fremantle:
resume_all = gtk.Button(_('Resume all'))
#resume_all.set_border_width(0)
def on_resume_all(button):
selection = self.treeDownloads.get_selection()
selection.select_all()
selected_tasks, can_queue, can_cancel, can_pause, can_remove, can_force = self.downloads_list_get_selection()
selection.unselect_all()
self._for_each_task_set_status(selected_tasks, download.DownloadTask.QUEUED)
self.message_area.hide()
resume_all.connect('clicked', on_resume_all)
self.message_area = SimpleMessageArea(_('Incomplete downloads from a previous session were found.'), (resume_all,))
self.vboxDownloadStatusWidgets.pack_start(self.message_area, expand=False)
self.vboxDownloadStatusWidgets.reorder_child(self.message_area, 0)
self.message_area.show_all()
self.clean_up_downloads(delete_partial=False)
util.idle_add(offer_resuming)
elif not gpodder.ui.fremantle:
util.idle_add(self.wNotebook.set_current_page, 0)
else:
util.idle_add(self.clean_up_downloads, True)
threading.Thread(target=find_partial_downloads).start()
# Start the auto-update procedure
self._auto_update_timer_source_id = None
if self.config.auto_update_feeds:
self.restart_auto_update_timer()
# Delete old episodes if the user wishes to
if self.config.auto_remove_played_episodes and \
self.config.episode_old_age > 0:
old_episodes = list(self.get_expired_episodes())
if len(old_episodes) > 0:
self.delete_episode_list(old_episodes, confirm=False)
self.update_podcast_list_model(set(e.channel.url for e in old_episodes))
if gpodder.ui.fremantle:
hildon.hildon_gtk_window_set_progress_indicator(self.main_window, False)
self.button_refresh.set_sensitive(True)
self.button_subscribe.set_sensitive(True)
self.main_window.set_title(_('gPodder'))
hildon.hildon_gtk_window_take_screenshot(self.main_window, True)
# Do the initial sync with the web service
util.idle_add(self.mygpo_client.flush, True)
# First-time users should be asked if they want to see the OPML
if not self.channels and not gpodder.ui.fremantle:
util.idle_add(self.on_itemUpdate_activate)
def on_played(self, start, end, total, file_uri):
"""Handle the "played" signal from a media player"""
log('Received play action: %s (%d, %d, %d)', file_uri, start, end, total, sender=self)
filename = file_uri[len('file://'):]
# FIXME: Optimize this by querying the database more directly
for channel in self.channels:
for episode in channel.get_all_episodes():
fn = episode.local_filename(create=False, check_only=True)
if fn == filename:
file_type = episode.file_type()
# Automatically enable D-Bus played status mode
if file_type == 'audio':
self.config.audio_played_dbus = True
elif file_type == 'video':
self.config.video_played_dbus = True
now = time.time()
if total > 0:
episode.total_time = total
if episode.current_position_updated is None or \
now > episode.current_position_updated:
episode.current_position = end
episode.current_position_updated = now
episode.mark(is_played=True)
episode.save()
self.db.commit()
self.update_episode_list_icons([episode.url])
self.update_podcast_list_model([episode.channel.url])
# Submit this action to the webservice
self.mygpo_client.on_playback_full(episode, \
start, end, total)
return
def on_add_remove_podcasts_mygpo(self):
actions = self.mygpo_client.get_received_actions()
if not actions:
return False
existing_urls = [c.url for c in self.channels]
# Columns for the episode selector window - just one...
columns = (
('description', None, None, _('Action')),
)
# A list of actions that have to be chosen from
changes = []
# Actions that are ignored (already carried out)
ignored = []
for action in actions:
if action.is_add and action.url not in existing_urls:
changes.append(my.Change(action))
elif action.is_remove and action.url in existing_urls:
podcast_object = None
for podcast in self.channels:
if podcast.url == action.url:
podcast_object = podcast
break
changes.append(my.Change(action, podcast_object))
else:
log('Ignoring action: %s', action, sender=self)
ignored.append(action)
# Confirm all ignored changes
self.mygpo_client.confirm_received_actions(ignored)
def execute_podcast_actions(selected):
add_list = [c.action.url for c in selected if c.action.is_add]
remove_list = [c.podcast for c in selected if c.action.is_remove]
# Apply the accepted changes locally
self.add_podcast_list(add_list)
self.remove_podcast_list(remove_list, confirm=False)
# All selected items are now confirmed
self.mygpo_client.confirm_received_actions(c.action for c in selected)
# Revert the changes on the server
rejected = [c.action for c in changes if c not in selected]
self.mygpo_client.reject_received_actions(rejected)
def ask():
# We're abusing the Episode Selector again ;) -- thp
gPodderEpisodeSelector(self.main_window, \
title=_('Confirm changes from gpodder.net'), \
instructions=_('Select the actions you want to carry out.'), \
episodes=changes, \
columns=columns, \
size_attribute=None, \
stock_ok_button=gtk.STOCK_APPLY, \
callback=execute_podcast_actions, \
_config=self.config)
# There are some actions that need the user's attention
if changes:
util.idle_add(ask)
return True
# We have no remaining actions - no selection happens
return False
def rewrite_urls_mygpo(self):
# Check if we have to rewrite URLs since the last add
rewritten_urls = self.mygpo_client.get_rewritten_urls()
for rewritten_url in rewritten_urls:
if not rewritten_url.new_url:
continue
for channel in self.channels:
if channel.url == rewritten_url.old_url:
log('Updating URL of %s to %s', channel, \
rewritten_url.new_url, sender=self)
channel.url = rewritten_url.new_url
channel.save()
self.channel_list_changed = True
util.idle_add(self.update_episode_list_model)
break
def on_send_full_subscriptions(self):
# Send the full subscription list to the gpodder.net client
# (this will overwrite the subscription list on the server)
indicator = ProgressIndicator(_('Uploading subscriptions'), \
_('Your subscriptions are being uploaded to the server.'), \
False, self.main_window)
try:
self.mygpo_client.set_subscriptions([c.url for c in self.channels])
util.idle_add(self.show_message, _('List uploaded successfully.'))
except Exception, e:
def show_error(e):
message = str(e)
if not message:
message = e.__class__.__name__
self.show_message(message, \
_('Error while uploading'), \
important=True)
util.idle_add(show_error, e)
util.idle_add(indicator.on_finished)
def on_podcast_selected(self, treeview, path, column):
# for Maemo 5's UI
model = treeview.get_model()
channel = model.get_value(model.get_iter(path), \
PodcastListModel.C_CHANNEL)
self.active_channel = channel
self.update_episode_list_model()
self.episodes_window.channel = self.active_channel
self.episodes_window.show()
def on_button_subscribe_clicked(self, button):
self.on_itemImportChannels_activate(button)
def on_button_downloads_clicked(self, widget):
self.downloads_window.show()
def show_episode_in_download_manager(self, episode):
self.downloads_window.show()
model = self.treeDownloads.get_model()
selection = self.treeDownloads.get_selection()
selection.unselect_all()
it = model.get_iter_first()
while it is not None:
task = model.get_value(it, DownloadStatusModel.C_TASK)
if task.episode.url == episode.url:
selection.select_iter(it)
# FIXME: Scroll to selection in pannable area
break
it = model.iter_next(it)
def for_each_episode_set_task_status(self, episodes, status):
episode_urls = set(episode.url for episode in episodes)
model = self.treeDownloads.get_model()
selected_tasks = [(gtk.TreeRowReference(model, row.path), \
model.get_value(row.iter, \
DownloadStatusModel.C_TASK)) for row in model \
if model.get_value(row.iter, DownloadStatusModel.C_TASK).url \
in episode_urls]
self._for_each_task_set_status(selected_tasks, status)
def on_window_orientation_changed(self, orientation):
self._last_orientation = orientation
if self.preferences_dialog is not None:
self.preferences_dialog.on_window_orientation_changed(orientation)
treeview = self.treeChannels
if orientation == Orientation.PORTRAIT:
treeview.set_action_area_orientation(gtk.ORIENTATION_VERTICAL)
# Work around Maemo bug #4718
self.button_subscribe.set_name('HildonButton-thumb')
self.button_refresh.set_name('HildonButton-thumb')
else:
treeview.set_action_area_orientation(gtk.ORIENTATION_HORIZONTAL)
# Work around Maemo bug #4718
self.button_subscribe.set_name('HildonButton-finger')
self.button_refresh.set_name('HildonButton-finger')
def on_treeview_podcasts_selection_changed(self, selection):
model, iter = selection.get_selected()
if iter is None:
self.active_channel = None
self.episode_list_model.clear()
def on_treeview_button_pressed(self, treeview, event):
if event.window != treeview.get_bin_window():
return False
TreeViewHelper.save_button_press_event(treeview, event)
if getattr(treeview, TreeViewHelper.ROLE) == \
TreeViewHelper.ROLE_PODCASTS:
return self.currently_updating
return event.button == self.context_menu_mouse_button and \
gpodder.ui.desktop
def on_treeview_podcasts_button_released(self, treeview, event):
if event.window != treeview.get_bin_window():
return False
if gpodder.ui.maemo:
return self.treeview_channels_handle_gestures(treeview, event)
return self.treeview_channels_show_context_menu(treeview, event)
def on_treeview_episodes_button_released(self, treeview, event):
if event.window != treeview.get_bin_window():
return False
if gpodder.ui.maemo:
if self.config.enable_fingerscroll or self.config.maemo_enable_gestures:
return self.treeview_available_handle_gestures(treeview, event)
return self.treeview_available_show_context_menu(treeview, event)
def on_treeview_downloads_button_released(self, treeview, event):
if event.window != treeview.get_bin_window():
return False
return self.treeview_downloads_show_context_menu(treeview, event)
def on_entry_search_podcasts_changed(self, editable):
if self.hbox_search_podcasts.get_property('visible'):
self.podcast_list_model.set_search_term(editable.get_chars(0, -1))
def on_entry_search_podcasts_key_press(self, editable, event):
if event.keyval == gtk.keysyms.Escape:
self.hide_podcast_search()
return True
def hide_podcast_search(self, *args):
self.hbox_search_podcasts.hide()
self.entry_search_podcasts.set_text('')
self.podcast_list_model.set_search_term(None)
self.treeChannels.grab_focus()
def show_podcast_search(self, input_char):
self.hbox_search_podcasts.show()
self.entry_search_podcasts.insert_text(input_char, -1)
self.entry_search_podcasts.grab_focus()
self.entry_search_podcasts.set_position(-1)
def init_podcast_list_treeview(self):
# Set up podcast channel tree view widget
if gpodder.ui.fremantle:
if self.config.podcast_list_view_mode == EpisodeListModel.VIEW_DOWNLOADED:
self.item_view_podcasts_downloaded.set_active(True)
elif self.config.podcast_list_view_mode == EpisodeListModel.VIEW_UNPLAYED:
self.item_view_podcasts_unplayed.set_active(True)
else:
self.item_view_podcasts_all.set_active(True)
self.podcast_list_model.set_view_mode(self.config.podcast_list_view_mode)
iconcolumn = gtk.TreeViewColumn('')
iconcell = gtk.CellRendererPixbuf()
iconcolumn.pack_start(iconcell, False)
iconcolumn.add_attribute(iconcell, 'pixbuf', PodcastListModel.C_COVER)
self.treeChannels.append_column(iconcolumn)
namecolumn = gtk.TreeViewColumn('')
namecell = gtk.CellRendererText()
namecell.set_property('ellipsize', pango.ELLIPSIZE_END)
namecolumn.pack_start(namecell, True)
namecolumn.add_attribute(namecell, 'markup', PodcastListModel.C_DESCRIPTION)
iconcell = gtk.CellRendererPixbuf()
iconcell.set_property('xalign', 1.0)
namecolumn.pack_start(iconcell, False)
namecolumn.add_attribute(iconcell, 'pixbuf', PodcastListModel.C_PILL)
namecolumn.add_attribute(iconcell, 'visible', PodcastListModel.C_PILL_VISIBLE)
self.treeChannels.append_column(namecolumn)
self.treeChannels.set_model(self.podcast_list_model.get_filtered_model())
# When no podcast is selected, clear the episode list model
selection = self.treeChannels.get_selection()
selection.connect('changed', self.on_treeview_podcasts_selection_changed)
# Set up type-ahead find for the podcast list
def on_key_press(treeview, event):
if event.keyval == gtk.keysyms.Escape:
self.hide_podcast_search()
elif gpodder.ui.fremantle and event.keyval == gtk.keysyms.BackSpace:
self.hide_podcast_search()
elif event.state & gtk.gdk.CONTROL_MASK:
# Don't handle type-ahead when control is pressed (so shortcuts
# with the Ctrl key still work, e.g. Ctrl+A, ...)
return True
else:
unicode_char_id = gtk.gdk.keyval_to_unicode(event.keyval)
if unicode_char_id == 0:
return False
input_char = unichr(unicode_char_id)
self.show_podcast_search(input_char)
return True
self.treeChannels.connect('key-press-event', on_key_press)
# Enable separators to the podcast list to separate special podcasts
# from others (this is used for the "all episodes" view)
self.treeChannels.set_row_separator_func(PodcastListModel.row_separator_func)
TreeViewHelper.set(self.treeChannels, TreeViewHelper.ROLE_PODCASTS)
def on_entry_search_episodes_changed(self, editable):
if self.hbox_search_episodes.get_property('visible'):
self.episode_list_model.set_search_term(editable.get_chars(0, -1))
def on_entry_search_episodes_key_press(self, editable, event):
if event.keyval == gtk.keysyms.Escape:
self.hide_episode_search()
return True
def hide_episode_search(self, *args):
self.hbox_search_episodes.hide()
self.entry_search_episodes.set_text('')
self.episode_list_model.set_search_term(None)
self.treeAvailable.grab_focus()
def show_episode_search(self, input_char):
self.hbox_search_episodes.show()
self.entry_search_episodes.insert_text(input_char, -1)
self.entry_search_episodes.grab_focus()
self.entry_search_episodes.set_position(-1)
def init_episode_list_treeview(self):
# For loading the list model
self.empty_episode_list_model = EpisodeListModel()
self.episode_list_model = EpisodeListModel()
if self.config.episode_list_view_mode == EpisodeListModel.VIEW_UNDELETED:
self.item_view_episodes_undeleted.set_active(True)
elif self.config.episode_list_view_mode == EpisodeListModel.VIEW_DOWNLOADED:
self.item_view_episodes_downloaded.set_active(True)
elif self.config.episode_list_view_mode == EpisodeListModel.VIEW_UNPLAYED:
self.item_view_episodes_unplayed.set_active(True)
else:
self.item_view_episodes_all.set_active(True)
self.episode_list_model.set_view_mode(self.config.episode_list_view_mode)
self.treeAvailable.set_model(self.episode_list_model.get_filtered_model())
TreeViewHelper.set(self.treeAvailable, TreeViewHelper.ROLE_EPISODES)
iconcell = gtk.CellRendererPixbuf()
if gpodder.ui.maemo:
iconcell.set_fixed_size(50, 50)
status_column_label = ''
else:
status_column_label = _('Status')
iconcolumn = gtk.TreeViewColumn(status_column_label, iconcell, pixbuf=EpisodeListModel.C_STATUS_ICON)
namecell = gtk.CellRendererText()
namecell.set_property('ellipsize', pango.ELLIPSIZE_END)
namecolumn = gtk.TreeViewColumn(_('Episode'), namecell, markup=EpisodeListModel.C_DESCRIPTION)
namecolumn.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE)
namecolumn.set_resizable(True)
namecolumn.set_expand(True)
sizecell = gtk.CellRendererText()
sizecolumn = gtk.TreeViewColumn(_('Size'), sizecell, text=EpisodeListModel.C_FILESIZE_TEXT)
releasecell = gtk.CellRendererText()
releasecolumn = gtk.TreeViewColumn(_('Released'), releasecell, text=EpisodeListModel.C_PUBLISHED_TEXT)
for itemcolumn in (iconcolumn, namecolumn, sizecolumn, releasecolumn):
itemcolumn.set_reorderable(True)
self.treeAvailable.append_column(itemcolumn)
if gpodder.ui.maemo:
sizecolumn.set_visible(False)
releasecolumn.set_visible(False)
# Set up type-ahead find for the episode list
def on_key_press(treeview, event):
if event.keyval == gtk.keysyms.Escape:
self.hide_episode_search()
elif gpodder.ui.fremantle and event.keyval == gtk.keysyms.BackSpace:
self.hide_episode_search()
elif event.state & gtk.gdk.CONTROL_MASK:
# Don't handle type-ahead when control is pressed (so shortcuts
# with the Ctrl key still work, e.g. Ctrl+A, ...)
return False
else:
unicode_char_id = gtk.gdk.keyval_to_unicode(event.keyval)
if unicode_char_id == 0:
return False
input_char = unichr(unicode_char_id)
self.show_episode_search(input_char)
return True
self.treeAvailable.connect('key-press-event', on_key_press)
if gpodder.ui.desktop:
self.treeAvailable.enable_model_drag_source(gtk.gdk.BUTTON1_MASK, \
(('text/uri-list', 0, 0),), gtk.gdk.ACTION_COPY)
def drag_data_get(tree, context, selection_data, info, timestamp):
if self.config.on_drag_mark_played:
for episode in self.get_selected_episodes():
episode.mark(is_played=True)
self.on_selected_episodes_status_changed()
uris = ['file://'+e.local_filename(create=False) \
for e in self.get_selected_episodes() \
if e.was_downloaded(and_exists=True)]
uris.append('') # for the trailing '\r\n'
selection_data.set(selection_data.target, 8, '\r\n'.join(uris))
self.treeAvailable.connect('drag-data-get', drag_data_get)
selection = self.treeAvailable.get_selection()
if gpodder.ui.diablo:
if self.config.maemo_enable_gestures or self.config.enable_fingerscroll:
selection.set_mode(gtk.SELECTION_SINGLE)
else:
selection.set_mode(gtk.SELECTION_MULTIPLE)
elif gpodder.ui.fremantle:
selection.set_mode(gtk.SELECTION_SINGLE)
else:
selection.set_mode(gtk.SELECTION_MULTIPLE)
# Update the sensitivity of the toolbar buttons on the Desktop
selection.connect('changed', lambda s: self.play_or_download())
if gpodder.ui.diablo:
# Set up the tap-and-hold context menu for podcasts
menu = gtk.Menu()
menu.append(self.itemUpdateChannel.create_menu_item())
menu.append(self.itemEditChannel.create_menu_item())
menu.append(gtk.SeparatorMenuItem())
menu.append(self.itemRemoveChannel.create_menu_item())
menu.append(gtk.SeparatorMenuItem())
item = gtk.ImageMenuItem(_('Close this menu'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_CLOSE, \
gtk.ICON_SIZE_MENU))
menu.append(item)
menu.show_all()
menu = self.set_finger_friendly(menu)
self.treeChannels.tap_and_hold_setup(menu)
def init_download_list_treeview(self):
# enable multiple selection support
self.treeDownloads.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.treeDownloads.set_search_equal_func(TreeViewHelper.make_search_equal_func(DownloadStatusModel))
# columns and renderers for "download progress" tab
# First column: [ICON] Episodename
column = gtk.TreeViewColumn(_('Episode'))
cell = gtk.CellRendererPixbuf()
if gpodder.ui.maemo:
cell.set_fixed_size(50, 50)
cell.set_property('stock-size', gtk.ICON_SIZE_MENU)
column.pack_start(cell, expand=False)
column.add_attribute(cell, 'stock-id', \
DownloadStatusModel.C_ICON_NAME)
cell = gtk.CellRendererText()
cell.set_property('ellipsize', pango.ELLIPSIZE_END)
column.pack_start(cell, expand=True)
column.add_attribute(cell, 'markup', DownloadStatusModel.C_NAME)
column.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE)
column.set_expand(True)
self.treeDownloads.append_column(column)
# Second column: Progress
cell = gtk.CellRendererProgress()
cell.set_property('yalign', .5)
cell.set_property('ypad', 6)
column = gtk.TreeViewColumn(_('Progress'), cell,
value=DownloadStatusModel.C_PROGRESS, \
text=DownloadStatusModel.C_PROGRESS_TEXT)
column.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE)
column.set_expand(False)
self.treeDownloads.append_column(column)
column.set_property('min-width', 150)
column.set_property('max-width', 150)
self.treeDownloads.set_model(self.download_status_model)
TreeViewHelper.set(self.treeDownloads, TreeViewHelper.ROLE_DOWNLOADS)
def on_treeview_expose_event(self, treeview, event):
if event.window == treeview.get_bin_window():
model = treeview.get_model()
if (model is not None and model.get_iter_first() is not None):
return False
role = getattr(treeview, TreeViewHelper.ROLE)
ctx = event.window.cairo_create()
ctx.rectangle(event.area.x, event.area.y,
event.area.width, event.area.height)
ctx.clip()
x, y, width, height, depth = event.window.get_geometry()
progress = None
if role == TreeViewHelper.ROLE_EPISODES:
if self.currently_updating:
text = _('Loading episodes')
progress = self.episode_list_model.get_update_progress()
elif self.config.episode_list_view_mode != \
EpisodeListModel.VIEW_ALL:
text = _('No episodes in current view')
else:
text = _('No episodes available')
elif role == TreeViewHelper.ROLE_PODCASTS:
if self.config.episode_list_view_mode != \
EpisodeListModel.VIEW_ALL and \
self.config.podcast_list_hide_boring and \
len(self.channels) > 0:
text = _('No podcasts in this view')
else:
text = _('No subscriptions')
elif role == TreeViewHelper.ROLE_DOWNLOADS:
text = _('No active downloads')
else:
raise Exception('on_treeview_expose_event: unknown role')
if gpodder.ui.fremantle:
from gpodder.gtkui.frmntl import style
font_desc = style.get_font_desc('LargeSystemFont')
else:
font_desc = None
draw_text_box_centered(ctx, treeview, width, height, text, font_desc, progress)
return False
def enable_download_list_update(self):
if not self.download_list_update_enabled:
gobject.timeout_add(1500, self.update_downloads_list)
self.download_list_update_enabled = True
def on_btnCleanUpDownloads_clicked(self, button=None):
model = self.download_status_model
all_tasks = [(gtk.TreeRowReference(model, row.path), row[0]) for row in model]
changed_episode_urls = set()
for row_reference, task in all_tasks:
if task.status in (task.DONE, task.CANCELLED):
model.remove(model.get_iter(row_reference.get_path()))
try:
# We don't "see" this task anymore - remove it;
# this is needed, so update_episode_list_icons()
# below gets the correct list of "seen" tasks
self.download_tasks_seen.remove(task)
except KeyError, key_error:
log('Cannot remove task from "seen" list: %s', task, sender=self)
changed_episode_urls.add(task.url)
# Tell the task that it has been removed (so it can clean up)
task.removed_from_list()
# Tell the podcasts tab to update icons for our removed podcasts
self.update_episode_list_icons(changed_episode_urls)
# Tell the shownotes window that we have removed the episode
if self.episode_shownotes_window is not None and \
self.episode_shownotes_window.episode is not None and \
self.episode_shownotes_window.episode.url in changed_episode_urls:
self.episode_shownotes_window._download_status_changed(None)
# Update the tab title and downloads list
self.update_downloads_list()
def on_tool_downloads_toggled(self, toolbutton):
if toolbutton.get_active():
self.wNotebook.set_current_page(1)
else:
self.wNotebook.set_current_page(0)
def add_download_task_monitor(self, monitor):
self.download_task_monitors.add(monitor)
model = self.download_status_model
if model is None:
model = ()
for row in model:
task = row[self.download_status_model.C_TASK]
monitor.task_updated(task)
def remove_download_task_monitor(self, monitor):
self.download_task_monitors.remove(monitor)
def update_downloads_list(self):
try:
model = self.download_status_model
downloading, failed, finished, queued, paused, others = 0, 0, 0, 0, 0, 0
total_speed, total_size, done_size = 0, 0, 0
# Keep a list of all download tasks that we've seen
download_tasks_seen = set()
# Remember the DownloadTask object for the episode that
# has been opened in the episode shownotes dialog (if any)
if self.episode_shownotes_window is not None:
shownotes_episode = self.episode_shownotes_window.episode
shownotes_task = None
else:
shownotes_episode = None
shownotes_task = None
# Do not go through the list of the model is not (yet) available
if model is None:
model = ()
failed_downloads = []
for row in model:
self.download_status_model.request_update(row.iter)
task = row[self.download_status_model.C_TASK]
speed, size, status, progress = task.speed, task.total_size, task.status, task.progress
# Let the download task monitors know of changes
for monitor in self.download_task_monitors:
monitor.task_updated(task)
total_size += size
done_size += size*progress
if shownotes_episode is not None and \
shownotes_episode.url == task.episode.url:
shownotes_task = task
download_tasks_seen.add(task)
if status == download.DownloadTask.DOWNLOADING:
downloading += 1
total_speed += speed
elif status == download.DownloadTask.FAILED:
failed_downloads.append(task)
failed += 1
elif status == download.DownloadTask.DONE:
finished += 1
elif status == download.DownloadTask.QUEUED:
queued += 1
elif status == download.DownloadTask.PAUSED:
paused += 1
else:
others += 1
# Remember which tasks we have seen after this run
self.download_tasks_seen = download_tasks_seen
if gpodder.ui.desktop:
text = [_('Downloads')]
if downloading + failed + queued > 0:
s = []
if downloading > 0:
s.append(N_('%d active', '%d active', downloading) % downloading)
if failed > 0:
s.append(N_('%d failed', '%d failed', failed) % failed)
if queued > 0:
s.append(N_('%d queued', '%d queued', queued) % queued)
text.append(' (' + ', '.join(s)+')')
self.labelDownloads.set_text(''.join(text))
elif gpodder.ui.diablo:
sum = downloading + failed + finished + queued + paused + others
if sum:
self.tool_downloads.set_label(_('Downloads (%d)') % sum)
else:
self.tool_downloads.set_label(_('Downloads'))
elif gpodder.ui.fremantle:
if downloading + queued > 0:
self.button_downloads.set_value(N_('%d active', '%d active', downloading+queued) % (downloading+queued))
elif failed > 0:
self.button_downloads.set_value(N_('%d failed', '%d failed', failed) % failed)
elif paused > 0:
self.button_downloads.set_value(N_('%d paused', '%d paused', paused) % paused)
else:
self.button_downloads.set_value(_('Idle'))
title = [self.default_title]
# We have to update all episodes/channels for which the status has
# changed. Accessing task.status_changed has the side effect of
# re-setting the changed flag, so we need to get the "changed" list
# of tuples first and split it into two lists afterwards
changed = [(task.url, task.podcast_url) for task in \
self.download_tasks_seen if task.status_changed]
episode_urls = [episode_url for episode_url, channel_url in changed]
channel_urls = [channel_url for episode_url, channel_url in changed]
count = downloading + queued
if count > 0:
title.append(N_('downloading %d file', 'downloading %d files', count) % count)
if total_size > 0:
percentage = 100.0*done_size/total_size
else:
percentage = 0.0
total_speed = util.format_filesize(total_speed)
title[1] += ' (%d%%, %s/s)' % (percentage, total_speed)
if self.tray_icon is not None:
# Update the tray icon status and progress bar
self.tray_icon.set_status(self.tray_icon.STATUS_DOWNLOAD_IN_PROGRESS, title[1])
self.tray_icon.draw_progress_bar(percentage/100.)
elif self.last_download_count > 0:
if self.tray_icon is not None:
# Update the tray icon status
self.tray_icon.set_status()
if gpodder.ui.desktop:
self.downloads_finished(self.download_tasks_seen)
if gpodder.ui.diablo:
hildon.hildon_banner_show_information(self.gPodder, '', 'gPodder: %s' % _('All downloads finished'))
log('All downloads have finished.', sender=self)
if self.config.cmd_all_downloads_complete:
util.run_external_command(self.config.cmd_all_downloads_complete)
if gpodder.ui.fremantle and failed:
message = '\n'.join(['%s: %s' % (str(task), \
task.error_message) for task in failed_downloads])
self.show_message(message, _('Downloads failed'), important=True)
self.last_download_count = count
if not gpodder.ui.fremantle:
self.gPodder.set_title(' - '.join(title))
self.update_episode_list_icons(episode_urls)
if self.episode_shownotes_window is not None:
if (shownotes_task and shownotes_task.url in episode_urls) or \
shownotes_task != self.episode_shownotes_window.task:
self.episode_shownotes_window._download_status_changed(shownotes_task)
self.episode_shownotes_window._download_status_progress()
self.play_or_download()
if channel_urls:
self.update_podcast_list_model(channel_urls)
if not self.download_queue_manager.are_queued_or_active_tasks():
self.download_list_update_enabled = False
return self.download_list_update_enabled
except Exception, e:
log('Exception happened while updating download list.', sender=self, traceback=True)
self.show_message('%s\n\n%s' % (_('Please report this problem and restart gPodder:'), str(e)), _('Unhandled exception'), important=True)
# We return False here, so the update loop won't be called again,
# that's why we require the restart of gPodder in the message.
return False
def on_config_changed(self, *args):
util.idle_add(self._on_config_changed, *args)
def _on_config_changed(self, name, old_value, new_value):
if name == 'show_toolbar' and gpodder.ui.desktop:
self.toolbar.set_property('visible', new_value)
elif name == 'videoplayer':
self.config.video_played_dbus = False
elif name == 'player':
self.config.audio_played_dbus = False
elif name == 'episode_list_descriptions':
self.update_episode_list_model()
elif name == 'episode_list_thumbnails':
self.update_episode_list_icons(all=True)
elif name == 'rotation_mode':
self._fremantle_rotation.set_mode(new_value)
elif name in ('auto_update_feeds', 'auto_update_frequency'):
self.restart_auto_update_timer()
elif name == 'podcast_list_view_all':
# Force a update of the podcast list model
self.channel_list_changed = True
if gpodder.ui.fremantle:
hildon.hildon_gtk_window_set_progress_indicator(self.main_window, True)
while gtk.events_pending():
gtk.main_iteration(False)
self.update_podcast_list_model()
if gpodder.ui.fremantle:
hildon.hildon_gtk_window_set_progress_indicator(self.main_window, False)
def on_treeview_query_tooltip(self, treeview, x, y, keyboard_tooltip, tooltip):
# With get_bin_window, we get the window that contains the rows without
# the header. The Y coordinate of this window will be the height of the
# treeview header. This is the amount we have to subtract from the
# event's Y coordinate to get the coordinate to pass to get_path_at_pos
(x_bin, y_bin) = treeview.get_bin_window().get_position()
y -= x_bin
y -= y_bin
(path, column, rx, ry) = treeview.get_path_at_pos( x, y) or (None,)*4
if not getattr(treeview, TreeViewHelper.CAN_TOOLTIP) or (column is not None and column != treeview.get_columns()[0]):
setattr(treeview, TreeViewHelper.LAST_TOOLTIP, None)
return False
if path is not None:
model = treeview.get_model()
iter = model.get_iter(path)
role = getattr(treeview, TreeViewHelper.ROLE)
if role == TreeViewHelper.ROLE_EPISODES:
id = model.get_value(iter, EpisodeListModel.C_URL)
elif role == TreeViewHelper.ROLE_PODCASTS:
id = model.get_value(iter, PodcastListModel.C_URL)
last_tooltip = getattr(treeview, TreeViewHelper.LAST_TOOLTIP)
if last_tooltip is not None and last_tooltip != id:
setattr(treeview, TreeViewHelper.LAST_TOOLTIP, None)
return False
setattr(treeview, TreeViewHelper.LAST_TOOLTIP, id)
if role == TreeViewHelper.ROLE_EPISODES:
description = model.get_value(iter, EpisodeListModel.C_TOOLTIP)
if description:
tooltip.set_text(description)
else:
return False
elif role == TreeViewHelper.ROLE_PODCASTS:
channel = model.get_value(iter, PodcastListModel.C_CHANNEL)
if channel is None:
return False
channel.request_save_dir_size()
diskspace_str = util.format_filesize(channel.save_dir_size, 0)
error_str = model.get_value(iter, PodcastListModel.C_ERROR)
if error_str:
error_str = _('Feedparser error: %s') % saxutils.escape(error_str.strip())
error_str = '<span foreground="#ff0000">%s</span>' % error_str
table = gtk.Table(rows=3, columns=3)
table.set_row_spacings(5)
table.set_col_spacings(5)
table.set_border_width(5)
heading = gtk.Label()
heading.set_alignment(0, 1)
heading.set_markup('<b><big>%s</big></b>\n<small>%s</small>' % (saxutils.escape(channel.title), saxutils.escape(channel.url)))
table.attach(heading, 0, 1, 0, 1)
size_info = gtk.Label()
size_info.set_alignment(1, 1)
size_info.set_justify(gtk.JUSTIFY_RIGHT)
size_info.set_markup('<b>%s</b>\n<small>%s</small>' % (diskspace_str, _('disk usage')))
table.attach(size_info, 2, 3, 0, 1)
table.attach(gtk.HSeparator(), 0, 3, 1, 2)
if len(channel.description) < 500:
description = channel.description
else:
pos = channel.description.find('\n\n')
if pos == -1 or pos > 500:
description = channel.description[:498]+'[...]'
else:
description = channel.description[:pos]
description = gtk.Label(description)
if error_str:
description.set_markup(error_str)
description.set_alignment(0, 0)
description.set_line_wrap(True)
table.attach(description, 0, 3, 2, 3)
table.show_all()
tooltip.set_custom(table)
return True
setattr(treeview, TreeViewHelper.LAST_TOOLTIP, None)
return False
def treeview_allow_tooltips(self, treeview, allow):
setattr(treeview, TreeViewHelper.CAN_TOOLTIP, allow)
def update_m3u_playlist_clicked(self, widget):
if self.active_channel is not None:
self.active_channel.update_m3u_playlist()
self.show_message(_('Updated M3U playlist in download folder.'), _('Updated playlist'), widget=self.treeChannels)
def treeview_handle_context_menu_click(self, treeview, event):
x, y = int(event.x), int(event.y)
path, column, rx, ry = treeview.get_path_at_pos(x, y) or (None,)*4
selection = treeview.get_selection()
model, paths = selection.get_selected_rows()
if path is None or (path not in paths and \
event.button == self.context_menu_mouse_button):
# We have right-clicked, but not into the selection,
# assume we don't want to operate on the selection
paths = []
if path is not None and not paths and \
event.button == self.context_menu_mouse_button:
# No selection or clicked outside selection;
# select the single item where we clicked
treeview.grab_focus()
treeview.set_cursor(path, column, 0)
paths = [path]
if not paths:
# Unselect any remaining items (clicked elsewhere)
if hasattr(treeview, 'is_rubber_banding_active'):
if not treeview.is_rubber_banding_active():
selection.unselect_all()
else:
selection.unselect_all()
return model, paths
def downloads_list_get_selection(self, model=None, paths=None):
if model is None and paths is None:
selection = self.treeDownloads.get_selection()
model, paths = selection.get_selected_rows()
can_queue, can_cancel, can_pause, can_remove, can_force = (True,)*5
selected_tasks = [(gtk.TreeRowReference(model, path), \
model.get_value(model.get_iter(path), \
DownloadStatusModel.C_TASK)) for path in paths]
for row_reference, task in selected_tasks:
if task.status != download.DownloadTask.QUEUED:
can_force = False
if task.status not in (download.DownloadTask.PAUSED, \
download.DownloadTask.FAILED, \
download.DownloadTask.CANCELLED):
can_queue = False
if task.status not in (download.DownloadTask.PAUSED, \
download.DownloadTask.QUEUED, \
download.DownloadTask.DOWNLOADING):
can_cancel = False
if task.status not in (download.DownloadTask.QUEUED, \
download.DownloadTask.DOWNLOADING):
can_pause = False
if task.status not in (download.DownloadTask.CANCELLED, \
download.DownloadTask.FAILED, \
download.DownloadTask.DONE):
can_remove = False
return selected_tasks, can_queue, can_cancel, can_pause, can_remove, can_force
def downloads_finished(self, download_tasks_seen):
# FIXME: Filter all tasks that have already been reported
finished_downloads = [str(task) for task in download_tasks_seen if task.status == task.DONE]
failed_downloads = [str(task)+' ('+task.error_message+')' for task in download_tasks_seen if task.status == task.FAILED]
if finished_downloads and failed_downloads:
message = self.format_episode_list(finished_downloads, 5)
message += '\n\n<i>%s</i>\n' % _('These downloads failed:')
message += self.format_episode_list(failed_downloads, 5)
self.show_message(message, _('Downloads finished'), True, widget=self.labelDownloads)
elif finished_downloads:
message = self.format_episode_list(finished_downloads)
self.show_message(message, _('Downloads finished'), widget=self.labelDownloads)
elif failed_downloads:
message = self.format_episode_list(failed_downloads)
self.show_message(message, _('Downloads failed'), True, widget=self.labelDownloads)
def format_episode_list(self, episode_list, max_episodes=10):
"""
Format a list of episode names for notifications
Will truncate long episode names and limit the amount of
episodes displayed (max_episodes=10).
The episode_list parameter should be a list of strings.
"""
MAX_TITLE_LENGTH = 100
result = []
for title in episode_list[:min(len(episode_list), max_episodes)]:
if len(title) > MAX_TITLE_LENGTH:
middle = (MAX_TITLE_LENGTH/2)-2
title = '%s...%s' % (title[0:middle], title[-middle:])
result.append(saxutils.escape(title))
result.append('\n')
more_episodes = len(episode_list) - max_episodes
if more_episodes > 0:
result.append('(...')
result.append(N_('%d more episode', '%d more episodes', more_episodes) % more_episodes)
result.append('...)')
return (''.join(result)).strip()
def _for_each_task_set_status(self, tasks, status, force_start=False):
episode_urls = set()
model = self.treeDownloads.get_model()
for row_reference, task in tasks:
if status == download.DownloadTask.QUEUED:
# Only queue task when its paused/failed/cancelled (or forced)
if task.status in (task.PAUSED, task.FAILED, task.CANCELLED) or force_start:
self.download_queue_manager.add_task(task, force_start)
self.enable_download_list_update()
elif status == download.DownloadTask.CANCELLED:
# Cancelling a download allowed when downloading/queued
if task.status in (task.QUEUED, task.DOWNLOADING):
task.status = status
# Cancelling paused downloads requires a call to .run()
elif task.status == task.PAUSED:
task.status = status
# Call run, so the partial file gets deleted
task.run()
elif status == download.DownloadTask.PAUSED:
# Pausing a download only when queued/downloading
if task.status in (task.DOWNLOADING, task.QUEUED):
task.status = status
elif status is None:
# Remove the selected task - cancel downloading/queued tasks
if task.status in (task.QUEUED, task.DOWNLOADING):
task.status = task.CANCELLED
model.remove(model.get_iter(row_reference.get_path()))
# Remember the URL, so we can tell the UI to update
try:
# We don't "see" this task anymore - remove it;
# this is needed, so update_episode_list_icons()
# below gets the correct list of "seen" tasks
self.download_tasks_seen.remove(task)
except KeyError, key_error:
log('Cannot remove task from "seen" list: %s', task, sender=self)
episode_urls.add(task.url)
# Tell the task that it has been removed (so it can clean up)
task.removed_from_list()
else:
# We can (hopefully) simply set the task status here
task.status = status
# Tell the podcasts tab to update icons for our removed podcasts
self.update_episode_list_icons(episode_urls)
# Update the tab title and downloads list
self.update_downloads_list()
def treeview_downloads_show_context_menu(self, treeview, event):
model, paths = self.treeview_handle_context_menu_click(treeview, event)
if not paths:
if not hasattr(treeview, 'is_rubber_banding_active'):
return True
else:
return not treeview.is_rubber_banding_active()
if event.button == self.context_menu_mouse_button:
selected_tasks, can_queue, can_cancel, can_pause, can_remove, can_force = \
self.downloads_list_get_selection(model, paths)
def make_menu_item(label, stock_id, tasks, status, sensitive, force_start=False):
# This creates a menu item for selection-wide actions
item = gtk.ImageMenuItem(label)
item.set_image(gtk.image_new_from_stock(stock_id, gtk.ICON_SIZE_MENU))
item.connect('activate', lambda item: self._for_each_task_set_status(tasks, status, force_start))
item.set_sensitive(sensitive)
return self.set_finger_friendly(item)
menu = gtk.Menu()
item = gtk.ImageMenuItem(_('Episode details'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_INFO, gtk.ICON_SIZE_MENU))
if len(selected_tasks) == 1:
row_reference, task = selected_tasks[0]
episode = task.episode
item.connect('activate', lambda item: self.show_episode_shownotes(episode))
else:
item.set_sensitive(False)
menu.append(self.set_finger_friendly(item))
menu.append(gtk.SeparatorMenuItem())
if can_force:
menu.append(make_menu_item(_('Start download now'), gtk.STOCK_GO_DOWN, selected_tasks, download.DownloadTask.QUEUED, True, True))
else:
menu.append(make_menu_item(_('Download'), gtk.STOCK_GO_DOWN, selected_tasks, download.DownloadTask.QUEUED, can_queue, False))
menu.append(make_menu_item(_('Cancel'), gtk.STOCK_CANCEL, selected_tasks, download.DownloadTask.CANCELLED, can_cancel))
menu.append(make_menu_item(_('Pause'), gtk.STOCK_MEDIA_PAUSE, selected_tasks, download.DownloadTask.PAUSED, can_pause))
menu.append(gtk.SeparatorMenuItem())
menu.append(make_menu_item(_('Remove from list'), gtk.STOCK_REMOVE, selected_tasks, None, can_remove))
if gpodder.ui.maemo:
# Because we open the popup on left-click for Maemo,
# we also include a non-action to close the menu
menu.append(gtk.SeparatorMenuItem())
item = gtk.ImageMenuItem(_('Close this menu'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_MENU))
menu.append(self.set_finger_friendly(item))
menu.show_all()
menu.popup(None, None, None, event.button, event.time)
return True
def treeview_channels_show_context_menu(self, treeview, event):
model, paths = self.treeview_handle_context_menu_click(treeview, event)
if not paths:
return True
# Check for valid channel id, if there's no id then
# assume that it is a proxy channel or equivalent
# and cannot be operated with right click
if self.active_channel.id is None:
return True
if event.button == 3:
menu = gtk.Menu()
ICON = lambda x: x
item = gtk.ImageMenuItem( _('Open download folder'))
item.set_image( gtk.image_new_from_icon_name(ICON('folder-open'), gtk.ICON_SIZE_MENU))
item.connect('activate', lambda x: util.gui_open(self.active_channel.save_dir))
menu.append( item)
item = gtk.ImageMenuItem( _('Update Feed'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_REFRESH, gtk.ICON_SIZE_MENU))
item.connect('activate', self.on_itemUpdateChannel_activate )
item.set_sensitive( not self.updating_feed_cache )
menu.append( item)
item = gtk.ImageMenuItem(_('Update M3U playlist'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_REFRESH, gtk.ICON_SIZE_MENU))
item.connect('activate', self.update_m3u_playlist_clicked)
menu.append(item)
if self.active_channel.link:
item = gtk.ImageMenuItem(_('Visit website'))
item.set_image(gtk.image_new_from_icon_name(ICON('web-browser'), gtk.ICON_SIZE_MENU))
item.connect('activate', lambda w: util.open_website(self.active_channel.link))
menu.append(item)
if self.active_channel.channel_is_locked:
item = gtk.ImageMenuItem(_('Allow deletion of all episodes'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_DIALOG_AUTHENTICATION, gtk.ICON_SIZE_MENU))
item.connect('activate', self.on_channel_toggle_lock_activate)
menu.append(self.set_finger_friendly(item))
else:
item = gtk.ImageMenuItem(_('Prohibit deletion of all episodes'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_DIALOG_AUTHENTICATION, gtk.ICON_SIZE_MENU))
item.connect('activate', self.on_channel_toggle_lock_activate)
menu.append(self.set_finger_friendly(item))
menu.append( gtk.SeparatorMenuItem())
item = gtk.ImageMenuItem(gtk.STOCK_EDIT)
item.connect( 'activate', self.on_itemEditChannel_activate)
menu.append( item)
item = gtk.ImageMenuItem(gtk.STOCK_DELETE)
item.connect( 'activate', self.on_itemRemoveChannel_activate)
menu.append( item)
menu.show_all()
# Disable tooltips while we are showing the menu, so
# the tooltip will not appear over the menu
self.treeview_allow_tooltips(self.treeChannels, False)
menu.connect('deactivate', lambda menushell: self.treeview_allow_tooltips(self.treeChannels, True))
menu.popup( None, None, None, event.button, event.time)
return True
def on_itemClose_activate(self, widget):
if self.tray_icon is not None:
self.iconify_main_window()
else:
self.on_gPodder_delete_event(widget)
def cover_file_removed(self, channel_url):
"""
The Cover Downloader calls this when a previously-
available cover has been removed from the disk. We
have to update our model to reflect this change.
"""
self.podcast_list_model.delete_cover_by_url(channel_url)
def cover_download_finished(self, channel_url, pixbuf):
"""
The Cover Downloader calls this when it has finished
downloading (or registering, if already downloaded)
a new channel cover, which is ready for displaying.
"""
self.podcast_list_model.add_cover_by_url(channel_url, pixbuf)
def save_episodes_as_file(self, episodes):
for episode in episodes:
self.save_episode_as_file(episode)
def save_episode_as_file(self, episode):
PRIVATE_FOLDER_ATTRIBUTE = '_save_episodes_as_file_folder'
if episode.was_downloaded(and_exists=True):
folder = getattr(self, PRIVATE_FOLDER_ATTRIBUTE, None)
copy_from = episode.local_filename(create=False)
assert copy_from is not None
copy_to = episode.sync_filename(self.config.custom_sync_name_enabled, self.config.custom_sync_name)
(result, folder) = self.show_copy_dialog(src_filename=copy_from, dst_filename=copy_to, dst_directory=folder)
setattr(self, PRIVATE_FOLDER_ATTRIBUTE, folder)
def copy_episodes_bluetooth(self, episodes):
episodes_to_copy = [e for e in episodes if e.was_downloaded(and_exists=True)]
def convert_and_send_thread(episode):
for episode in episodes:
filename = episode.local_filename(create=False)
assert filename is not None
destfile = os.path.join(tempfile.gettempdir(), \
util.sanitize_filename(episode.sync_filename(self.config.custom_sync_name_enabled, self.config.custom_sync_name)))
(base, ext) = os.path.splitext(filename)
if not destfile.endswith(ext):
destfile += ext
try:
shutil.copyfile(filename, destfile)
util.bluetooth_send_file(destfile)
except:
log('Cannot copy "%s" to "%s".', filename, destfile, sender=self)
self.notification(_('Error converting file.'), _('Bluetooth file transfer'), important=True)
util.delete_file(destfile)
threading.Thread(target=convert_and_send_thread, args=[episodes_to_copy]).start()
def get_device_name(self):
if self.config.device_type == 'ipod':
return _('iPod')
elif self.config.device_type in ('filesystem', 'mtp'):
return _('MP3 player')
else:
return '(unknown device)'
def _treeview_button_released(self, treeview, event):
xpos, ypos = TreeViewHelper.get_button_press_event(treeview)
dy = int(abs(event.y-ypos))
dx = int(event.x-xpos)
selection = treeview.get_selection()
path = treeview.get_path_at_pos(int(event.x), int(event.y))
if path is None or dy > 30:
return (False, dx, dy)
path, column, x, y = path
selection.select_path(path)
treeview.set_cursor(path)
treeview.grab_focus()
return (True, dx, dy)
def treeview_channels_handle_gestures(self, treeview, event):
if self.currently_updating:
return False
selected, dx, dy = self._treeview_button_released(treeview, event)
if selected:
if self.config.maemo_enable_gestures:
if dx > 70:
self.on_itemUpdateChannel_activate()
elif dx < -70:
self.on_itemEditChannel_activate(treeview)
return False
def treeview_available_handle_gestures(self, treeview, event):
selected, dx, dy = self._treeview_button_released(treeview, event)
if selected:
if self.config.maemo_enable_gestures:
if dx > 70:
self.on_playback_selected_episodes(None)
return True
elif dx < -70:
self.on_shownotes_selected_episodes(None)
return True
# Pass the event to the context menu handler for treeAvailable
self.treeview_available_show_context_menu(treeview, event)
return True
def treeview_available_show_context_menu(self, treeview, event):
model, paths = self.treeview_handle_context_menu_click(treeview, event)
if not paths:
if not hasattr(treeview, 'is_rubber_banding_active'):
return True
else:
return not treeview.is_rubber_banding_active()
if event.button == self.context_menu_mouse_button:
episodes = self.get_selected_episodes()
any_locked = any(e.is_locked for e in episodes)
any_played = any(e.is_played for e in episodes)
one_is_new = any(e.state == gpodder.STATE_NORMAL and not e.is_played for e in episodes)
downloaded = all(e.was_downloaded(and_exists=True) for e in episodes)
downloading = any(self.episode_is_downloading(e) for e in episodes)
menu = gtk.Menu()
(can_play, can_download, can_transfer, can_cancel, can_delete, open_instead_of_play) = self.play_or_download()
if open_instead_of_play:
item = gtk.ImageMenuItem(gtk.STOCK_OPEN)
elif downloaded:
item = gtk.ImageMenuItem(gtk.STOCK_MEDIA_PLAY)
else:
item = gtk.ImageMenuItem(_('Stream'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_MEDIA_PLAY, gtk.ICON_SIZE_MENU))
item.set_sensitive(can_play and not downloading)
item.connect('activate', self.on_playback_selected_episodes)
menu.append(self.set_finger_friendly(item))
if not can_cancel:
item = gtk.ImageMenuItem(_('Download'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_GO_DOWN, gtk.ICON_SIZE_MENU))
item.set_sensitive(can_download)
item.connect('activate', self.on_download_selected_episodes)
menu.append(self.set_finger_friendly(item))
else:
item = gtk.ImageMenuItem(gtk.STOCK_CANCEL)
item.connect('activate', self.on_item_cancel_download_activate)
menu.append(self.set_finger_friendly(item))
item = gtk.ImageMenuItem(gtk.STOCK_DELETE)
item.set_sensitive(can_delete)
item.connect('activate', self.on_btnDownloadedDelete_clicked)
menu.append(self.set_finger_friendly(item))
ICON = lambda x: x
# Ok, this probably makes sense to only display for downloaded files
if downloaded:
menu.append(gtk.SeparatorMenuItem())
share_item = gtk.MenuItem(_('Send to'))
menu.append(share_item)
share_menu = gtk.Menu()
item = gtk.ImageMenuItem(_('Local folder'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_DIRECTORY, gtk.ICON_SIZE_MENU))
item.connect('activate', lambda w, ee: self.save_episodes_as_file(ee), episodes)
share_menu.append(self.set_finger_friendly(item))
if self.bluetooth_available:
item = gtk.ImageMenuItem(_('Bluetooth device'))
item.set_image(gtk.image_new_from_icon_name(ICON('bluetooth'), gtk.ICON_SIZE_MENU))
item.connect('activate', lambda w, ee: self.copy_episodes_bluetooth(ee), episodes)
share_menu.append(self.set_finger_friendly(item))
if can_transfer:
item = gtk.ImageMenuItem(self.get_device_name())
item.set_image(gtk.image_new_from_icon_name(ICON('multimedia-player'), gtk.ICON_SIZE_MENU))
item.connect('activate', lambda w, ee: self.on_sync_to_ipod_activate(w, ee), episodes)
share_menu.append(self.set_finger_friendly(item))
share_item.set_submenu(share_menu)
if (downloaded or one_is_new or can_download) and not downloading:
menu.append(gtk.SeparatorMenuItem())
if one_is_new:
item = gtk.CheckMenuItem(_('New'))
item.set_active(True)
item.connect('activate', lambda w: self.mark_selected_episodes_old())
menu.append(self.set_finger_friendly(item))
elif can_download:
item = gtk.CheckMenuItem(_('New'))
item.set_active(False)
item.connect('activate', lambda w: self.mark_selected_episodes_new())
menu.append(self.set_finger_friendly(item))
if downloaded:
item = gtk.CheckMenuItem(_('Played'))
item.set_active(any_played)
item.connect( 'activate', lambda w: self.on_item_toggle_played_activate( w, False, not any_played))
menu.append(self.set_finger_friendly(item))
item = gtk.CheckMenuItem(_('Keep episode'))
item.set_active(any_locked)
item.connect('activate', lambda w: self.on_item_toggle_lock_activate( w, False, not any_locked))
menu.append(self.set_finger_friendly(item))
menu.append(gtk.SeparatorMenuItem())
# Single item, add episode information menu item
item = gtk.ImageMenuItem(_('Episode details'))
item.set_image(gtk.image_new_from_stock( gtk.STOCK_INFO, gtk.ICON_SIZE_MENU))
item.connect('activate', lambda w: self.show_episode_shownotes(episodes[0]))
menu.append(self.set_finger_friendly(item))
if gpodder.ui.maemo:
# Because we open the popup on left-click for Maemo,
# we also include a non-action to close the menu
menu.append(gtk.SeparatorMenuItem())
item = gtk.ImageMenuItem(_('Close this menu'))
item.set_image(gtk.image_new_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_MENU))
menu.append(self.set_finger_friendly(item))
menu.show_all()
# Disable tooltips while we are showing the menu, so
# the tooltip will not appear over the menu
self.treeview_allow_tooltips(self.treeAvailable, False)
menu.connect('deactivate', lambda menushell: self.treeview_allow_tooltips(self.treeAvailable, True))
menu.popup( None, None, None, event.button, event.time)
return True
def set_title(self, new_title):
if not gpodder.ui.fremantle:
self.default_title = new_title
self.gPodder.set_title(new_title)
def update_episode_list_icons(self, urls=None, selected=False, all=False):
"""
Updates the status icons in the episode list.
If urls is given, it should be a list of URLs
of episodes that should be updated.
If urls is None, set ONE OF selected, all to
True (the former updates just the selected
episodes and the latter updates all episodes).
"""
additional_args = (self.episode_is_downloading, \
self.config.episode_list_descriptions and gpodder.ui.desktop, \
self.config.episode_list_thumbnails and gpodder.ui.desktop)
if urls is not None:
# We have a list of URLs to walk through
self.episode_list_model.update_by_urls(urls, *additional_args)
elif selected and not all:
# We should update all selected episodes
selection = self.treeAvailable.get_selection()
model, paths = selection.get_selected_rows()
for path in reversed(paths):
iter = model.get_iter(path)
self.episode_list_model.update_by_filter_iter(iter, \
*additional_args)
elif all and not selected:
# We update all (even the filter-hidden) episodes
self.episode_list_model.update_all(*additional_args)
else:
# Wrong/invalid call - have to specify at least one parameter
raise ValueError('Invalid call to update_episode_list_icons')
def episode_list_status_changed(self, episodes):
self.update_episode_list_icons(set(e.url for e in episodes))
self.update_podcast_list_model(set(e.channel.url for e in episodes))
self.db.commit()
def clean_up_downloads(self, delete_partial=False):
# Clean up temporary files left behind by old gPodder versions
temporary_files = glob.glob('%s/*/.tmp-*' % self.config.download_dir)
if delete_partial:
temporary_files += glob.glob('%s/*/*.partial' % self.config.download_dir)
for tempfile in temporary_files:
util.delete_file(tempfile)
# Clean up empty download folders and abandoned download folders
download_dirs = glob.glob(os.path.join(self.config.download_dir, '*'))
for ddir in download_dirs:
if os.path.isdir(ddir) and False: # FIXME not db.channel_foldername_exists(os.path.basename(ddir)):
globr = glob.glob(os.path.join(ddir, '*'))
if len(globr) == 0 or (len(globr) == 1 and globr[0].endswith('/cover')):
log('Stale download directory found: %s', os.path.basename(ddir), sender=self)
shutil.rmtree(ddir, ignore_errors=True)
def streaming_possible(self):
if gpodder.ui.desktop:
# User has to have a media player set on the Desktop, or else we
# would probably open the browser when giving a URL to xdg-open..
return (self.config.player and self.config.player != 'default')
elif gpodder.ui.maemo:
# On Maemo, the default is to use the Nokia Media Player, which is
# already able to deal with HTTP URLs the right way, so we
# unconditionally enable streaming always on Maemo
return True
return False
def playback_episodes_for_real(self, episodes):
groups = collections.defaultdict(list)
for episode in episodes:
file_type = episode.file_type()
if file_type == 'video' and self.config.videoplayer and \
self.config.videoplayer != 'default':
player = self.config.videoplayer
if gpodder.ui.diablo:
# Use the wrapper script if it's installed to crop 3GP YouTube
# videos to fit the screen (looks much nicer than w/ black border)
if player == 'mplayer' and util.find_command('gpodder-mplayer'):
player = 'gpodder-mplayer'
elif gpodder.ui.fremantle and player == 'mplayer':
player = 'mplayer -fs %F'
elif file_type == 'audio' and self.config.player and \
self.config.player != 'default':
player = self.config.player
else:
player = 'default'
if file_type not in ('audio', 'video') or \
(file_type == 'audio' and not self.config.audio_played_dbus) or \
(file_type == 'video' and not self.config.video_played_dbus):
# Mark episode as played in the database
episode.mark(is_played=True)
self.mygpo_client.on_playback([episode])
filename = episode.local_filename(create=False)
if filename is None or not os.path.exists(filename):
filename = episode.url
if youtube.is_video_link(filename):
fmt_id = self.config.youtube_preferred_fmt_id
if gpodder.ui.fremantle:
fmt_id = 5
filename = youtube.get_real_download_url(filename, fmt_id)
groups[player].append(filename)
# Open episodes with system default player
if 'default' in groups:
for filename in groups['default']:
log('Opening with system default: %s', filename, sender=self)
util.gui_open(filename)
del groups['default']
elif gpodder.ui.maemo:
# When on Maemo and not opening with default, show a notification
# (no startup notification for Panucci / MPlayer yet...)
if len(episodes) == 1:
text = _('Opening %s') % episodes[0].title
else:
count = len(episodes)
text = N_('Opening %d episode', 'Opening %d episodes', count) % count
banner = hildon.hildon_banner_show_animation(self.gPodder, '', text)
def destroy_banner_later(banner):
banner.destroy()
return False
gobject.timeout_add(5000, destroy_banner_later, banner)
# For each type now, go and create play commands
for group in groups:
for command in util.format_desktop_command(group, groups[group]):
log('Executing: %s', repr(command), sender=self)
subprocess.Popen(command)
# Persist episode status changes to the database
self.db.commit()
# Flush updated episode status
self.mygpo_client.flush()
def playback_episodes(self, episodes):
# We need to create a list, because we run through it more than once
episodes = list(PodcastEpisode.sort_by_pubdate(e for e in episodes if \
e.was_downloaded(and_exists=True) or self.streaming_possible()))
try:
self.playback_episodes_for_real(episodes)
except Exception, e:
log('Error in playback!', sender=self, traceback=True)
if gpodder.ui.desktop:
self.show_message(_('Please check your media player settings in the preferences dialog.'), \
_('Error opening player'), widget=self.toolPreferences)
else:
self.show_message(_('Please check your media player settings in the preferences dialog.'))
channel_urls = set()
episode_urls = set()
for episode in episodes:
channel_urls.add(episode.channel.url)
episode_urls.add(episode.url)
self.update_episode_list_icons(episode_urls)
self.update_podcast_list_model(channel_urls)
def play_or_download(self):
if not gpodder.ui.fremantle:
if self.wNotebook.get_current_page() > 0:
if gpodder.ui.desktop:
self.toolCancel.set_sensitive(True)
return
if self.currently_updating:
return (False, False, False, False, False, False)
( can_play, can_download, can_transfer, can_cancel, can_delete ) = (False,)*5
( is_played, is_locked ) = (False,)*2
open_instead_of_play = False
selection = self.treeAvailable.get_selection()
if selection.count_selected_rows() > 0:
(model, paths) = selection.get_selected_rows()
for path in paths:
episode = model.get_value(model.get_iter(path), EpisodeListModel.C_EPISODE)
if episode.file_type() not in ('audio', 'video'):
open_instead_of_play = True
if episode.was_downloaded():
can_play = episode.was_downloaded(and_exists=True)
is_played = episode.is_played
is_locked = episode.is_locked
if not can_play:
can_download = True
else:
if self.episode_is_downloading(episode):
can_cancel = True
else:
can_download = True
can_download = can_download and not can_cancel
can_play = self.streaming_possible() or (can_play and not can_cancel and not can_download)
can_transfer = can_play and self.config.device_type != 'none' and not can_cancel and not can_download and not open_instead_of_play
can_delete = not can_cancel
if gpodder.ui.desktop:
if open_instead_of_play:
self.toolPlay.set_stock_id(gtk.STOCK_OPEN)
else:
self.toolPlay.set_stock_id(gtk.STOCK_MEDIA_PLAY)
self.toolPlay.set_sensitive( can_play)
self.toolDownload.set_sensitive( can_download)
self.toolTransfer.set_sensitive( can_transfer)
self.toolCancel.set_sensitive( can_cancel)
if not gpodder.ui.fremantle:
self.item_cancel_download.set_sensitive(can_cancel)
self.itemDownloadSelected.set_sensitive(can_download)
self.itemOpenSelected.set_sensitive(can_play)
self.itemPlaySelected.set_sensitive(can_play)
self.itemDeleteSelected.set_sensitive(can_delete)
self.item_toggle_played.set_sensitive(can_play)
self.item_toggle_lock.set_sensitive(can_play)
self.itemOpenSelected.set_visible(open_instead_of_play)
self.itemPlaySelected.set_visible(not open_instead_of_play)
return (can_play, can_download, can_transfer, can_cancel, can_delete, open_instead_of_play)
def on_cbMaxDownloads_toggled(self, widget, *args):
self.spinMaxDownloads.set_sensitive(self.cbMaxDownloads.get_active())
def on_cbLimitDownloads_toggled(self, widget, *args):
self.spinLimitDownloads.set_sensitive(self.cbLimitDownloads.get_active())
def episode_new_status_changed(self, urls):
self.update_podcast_list_model()
self.update_episode_list_icons(urls)
def update_podcast_list_model(self, urls=None, selected=False, select_url=None):
"""Update the podcast list treeview model
If urls is given, it should list the URLs of each
podcast that has to be updated in the list.
If selected is True, only update the model contents
for the currently-selected podcast - nothing more.
The caller can optionally specify "select_url",
which is the URL of the podcast that is to be
selected in the list after the update is complete.
This only works if the podcast list has to be
reloaded; i.e. something has been added or removed
since the last update of the podcast list).
"""
selection = self.treeChannels.get_selection()
model, iter = selection.get_selected()
if self.config.podcast_list_view_all and not self.channel_list_changed:
# Update "all episodes" view in any case (if enabled)
self.podcast_list_model.update_first_row()
if selected:
# very cheap! only update selected channel
if iter is not None:
# If we have selected the "all episodes" view, we have
# to update all channels for selected episodes:
if self.config.podcast_list_view_all and \
self.podcast_list_model.iter_is_first_row(iter):
urls = self.get_podcast_urls_from_selected_episodes()
self.podcast_list_model.update_by_urls(urls)
else:
# Otherwise just update the selected row (a podcast)
self.podcast_list_model.update_by_filter_iter(iter)
elif not self.channel_list_changed:
# we can keep the model, but have to update some
if urls is None:
# still cheaper than reloading the whole list
self.podcast_list_model.update_all()
else:
# ok, we got a bunch of urls to update
self.podcast_list_model.update_by_urls(urls)
else:
if model and iter and select_url is None:
# Get the URL of the currently-selected podcast
select_url = model.get_value(iter, PodcastListModel.C_URL)
# Update the podcast list model with new channels
self.podcast_list_model.set_channels(self.db, self.config, self.channels)
try:
selected_iter = model.get_iter_first()
# Find the previously-selected URL in the new
# model if we have an URL (else select first)
if select_url is not None:
pos = model.get_iter_first()
while pos is not None:
url = model.get_value(pos, PodcastListModel.C_URL)
if url == select_url:
selected_iter = pos
break
pos = model.iter_next(pos)
if not gpodder.ui.fremantle:
if selected_iter is not None:
selection.select_iter(selected_iter)
self.on_treeChannels_cursor_changed(self.treeChannels)
except:
log('Cannot select podcast in list', traceback=True, sender=self)
self.channel_list_changed = False
def episode_is_downloading(self, episode):
"""Returns True if the given episode is being downloaded at the moment"""
if episode is None:
return False
return episode.url in (task.url for task in self.download_tasks_seen if task.status in (task.DOWNLOADING, task.QUEUED, task.PAUSED))
def update_episode_list_model(self):
if self.channels and self.active_channel is not None:
if gpodder.ui.fremantle:
hildon.hildon_gtk_window_set_progress_indicator(self.episodes_window.main_window, True)
self.currently_updating = True
self.episode_list_model.clear()
self.episode_list_model.reset_update_progress()
self.treeAvailable.set_model(self.empty_episode_list_model)
def do_update_episode_list_model():
additional_args = (self.episode_is_downloading, \
self.config.episode_list_descriptions and gpodder.ui.desktop, \
self.config.episode_list_thumbnails and gpodder.ui.desktop, \
self.treeAvailable)
self.episode_list_model.add_from_channel(self.active_channel, *additional_args)
def on_episode_list_model_updated():
if gpodder.ui.fremantle:
hildon.hildon_gtk_window_set_progress_indicator(self.episodes_window.main_window, False)
self.treeAvailable.set_model(self.episode_list_model.get_filtered_model())
self.treeAvailable.columns_autosize()
self.currently_updating = False
self.play_or_download()
util.idle_add(on_episode_list_model_updated)
threading.Thread(target=do_update_episode_list_model).start()
else:
self.episode_list_model.clear()
def offer_new_episodes(self, channels=None):
new_episodes = self.get_new_episodes(channels)
if new_episodes:
self.new_episodes_show(new_episodes)
return True
return False
def add_podcast_list(self, urls, auth_tokens=None):
"""Subscribe to a list of podcast given their URLs
If auth_tokens is given, it should be a dictionary
mapping URLs to (username, password) tuples."""
if auth_tokens is None:
auth_tokens = {}
# Sort and split the URL list into five buckets
queued, failed, existing, worked, authreq = [], [], [], [], []
for input_url in urls:
url = util.normalize_feed_url(input_url)
if url is None:
# Fail this one because the URL is not valid
failed.append(input_url)
elif self.podcast_list_model.get_filter_path_from_url(url) is not None:
# A podcast already exists in the list for this URL
existing.append(url)
else:
# This URL has survived the first round - queue for add
queued.append(url)
if url != input_url and input_url in auth_tokens:
auth_tokens[url] = auth_tokens[input_url]
error_messages = {}
redirections = {}
progress = ProgressIndicator(_('Adding podcasts'), \
_('Please wait while episode information is downloaded.'), \
parent=self.main_window)
def on_after_update():
progress.on_finished()
# Report already-existing subscriptions to the user
if existing:
title = _('Existing subscriptions skipped')
message = _('You are already subscribed to these podcasts:') \
+ '\n\n' + '\n'.join(saxutils.escape(url) for url in existing)
self.show_message(message, title, widget=self.treeChannels)
# Report subscriptions that require authentication
if authreq:
retry_podcasts = {}
for url in authreq:
title = _('Podcast requires authentication')
message = _('Please login to %s:') % (saxutils.escape(url),)
success, auth_tokens = self.show_login_dialog(title, message)
if success:
retry_podcasts[url] = auth_tokens
else:
# Stop asking the user for more login data
retry_podcasts = {}
for url in authreq:
error_messages[url] = _('Authentication failed')
failed.append(url)
break
# If we have authentication data to retry, do so here
if retry_podcasts:
self.add_podcast_list(retry_podcasts.keys(), retry_podcasts)
# Report website redirections
for url in redirections:
title = _('Website redirection detected')
message = _('The URL %(url)s redirects to %(target)s.') \
+ '\n\n' + _('Do you want to visit the website now?')
message = message % {'url': url, 'target': redirections[url]}
if self.show_confirmation(message, title):
util.open_website(url)
else:
break
# Report failed subscriptions to the user
if failed:
title = _('Could not add some podcasts')
message = _('Some podcasts could not be added to your list:') \
+ '\n\n' + '\n'.join(saxutils.escape('%s: %s' % (url, \
error_messages.get(url, _('Unknown')))) for url in failed)
self.show_message(message, title, important=True)
# Upload subscription changes to gpodder.net
self.mygpo_client.on_subscribe(worked)
# If at least one podcast has been added, save and update all
if self.channel_list_changed:
# Fix URLs if mygpo has rewritten them
self.rewrite_urls_mygpo()
self.save_channels_opml()
# If only one podcast was added, select it after the update
if len(worked) == 1:
url = worked[0]
else:
url = None
# Update the list of subscribed podcasts
self.update_feed_cache(force_update=False, select_url_afterwards=url)
self.update_podcasts_tab()
# Offer to download new episodes
self.offer_new_episodes(channels=[c for c in self.channels if c.url in worked])
def thread_proc():
# After the initial sorting and splitting, try all queued podcasts
length = len(queued)
for index, url in enumerate(queued):
progress.on_progress(float(index)/float(length))
progress.on_message(url)
log('QUEUE RUNNER: %s', url, sender=self)
try:
# The URL is valid and does not exist already - subscribe!
channel = PodcastChannel.load(self.db, url=url, create=True, \
authentication_tokens=auth_tokens.get(url, None), \
max_episodes=self.config.max_episodes_per_feed, \
download_dir=self.config.download_dir, \
allow_empty_feeds=self.config.allow_empty_feeds)
try:
username, password = util.username_password_from_url(url)
except ValueError, ve:
username, password = (None, None)
if username is not None and channel.username is None and \
password is not None and channel.password is None:
channel.username = username
channel.password = password
channel.save()
self._update_cover(channel)
except feedcore.AuthenticationRequired:
if url in auth_tokens:
# Fail for wrong authentication data
error_messages[url] = _('Authentication failed')
failed.append(url)
else:
# Queue for login dialog later
authreq.append(url)
continue
except feedcore.WifiLogin, error:
redirections[url] = error.data
failed.append(url)
error_messages[url] = _('Redirection detected')
continue
except Exception, e:
log('Subscription error: %s', e, traceback=True, sender=self)
error_messages[url] = str(e)
failed.append(url)
continue
assert channel is not None
worked.append(channel.url)
self.channels.append(channel)
self.channel_list_changed = True
util.idle_add(on_after_update)
threading.Thread(target=thread_proc).start()
def save_channels_opml(self):
exporter = opml.Exporter(gpodder.subscription_file)
return exporter.write(self.channels)
def update_feed_cache_finish_callback(self, updated_urls=None, select_url_afterwards=None):
self.db.commit()
self.updating_feed_cache = False
self.channels = PodcastChannel.load_from_db(self.db, self.config.download_dir)
self.channel_list_changed = True
self.update_podcast_list_model(select_url=select_url_afterwards)
# Only search for new episodes in podcasts that have been
# updated, not in other podcasts (for single-feed updates)
episodes = self.get_new_episodes([c for c in self.channels if c.url in updated_urls])
if gpodder.ui.fremantle:
self.button_subscribe.set_sensitive(True)
self.button_refresh.set_image(gtk.image_new_from_icon_name(\
self.ICON_GENERAL_REFRESH, gtk.ICON_SIZE_BUTTON))
hildon.hildon_gtk_window_set_progress_indicator(self.main_window, False)
self.update_podcasts_tab()
if self.feed_cache_update_cancelled:
return
if episodes:
if self.config.auto_download == 'quiet' and not self.config.auto_update_feeds:
# New episodes found, but we should do nothing
self.show_message(_('New episodes are available.'))
elif self.config.auto_download == 'always':
count = len(episodes)
title = N_('Downloading %d new episode.', 'Downloading %d new episodes.', count) % count
self.show_message(title)
self.download_episode_list(episodes)
elif self.config.auto_download == 'queue':
self.show_message(_('New episodes have been added to the download list.'))
self.download_episode_list_paused(episodes)
else:
self.new_episodes_show(episodes)
elif not self.config.auto_update_feeds:
self.show_message(_('No new episodes. Please check for new episodes later.'))
return
if self.tray_icon:
self.tray_icon.set_status()
if self.feed_cache_update_cancelled:
# The user decided to abort the feed update
self.show_update_feeds_buttons()
elif not episodes:
# Nothing new here - but inform the user
self.pbFeedUpdate.set_fraction(1.0)
self.pbFeedUpdate.set_text(_('No new episodes'))
self.feed_cache_update_cancelled = True
self.btnCancelFeedUpdate.show()
self.btnCancelFeedUpdate.set_sensitive(True)
if gpodder.ui.maemo:
# btnCancelFeedUpdate is a ToolButton on Maemo
self.btnCancelFeedUpdate.set_stock_id(gtk.STOCK_APPLY)
else:
# btnCancelFeedUpdate is a normal gtk.Button
self.btnCancelFeedUpdate.set_image(gtk.image_new_from_stock(gtk.STOCK_APPLY, gtk.ICON_SIZE_BUTTON))
else:
count = len(episodes)
# New episodes are available
self.pbFeedUpdate.set_fraction(1.0)
# Are we minimized and should we auto download?
if (self.is_iconified() and (self.config.auto_download == 'minimized')) or (self.config.auto_download == 'always'):
self.download_episode_list(episodes)
title = N_('Downloading %d new episode.', 'Downloading %d new episodes.', count) % count
self.show_message(title, _('New episodes available'), widget=self.labelDownloads)
self.show_update_feeds_buttons()
elif self.config.auto_download == 'queue':
self.download_episode_list_paused(episodes)
title = N_('%d new episode added to download list.', '%d new episodes added to download list.', count) % count
self.show_message(title, _('New episodes available'), widget=self.labelDownloads)
self.show_update_feeds_buttons()
else:
self.show_update_feeds_buttons()
# New episodes are available and we are not minimized
if not self.config.do_not_show_new_episodes_dialog:
self.new_episodes_show(episodes, notification=True)
else:
message = N_('%d new episode available', '%d new episodes available', count) % count
self.pbFeedUpdate.set_text(message)
def _update_cover(self, channel):
if channel is not None and not os.path.exists(channel.cover_file) and channel.image:
self.cover_downloader.request_cover(channel)
def update_feed_cache_proc(self, channels, select_url_afterwards):
total = len(channels)
for updated, channel in enumerate(channels):
if not self.feed_cache_update_cancelled:
try:
# Update if timeout is not reached or we update a single podcast or skipping is disabled
if channel.query_automatic_update() or total == 1 or not self.config.feed_update_skipping:
channel.update(max_episodes=self.config.max_episodes_per_feed)
else:
log('Skipping update of %s (see feed_update_skipping)', channel.title, sender=self)
self._update_cover(channel)
except Exception, e:
d = {'url': saxutils.escape(channel.url), 'message': saxutils.escape(str(e))}
if d['message']:
message = _('Error while updating %(url)s: %(message)s')
else:
message = _('The feed at %(url)s could not be updated.')
self.notification(message % d, _('Error while updating feed'), widget=self.treeChannels)
log('Error: %s', str(e), sender=self, traceback=True)
if self.feed_cache_update_cancelled:
break
if gpodder.ui.fremantle:
util.idle_add(self.button_refresh.set_title, \
_('%(position)d/%(total)d updated') % {'position': updated, 'total': total})
continue
# By the time we get here the update may have already been cancelled
if not self.feed_cache_update_cancelled:
def update_progress():
d = {'podcast': channel.title, 'position': updated, 'total': total}
progression = _('Updated %(podcast)s (%(position)d/%(total)d)') % d
self.pbFeedUpdate.set_text(progression)
if self.tray_icon:
self.tray_icon.set_status(self.tray_icon.STATUS_UPDATING_FEED_CACHE, progression)
self.pbFeedUpdate.set_fraction(float(updated)/float(total))
util.idle_add(update_progress)
updated_urls = [c.url for c in channels]
util.idle_add(self.update_feed_cache_finish_callback, updated_urls, select_url_afterwards)
def show_update_feeds_buttons(self):
# Make sure that the buttons for updating feeds
# appear - this should happen after a feed update
if gpodder.ui.maemo:
self.btnUpdateSelectedFeed.show()
self.toolFeedUpdateProgress.hide()
self.btnCancelFeedUpdate.hide()
self.btnCancelFeedUpdate.set_is_important(False)
self.btnCancelFeedUpdate.set_stock_id(gtk.STOCK_CLOSE)
self.toolbarSpacer.set_expand(True)
self.toolbarSpacer.set_draw(False)
else:
self.hboxUpdateFeeds.hide()
self.btnUpdateFeeds.show()
self.itemUpdate.set_sensitive(True)
self.itemUpdateChannel.set_sensitive(True)
def on_btnCancelFeedUpdate_clicked(self, widget):
if not self.feed_cache_update_cancelled:
self.pbFeedUpdate.set_text(_('Cancelling...'))
self.feed_cache_update_cancelled = True
self.btnCancelFeedUpdate.set_sensitive(False)
else:
self.show_update_feeds_buttons()
def update_feed_cache(self, channels=None, force_update=True, select_url_afterwards=None):
if self.updating_feed_cache:
if gpodder.ui.fremantle:
self.feed_cache_update_cancelled = True
return
if not force_update:
self.channels = PodcastChannel.load_from_db(self.db, self.config.download_dir)
self.channel_list_changed = True
self.update_podcast_list_model(select_url=select_url_afterwards)
return
# Fix URLs if mygpo has rewritten them
self.rewrite_urls_mygpo()
self.updating_feed_cache = True
if channels is None:
channels = self.channels
if gpodder.ui.fremantle:
hildon.hildon_gtk_window_set_progress_indicator(self.main_window, True)
self.button_refresh.set_title(_('Updating...'))
self.button_subscribe.set_sensitive(False)
self.button_refresh.set_image(gtk.image_new_from_icon_name(\
self.ICON_GENERAL_CLOSE, gtk.ICON_SIZE_BUTTON))
self.feed_cache_update_cancelled = False
else:
self.itemUpdate.set_sensitive(False)
self.itemUpdateChannel.set_sensitive(False)
if self.tray_icon:
self.tray_icon.set_status(self.tray_icon.STATUS_UPDATING_FEED_CACHE)
if len(channels) == 1:
text = _('Updating "%s"...') % channels[0].title
else:
count = len(channels)
text = N_('Updating %d feed...', 'Updating %d feeds...', count) % count
self.pbFeedUpdate.set_text(text)
self.pbFeedUpdate.set_fraction(0)
self.feed_cache_update_cancelled = False
self.btnCancelFeedUpdate.show()
self.btnCancelFeedUpdate.set_sensitive(True)
if gpodder.ui.maemo:
self.toolbarSpacer.set_expand(False)
self.toolbarSpacer.set_draw(True)
self.btnUpdateSelectedFeed.hide()
self.toolFeedUpdateProgress.show_all()
else:
self.btnCancelFeedUpdate.set_image(gtk.image_new_from_stock(gtk.STOCK_STOP, gtk.ICON_SIZE_BUTTON))
self.hboxUpdateFeeds.show_all()
self.btnUpdateFeeds.hide()
args = (channels, select_url_afterwards)
threading.Thread(target=self.update_feed_cache_proc, args=args).start()
def on_gPodder_delete_event(self, widget, *args):
"""Called when the GUI wants to close the window
Displays a confirmation dialog (and closes/hides gPodder)
"""
downloading = self.download_status_model.are_downloads_in_progress()
# Only iconify if we are using the window's "X" button,
# but not when we are using "Quit" in the menu or toolbar
if self.config.on_quit_systray and self.tray_icon and widget.get_name() not in ('toolQuit', 'itemQuit'):
self.iconify_main_window()
elif self.config.on_quit_ask or downloading:
if gpodder.ui.fremantle:
self.close_gpodder()
elif gpodder.ui.diablo:
result = self.show_confirmation(_('Do you really want to quit gPodder now?'))
if result:
self.close_gpodder()
else:
return True
dialog = gtk.MessageDialog(self.gPodder, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_NONE)
dialog.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
quit_button = dialog.add_button(gtk.STOCK_QUIT, gtk.RESPONSE_CLOSE)
title = _('Quit gPodder')
if downloading:
message = _('You are downloading episodes. You can resume downloads the next time you start gPodder. Do you want to quit now?')
else:
message = _('Do you really want to quit gPodder now?')
dialog.set_title(title)
dialog.set_markup('<span weight="bold" size="larger">%s</span>\n\n%s'%(title, message))
if not downloading:
cb_ask = gtk.CheckButton(_("Don't ask me again"))
dialog.vbox.pack_start(cb_ask)
cb_ask.show_all()
quit_button.grab_focus()
result = dialog.run()
dialog.destroy()
if result == gtk.RESPONSE_CLOSE:
if not downloading and cb_ask.get_active() == True:
self.config.on_quit_ask = False
self.close_gpodder()
else:
self.close_gpodder()
return True
def close_gpodder(self):
""" clean everything and exit properly
"""
if self.channels:
if self.save_channels_opml():
pass # FIXME: Add mygpo synchronization here
else:
self.show_message(_('Please check your permissions and free disk space.'), _('Error saving podcast list'), important=True)
self.gPodder.hide()
if self.tray_icon is not None:
self.tray_icon.set_visible(False)
# Notify all tasks to to carry out any clean-up actions
self.download_status_model.tell_all_tasks_to_quit()
while gtk.events_pending():
gtk.main_iteration(False)
self.db.close()
self.quit()
sys.exit(0)
def get_expired_episodes(self):
for channel in self.channels:
for episode in channel.get_downloaded_episodes():
# Never consider locked episodes as old
if episode.is_locked:
continue
# Never consider fresh episodes as old
if episode.age_in_days() < self.config.episode_old_age:
continue
# Do not delete played episodes (except if configured)
if episode.is_played:
if not self.config.auto_remove_played_episodes:
continue
# Do not delete unplayed episodes (except if configured)
if not episode.is_played:
if not self.config.auto_remove_unplayed_episodes:
continue
yield episode
def delete_episode_list(self, episodes, confirm=True, skip_locked=True):
if not episodes:
return False
if skip_locked:
episodes = [e for e in episodes if not e.is_locked]
if not episodes:
title = _('Episodes are locked')
message = _('The selected episodes are locked. Please unlock the episodes that you want to delete before trying to delete them.')
self.notification(message, title, widget=self.treeAvailable)
return False
count = len(episodes)
title = N_('Delete %d episode?', 'Delete %d episodes?', count) % count
message = _('Deleting episodes removes downloaded files.')
if gpodder.ui.fremantle:
message = '\n'.join([title, message])
if confirm and not self.show_confirmation(message, title):
return False
progress = ProgressIndicator(_('Deleting episodes'), \
_('Please wait while episodes are deleted'), \
parent=self.main_window)
def finish_deletion(episode_urls, channel_urls):
progress.on_finished()
# Episodes have been deleted - persist the database
self.db.commit()
self.update_episode_list_icons(episode_urls)
self.update_podcast_list_model(channel_urls)
self.play_or_download()
def thread_proc():
episode_urls = set()
channel_urls = set()
episodes_status_update = []
for idx, episode in enumerate(episodes):
progress.on_progress(float(idx)/float(len(episodes)))
if episode.is_locked:
log('Not deleting episode (is locked): %s', episode.title)
else:
log('Deleting episode: %s', episode.title)
progress.on_message(episode.title)
episode.delete_from_disk()
episode_urls.add(episode.url)
channel_urls.add(episode.channel.url)
episodes_status_update.append(episode)
# Tell the shownotes window that we have removed the episode
if self.episode_shownotes_window is not None and \
self.episode_shownotes_window.episode is not None and \
self.episode_shownotes_window.episode.url == episode.url:
util.idle_add(self.episode_shownotes_window._download_status_changed, None)
# Notify the web service about the status update + upload
self.mygpo_client.on_delete(episodes_status_update)
self.mygpo_client.flush()
util.idle_add(finish_deletion, episode_urls, channel_urls)
threading.Thread(target=thread_proc).start()
return True
def on_itemRemoveOldEpisodes_activate( self, widget):
if gpodder.ui.maemo:
columns = (
('maemo_remove_markup', None, None, _('Episode')),
)
else:
columns = (
('title_markup', None, None, _('Episode')),
('filesize_prop', 'length', gobject.TYPE_INT, _('Size')),
('pubdate_prop', 'pubDate', gobject.TYPE_INT, _('Released')),
('played_prop', None, None, _('Status')),
('age_prop', None, None, _('Downloaded')),
)
msg_older_than = N_('Select older than %d day', 'Select older than %d days', self.config.episode_old_age)
selection_buttons = {
_('Select played'): lambda episode: episode.is_played,
msg_older_than % self.config.episode_old_age: lambda episode: episode.age_in_days() > self.config.episode_old_age,
}
instructions = _('Select the episodes you want to delete:')
episodes = []
selected = []
for channel in self.channels:
for episode in channel.get_downloaded_episodes():
# Disallow deletion of locked episodes that still exist
if not episode.is_locked or not episode.file_exists():
episodes.append(episode)
# Automatically select played and file-less episodes
selected.append(episode.is_played or \
not episode.file_exists())
gPodderEpisodeSelector(self.gPodder, title = _('Delete episodes'), instructions = instructions, \
episodes = episodes, selected = selected, columns = columns, \
stock_ok_button = gtk.STOCK_DELETE, callback = self.delete_episode_list, \
selection_buttons = selection_buttons, _config=self.config, \
show_episode_shownotes=self.show_episode_shownotes)
def on_selected_episodes_status_changed(self):
self.update_episode_list_icons(selected=True)
self.update_podcast_list_model(selected=True)
self.db.commit()
def mark_selected_episodes_new(self):
for episode in self.get_selected_episodes():
episode.mark_new()
self.on_selected_episodes_status_changed()
def mark_selected_episodes_old(self):
for episode in self.get_selected_episodes():
episode.mark_old()
self.on_selected_episodes_status_changed()
def on_item_toggle_played_activate( self, widget, toggle = True, new_value = False):
for episode in self.get_selected_episodes():
if toggle:
episode.mark(is_played=not episode.is_played)
else:
episode.mark(is_played=new_value)
self.on_selected_episodes_status_changed()
def on_item_toggle_lock_activate(self, widget, toggle=True, new_value=False):
for episode in self.get_selected_episodes():
if toggle:
episode.mark(is_locked=not episode.is_locked)
else:
episode.mark(is_locked=new_value)
self.on_selected_episodes_status_changed()
def on_channel_toggle_lock_activate(self, widget, toggle=True, new_value=False):
if self.active_channel is None:
return
self.active_channel.channel_is_locked = not self.active_channel.channel_is_locked
self.active_channel.update_channel_lock()
for episode in self.active_channel.get_all_episodes():
episode.mark(is_locked=self.active_channel.channel_is_locked)
self.update_podcast_list_model(selected=True)
self.update_episode_list_icons(all=True)
def on_itemUpdateChannel_activate(self, widget=None):
if self.active_channel is None:
title = _('No podcast selected')
message = _('Please select a podcast in the podcasts list to update.')
self.show_message( message, title, widget=self.treeChannels)
return
self.update_feed_cache(channels=[self.active_channel])
def on_itemUpdate_activate(self, widget=None):
# Check if we have outstanding subscribe/unsubscribe actions
if self.on_add_remove_podcasts_mygpo():
log('Update cancelled (received server changes)', sender=self)
return
if self.channels:
self.update_feed_cache()
else:
gPodderWelcome(self.gPodder,
center_on_widget=self.gPodder,
show_example_podcasts_callback=self.on_itemImportChannels_activate,
setup_my_gpodder_callback=self.on_mygpo_settings_activate)
def download_episode_list_paused(self, episodes):
self.download_episode_list(episodes, True)
def download_episode_list(self, episodes, add_paused=False, force_start=False):
for episode in episodes:
log('Downloading episode: %s', episode.title, sender = self)
if not episode.was_downloaded(and_exists=True):
task_exists = False
for task in self.download_tasks_seen:
if episode.url == task.url and task.status not in (task.DOWNLOADING, task.QUEUED):
self.download_queue_manager.add_task(task, force_start)
self.enable_download_list_update()
task_exists = True
continue
if task_exists:
continue
try:
task = download.DownloadTask(episode, self.config)
except Exception, e:
d = {'episode': episode.title, 'message': str(e)}
message = _('Download error while downloading %(episode)s: %(message)s')
self.show_message(message % d, _('Download error'), important=True)
log('Download error while downloading %s', episode.title, sender=self, traceback=True)
continue
if add_paused:
task.status = task.PAUSED
else:
self.mygpo_client.on_download([task.episode])
self.download_queue_manager.add_task(task, force_start)
self.download_status_model.register_task(task)
self.enable_download_list_update()
# Flush updated episode status
self.mygpo_client.flush()
def cancel_task_list(self, tasks):
if not tasks:
return
for task in tasks:
if task.status in (task.QUEUED, task.DOWNLOADING):
task.status = task.CANCELLED
elif task.status == task.PAUSED:
task.status = task.CANCELLED
# Call run, so the partial file gets deleted
task.run()
self.update_episode_list_icons([task.url for task in tasks])
self.play_or_download()
# Update the tab title and downloads list
self.update_downloads_list()
def new_episodes_show(self, episodes, notification=False):
if gpodder.ui.maemo:
columns = (
('maemo_markup', None, None, _('Episode')),
)
show_notification = notification
else:
columns = (
('title_markup', None, None, _('Episode')),
('filesize_prop', 'length', gobject.TYPE_INT, _('Size')),
('pubdate_prop', 'pubDate', gobject.TYPE_INT, _('Released')),
)
show_notification = False
instructions = _('Select the episodes you want to download:')
if self.new_episodes_window is not None:
self.new_episodes_window.main_window.destroy()
self.new_episodes_window = None
def download_episodes_callback(episodes):
self.new_episodes_window = None
self.download_episode_list(episodes)
self.new_episodes_window = gPodderEpisodeSelector(self.gPodder, \
title=_('New episodes available'), \
instructions=instructions, \
episodes=episodes, \
columns=columns, \
selected_default=True, \
stock_ok_button = 'gpodder-download', \
callback=download_episodes_callback, \
remove_callback=lambda e: e.mark_old(), \
remove_action=_('Mark as old'), \
remove_finished=self.episode_new_status_changed, \
_config=self.config, \
show_notification=show_notification, \
show_episode_shownotes=self.show_episode_shownotes)
def on_itemDownloadAllNew_activate(self, widget, *args):
if not self.offer_new_episodes():
self.show_message(_('Please check for new episodes later.'), \
_('No new episodes available'), widget=self.btnUpdateFeeds)
def get_new_episodes(self, channels=None):
if channels is None:
channels = self.channels
episodes = []
for channel in channels:
for episode in channel.get_new_episodes(downloading=self.episode_is_downloading):
episodes.append(episode)
return episodes
def on_sync_to_ipod_activate(self, widget, episodes=None):
self.sync_ui.on_synchronize_episodes(self.channels, episodes)
def commit_changes_to_database(self):
"""This will be called after the sync process is finished"""
self.db.commit()
def on_cleanup_ipod_activate(self, widget, *args):
self.sync_ui.on_cleanup_device()
def on_manage_device_playlist(self, widget):
self.sync_ui.on_manage_device_playlist()
def show_hide_tray_icon(self):
if self.config.display_tray_icon and have_trayicon and self.tray_icon is None:
self.tray_icon = GPodderStatusIcon(self, gpodder.icon_file, self.config)
elif not self.config.display_tray_icon and self.tray_icon is not None:
self.tray_icon.set_visible(False)
del self.tray_icon
self.tray_icon = None
if self.config.minimize_to_tray and self.tray_icon:
self.tray_icon.set_visible(self.is_iconified())
elif self.tray_icon:
self.tray_icon.set_visible(True)
def on_itemShowAllEpisodes_activate(self, widget):
self.config.podcast_list_view_all = widget.get_active()
def on_itemShowToolbar_activate(self, widget):
self.config.show_toolbar = self.itemShowToolbar.get_active()
def on_itemShowDescription_activate(self, widget):
self.config.episode_list_descriptions = self.itemShowDescription.get_active()
def on_item_view_hide_boring_podcasts_toggled(self, toggleaction):
self.config.podcast_list_hide_boring = toggleaction.get_active()
if self.config.podcast_list_hide_boring:
self.podcast_list_model.set_view_mode(self.config.episode_list_view_mode)
else:
self.podcast_list_model.set_view_mode(-1)
def on_item_view_podcasts_changed(self, radioaction, current):
# Only on Fremantle
if current == self.item_view_podcasts_all:
self.podcast_list_model.set_view_mode(-1)
elif current == self.item_view_podcasts_downloaded:
self.podcast_list_model.set_view_mode(EpisodeListModel.VIEW_DOWNLOADED)
elif current == self.item_view_podcasts_unplayed:
self.podcast_list_model.set_view_mode(EpisodeListModel.VIEW_UNPLAYED)
self.config.podcast_list_view_mode = self.podcast_list_model.get_view_mode()
def on_item_view_episodes_changed(self, radioaction, current):
if current == self.item_view_episodes_all:
self.episode_list_model.set_view_mode(EpisodeListModel.VIEW_ALL)
elif current == self.item_view_episodes_undeleted:
self.episode_list_model.set_view_mode(EpisodeListModel.VIEW_UNDELETED)
elif current == self.item_view_episodes_downloaded:
self.episode_list_model.set_view_mode(EpisodeListModel.VIEW_DOWNLOADED)
elif current == self.item_view_episodes_unplayed:
self.episode_list_model.set_view_mode(EpisodeListModel.VIEW_UNPLAYED)
self.config.episode_list_view_mode = self.episode_list_model.get_view_mode()
if self.config.podcast_list_hide_boring and not gpodder.ui.fremantle:
self.podcast_list_model.set_view_mode(self.config.episode_list_view_mode)
def update_item_device( self):
if not gpodder.ui.fremantle:
if self.config.device_type != 'none':
self.itemDevice.set_visible(True)
self.itemDevice.label = self.get_device_name()
else:
self.itemDevice.set_visible(False)
def properties_closed( self):
self.preferences_dialog = None
self.show_hide_tray_icon()
self.update_item_device()
if gpodder.ui.maemo:
selection = self.treeAvailable.get_selection()
if self.config.maemo_enable_gestures or \
self.config.enable_fingerscroll:
selection.set_mode(gtk.SELECTION_SINGLE)
else:
selection.set_mode(gtk.SELECTION_MULTIPLE)
def on_itemPreferences_activate(self, widget, *args):
self.preferences_dialog = gPodderPreferences(self.main_window, \
_config=self.config, \
callback_finished=self.properties_closed, \
user_apps_reader=self.user_apps_reader, \
mygpo_login=self.on_mygpo_settings_activate, \
parent_window=self.main_window, \
mygpo_client=self.mygpo_client, \
on_send_full_subscriptions=self.on_send_full_subscriptions)
# Initial message to relayout window (in case it's opened in portrait mode
self.preferences_dialog.on_window_orientation_changed(self._last_orientation)
def on_itemDependencies_activate(self, widget):
gPodderDependencyManager(self.gPodder)
def on_goto_mygpo(self, widget):
self.mygpo_client.open_website()
def on_mygpo_settings_activate(self, action=None):
settings = MygPodderSettings(self.main_window, \
config=self.config, \
mygpo_client=self.mygpo_client, \
on_send_full_subscriptions=self.on_send_full_subscriptions)
def on_itemAddChannel_activate(self, widget=None):
gPodderAddPodcast(self.gPodder, \
add_urls_callback=self.add_podcast_list)
def on_itemEditChannel_activate(self, widget, *args):
if self.active_channel is None:
title = _('No podcast selected')
message = _('Please select a podcast in the podcasts list to edit.')
self.show_message( message, title, widget=self.treeChannels)
return
callback_closed = lambda: self.update_podcast_list_model(selected=True)
gPodderChannel(self.main_window, \
channel=self.active_channel, \
callback_closed=callback_closed, \
cover_downloader=self.cover_downloader)
def on_itemMassUnsubscribe_activate(self, item=None):
columns = (
('title', None, None, _('Podcast')),
)
# We're abusing the Episode Selector for selecting Podcasts here,
# but it works and looks good, so why not? -- thp
gPodderEpisodeSelector(self.main_window, \
title=_('Remove podcasts'), \
instructions=_('Select the podcast you want to remove.'), \
episodes=self.channels, \
columns=columns, \
size_attribute=None, \
stock_ok_button=gtk.STOCK_DELETE, \
callback=self.remove_podcast_list, \
_config=self.config)
def remove_podcast_list(self, channels, confirm=True):
if not channels:
log('No podcasts selected for deletion', sender=self)
return
if len(channels) == 1:
title = _('Removing podcast')
info = _('Please wait while the podcast is removed')
message = _('Do you really want to remove this podcast and its episodes?')
else:
title = _('Removing podcasts')
info = _('Please wait while the podcasts are removed')
message = _('Do you really want to remove the selected podcasts and their episodes?')
if confirm and not self.show_confirmation(message, title):
return
progress = ProgressIndicator(title, info, parent=self.main_window)
def finish_deletion(select_url):
# Upload subscription list changes to the web service
self.mygpo_client.on_unsubscribe([c.url for c in channels])
# Re-load the channels and select the desired new channel
self.update_feed_cache(force_update=False, select_url_afterwards=select_url)
progress.on_finished()
self.update_podcasts_tab()
def thread_proc():
select_url = None
for idx, channel in enumerate(channels):
# Update the UI for correct status messages
progress.on_progress(float(idx)/float(len(channels)))
progress.on_message(channel.title)
# Delete downloaded episodes
channel.remove_downloaded()
# cancel any active downloads from this channel
for episode in channel.get_all_episodes():
util.idle_add(self.download_status_model.cancel_by_url,
episode.url)
if len(channels) == 1:
# get the URL of the podcast we want to select next
if channel in self.channels:
position = self.channels.index(channel)
else:
position = -1
if position == len(self.channels)-1:
# this is the last podcast, so select the URL
# of the item before this one (i.e. the "new last")
select_url = self.channels[position-1].url
else:
# there is a podcast after the deleted one, so
# we simply select the one that comes after it
select_url = self.channels[position+1].url
# Remove the channel and clean the database entries
channel.delete()
self.channels.remove(channel)
# Clean up downloads and download directories
self.clean_up_downloads()
self.channel_list_changed = True
self.save_channels_opml()
# The remaining stuff is to be done in the GTK main thread
util.idle_add(finish_deletion, select_url)
threading.Thread(target=thread_proc).start()
def on_itemRemoveChannel_activate(self, widget, *args):
if self.active_channel is None:
title = _('No podcast selected')
message = _('Please select a podcast in the podcasts list to remove.')
self.show_message( message, title, widget=self.treeChannels)
return
self.remove_podcast_list([self.active_channel])
def get_opml_filter(self):
filter = gtk.FileFilter()
filter.add_pattern('*.opml')
filter.add_pattern('*.xml')
filter.set_name(_('OPML files')+' (*.opml, *.xml)')
return filter
def on_item_import_from_file_activate(self, widget, filename=None):
if filename is None:
if gpodder.ui.desktop or gpodder.ui.fremantle:
# FIXME: Hildonization on Fremantle
dlg = gtk.FileChooserDialog(title=_('Import from OPML'), parent=None, action=gtk.FILE_CHOOSER_ACTION_OPEN)
dlg.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
dlg.add_button(gtk.STOCK_OPEN, gtk.RESPONSE_OK)
elif gpodder.ui.diablo:
dlg = hildon.FileChooserDialog(self.gPodder, gtk.FILE_CHOOSER_ACTION_OPEN)
dlg.set_filter(self.get_opml_filter())
response = dlg.run()
filename = None
if response == gtk.RESPONSE_OK:
filename = dlg.get_filename()
dlg.destroy()
if filename is not None:
dir = gPodderPodcastDirectory(self.gPodder, _config=self.config, \
custom_title=_('Import podcasts from OPML file'), \
add_urls_callback=self.add_podcast_list, \
hide_url_entry=True)
dir.download_opml_file(filename)
def on_itemExportChannels_activate(self, widget, *args):
if not self.channels:
title = _('Nothing to export')
message = _('Your list of podcast subscriptions is empty. Please subscribe to some podcasts first before trying to export your subscription list.')
self.show_message(message, title, widget=self.treeChannels)
return
if gpodder.ui.desktop or gpodder.ui.fremantle:
# FIXME: Hildonization on Fremantle
dlg = gtk.FileChooserDialog(title=_('Export to OPML'), parent=self.gPodder, action=gtk.FILE_CHOOSER_ACTION_SAVE)
dlg.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
dlg.add_button(gtk.STOCK_SAVE, gtk.RESPONSE_OK)
elif gpodder.ui.diablo:
dlg = hildon.FileChooserDialog(self.gPodder, gtk.FILE_CHOOSER_ACTION_SAVE)
dlg.set_filter(self.get_opml_filter())
response = dlg.run()
if response == gtk.RESPONSE_OK:
filename = dlg.get_filename()
dlg.destroy()
exporter = opml.Exporter( filename)
if exporter.write(self.channels):
count = len(self.channels)
title = N_('%d subscription exported', '%d subscriptions exported', count) % count
self.show_message(_('Your podcast list has been successfully exported.'), title, widget=self.treeChannels)
else:
self.show_message( _('Could not export OPML to file. Please check your permissions.'), _('OPML export failed'), important=True)
else:
dlg.destroy()
def on_itemImportChannels_activate(self, widget, *args):
if gpodder.ui.fremantle:
gPodderPodcastDirectory.show_add_podcast_picker(self.main_window, \
self.config.toplist_url, \
self.config.opml_url, \
self.add_podcast_list, \
self.on_itemAddChannel_activate, \
self.on_mygpo_settings_activate, \
self.show_text_edit_dialog)
else:
dir = gPodderPodcastDirectory(self.main_window, _config=self.config, \
add_urls_callback=self.add_podcast_list)
util.idle_add(dir.download_opml_file, self.config.opml_url)
def on_homepage_activate(self, widget, *args):
util.open_website(gpodder.__url__)
def on_wiki_activate(self, widget, *args):
util.open_website('http://gpodder.org/wiki/User_Manual')
def on_bug_tracker_activate(self, widget, *args):
if gpodder.ui.maemo:
util.open_website('http://bugs.maemo.org/enter_bug.cgi?product=gPodder')
else:
util.open_website('https://bugs.gpodder.org/enter_bug.cgi?product=gPodder')
def on_item_support_activate(self, widget):
util.open_website('http://gpodder.org/donate')
def on_itemAbout_activate(self, widget, *args):
if gpodder.ui.fremantle:
from gpodder.gtkui.frmntl.about import HeAboutDialog
HeAboutDialog.present(self.main_window,
'gPodder',
'gpodder',
gpodder.__version__,
_('A podcast client with focus on usability'),
gpodder.__copyright__,
gpodder.__url__,
'http://bugs.maemo.org/enter_bug.cgi?product=gPodder',
'http://gpodder.org/donate')
return
dlg = gtk.AboutDialog()
dlg.set_transient_for(self.main_window)
dlg.set_name('gPodder')
dlg.set_version(gpodder.__version__)
dlg.set_copyright(gpodder.__copyright__)
dlg.set_comments(_('A podcast client with focus on usability'))
dlg.set_website(gpodder.__url__)
dlg.set_translator_credits( _('translator-credits'))
dlg.connect( 'response', lambda dlg, response: dlg.destroy())
if gpodder.ui.desktop:
# For the "GUI" version, we add some more
# items to the about dialog (credits and logo)
app_authors = [
_('Maintainer:'),
'Thomas Perl <thpinfo.com>',
]
if os.path.exists(gpodder.credits_file):
credits = open(gpodder.credits_file).read().strip().split('\n')
app_authors += ['', _('Patches, bug reports and donations by:')]
app_authors += credits
dlg.set_authors(app_authors)
try:
dlg.set_logo(gtk.gdk.pixbuf_new_from_file(gpodder.icon_file))
except:
dlg.set_logo_icon_name('gpodder')
dlg.run()
def on_wNotebook_switch_page(self, widget, *args):
page_num = args[1]
if gpodder.ui.maemo:
self.tool_downloads.set_active(page_num == 1)
page = self.wNotebook.get_nth_page(page_num)
tab_label = self.wNotebook.get_tab_label(page).get_text()
if page_num == 0 and self.active_channel is not None:
self.set_title(self.active_channel.title)
else:
self.set_title(tab_label)
if page_num == 0:
self.play_or_download()
self.menuChannels.set_sensitive(True)
self.menuSubscriptions.set_sensitive(True)
# The message area in the downloads tab should be hidden
# when the user switches away from the downloads tab
if self.message_area is not None:
self.message_area.hide()
self.message_area = None
else:
# Remove finished episodes
if self.config.auto_cleanup_downloads:
self.on_btnCleanUpDownloads_clicked()
self.menuChannels.set_sensitive(False)
self.menuSubscriptions.set_sensitive(False)
if gpodder.ui.desktop:
self.toolDownload.set_sensitive(False)
self.toolPlay.set_sensitive(False)
self.toolTransfer.set_sensitive(False)
self.toolCancel.set_sensitive(False)
def on_treeChannels_row_activated(self, widget, path, *args):
# double-click action of the podcast list or enter
self.treeChannels.set_cursor(path)
def on_treeChannels_cursor_changed(self, widget, *args):
( model, iter ) = self.treeChannels.get_selection().get_selected()
if model is not None and iter is not None:
old_active_channel = self.active_channel
self.active_channel = model.get_value(iter, PodcastListModel.C_CHANNEL)
if self.active_channel == old_active_channel:
return
if gpodder.ui.maemo:
self.set_title(self.active_channel.title)
self.itemEditChannel.set_visible(True)
self.itemRemoveChannel.set_visible(True)
else:
self.active_channel = None
self.itemEditChannel.set_visible(False)
self.itemRemoveChannel.set_visible(False)
self.update_episode_list_model()
def on_btnEditChannel_clicked(self, widget, *args):
self.on_itemEditChannel_activate( widget, args)
def get_podcast_urls_from_selected_episodes(self):
"""Get a set of podcast URLs based on the selected episodes"""
return set(episode.channel.url for episode in \
self.get_selected_episodes())
def get_selected_episodes(self):
"""Get a list of selected episodes from treeAvailable"""
selection = self.treeAvailable.get_selection()
model, paths = selection.get_selected_rows()
episodes = [model.get_value(model.get_iter(path), EpisodeListModel.C_EPISODE) for path in paths]
return episodes
def on_transfer_selected_episodes(self, widget):
self.on_sync_to_ipod_activate(widget, self.get_selected_episodes())
def on_playback_selected_episodes(self, widget):
self.playback_episodes(self.get_selected_episodes())
def on_shownotes_selected_episodes(self, widget):
episodes = self.get_selected_episodes()
if episodes:
episode = episodes.pop(0)
self.show_episode_shownotes(episode)
else:
self.show_message(_('Please select an episode from the episode list to display shownotes.'), _('No episode selected'), widget=self.treeAvailable)
def on_download_selected_episodes(self, widget):
episodes = self.get_selected_episodes()
self.download_episode_list(episodes)
self.update_episode_list_icons([episode.url for episode in episodes])
self.play_or_download()
def on_treeAvailable_row_activated(self, widget, path, view_column):
"""Double-click/enter action handler for treeAvailable"""
# We should only have one one selected as it was double clicked!
e = self.get_selected_episodes()[0]
if (self.config.double_click_episode_action == 'download'):
# If the episode has already been downloaded and exists then play it
if e.was_downloaded(and_exists=True):
self.playback_episodes(self.get_selected_episodes())
# else download it if it is not already downloading
elif not self.episode_is_downloading(e):
self.download_episode_list([e])
self.update_episode_list_icons([e.url])
self.play_or_download()
elif (self.config.double_click_episode_action == 'stream'):
# If we happen to have downloaded this episode simple play it
if e.was_downloaded(and_exists=True):
self.playback_episodes(self.get_selected_episodes())
# else if streaming is possible stream it
elif self.streaming_possible():
self.playback_episodes(self.get_selected_episodes())
else:
log('Unable to stream episode - default media player selected!', sender=self, traceback=True)
self.show_message(_('Please check your media player settings in the preferences dialog.'), _('Unable to stream episode'), widget=self.toolPreferences)
else:
# default action is to display show notes
self.on_shownotes_selected_episodes(widget)
def show_episode_shownotes(self, episode):
if self.episode_shownotes_window is None:
log('First-time use of episode window --- creating', sender=self)
self.episode_shownotes_window = gPodderShownotes(self.gPodder, _config=self.config, \
_download_episode_list=self.download_episode_list, \
_playback_episodes=self.playback_episodes, \
_delete_episode_list=self.delete_episode_list, \
_episode_list_status_changed=self.episode_list_status_changed, \
_cancel_task_list=self.cancel_task_list, \
_episode_is_downloading=self.episode_is_downloading, \
_streaming_possible=self.streaming_possible())
self.episode_shownotes_window.show(episode)
if self.episode_is_downloading(episode):
self.update_downloads_list()
def restart_auto_update_timer(self):
if self._auto_update_timer_source_id is not None:
log('Removing existing auto update timer.', sender=self)
gobject.source_remove(self._auto_update_timer_source_id)
self._auto_update_timer_source_id = None
if self.config.auto_update_feeds and \
self.config.auto_update_frequency:
interval = 60*1000*self.config.auto_update_frequency
log('Setting up auto update timer with interval %d.', \
self.config.auto_update_frequency, sender=self)
self._auto_update_timer_source_id = gobject.timeout_add(\
interval, self._on_auto_update_timer)
def _on_auto_update_timer(self):
log('Auto update timer fired.', sender=self)
self.update_feed_cache(force_update=True)
# Ask web service for sub changes (if enabled)
self.mygpo_client.flush()
return True
def on_treeDownloads_row_activated(self, widget, *args):
# Use the standard way of working on the treeview
selection = self.treeDownloads.get_selection()
(model, paths) = selection.get_selected_rows()
selected_tasks = [(gtk.TreeRowReference(model, path), model.get_value(model.get_iter(path), 0)) for path in paths]
for tree_row_reference, task in selected_tasks:
if task.status in (task.DOWNLOADING, task.QUEUED):
task.status = task.PAUSED
elif task.status in (task.CANCELLED, task.PAUSED, task.FAILED):
self.download_queue_manager.add_task(task)
self.enable_download_list_update()
elif task.status == task.DONE:
model.remove(model.get_iter(tree_row_reference.get_path()))
self.play_or_download()
# Update the tab title and downloads list
self.update_downloads_list()
def on_item_cancel_download_activate(self, widget):
if self.wNotebook.get_current_page() == 0:
selection = self.treeAvailable.get_selection()
(model, paths) = selection.get_selected_rows()
urls = [model.get_value(model.get_iter(path), \
self.episode_list_model.C_URL) for path in paths]
selected_tasks = [task for task in self.download_tasks_seen \
if task.url in urls]
else:
selection = self.treeDownloads.get_selection()
(model, paths) = selection.get_selected_rows()
selected_tasks = [model.get_value(model.get_iter(path), \
self.download_status_model.C_TASK) for path in paths]
self.cancel_task_list(selected_tasks)
def on_btnCancelAll_clicked(self, widget, *args):
self.cancel_task_list(self.download_tasks_seen)
def on_btnDownloadedDelete_clicked(self, widget, *args):
episodes = self.get_selected_episodes()
if len(episodes) == 1:
self.delete_episode_list(episodes, skip_locked=False)
else:
self.delete_episode_list(episodes)
def on_key_press(self, widget, event):
# Allow tab switching with Ctrl + PgUp/PgDown
if event.state & gtk.gdk.CONTROL_MASK:
if event.keyval == gtk.keysyms.Page_Up:
self.wNotebook.prev_page()
return True
elif event.keyval == gtk.keysyms.Page_Down:
self.wNotebook.next_page()
return True
# After this code we only handle Maemo hardware keys,
# so if we are not a Maemo app, we don't do anything
if not gpodder.ui.maemo:
return False
diff = 0
if event.keyval == gtk.keysyms.F7: #plus
diff = 1
elif event.keyval == gtk.keysyms.F8: #minus
diff = -1
if diff != 0 and not self.currently_updating:
selection = self.treeChannels.get_selection()
(model, iter) = selection.get_selected()
new_path = ((model.get_path(iter)[0]+diff)%len(model),)
selection.select_path(new_path)
self.treeChannels.set_cursor(new_path)
return True
return False
def on_iconify(self):
if self.tray_icon:
self.gPodder.set_skip_taskbar_hint(True)
if self.config.minimize_to_tray:
self.tray_icon.set_visible(True)
else:
self.gPodder.set_skip_taskbar_hint(False)
def on_uniconify(self):
if self.tray_icon:
self.gPodder.set_skip_taskbar_hint(False)
if self.config.minimize_to_tray:
self.tray_icon.set_visible(False)
else:
self.gPodder.set_skip_taskbar_hint(False)
def uniconify_main_window(self):
if self.is_iconified():
self.gPodder.present()
def iconify_main_window(self):
if not self.is_iconified():
self.gPodder.iconify()
def update_podcasts_tab(self):
if len(self.channels):
if gpodder.ui.fremantle:
self.button_refresh.set_title(_('Check for new episodes'))
self.button_refresh.show()
else:
self.label2.set_text(_('Podcasts (%d)') % len(self.channels))
else:
if gpodder.ui.fremantle:
self.button_refresh.hide()
else:
self.label2.set_text(_('Podcasts'))
@dbus.service.method(gpodder.dbus_interface)
def show_gui_window(self):
self.gPodder.present()
@dbus.service.method(gpodder.dbus_interface)
def subscribe_to_url(self, url):
gPodderAddPodcast(self.gPodder,
add_urls_callback=self.add_podcast_list,
preset_url=url)
@dbus.service.method(gpodder.dbus_interface)
def mark_episode_played(self, filename):
if filename is None:
return False
for channel in self.channels:
for episode in channel.get_all_episodes():
fn = episode.local_filename(create=False, check_only=True)
if fn == filename:
episode.mark(is_played=True)
self.db.commit()
self.update_episode_list_icons([episode.url])
self.update_podcast_list_model([episode.channel.url])
return True
return False
def main(options=None):
gobject.threads_init()
gobject.set_application_name('gPodder')
if gpodder.ui.maemo:
# Try to enable the custom icon theme for gPodder on Maemo
settings = gtk.settings_get_default()
settings.set_string_property('gtk-icon-theme-name', \
'gpodder', __file__)
# Extend the search path for the optified icon theme (Maemo 5)
icon_theme = gtk.icon_theme_get_default()
icon_theme.prepend_search_path('/opt/gpodder-icon-theme/')
gtk.window_set_default_icon_name('gpodder')
gtk.about_dialog_set_url_hook(lambda dlg, link, data: util.open_website(link), None)
try:
session_bus = dbus.SessionBus(mainloop=dbus.glib.DBusGMainLoop())
bus_name = dbus.service.BusName(gpodder.dbus_bus_name, bus=session_bus)
except dbus.exceptions.DBusException, dbe:
log('Warning: Cannot get "on the bus".', traceback=True)
dlg = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_ERROR, \
gtk.BUTTONS_CLOSE, _('Cannot start gPodder'))
dlg.format_secondary_markup(_('D-Bus error: %s') % (str(dbe),))
dlg.set_title('gPodder')
dlg.run()
dlg.destroy()
sys.exit(0)
util.make_directory(gpodder.home)
gpodder.load_plugins()
config = UIConfig(gpodder.config_file)
if gpodder.ui.diablo:
# Detect changing of SD cards between mmc1/mmc2 if a gpodder
# folder exists there (allow moving "gpodder" between SD cards or USB)
# Also allow moving "gpodder" to home folder (e.g. rootfs on SD)
if not os.path.exists(config.download_dir):
log('Downloads might have been moved. Trying to locate them...')
for basedir in ['/media/mmc1', '/media/mmc2']+glob.glob('/media/usb/*')+['/home/user/MyDocs']:
dir = os.path.join(basedir, 'gpodder')
if os.path.exists(dir):
log('Downloads found in: %s', dir)
config.download_dir = dir
break
else:
log('Downloads NOT FOUND in %s', dir)
if config.enable_fingerscroll:
BuilderWidget.use_fingerscroll = True
elif gpodder.ui.fremantle:
config.on_quit_ask = False
config.feed_update_skipping = False
config.mygpo_device_type = util.detect_device_type()
gp = gPodder(bus_name, config)
# Handle options
if options.subscribe:
util.idle_add(gp.subscribe_to_url, options.subscribe)
# mac OS X stuff :
# handle "subscribe to podcast" events from firefox
if platform.system() == 'Darwin':
from gpodder import gpodderosx
gpodderosx.register_handlers(gp)
# end mac OS X stuff
gp.run()
|
Hillshum/gPodder-tagging
|
src/gpodder/gui.py
|
Python
|
gpl-3.0
| 171,344
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume manager manages creating, attaching, detaching, and persistent storage.
Persistent storage volumes keep their state independent of instances. You can
attach to an instance, terminate the instance, spawn a new instance (even
one from a different image) and re-attach the volume with the same data
intact.
**Related Flags**
:volume_topic: What :mod:`rpc` topic to listen to (default: `cinder-volume`).
:volume_manager: The module name of a class derived from
:class:`manager.Manager` (default:
:class:`cinder.volume.manager.Manager`).
:volume_driver: Used by :class:`Manager`. Defaults to
:class:`cinder.volume.drivers.lvm.LVMISCSIDriver`.
:volume_group: Name of the group that will contain exported volumes (default:
`cinder-volumes`)
:num_shell_tries: Number of times to attempt to run commands (default: 3)
"""
import time
from oslo.config import cfg
from oslo import messaging
from osprofiler import profiler
from cinder import compute
from cinder import context
from cinder import exception
from cinder import flow_utils
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import glance
from cinder import manager
from cinder.openstack.common import excutils
from cinder.openstack.common import importutils
from cinder.openstack.common import jsonutils
from cinder.openstack.common import log as logging
from cinder.openstack.common import periodic_task
from cinder.openstack.common import timeutils
from cinder.openstack.common import uuidutils
from cinder import quota
from cinder import utils
from cinder.volume.configuration import Configuration
from cinder.volume.flows.manager import create_volume
from cinder.volume.flows.manager import manage_existing
from cinder.volume import rpcapi as volume_rpcapi
from cinder.volume import utils as vol_utils
from cinder.volume import volume_types
from eventlet.greenpool import GreenPool
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
CGQUOTAS = quota.CGQUOTAS
volume_manager_opts = [
cfg.StrOpt('volume_driver',
default='cinder.volume.drivers.lvm.LVMISCSIDriver',
help='Driver to use for volume creation'),
cfg.IntOpt('migration_create_volume_timeout_secs',
default=300,
help='Timeout for creating the volume to migrate to '
'when performing volume migration (seconds)'),
cfg.BoolOpt('volume_service_inithost_offload',
default=False,
help='Offload pending volume delete during '
'volume service startup'),
cfg.StrOpt('zoning_mode',
default='none',
help='FC Zoning mode configured'),
cfg.StrOpt('extra_capabilities',
default='{}',
help='User defined capabilities, a JSON formatted string '
'specifying key/value pairs.'),
]
CONF = cfg.CONF
CONF.register_opts(volume_manager_opts)
MAPPING = {
'cinder.volume.drivers.storwize_svc.StorwizeSVCDriver':
'cinder.volume.drivers.ibm.storwize_svc.StorwizeSVCDriver',
'cinder.volume.drivers.xiv_ds8k.XIVDS8KDriver':
'cinder.volume.drivers.ibm.xiv_ds8k.XIVDS8KDriver',
'cinder.volume.drivers.san.hp_lefthand.HpSanISCSIDriver':
'cinder.volume.drivers.san.hp.hp_lefthand_iscsi.HPLeftHandISCSIDriver',
'cinder.volume.drivers.gpfs.GPFSDriver':
'cinder.volume.drivers.ibm.gpfs.GPFSDriver', }
def locked_volume_operation(f):
"""Lock decorator for volume operations.
Takes a named lock prior to executing the operation. The lock is named with
the operation executed and the id of the volume. This lock can then be used
by other operations to avoid operation conflicts on shared volumes.
Example use:
If a volume operation uses this decorator, it will block until the named
lock is free. This is used to protect concurrent operations on the same
volume e.g. delete VolA while create volume VolB from VolA is in progress.
"""
def lvo_inner1(inst, context, volume_id, **kwargs):
@utils.synchronized("%s-%s" % (volume_id, f.__name__), external=True)
def lvo_inner2(*_args, **_kwargs):
return f(*_args, **_kwargs)
return lvo_inner2(inst, context, volume_id, **kwargs)
return lvo_inner1
def locked_snapshot_operation(f):
"""Lock decorator for snapshot operations.
Takes a named lock prior to executing the operation. The lock is named with
the operation executed and the id of the snapshot. This lock can then be
used by other operations to avoid operation conflicts on shared snapshots.
Example use:
If a snapshot operation uses this decorator, it will block until the named
lock is free. This is used to protect concurrent operations on the same
snapshot e.g. delete SnapA while create volume VolA from SnapA is in
progress.
"""
def lso_inner1(inst, context, snapshot_id, **kwargs):
@utils.synchronized("%s-%s" % (snapshot_id, f.__name__), external=True)
def lso_inner2(*_args, **_kwargs):
return f(*_args, **_kwargs)
return lso_inner2(inst, context, snapshot_id, **kwargs)
return lso_inner1
class VolumeManager(manager.SchedulerDependentManager):
"""Manages attachable block storage devices."""
RPC_API_VERSION = '1.18'
target = messaging.Target(version=RPC_API_VERSION)
def __init__(self, volume_driver=None, service_name=None,
*args, **kwargs):
"""Load the driver from the one specified in args, or from flags."""
# update_service_capabilities needs service_name to be volume
super(VolumeManager, self).__init__(service_name='volume',
*args, **kwargs)
self.configuration = Configuration(volume_manager_opts,
config_group=service_name)
self._tp = GreenPool()
self.stats = {}
if not volume_driver:
# Get from configuration, which will get the default
# if its not using the multi backend
volume_driver = self.configuration.volume_driver
if volume_driver in MAPPING:
LOG.warn(_("Driver path %s is deprecated, update your "
"configuration to the new path."), volume_driver)
volume_driver = MAPPING[volume_driver]
self.driver = importutils.import_object(
volume_driver,
configuration=self.configuration,
db=self.db,
host=self.host)
self.driver = profiler.trace_cls("driver")(self.driver)
try:
self.extra_capabilities = jsonutils.loads(
self.driver.configuration.extra_capabilities)
except AttributeError:
self.extra_capabilities = {}
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Invalid JSON: %s" %
self.driver.configuration.extra_capabilities)
def _add_to_threadpool(self, func, *args, **kwargs):
self._tp.spawn_n(func, *args, **kwargs)
def _count_allocated_capacity(self, ctxt, volume):
pool = vol_utils.extract_host(volume['host'], 'pool')
if pool is None:
# No pool name encoded in host, so this is a legacy
# volume created before pool is introduced, ask
# driver to provide pool info if it has such
# knowledge and update the DB.
try:
pool = self.driver.get_pool(volume)
except Exception as err:
LOG.error(_('Failed to fetch pool name for volume: %s'),
volume['id'])
LOG.exception(err)
return
if pool:
new_host = vol_utils.append_host(volume['host'],
pool)
self.db.volume_update(ctxt, volume['id'],
{'host': new_host})
else:
# Otherwise, put them into a special fixed pool with
# volume_backend_name being the pool name, if
# volume_backend_name is None, use default pool name.
# This is only for counting purpose, doesn't update DB.
pool = (self.driver.configuration.safe_get(
'volume_backend_name') or vol_utils.extract_host(
volume['host'], 'pool', True))
try:
pool_stat = self.stats['pools'][pool]
except KeyError:
# First volume in the pool
self.stats['pools'][pool] = dict(
allocated_capacity_gb=0)
pool_stat = self.stats['pools'][pool]
pool_sum = pool_stat['allocated_capacity_gb']
pool_sum += volume['size']
self.stats['pools'][pool]['allocated_capacity_gb'] = pool_sum
self.stats['allocated_capacity_gb'] += volume['size']
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
ctxt = context.get_admin_context()
LOG.info(_("Starting volume driver %(driver_name)s (%(version)s)") %
{'driver_name': self.driver.__class__.__name__,
'version': self.driver.get_version()})
try:
self.driver.do_setup(ctxt)
self.driver.check_for_setup_error()
except Exception as ex:
LOG.error(_("Error encountered during "
"initialization of driver: %(name)s") %
{'name': self.driver.__class__.__name__})
LOG.exception(ex)
# we don't want to continue since we failed
# to initialize the driver correctly.
return
volumes = self.db.volume_get_all_by_host(ctxt, self.host)
# FIXME volume count for exporting is wrong
LOG.debug("Re-exporting %s volumes" % len(volumes))
try:
self.stats['pools'] = {}
self.stats.update({'allocated_capacity_gb': 0})
for volume in volumes:
# available volume should also be counted into allocated
if volume['status'] in ['in-use', 'available']:
# calculate allocated capacity for driver
self._count_allocated_capacity(ctxt, volume)
try:
if volume['status'] in ['in-use']:
self.driver.ensure_export(ctxt, volume)
except Exception as export_ex:
LOG.error(_("Failed to re-export volume %s: "
"setting to error state"), volume['id'])
LOG.exception(export_ex)
self.db.volume_update(ctxt,
volume['id'],
{'status': 'error'})
elif volume['status'] == 'downloading':
LOG.info(_("volume %s stuck in a downloading state"),
volume['id'])
self.driver.clear_download(ctxt, volume)
self.db.volume_update(ctxt,
volume['id'],
{'status': 'error'})
else:
LOG.info(_("volume %s: skipping export"), volume['id'])
except Exception as ex:
LOG.error(_("Error encountered during "
"re-exporting phase of driver initialization: "
" %(name)s") %
{'name': self.driver.__class__.__name__})
LOG.exception(ex)
return
# at this point the driver is considered initialized.
self.driver.set_initialized()
LOG.debug('Resuming any in progress delete operations')
for volume in volumes:
if volume['status'] == 'deleting':
LOG.info(_('Resuming delete on volume: %s') % volume['id'])
if CONF.volume_service_inithost_offload:
# Offload all the pending volume delete operations to the
# threadpool to prevent the main volume service thread
# from being blocked.
self._add_to_threadpool(self.delete_volume(ctxt,
volume['id']))
else:
# By default, delete volumes sequentially
self.delete_volume(ctxt, volume['id'])
# collect and publish service capabilities
self.publish_service_capabilities(ctxt)
def create_volume(self, context, volume_id, request_spec=None,
filter_properties=None, allow_reschedule=True,
snapshot_id=None, image_id=None, source_volid=None,
source_replicaid=None, consistencygroup_id=None):
"""Creates the volume."""
context_saved = context.deepcopy()
context = context.elevated()
if filter_properties is None:
filter_properties = {}
try:
# NOTE(flaper87): Driver initialization is
# verified by the task itself.
flow_engine = create_volume.get_flow(
context,
self.db,
self.driver,
self.scheduler_rpcapi,
self.host,
volume_id,
snapshot_id=snapshot_id,
image_id=image_id,
source_volid=source_volid,
source_replicaid=source_replicaid,
consistencygroup_id=consistencygroup_id,
allow_reschedule=allow_reschedule,
reschedule_context=context_saved,
request_spec=request_spec,
filter_properties=filter_properties)
except Exception:
LOG.exception(_("Failed to create manager volume flow"))
raise exception.CinderException(
_("Failed to create manager volume flow."))
if snapshot_id is not None:
# Make sure the snapshot is not deleted until we are done with it.
locked_action = "%s-%s" % (snapshot_id, 'delete_snapshot')
elif source_volid is not None:
# Make sure the volume is not deleted until we are done with it.
locked_action = "%s-%s" % (source_volid, 'delete_volume')
elif source_replicaid is not None:
# Make sure the volume is not deleted until we are done with it.
locked_action = "%s-%s" % (source_replicaid, 'delete_volume')
else:
locked_action = None
def _run_flow():
# This code executes create volume flow. If something goes wrong,
# flow reverts all job that was done and reraises an exception.
# Otherwise, all data that was generated by flow becomes available
# in flow engine's storage.
with flow_utils.DynamicLogListener(flow_engine, logger=LOG):
flow_engine.run()
@utils.synchronized(locked_action, external=True)
def _run_flow_locked():
_run_flow()
if locked_action is None:
_run_flow()
else:
_run_flow_locked()
# Fetch created volume from storage
vol_ref = flow_engine.storage.fetch('volume')
# Update volume stats
pool = vol_utils.extract_host(vol_ref['host'], 'pool')
if pool is None:
# Legacy volume, put them into default pool
pool = self.driver.configuration.safe_get(
'volume_backend_name') or vol_utils.extract_host(
vol_ref['host'], 'pool', True)
try:
self.stats['pools'][pool]['allocated_capacity_gb'] \
+= vol_ref['size']
except KeyError:
self.stats['pools'][pool] = dict(
allocated_capacity_gb=vol_ref['size'])
return vol_ref['id']
@locked_volume_operation
def delete_volume(self, context, volume_id, unmanage_only=False):
"""Deletes and unexports volume.
1. Delete a volume(normal case)
Delete a volume and update quotas.
2. Delete a migration source volume
If deleting the source volume in a migration, we want to skip
quotas. Also we want to skip other database updates for source
volume because these update will be handled at
migrate_volume_completion properly.
3. Delete a migration destination volume
If deleting the destination volume in a migration, we want to
skip quotas but we need database updates for the volume.
"""
context = context.elevated()
try:
volume_ref = self.db.volume_get(context, volume_id)
except exception.VolumeNotFound:
# NOTE(thingee): It could be possible for a volume to
# be deleted when resuming deletes from init_host().
LOG.info(_("Tried to delete volume %s, but it no longer exists, "
"moving on") % (volume_id))
return True
if context.project_id != volume_ref['project_id']:
project_id = volume_ref['project_id']
else:
project_id = context.project_id
LOG.info(_("volume %s: deleting"), volume_ref['id'])
if volume_ref['attach_status'] == "attached":
# Volume is still attached, need to detach first
raise exception.VolumeAttached(volume_id=volume_id)
if (vol_utils.extract_host(volume_ref['host']) != self.host):
raise exception.InvalidVolume(
reason=_("volume is not local to this node"))
is_migrating = volume_ref['migration_status'] is not None
is_migrating_dest = (is_migrating and
volume_ref['migration_status'].startswith(
'target:'))
self._notify_about_volume_usage(context, volume_ref, "delete.start")
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
LOG.debug("volume %s: removing export", volume_ref['id'])
self.driver.remove_export(context, volume_ref)
LOG.debug("volume %s: deleting", volume_ref['id'])
if unmanage_only:
self.driver.unmanage(volume_ref)
else:
self.driver.delete_volume(volume_ref)
except exception.VolumeIsBusy:
LOG.error(_("Cannot delete volume %s: volume is busy"),
volume_ref['id'])
# If this is a destination volume, we have to clear the database
# record to avoid user confusion.
self._clear_db(context, is_migrating_dest, volume_ref,
'available')
return True
except Exception:
with excutils.save_and_reraise_exception():
# If this is a destination volume, we have to clear the
# database record to avoid user confusion.
self._clear_db(context, is_migrating_dest, volume_ref,
'error_deleting')
# If deleting source/destination volume in a migration, we should
# skip quotas.
if not is_migrating:
# Get reservations
try:
reserve_opts = {'volumes': -1,
'gigabytes': -volume_ref['size']}
QUOTAS.add_volume_type_opts(context,
reserve_opts,
volume_ref.get('volume_type_id'))
reservations = QUOTAS.reserve(context,
project_id=project_id,
**reserve_opts)
except Exception:
reservations = None
LOG.exception(_LE("Failed to update usages deleting volume"))
# If deleting the source volume in a migration, we should skip database
# update here. In other cases, continue to update database entries.
if not is_migrating or is_migrating_dest:
# Delete glance metadata if it exists
self.db.volume_glance_metadata_delete_by_volume(context, volume_id)
self.db.volume_destroy(context, volume_id)
LOG.info(_LI("volume %s: deleted successfully"), volume_ref['id'])
# If deleting source/destination volume in a migration, we should
# skip quotas.
if not is_migrating:
self._notify_about_volume_usage(context, volume_ref, "delete.end")
# Commit the reservations
if reservations:
QUOTAS.commit(context, reservations, project_id=project_id)
pool = vol_utils.extract_host(volume_ref['host'], 'pool')
if pool is None:
# Legacy volume, put them into default pool
pool = self.driver.configuration.safe_get(
'volume_backend_name') or vol_utils.extract_host(
volume_ref['host'], 'pool', True)
size = volume_ref['size']
try:
self.stats['pools'][pool]['allocated_capacity_gb'] -= size
except KeyError:
self.stats['pools'][pool] = dict(
allocated_capacity_gb=-size)
self.publish_service_capabilities(context)
return True
def _clear_db(self, context, is_migrating_dest, volume_ref, status):
# This method is called when driver.unmanage() or
# driver.delete_volume() fails in delete_volume(), so it is already
# in the exception handling part.
if is_migrating_dest:
self.db.volume_destroy(context, volume_ref['id'])
LOG.error(_LE("Unable to delete the destination volume %s "
"during volume migration, but the database "
"record needs to be deleted."),
volume_ref['id'])
else:
self.db.volume_update(context,
volume_ref['id'],
{'status': status})
def create_snapshot(self, context, volume_id, snapshot_id):
"""Creates and exports the snapshot."""
caller_context = context
context = context.elevated()
snapshot_ref = self.db.snapshot_get(context, snapshot_id)
LOG.info(_("snapshot %s: creating"), snapshot_ref['id'])
self._notify_about_snapshot_usage(
context, snapshot_ref, "create.start")
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the snapshot status updated.
utils.require_driver_initialized(self.driver)
LOG.debug("snapshot %(snap_id)s: creating",
{'snap_id': snapshot_ref['id']})
# Pass context so that drivers that want to use it, can,
# but it is not a requirement for all drivers.
snapshot_ref['context'] = caller_context
model_update = self.driver.create_snapshot(snapshot_ref)
if model_update:
self.db.snapshot_update(context, snapshot_ref['id'],
model_update)
except Exception:
with excutils.save_and_reraise_exception():
self.db.snapshot_update(context,
snapshot_ref['id'],
{'status': 'error'})
vol_ref = self.db.volume_get(context, volume_id)
if vol_ref.bootable:
try:
self.db.volume_glance_metadata_copy_to_snapshot(
context, snapshot_ref['id'], volume_id)
except exception.CinderException as ex:
LOG.exception(_("Failed updating %(snapshot_id)s"
" metadata using the provided volumes"
" %(volume_id)s metadata") %
{'volume_id': volume_id,
'snapshot_id': snapshot_id})
self.db.snapshot_update(context,
snapshot_ref['id'],
{'status': 'error'})
raise exception.MetadataCopyFailure(reason=ex)
snapshot_ref = self.db.snapshot_update(context,
snapshot_ref['id'],
{'status': 'available',
'progress': '100%'})
LOG.info(_("snapshot %s: created successfully"), snapshot_ref['id'])
self._notify_about_snapshot_usage(context, snapshot_ref, "create.end")
return snapshot_id
@locked_snapshot_operation
def delete_snapshot(self, context, snapshot_id):
"""Deletes and unexports snapshot."""
caller_context = context
context = context.elevated()
snapshot_ref = self.db.snapshot_get(context, snapshot_id)
project_id = snapshot_ref['project_id']
LOG.info(_("snapshot %s: deleting"), snapshot_ref['id'])
self._notify_about_snapshot_usage(
context, snapshot_ref, "delete.start")
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the snapshot status updated.
utils.require_driver_initialized(self.driver)
LOG.debug("snapshot %s: deleting", snapshot_ref['id'])
# Pass context so that drivers that want to use it, can,
# but it is not a requirement for all drivers.
snapshot_ref['context'] = caller_context
self.driver.delete_snapshot(snapshot_ref)
except exception.SnapshotIsBusy:
LOG.error(_("Cannot delete snapshot %s: snapshot is busy"),
snapshot_ref['id'])
self.db.snapshot_update(context,
snapshot_ref['id'],
{'status': 'available'})
return True
except Exception:
with excutils.save_and_reraise_exception():
self.db.snapshot_update(context,
snapshot_ref['id'],
{'status': 'error_deleting'})
# Get reservations
try:
if CONF.no_snapshot_gb_quota:
reserve_opts = {'snapshots': -1}
else:
reserve_opts = {
'snapshots': -1,
'gigabytes': -snapshot_ref['volume_size'],
}
volume_ref = self.db.volume_get(context, snapshot_ref['volume_id'])
QUOTAS.add_volume_type_opts(context,
reserve_opts,
volume_ref.get('volume_type_id'))
reservations = QUOTAS.reserve(context,
project_id=project_id,
**reserve_opts)
except Exception:
reservations = None
LOG.exception(_("Failed to update usages deleting snapshot"))
self.db.volume_glance_metadata_delete_by_snapshot(context, snapshot_id)
self.db.snapshot_destroy(context, snapshot_id)
LOG.info(_("snapshot %s: deleted successfully"), snapshot_ref['id'])
self._notify_about_snapshot_usage(context, snapshot_ref, "delete.end")
# Commit the reservations
if reservations:
QUOTAS.commit(context, reservations, project_id=project_id)
return True
def attach_volume(self, context, volume_id, instance_uuid, host_name,
mountpoint, mode):
"""Updates db to show volume is attached."""
@utils.synchronized(volume_id, external=True)
def do_attach():
# check the volume status before attaching
volume = self.db.volume_get(context, volume_id)
volume_metadata = self.db.volume_admin_metadata_get(
context.elevated(), volume_id)
if volume['status'] == 'attaching':
if (volume['instance_uuid'] and volume['instance_uuid'] !=
instance_uuid):
msg = _("being attached by another instance")
raise exception.InvalidVolume(reason=msg)
if (volume['attached_host'] and volume['attached_host'] !=
host_name):
msg = _("being attached by another host")
raise exception.InvalidVolume(reason=msg)
if (volume_metadata.get('attached_mode') and
volume_metadata.get('attached_mode') != mode):
msg = _("being attached by different mode")
raise exception.InvalidVolume(reason=msg)
elif (not volume['migration_status'] and
volume['status'] != "available"):
msg = _("status must be available or attaching")
raise exception.InvalidVolume(reason=msg)
# TODO(jdg): attach_time column is currently varchar
# we should update this to a date-time object
# also consider adding detach_time?
self._notify_about_volume_usage(context, volume,
"attach.start")
self.db.volume_update(context, volume_id,
{"instance_uuid": instance_uuid,
"attached_host": host_name,
"status": "attaching",
"attach_time": timeutils.strtime()})
self.db.volume_admin_metadata_update(context.elevated(),
volume_id,
{"attached_mode": mode},
False)
if instance_uuid and not uuidutils.is_uuid_like(instance_uuid):
self.db.volume_update(context, volume_id,
{'status': 'error_attaching'})
raise exception.InvalidUUID(uuid=instance_uuid)
host_name_sanitized = utils.sanitize_hostname(
host_name) if host_name else None
volume = self.db.volume_get(context, volume_id)
if volume_metadata.get('readonly') == 'True' and mode != 'ro':
self.db.volume_update(context, volume_id,
{'status': 'error_attaching'})
raise exception.InvalidVolumeAttachMode(mode=mode,
volume_id=volume_id)
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
self.driver.attach_volume(context,
volume,
instance_uuid,
host_name_sanitized,
mountpoint)
except Exception:
with excutils.save_and_reraise_exception():
self.db.volume_update(context, volume_id,
{'status': 'error_attaching'})
volume = self.db.volume_attached(context.elevated(),
volume_id,
instance_uuid,
host_name_sanitized,
mountpoint)
if volume['migration_status']:
self.db.volume_update(context, volume_id,
{'migration_status': None})
self._notify_about_volume_usage(context, volume, "attach.end")
return do_attach()
@locked_volume_operation
def detach_volume(self, context, volume_id):
"""Updates db to show volume is detached."""
# TODO(vish): refactor this into a more general "unreserve"
volume = self.db.volume_get(context, volume_id)
self._notify_about_volume_usage(context, volume, "detach.start")
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
self.driver.detach_volume(context, volume)
except Exception:
with excutils.save_and_reraise_exception():
self.db.volume_update(context,
volume_id,
{'status': 'error_detaching'})
self.db.volume_detached(context.elevated(), volume_id)
self.db.volume_admin_metadata_delete(context.elevated(), volume_id,
'attached_mode')
# NOTE(jdg): We used to do an ensure export here to
# catch upgrades while volumes were attached (E->F)
# this was necessary to convert in-use volumes from
# int ID's to UUID's. Don't need this any longer
# We're going to remove the export here
# (delete the iscsi target)
volume = self.db.volume_get(context, volume_id)
try:
utils.require_driver_initialized(self.driver)
LOG.debug("volume %s: removing export", volume_id)
self.driver.remove_export(context.elevated(), volume)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
LOG.exception(_("Error detaching volume %(volume)s, "
"due to uninitialized driver."),
{"volume": volume_id})
except Exception as ex:
LOG.exception(_("Error detaching volume %(volume)s, "
"due to remove export failure."),
{"volume": volume_id})
raise exception.RemoveExportException(volume=volume_id, reason=ex)
self._notify_about_volume_usage(context, volume, "detach.end")
def copy_volume_to_image(self, context, volume_id, image_meta):
"""Uploads the specified volume to Glance.
image_meta is a dictionary containing the following keys:
'id', 'container_format', 'disk_format'
"""
payload = {'volume_id': volume_id, 'image_id': image_meta['id']}
image_service = None
try:
volume = self.db.volume_get(context, volume_id)
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
image_service, image_id = \
glance.get_remote_image_service(context, image_meta['id'])
self.driver.copy_volume_to_image(context, volume, image_service,
image_meta)
LOG.debug("Uploaded volume %(volume_id)s to "
"image (%(image_id)s) successfully",
{'volume_id': volume_id, 'image_id': image_id})
except Exception as error:
LOG.error(_("Error occurred while uploading volume %(volume_id)s "
"to image %(image_id)s."),
{'volume_id': volume_id, 'image_id': image_meta['id']})
if image_service is not None:
# Deletes the image if it is in queued or saving state
self._delete_image(context, image_meta['id'], image_service)
with excutils.save_and_reraise_exception():
payload['message'] = unicode(error)
finally:
if (volume['instance_uuid'] is None and
volume['attached_host'] is None):
self.db.volume_update(context, volume_id,
{'status': 'available'})
else:
self.db.volume_update(context, volume_id,
{'status': 'in-use'})
def _delete_image(self, context, image_id, image_service):
"""Deletes an image stuck in queued or saving state."""
try:
image_meta = image_service.show(context, image_id)
image_status = image_meta.get('status')
if image_status == 'queued' or image_status == 'saving':
LOG.warn("Deleting image %(image_id)s in %(image_status)s "
"state.",
{'image_id': image_id,
'image_status': image_status})
image_service.delete(context, image_id)
except Exception:
LOG.warn(_("Error occurred while deleting image %s."),
image_id, exc_info=True)
def initialize_connection(self, context, volume_id, connector):
"""Prepare volume for connection from host represented by connector.
This method calls the driver initialize_connection and returns
it to the caller. The connector parameter is a dictionary with
information about the host that will connect to the volume in the
following format::
{
'ip': ip,
'initiator': initiator,
}
ip: the ip address of the connecting machine
initiator: the iscsi initiator name of the connecting machine.
This can be None if the connecting machine does not support iscsi
connections.
driver is responsible for doing any necessary security setup and
returning a connection_info dictionary in the following format::
{
'driver_volume_type': driver_volume_type,
'data': data,
}
driver_volume_type: a string to identify the type of volume. This
can be used by the calling code to determine the
strategy for connecting to the volume. This could
be 'iscsi', 'rbd', 'sheepdog', etc.
data: this is the data that the calling code will use to connect
to the volume. Keep in mind that this will be serialized to
json in various places, so it should not contain any non-json
data types.
"""
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
try:
self.driver.validate_connector(connector)
except Exception as err:
err_msg = (_('Unable to fetch connection information from '
'backend: %(err)s') % {'err': err})
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
volume = self.db.volume_get(context, volume_id)
model_update = None
try:
LOG.debug("Volume %s: creating export", volume_id)
model_update = self.driver.create_export(context.elevated(),
volume)
except exception.CinderException:
err_msg = (_('Unable to create export for volume %(volume_id)s') %
{'volume_id': volume_id})
LOG.exception(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
try:
if model_update:
volume = self.db.volume_update(context,
volume_id,
model_update)
except exception.CinderException as ex:
LOG.exception(_("Failed updating model of volume %(volume_id)s"
" with driver provided model %(model)s") %
{'volume_id': volume_id, 'model': model_update})
raise exception.ExportFailure(reason=ex)
try:
conn_info = self.driver.initialize_connection(volume, connector)
except Exception as err:
err_msg = (_('Unable to fetch connection information from '
'backend: %(err)s') % {'err': err})
LOG.error(err_msg)
self.driver.remove_export(context.elevated(), volume)
raise exception.VolumeBackendAPIException(data=err_msg)
# Add qos_specs to connection info
typeid = volume['volume_type_id']
specs = None
if typeid:
res = volume_types.get_volume_type_qos_specs(typeid)
qos = res['qos_specs']
# only pass qos_specs that is designated to be consumed by
# front-end, or both front-end and back-end.
if qos and qos.get('consumer') in ['front-end', 'both']:
specs = qos.get('specs')
qos_spec = dict(qos_specs=specs)
conn_info['data'].update(qos_spec)
# Add access_mode to connection info
volume_metadata = self.db.volume_admin_metadata_get(context.elevated(),
volume_id)
if conn_info['data'].get('access_mode') is None:
access_mode = volume_metadata.get('attached_mode')
if access_mode is None:
# NOTE(zhiyan): client didn't call 'os-attach' before
access_mode = ('ro'
if volume_metadata.get('readonly') == 'True'
else 'rw')
conn_info['data']['access_mode'] = access_mode
return conn_info
def terminate_connection(self, context, volume_id, connector, force=False):
"""Cleanup connection from host represented by connector.
The format of connector is the same as for initialize_connection.
"""
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
volume_ref = self.db.volume_get(context, volume_id)
try:
self.driver.terminate_connection(volume_ref, connector,
force=force)
except Exception as err:
err_msg = (_('Unable to terminate volume connection: %(err)s')
% {'err': err})
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
def accept_transfer(self, context, volume_id, new_user, new_project):
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
# NOTE(jdg): need elevated context as we haven't "given" the vol
# yet
volume_ref = self.db.volume_get(context.elevated(), volume_id)
# NOTE(jdg): Some drivers tie provider info (CHAP) to tenant
# for those that do allow them to return updated model info
model_update = self.driver.accept_transfer(context,
volume_ref,
new_user,
new_project)
if model_update:
try:
self.db.volume_update(context.elevated(),
volume_id,
model_update)
except exception.CinderException:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed updating model of "
"volume %(volume_id)s "
"with drivers update %(model)s "
"during xfr.") %
{'volume_id': volume_id,
'model': model_update})
self.db.volume_update(context.elevated(),
volume_id,
{'status': 'error'})
return model_update
def _migrate_volume_generic(self, ctxt, volume, host, new_type_id):
rpcapi = volume_rpcapi.VolumeAPI()
# Create new volume on remote host
new_vol_values = {}
for k, v in volume.iteritems():
new_vol_values[k] = v
del new_vol_values['id']
del new_vol_values['_name_id']
# We don't copy volume_type because the db sets that according to
# volume_type_id, which we do copy
del new_vol_values['volume_type']
if new_type_id:
new_vol_values['volume_type_id'] = new_type_id
new_vol_values['host'] = host['host']
new_vol_values['status'] = 'creating'
new_vol_values['migration_status'] = 'target:%s' % volume['id']
new_vol_values['attach_status'] = 'detached'
new_volume = self.db.volume_create(ctxt, new_vol_values)
rpcapi.create_volume(ctxt, new_volume, host['host'],
None, None, allow_reschedule=False)
# Wait for new_volume to become ready
starttime = time.time()
deadline = starttime + CONF.migration_create_volume_timeout_secs
new_volume = self.db.volume_get(ctxt, new_volume['id'])
tries = 0
while new_volume['status'] != 'available':
tries += 1
now = time.time()
if new_volume['status'] == 'error':
msg = _("failed to create new_volume on destination host")
self._clean_temporary_volume(ctxt, volume['id'],
new_volume['id'],
clean_db_only=True)
raise exception.VolumeMigrationFailed(reason=msg)
elif now > deadline:
msg = _("timeout creating new_volume on destination host")
self._clean_temporary_volume(ctxt, volume['id'],
new_volume['id'],
clean_db_only=True)
raise exception.VolumeMigrationFailed(reason=msg)
else:
time.sleep(tries ** 2)
new_volume = self.db.volume_get(ctxt, new_volume['id'])
# Copy the source volume to the destination volume
try:
if (volume['instance_uuid'] is None and
volume['attached_host'] is None):
self.driver.copy_volume_data(ctxt, volume, new_volume,
remote='dest')
# The above call is synchronous so we complete the migration
self.migrate_volume_completion(ctxt, volume['id'],
new_volume['id'], error=False)
else:
nova_api = compute.API()
# This is an async call to Nova, which will call the completion
# when it's done
nova_api.update_server_volume(ctxt, volume['instance_uuid'],
volume['id'], new_volume['id'])
except Exception:
with excutils.save_and_reraise_exception():
msg = _LE("Failed to copy volume %(vol1)s to %(vol2)s")
LOG.error(msg, {'vol1': volume['id'],
'vol2': new_volume['id']})
self._clean_temporary_volume(ctxt, volume['id'],
new_volume['id'])
def _get_original_status(self, volume):
if (volume['instance_uuid'] is None and
volume['attached_host'] is None):
return 'available'
else:
return 'in-use'
def _clean_temporary_volume(self, ctxt, volume_id, new_volume_id,
clean_db_only=False):
volume = self.db.volume_get(ctxt, volume_id)
# If we're in the migrating phase, we need to cleanup
# destination volume because source volume is remaining
if volume['migration_status'] == 'migrating':
try:
if clean_db_only:
# The temporary volume is not created, only DB data
# is created
self.db.volume_destroy(ctxt, new_volume_id)
else:
# The temporary volume is already created
rpcapi = volume_rpcapi.VolumeAPI()
volume = self.db.volume_get(ctxt, new_volume_id)
rpcapi.delete_volume(ctxt, volume)
except exception.VolumeNotFound:
LOG.info(_LI("Couldn't find the temporary volume "
"%(vol)s in the database. There is no need "
"to clean up this volume."),
{'vol': new_volume_id})
else:
# If we're in the completing phase don't delete the
# destination because we may have already deleted the
# source! But the migration_status in database should
# be cleared to handle volume after migration failure
try:
updates = {'migration_status': None}
self.db.volume_update(ctxt, new_volume_id, updates)
except exception.VolumeNotFound:
LOG.info(_LI("Couldn't find destination volume "
"%(vol)s in the database. The entry might be "
"successfully deleted during migration "
"completion phase."),
{'vol': new_volume_id})
LOG.warning(_LW("Failed to migrate volume. The destination "
"volume %(vol)s is not deleted since the "
"source volume may have been deleted."),
{'vol': new_volume_id})
def migrate_volume_completion(self, ctxt, volume_id, new_volume_id,
error=False):
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the migration status updated.
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
self.db.volume_update(ctxt, volume_id,
{'migration_status': 'error'})
msg = _("migrate_volume_completion: completing migration for "
"volume %(vol1)s (temporary volume %(vol2)s")
LOG.debug(msg % {'vol1': volume_id, 'vol2': new_volume_id})
volume = self.db.volume_get(ctxt, volume_id)
new_volume = self.db.volume_get(ctxt, new_volume_id)
rpcapi = volume_rpcapi.VolumeAPI()
orig_volume_status = self._get_original_status(volume)
if error:
msg = _("migrate_volume_completion is cleaning up an error "
"for volume %(vol1)s (temporary volume %(vol2)s")
LOG.info(msg % {'vol1': volume['id'],
'vol2': new_volume['id']})
rpcapi.delete_volume(ctxt, new_volume)
updates = {'migration_status': None, 'status': orig_volume_status}
self.db.volume_update(ctxt, volume_id, updates)
return volume_id
self.db.volume_update(ctxt, volume_id,
{'migration_status': 'completing'})
# Delete the source volume (if it fails, don't fail the migration)
try:
if orig_volume_status == 'in-use':
self.detach_volume(ctxt, volume_id)
self.delete_volume(ctxt, volume_id)
except Exception as ex:
msg = _("Failed to delete migration source vol %(vol)s: %(err)s")
LOG.error(msg % {'vol': volume_id, 'err': ex})
self.db.finish_volume_migration(ctxt, volume_id, new_volume_id)
self.db.volume_destroy(ctxt, new_volume_id)
if orig_volume_status == 'in-use':
updates = {'migration_status': 'completing',
'status': orig_volume_status}
else:
updates = {'migration_status': None}
self.db.volume_update(ctxt, volume_id, updates)
if orig_volume_status == 'in-use':
rpcapi.attach_volume(ctxt,
volume,
volume['instance_uuid'],
volume['attached_host'],
volume['mountpoint'],
'rw')
return volume['id']
def migrate_volume(self, ctxt, volume_id, host, force_host_copy=False,
new_type_id=None):
"""Migrate the volume to the specified host (called on source host)."""
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the migration status updated.
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
self.db.volume_update(ctxt, volume_id,
{'migration_status': 'error'})
volume_ref = self.db.volume_get(ctxt, volume_id)
model_update = None
moved = False
status_update = None
if volume_ref['status'] == 'retyping':
status_update = {'status': self._get_original_status(volume_ref)}
self.db.volume_update(ctxt, volume_ref['id'],
{'migration_status': 'migrating'})
if not force_host_copy and new_type_id is None:
try:
LOG.debug("volume %s: calling driver migrate_volume",
volume_ref['id'])
moved, model_update = self.driver.migrate_volume(ctxt,
volume_ref,
host)
if moved:
updates = {'host': host['host'],
'migration_status': None}
if status_update:
updates.update(status_update)
if model_update:
updates.update(model_update)
volume_ref = self.db.volume_update(ctxt,
volume_ref['id'],
updates)
except Exception:
with excutils.save_and_reraise_exception():
updates = {'migration_status': None}
if status_update:
updates.update(status_update)
try:
model_update = self.driver.create_export(ctxt,
volume_ref)
if model_update:
updates.update(model_update)
except Exception:
LOG.exception(_LE("Failed to create export for "
"volume: %s"), volume_ref['id'])
finally:
self.db.volume_update(ctxt, volume_ref['id'], updates)
if not moved:
try:
self._migrate_volume_generic(ctxt, volume_ref, host,
new_type_id)
except Exception:
with excutils.save_and_reraise_exception():
updates = {'migration_status': None}
if status_update:
updates.update(status_update)
try:
model_update = self.driver.create_export(ctxt,
volume_ref)
if model_update:
updates.update(model_update)
except Exception:
LOG.exception(_LE("Failed to create export for "
"volume: %s"), volume_ref['id'])
finally:
self.db.volume_update(ctxt, volume_ref['id'], updates)
@periodic_task.periodic_task
def _report_driver_status(self, context):
LOG.info(_("Updating volume status"))
if not self.driver.initialized:
if self.driver.configuration.config_group is None:
config_group = ''
else:
config_group = ('(config name %s)' %
self.driver.configuration.config_group)
LOG.warning(_('Unable to update stats, %(driver_name)s '
'-%(driver_version)s '
'%(config_group)s driver is uninitialized.') %
{'driver_name': self.driver.__class__.__name__,
'driver_version': self.driver.get_version(),
'config_group': config_group})
else:
volume_stats = self.driver.get_volume_stats(refresh=True)
if self.extra_capabilities:
volume_stats.update(self.extra_capabilities)
if volume_stats:
# Append volume stats with 'allocated_capacity_gb'
self._append_volume_stats(volume_stats)
# queue it to be sent to the Schedulers.
self.update_service_capabilities(volume_stats)
def _append_volume_stats(self, vol_stats):
pools = vol_stats.get('pools', None)
if pools and isinstance(pools, list):
for pool in pools:
pool_name = pool['pool_name']
try:
pool_stats = self.stats['pools'][pool_name]
except KeyError:
# Pool not found in volume manager
pool_stats = dict(allocated_capacity_gb=0)
pool.update(pool_stats)
def publish_service_capabilities(self, context):
"""Collect driver status and then publish."""
self._report_driver_status(context)
self._publish_service_capabilities(context)
def notification(self, context, event):
LOG.info(_("Notification {%s} received"), event)
def _notify_about_volume_usage(self,
context,
volume,
event_suffix,
extra_usage_info=None):
vol_utils.notify_about_volume_usage(
context, volume, event_suffix,
extra_usage_info=extra_usage_info, host=self.host)
def _notify_about_snapshot_usage(self,
context,
snapshot,
event_suffix,
extra_usage_info=None):
vol_utils.notify_about_snapshot_usage(
context, snapshot, event_suffix,
extra_usage_info=extra_usage_info, host=self.host)
def _notify_about_consistencygroup_usage(self,
context,
group,
event_suffix,
extra_usage_info=None):
vol_utils.notify_about_consistencygroup_usage(
context, group, event_suffix,
extra_usage_info=extra_usage_info, host=self.host)
volumes = self.db.volume_get_all_by_group(context, group['id'])
if volumes:
for volume in volumes:
vol_utils.notify_about_volume_usage(
context, volume, event_suffix,
extra_usage_info=extra_usage_info, host=self.host)
def _notify_about_cgsnapshot_usage(self,
context,
cgsnapshot,
event_suffix,
extra_usage_info=None):
vol_utils.notify_about_cgsnapshot_usage(
context, cgsnapshot, event_suffix,
extra_usage_info=extra_usage_info, host=self.host)
snapshots = self.db.snapshot_get_all_for_cgsnapshot(context,
cgsnapshot['id'])
if snapshots:
for snapshot in snapshots:
vol_utils.notify_about_snapshot_usage(
context, snapshot, event_suffix,
extra_usage_info=extra_usage_info, host=self.host)
def extend_volume(self, context, volume_id, new_size, reservations):
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
self.db.volume_update(context, volume_id,
{'status': 'error_extending'})
volume = self.db.volume_get(context, volume_id)
size_increase = (int(new_size)) - volume['size']
self._notify_about_volume_usage(context, volume, "resize.start")
try:
LOG.info(_("volume %s: extending"), volume['id'])
self.driver.extend_volume(volume, new_size)
LOG.info(_("volume %s: extended successfully"), volume['id'])
except Exception:
LOG.exception(_("volume %s: Error trying to extend volume"),
volume_id)
try:
self.db.volume_update(context, volume['id'],
{'status': 'error_extending'})
raise exception.CinderException(_("Volume %s: Error trying "
"to extend volume") %
volume_id)
finally:
QUOTAS.rollback(context, reservations)
return
QUOTAS.commit(context, reservations)
volume = self.db.volume_update(context,
volume['id'],
{'size': int(new_size),
'status': 'available'})
pool = vol_utils.extract_host(volume['host'], 'pool')
if pool is None:
# Legacy volume, put them into default pool
pool = self.driver.configuration.safe_get(
'volume_backend_name') or vol_utils.extract_host(
volume['host'], 'pool', True)
try:
self.stats['pools'][pool]['allocated_capacity_gb'] += size_increase
except KeyError:
self.stats['pools'][pool] = dict(
allocated_capacity_gb=size_increase)
self._notify_about_volume_usage(
context, volume, "resize.end",
extra_usage_info={'size': int(new_size)})
def retype(self, ctxt, volume_id, new_type_id, host,
migration_policy='never', reservations=None):
def _retype_error(context, volume_id, old_reservations,
new_reservations, status_update):
try:
self.db.volume_update(context, volume_id, status_update)
finally:
QUOTAS.rollback(context, old_reservations)
QUOTAS.rollback(context, new_reservations)
context = ctxt.elevated()
volume_ref = self.db.volume_get(ctxt, volume_id)
status_update = {'status': self._get_original_status(volume_ref)}
if context.project_id != volume_ref['project_id']:
project_id = volume_ref['project_id']
else:
project_id = context.project_id
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the volume status updated.
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
# NOTE(flaper87): Other exceptions in this method don't
# set the volume status to error. Should that be done
# here? Setting the volume back to it's original status
# for now.
self.db.volume_update(context, volume_id, status_update)
# Get old reservations
try:
reserve_opts = {'volumes': -1, 'gigabytes': -volume_ref['size']}
QUOTAS.add_volume_type_opts(context,
reserve_opts,
volume_ref.get('volume_type_id'))
old_reservations = QUOTAS.reserve(context,
project_id=project_id,
**reserve_opts)
except Exception:
old_reservations = None
self.db.volume_update(context, volume_id, status_update)
LOG.exception(_("Failed to update usages while retyping volume."))
raise exception.CinderException(_("Failed to get old volume type"
" quota reservations"))
# We already got the new reservations
new_reservations = reservations
# If volume types have the same contents, no need to do anything
retyped = False
diff, all_equal = volume_types.volume_types_diff(
context, volume_ref.get('volume_type_id'), new_type_id)
if all_equal:
retyped = True
# Call driver to try and change the type
retype_model_update = None
if not retyped:
try:
new_type = volume_types.get_volume_type(context, new_type_id)
ret = self.driver.retype(context,
volume_ref,
new_type,
diff,
host)
# Check if the driver retype provided a model update or
# just a retype indication
if type(ret) == tuple:
retyped, retype_model_update = ret
else:
retyped = ret
if retyped:
LOG.info(_("Volume %s: retyped successfully"), volume_id)
except Exception as ex:
retyped = False
LOG.error(_("Volume %s: driver error when trying to retype, "
"falling back to generic mechanism."),
volume_ref['id'])
LOG.exception(ex)
# We could not change the type, so we need to migrate the volume, where
# the destination volume will be of the new type
if not retyped:
if migration_policy == 'never':
_retype_error(context, volume_id, old_reservations,
new_reservations, status_update)
msg = _("Retype requires migration but is not allowed.")
raise exception.VolumeMigrationFailed(reason=msg)
snaps = self.db.snapshot_get_all_for_volume(context,
volume_ref['id'])
if snaps:
_retype_error(context, volume_id, old_reservations,
new_reservations, status_update)
msg = _("Volume must not have snapshots.")
LOG.error(msg)
raise exception.InvalidVolume(reason=msg)
# Don't allow volume with replicas to be migrated
rep_status = volume_ref['replication_status']
if rep_status is not None and rep_status != 'disabled':
_retype_error(context, volume_id, old_reservations,
new_reservations, status_update)
msg = _("Volume must not be replicated.")
LOG.error(msg)
raise exception.InvalidVolume(reason=msg)
self.db.volume_update(context, volume_ref['id'],
{'migration_status': 'starting'})
try:
self.migrate_volume(context, volume_id, host,
new_type_id=new_type_id)
except Exception:
with excutils.save_and_reraise_exception():
_retype_error(context, volume_id, old_reservations,
new_reservations, status_update)
else:
model_update = {'volume_type_id': new_type_id,
'host': host['host'],
'status': status_update['status']}
if retype_model_update:
model_update.update(retype_model_update)
self.db.volume_update(context, volume_id, model_update)
if old_reservations:
QUOTAS.commit(context, old_reservations, project_id=project_id)
if new_reservations:
QUOTAS.commit(context, new_reservations, project_id=project_id)
self.publish_service_capabilities(context)
def manage_existing(self, ctxt, volume_id, ref=None):
LOG.debug('manage_existing: managing %s.' % ref)
try:
flow_engine = manage_existing.get_flow(
ctxt,
self.db,
self.driver,
self.host,
volume_id,
ref)
except Exception:
LOG.exception(_("Failed to create manage_existing flow."))
raise exception.CinderException(
_("Failed to create manage existing flow."))
with flow_utils.DynamicLogListener(flow_engine, logger=LOG):
flow_engine.run()
# Fetch created volume from storage
vol_ref = flow_engine.storage.fetch('volume')
# Update volume stats
pool = vol_utils.extract_host(vol_ref['host'], 'pool')
if pool is None:
# Legacy volume, put them into default pool
pool = self.driver.configuration.safe_get(
'volume_backend_name') or vol_utils.extract_host(
vol_ref['host'], 'pool', True)
try:
self.stats['pools'][pool]['allocated_capacity_gb'] \
+= vol_ref['size']
except KeyError:
self.stats['pools'][pool] = dict(
allocated_capacity_gb=vol_ref['size'])
return vol_ref['id']
def promote_replica(self, ctxt, volume_id):
"""Promote volume replica secondary to be the primary volume."""
try:
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to promote replica for volume %(id)s.")
% {'id': volume_id})
volume = self.db.volume_get(ctxt, volume_id)
model_update = None
try:
LOG.debug("Volume %s: promote replica.", volume_id)
model_update = self.driver.promote_replica(ctxt, volume)
except exception.CinderException:
err_msg = (_('Error promoting secondary volume to primary'))
raise exception.ReplicationError(reason=err_msg,
volume_id=volume_id)
try:
if model_update:
volume = self.db.volume_update(ctxt,
volume_id,
model_update)
except exception.CinderException:
err_msg = (_("Failed updating model"
" with driver provided model %(model)s") %
{'model': model_update})
raise exception.ReplicationError(reason=err_msg,
volume_id=volume_id)
def reenable_replication(self, ctxt, volume_id):
"""Re-enable replication of secondary volume with primary volumes."""
try:
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to sync replica for volume %(id)s.")
% {'id': volume_id})
volume = self.db.volume_get(ctxt, volume_id)
model_update = None
try:
LOG.debug("Volume %s: sync replica.", volume_id)
model_update = self.driver.reenable_replication(ctxt, volume)
except exception.CinderException:
err_msg = (_('Error synchronizing secondary volume to primary'))
raise exception.ReplicationError(reason=err_msg,
volume_id=volume_id)
try:
if model_update:
volume = self.db.volume_update(ctxt,
volume_id,
model_update)
except exception.CinderException:
err_msg = (_("Failed updating model"
" with driver provided model %(model)s") %
{'model': model_update})
raise exception.ReplicationError(reason=err_msg,
volume_id=volume_id)
@periodic_task.periodic_task
def _update_replication_relationship_status(self, ctxt):
LOG.info(_('Updating volume replication status.'))
if not self.driver.initialized:
if self.driver.configuration.config_group is None:
config_group = ''
else:
config_group = ('(config name %s)' %
self.driver.configuration.config_group)
LOG.warning(_('Unable to update volume replication status, '
'%(driver_name)s -%(driver_version)s '
'%(config_group)s driver is uninitialized.') %
{'driver_name': self.driver.__class__.__name__,
'driver_version': self.driver.get_version(),
'config_group': config_group})
else:
volumes = self.db.volume_get_all_by_host(ctxt, self.host)
for vol in volumes:
model_update = None
try:
model_update = self.driver.get_replication_status(
ctxt, vol)
if model_update:
self.db.volume_update(ctxt,
vol['id'],
model_update)
except Exception:
LOG.exception(_("Error checking replication status for "
"volume %s") % vol['id'])
def create_consistencygroup(self, context, group_id):
"""Creates the consistency group."""
context = context.elevated()
group_ref = self.db.consistencygroup_get(context, group_id)
group_ref['host'] = self.host
status = 'available'
model_update = False
self._notify_about_consistencygroup_usage(
context, group_ref, "create.start")
try:
utils.require_driver_initialized(self.driver)
LOG.info(_("Consistency group %s: creating"), group_ref['name'])
model_update = self.driver.create_consistencygroup(context,
group_ref)
if model_update:
group_ref = self.db.consistencygroup_update(
context, group_ref['id'], model_update)
except Exception:
with excutils.save_and_reraise_exception():
self.db.consistencygroup_update(
context,
group_ref['id'],
{'status': 'error'})
LOG.error(_("Consistency group %s: create failed"),
group_ref['name'])
now = timeutils.utcnow()
self.db.consistencygroup_update(context,
group_ref['id'],
{'status': status,
'created_at': now})
LOG.info(_("Consistency group %s: created successfully"),
group_ref['name'])
self._notify_about_consistencygroup_usage(
context, group_ref, "create.end")
return group_ref['id']
def delete_consistencygroup(self, context, group_id):
"""Deletes consistency group and the volumes in the group."""
context = context.elevated()
group_ref = self.db.consistencygroup_get(context, group_id)
project_id = group_ref['project_id']
if context.project_id != group_ref['project_id']:
project_id = group_ref['project_id']
else:
project_id = context.project_id
LOG.info(_("Consistency group %s: deleting"), group_ref['id'])
volumes = self.db.volume_get_all_by_group(context, group_id)
for volume_ref in volumes:
if volume_ref['attach_status'] == "attached":
# Volume is still attached, need to detach first
raise exception.VolumeAttached(volume_id=volume_ref['id'])
# self.host is 'host@backend'
# volume_ref['host'] is 'host@backend#pool'
# Extract host before doing comparison
new_host = vol_utils.extract_host(volume_ref['host'])
if new_host != self.host:
raise exception.InvalidVolume(
reason=_("Volume is not local to this node"))
self._notify_about_consistencygroup_usage(
context, group_ref, "delete.start")
try:
utils.require_driver_initialized(self.driver)
LOG.debug("Consistency group %(group_id)s: deleting",
{'group_id': group_id})
model_update, volumes = self.driver.delete_consistencygroup(
context, group_ref)
if volumes:
for volume in volumes:
update = {'status': volume['status']}
self.db.volume_update(context, volume['id'],
update)
# If we failed to delete a volume, make sure the status
# for the cg is set to error as well
if (volume['status'] in ['error_deleting', 'error'] and
model_update['status'] not in
['error_deleting', 'error']):
model_update['status'] = volume['status']
if model_update:
if model_update['status'] in ['error_deleting', 'error']:
msg = (_('Error occurred when deleting consistency group '
'%s.') % group_ref['id'])
LOG.exception(msg)
raise exception.VolumeDriverException(message=msg)
else:
self.db.consistencygroup_update(context, group_ref['id'],
model_update)
except Exception:
with excutils.save_and_reraise_exception():
self.db.consistencygroup_update(
context,
group_ref['id'],
{'status': 'error_deleting'})
# Get reservations for group
try:
reserve_opts = {'consistencygroups': -1}
cgreservations = CGQUOTAS.reserve(context,
project_id=project_id,
**reserve_opts)
except Exception:
cgreservations = None
LOG.exception(_("Failed to update usages deleting "
"consistency groups."))
for volume_ref in volumes:
# Get reservations for volume
try:
volume_id = volume_ref['id']
reserve_opts = {'volumes': -1,
'gigabytes': -volume_ref['size']}
QUOTAS.add_volume_type_opts(context,
reserve_opts,
volume_ref.get('volume_type_id'))
reservations = QUOTAS.reserve(context,
project_id=project_id,
**reserve_opts)
except Exception:
reservations = None
LOG.exception(_("Failed to update usages deleting volume."))
# Delete glance metadata if it exists
self.db.volume_glance_metadata_delete_by_volume(context, volume_id)
self.db.volume_destroy(context, volume_id)
# Commit the reservations
if reservations:
QUOTAS.commit(context, reservations, project_id=project_id)
self.stats['allocated_capacity_gb'] -= volume_ref['size']
if cgreservations:
CGQUOTAS.commit(context, cgreservations,
project_id=project_id)
self.db.consistencygroup_destroy(context, group_id)
LOG.info(_("Consistency group %s: deleted successfully."),
group_id)
self._notify_about_consistencygroup_usage(
context, group_ref, "delete.end")
self.publish_service_capabilities(context)
return True
def create_cgsnapshot(self, context, group_id, cgsnapshot_id):
"""Creates the cgsnapshot."""
caller_context = context
context = context.elevated()
cgsnapshot_ref = self.db.cgsnapshot_get(context, cgsnapshot_id)
LOG.info(_("Cgsnapshot %s: creating."), cgsnapshot_ref['id'])
snapshots = self.db.snapshot_get_all_for_cgsnapshot(context,
cgsnapshot_id)
self._notify_about_cgsnapshot_usage(
context, cgsnapshot_ref, "create.start")
try:
utils.require_driver_initialized(self.driver)
LOG.debug("Cgsnapshot %(cgsnap_id)s: creating.",
{'cgsnap_id': cgsnapshot_id})
# Pass context so that drivers that want to use it, can,
# but it is not a requirement for all drivers.
cgsnapshot_ref['context'] = caller_context
for snapshot in snapshots:
snapshot['context'] = caller_context
model_update, snapshots = \
self.driver.create_cgsnapshot(context, cgsnapshot_ref)
if snapshots:
for snapshot in snapshots:
# Update db if status is error
if snapshot['status'] == 'error':
update = {'status': snapshot['status']}
self.db.snapshot_update(context, snapshot['id'],
update)
# If status for one snapshot is error, make sure
# the status for the cgsnapshot is also error
if model_update['status'] != 'error':
model_update['status'] = snapshot['status']
if model_update:
if model_update['status'] == 'error':
msg = (_('Error occurred when creating cgsnapshot '
'%s.') % cgsnapshot_ref['id'])
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
except Exception:
with excutils.save_and_reraise_exception():
self.db.cgsnapshot_update(context,
cgsnapshot_ref['id'],
{'status': 'error'})
for snapshot in snapshots:
volume_id = snapshot['volume_id']
snapshot_id = snapshot['id']
vol_ref = self.db.volume_get(context, volume_id)
if vol_ref.bootable:
try:
self.db.volume_glance_metadata_copy_to_snapshot(
context, snapshot['id'], volume_id)
except exception.CinderException as ex:
LOG.error(_("Failed updating %(snapshot_id)s"
" metadata using the provided volumes"
" %(volume_id)s metadata") %
{'volume_id': volume_id,
'snapshot_id': snapshot_id})
self.db.snapshot_update(context,
snapshot['id'],
{'status': 'error'})
raise exception.MetadataCopyFailure(reason=ex)
self.db.snapshot_update(context,
snapshot['id'], {'status': 'available',
'progress': '100%'})
self.db.cgsnapshot_update(context,
cgsnapshot_ref['id'],
{'status': 'available'})
LOG.info(_("cgsnapshot %s: created successfully"),
cgsnapshot_ref['id'])
self._notify_about_cgsnapshot_usage(
context, cgsnapshot_ref, "create.end")
return cgsnapshot_id
def delete_cgsnapshot(self, context, cgsnapshot_id):
"""Deletes cgsnapshot."""
caller_context = context
context = context.elevated()
cgsnapshot_ref = self.db.cgsnapshot_get(context, cgsnapshot_id)
project_id = cgsnapshot_ref['project_id']
LOG.info(_("cgsnapshot %s: deleting"), cgsnapshot_ref['id'])
snapshots = self.db.snapshot_get_all_for_cgsnapshot(context,
cgsnapshot_id)
self._notify_about_cgsnapshot_usage(
context, cgsnapshot_ref, "delete.start")
try:
utils.require_driver_initialized(self.driver)
LOG.debug("cgsnapshot %(cgsnap_id)s: deleting",
{'cgsnap_id': cgsnapshot_id})
# Pass context so that drivers that want to use it, can,
# but it is not a requirement for all drivers.
cgsnapshot_ref['context'] = caller_context
for snapshot in snapshots:
snapshot['context'] = caller_context
model_update, snapshots = \
self.driver.delete_cgsnapshot(context, cgsnapshot_ref)
if snapshots:
for snapshot in snapshots:
update = {'status': snapshot['status']}
self.db.snapshot_update(context, snapshot['id'],
update)
if snapshot['status'] in ['error_deleting', 'error'] and \
model_update['status'] not in \
['error_deleting', 'error']:
model_update['status'] = snapshot['status']
if model_update:
if model_update['status'] in ['error_deleting', 'error']:
msg = (_('Error occurred when deleting cgsnapshot '
'%s.') % cgsnapshot_ref['id'])
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
else:
self.db.cgsnapshot_update(context, cgsnapshot_ref['id'],
model_update)
except Exception:
with excutils.save_and_reraise_exception():
self.db.cgsnapshot_update(context,
cgsnapshot_ref['id'],
{'status': 'error_deleting'})
for snapshot in snapshots:
# Get reservations
try:
if CONF.no_snapshot_gb_quota:
reserve_opts = {'snapshots': -1}
else:
reserve_opts = {
'snapshots': -1,
'gigabytes': -snapshot['volume_size'],
}
volume_ref = self.db.volume_get(context, snapshot['volume_id'])
QUOTAS.add_volume_type_opts(context,
reserve_opts,
volume_ref.get('volume_type_id'))
reservations = QUOTAS.reserve(context,
project_id=project_id,
**reserve_opts)
except Exception:
reservations = None
LOG.exception(_("Failed to update usages deleting snapshot"))
self.db.volume_glance_metadata_delete_by_snapshot(context,
snapshot['id'])
self.db.snapshot_destroy(context, snapshot['id'])
# Commit the reservations
if reservations:
QUOTAS.commit(context, reservations, project_id=project_id)
self.db.cgsnapshot_destroy(context, cgsnapshot_id)
LOG.info(_("cgsnapshot %s: deleted successfully"),
cgsnapshot_ref['id'])
self._notify_about_cgsnapshot_usage(
context, cgsnapshot_ref, "delete.end")
return True
|
redhat-openstack/cinder
|
cinder/volume/manager.py
|
Python
|
apache-2.0
| 91,945
|
# Copyright (c) 2006 by Aurelien Foret <orelien@chez.com>
# Copyright (c) 2006-2022 Pacman Development Team <pacman-dev@lists.archlinux.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import stat
import tap
import util
class pmrule(object):
"""Rule object
"""
def __init__(self, rule):
self.rule = rule
self.false = 0
self.result = 0
def __str__(self):
return self.rule
def snapshots_needed(self):
(testname, args) = self.rule.split("=")
if testname == "FILE_MODIFIED" or testname == "!FILE_MODIFIED":
return [args]
return []
def check(self, test):
"""
"""
success = 1
[testname, args] = self.rule.split("=")
if testname[0] == "!":
self.false = 1
testname = testname[1:]
[kind, case] = testname.split("_")
if "|" in args:
[key, value] = args.split("|", 1)
else:
[key, value] = [args, None]
if kind == "PACMAN":
if case == "RETCODE":
if test.retcode != int(key):
success = 0
elif case == "OUTPUT":
logfile = os.path.join(test.root, util.LOGFILE)
if not os.access(logfile, os.F_OK):
tap.diag("LOGFILE not found, cannot validate 'OUTPUT' rule")
success = 0
elif not util.grep(logfile, key):
success = 0
else:
tap.diag("PACMAN rule '%s' not found" % case)
success = -1
elif kind == "PKG":
localdb = test.db["local"]
newpkg = localdb.db_read(key)
if not newpkg:
success = 0
else:
if case == "EXIST":
success = 1
elif case == "VERSION":
if value != newpkg.version:
success = 0
elif case == "DESC":
if value != newpkg.desc:
success = 0
elif case == "GROUPS":
if not value in newpkg.groups:
success = 0
elif case == "PROVIDES":
if not value in newpkg.provides:
success = 0
elif case == "DEPENDS":
if not value in newpkg.depends:
success = 0
elif case == "OPTDEPENDS":
success = 0
for optdep in newpkg.optdepends:
if value == optdep.split(':', 1)[0]:
success = 1
break
elif case == "REASON":
if newpkg.reason != int(value):
success = 0
elif case == "FILES":
if not value in newpkg.files:
success = 0
elif case == "BACKUP":
success = 0
for f in newpkg.backup:
if f.startswith(value + "\t"):
success = 1
break;
else:
tap.diag("PKG rule '%s' not found" % case)
success = -1
elif kind == "FILE":
filename = os.path.join(test.root, key)
if case == "EXIST":
if not os.path.isfile(filename):
success = 0
elif case == "EMPTY":
if not (os.path.isfile(filename)
and os.path.getsize(filename) == 0):
success = 0
elif case == "CONTENTS":
try:
with open(filename, 'r') as f:
success = f.read() == value
except:
success = 0
elif case == "MODIFIED":
for f in test.files:
if f.name == key:
if not f.ismodified():
success = 0
break
elif case == "MODE":
if not os.path.isfile(filename):
success = 0
else:
mode = os.lstat(filename)[stat.ST_MODE]
if int(value, 8) != stat.S_IMODE(mode):
success = 0
elif case == "TYPE":
if value == "dir":
if not os.path.isdir(filename):
success = 0
elif value == "file":
if not os.path.isfile(filename):
success = 0
elif value == "link":
if not os.path.islink(filename):
success = 0
elif case == "PACNEW":
if not os.path.isfile("%s.pacnew" % filename):
success = 0
elif case == "PACSAVE":
if not os.path.isfile("%s.pacsave" % filename):
success = 0
else:
tap.diag("FILE rule '%s' not found" % case)
success = -1
elif kind == "DIR":
filename = os.path.join(test.root, key)
if case == "EXIST":
if not os.path.isdir(filename):
success = 0
else:
tap.diag("DIR rule '%s' not found" % case)
success = -1
elif kind == "LINK":
filename = os.path.join(test.root, key)
if case == "EXIST":
if not os.path.islink(filename):
success = 0
else:
tap.diag("LINK rule '%s' not found" % case)
success = -1
elif kind == "CACHE":
cachedir = os.path.join(test.root, util.PM_CACHEDIR)
if case == "EXISTS":
pkg = test.findpkg(key, value, allow_local=True)
if not pkg or not os.path.isfile(
os.path.join(cachedir, pkg.filename())):
success = 0
elif case == "FEXISTS":
if not os.path.isfile(os.path.join(cachedir, key)):
success = 0
elif case == "FCONTENTS":
filename = os.path.join(cachedir, key)
try:
with open(filename, 'r') as f:
success = f.read() == value
except:
success = 0
else:
tap.diag("Rule kind '%s' not found" % kind)
success = -1
if self.false and success != -1:
success = not success
self.result = success
return success
|
eworm-de/pacman
|
test/pacman/pmrule.py
|
Python
|
gpl-2.0
| 7,425
|
######################################
# Last update: 27 November 2018, Jan Dreyling-Eschweiler
######################################
# Sensor Name
sensor_name = "104"
# Middlepoints in DAC
IVDREF2 = 100
IVDREF1A = 65
IVDREF1B = 80
IVDREF1C = 135
IVDREF1D = 145
# Thermal noise: TN
THN_matA = 0.9318
THN_matB = 0.8521
THN_matC = 0.8079
THN_matD = 0.8679
# Fixed pattern noise: FPN
FPN_matA = 0.4042
FPN_matB = 0.4578
FPN_matC = 0.5161
FPN_matD = 0.5197
# Offset
OFF_matA = -0.4058
OFF_matB = -0.0084
OFF_matC = 0.2322
OFF_matD = 0.1061
# Disable columns/discriminators
DIS_col = []
######################################
# Generate using Jtag class
from generate_jtag import Jtag
# Constructor
chip = Jtag(sensor_name, IVDREF2, IVDREF1A, IVDREF1B, IVDREF1C, IVDREF1D, THN_matA, THN_matB, THN_matC, THN_matD, FPN_matA, FPN_matB, FPN_matC, FPN_matD, OFF_matA, OFF_matB, OFF_matC, OFF_matD, DIS_col)
# Print values
chip.values()
# Generate txt-files from threshold minimum to maximum
chip.generate(3, 12)
|
eudaq/eudaq-configuration
|
jtag_generation/sensors/chip104/chip104.py
|
Python
|
lgpl-3.0
| 1,011
|
from django.core.management.base import BaseCommand
from build.management.commands.base_build import Command as BaseBuild
from django.db import transaction
from contactnetwork.models import *
from residue.models import Residue
import contactnetwork.interaction as ci
import logging
import datetime
from contactnetwork.cube import compute_interactions
from django.contrib.contenttypes.models import ContentType
class Command(BaseBuild):
help = 'Compute interactions for all available crystals.'
logger = logging.getLogger(__name__)
def add_arguments(self, parser):
parser.add_argument('-p', '--proc',
type=int,
action='store',
dest='proc',
default=1,
help='Number of processes to run')
def handle(self, *args, **options):
self.delete_all()
self.structures = Structure.objects.all().exclude(refined=True)
self.prepare_input(options['proc'], self.structures)
self.logger.info('Finished building crystal interaction data for all PDBs!')
def delete_all(self):
VanDerWaalsInteraction.objects.all().delete()
HydrophobicInteraction.objects.all().delete()
PolarBackboneSidechainInteraction.objects.all().delete()
PolarSidechainSidechainInteraction.objects.all().delete()
FaceToFaceInteraction.objects.all().delete()
FaceToEdgeInteraction.objects.all().delete()
PiCationInteraction.objects.all().delete()
InteractingResiduePair.objects.all().delete()
self.logger.info('Deleted crystal interactions data all PDBs...')
# @transaction.atomic
def main_func(self, positions, iteration,count,lock):
while count.value<len(self.structures):
with lock:
s = self.structures[count.value]
pdb_code = s.protein_conformation.protein.entry_name
count.value +=1
self.logger.info('Generating crystal interactions data for PDB \'{}\'... ({} out of {})'.format(pdb_code, count.value, len(self.structures)))
try:
interacting_pairs = compute_interactions(pdb_code)
except:
self.logger.error('Error with computing interactions (%s)' % (pdb_code))
continue
for p in interacting_pairs:
# Create the pair
res1_seq_num = p.get_residue_1().id[1]
res2_seq_num = p.get_residue_2().id[1]
conformation = s.protein_conformation
# Get the residues
try:
res1 = Residue.objects.get(sequence_number=res1_seq_num, protein_conformation=conformation)
res2 = Residue.objects.get(sequence_number=res2_seq_num, protein_conformation=conformation)
except Residue.DoesNotExist:
self.logger.warning('Error with pair between %s and %s (%s)' % (res1_seq_num,res2_seq_num,conformation))
continue
# Save the pair
pair = InteractingResiduePair()
pair.res1 = res1
pair.res2 = res2
pair.referenced_structure = s
pair.save()
# Add the interactions to the pair
for i in p.get_interactions():
if type(i) is ci.VanDerWaalsInteraction:
ni = VanDerWaalsInteraction()
ni.interacting_pair = pair
ni.save()
elif type(i) is ci.HydrophobicInteraction:
ni = HydrophobicInteraction()
ni.interacting_pair = pair
ni.save()
elif type(i) is ci.PolarSidechainSidechainInteraction:
ni = PolarSidechainSidechainInteraction()
ni.interacting_pair = pair
ni.is_charged_res1 = i.is_charged_res1
ni.is_charged_res2 = i.is_charged_res2
ni.save()
elif type(i) is ci.PolarBackboneSidechainInteraction:
ni = PolarBackboneSidechainInteraction()
ni.interacting_pair = pair
ni.is_charged_res1 = i.is_charged_res1
ni.is_charged_res2 = i.is_charged_res2
ni.res1_is_sidechain = False
ni.save()
elif type(i) is ci.PolarSideChainBackboneInteraction:
ni = PolarBackboneSidechainInteraction()
ni.interacting_pair = pair
ni.is_charged_res1 = i.is_charged_res1
ni.is_charged_res2 = i.is_charged_res2
ni.res1_is_sidechain = True
ni.save()
elif type(i) is ci.FaceToFaceInteraction:
ni = FaceToFaceInteraction()
ni.interacting_pair = pair
ni.save()
elif type(i) is ci.FaceToEdgeInteraction:
ni = FaceToEdgeInteraction()
ni.interacting_pair = pair
ni.res1_has_face = True
ni.save()
elif type(i) is ci.EdgeToFaceInteraction:
ni = FaceToEdgeInteraction()
ni.interacting_pair = pair
ni.res1_has_face = False
ni.save()
elif type(i) is ci.PiCationInteraction:
ni = PiCationInteraction()
ni.interacting_pair = pair
ni.res1_has_pi = True
ni.save()
elif type(i) is ci.CationPiInteraction:
ni = PiCationInteraction()
ni.interacting_pair = pair
ni.res1_has_pi = False
ni.save()
self.logger.info('Generated crystal interactions data for PDB \'{}\'...'.format(pdb_code))
|
cmunk/protwis
|
build/management/commands/build_crystal_interactions.py
|
Python
|
apache-2.0
| 6,177
|
from setuptools import setup, find_packages
import os
from io import open
import re
# this setup.py is set up in a specific way to keep the azure* and azure-mgmt-* namespaces WORKING all the way
# up from python 2.7. Reference here: https://github.com/Azure/azure-sdk-for-python/wiki/Azure-packaging
PACKAGE_NAME = "azure-mixedreality-remoterendering"
PACKAGE_PPRINT_NAME = "Azure Remote Rendering"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open("CHANGELOG.md", encoding="utf-8") as f:
long_description += f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(
PACKAGE_PPRINT_NAME),
# ensure that these are updated to reflect the package owners' information
long_description=long_description,
url='https://github.com/Azure/azure-sdk-for-python',
author='Microsoft Corporation',
author_email='azuresdkengsysadmins@microsoft.com',
license='MIT License',
# ensure that the development status reflects the status of your package
classifiers=[
"Development Status :: 4 - Beta",
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'License :: OSI Approved :: MIT License',
],
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mixedreality'
]),
install_requires=[
'azure-core<2.0.0,>=1.6.0',
'azure-mixedreality-authentication>=1.0.0b1',
'msrest>=0.6.21'
],
extras_require={
":python_version<'3.0'": ['futures', 'azure-mixedreality-nspkg'],
":python_version<'3.4'": ['enum34>=1.0.4'],
":python_version<'3.5'": ["typing"]
},
project_urls={
'Bug Reports': 'https://github.com/Azure/azure-sdk-for-python/issues',
'Source': 'https://github.com/Azure/azure-sdk-python',
}
)
|
Azure/azure-sdk-for-python
|
sdk/remoterendering/azure-mixedreality-remoterendering/setup.py
|
Python
|
mit
| 2,825
|
# -*- coding: utf-8 -*-
#
# User interface module of Dashboard.
#
# (C) 2013 Internet Initiative Japan Inc.
# All rights reserved.
#
# Created on 2013/05/20
# @author: yosinobu@iij.ad.jp
from genshi.builder import tag
from pkg_resources import resource_filename
from trac.core import *
from trac.perm import IPermissionRequestor
from trac.web.api import IRequestHandler
from trac.web.chrome import INavigationContributor, ITemplateProvider, \
add_script, add_stylesheet, add_script_data
from tracportal.api import IProjectListProvider
from tracportal.i18n import _
from tracportal.project_list.api import IProjectInfoProvider
class DashboardModule(Component):
implements(INavigationContributor, IPermissionRequestor, IRequestHandler, ITemplateProvider)
project_list_providers = ExtensionPoint(IProjectListProvider)
project_info_providers = ExtensionPoint(IProjectInfoProvider)
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'dashboard'
def get_navigation_items(self, req):
try:
import tracrpc
except ImportError:
self.log.info('TracPortalPlugin\'s dashboard feature requires TracXMLRPC plugin.')
return
if 'PORTAL_DASHBOARD_VIEW' in req.perm:
yield ('mainnav', 'dashboard',
tag.a(_('Dashboard'), href=req.href('/dashboard'), accesskey=6))
# IPermissionRequestor methods
def get_permission_actions(self):
return ['PORTAL_DASHBOARD_VIEW']
# IRequestHandler methods
def match_request(self, req):
return req.path_info and req.path_info.startswith('/dashboard')
def process_request(self, req):
req.perm.require('PORTAL_DASHBOARD_VIEW')
data = {}
# Add scripts/styles
add_stylesheet(req, 'tracportal/css/dashboard.css')
add_stylesheet(req, 'tracportal/css/smoothness/jquery-ui-1.10.3.custom.min.css')
add_stylesheet(req, 'common/css/report.css')
if req.locale is not None:
add_script(req, 'tracportal/js/messages/%s.js' % req.locale)
add_script(req, 'tracportal/js/dashboard.js')
add_script(req, 'tracportal/js/jquery-1.9.1.js')
add_script(req, 'tracportal/js/jquery-ui-1.10.3.custom.min.js')
# data['_'] = _
add_script_data(req, {
'tracportal': {
'authname': req.authname
}
})
return "dashboard.html", data, None
# ITemplateProvider methods
def get_htdocs_dirs(self):
return [('tracportal', resource_filename('tracportal', 'htdocs'))]
def get_templates_dirs(self):
return [resource_filename('tracportal.dashboard', 'templates')]
|
iij/TracPortalPlugin
|
tracportal/dashboard/web_ui.py
|
Python
|
mit
| 2,727
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
import re
from waflib.Tools import ccroot
from waflib import Utils
from waflib.Logs import debug
c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'osf1V':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['gcc','clang'],}
def default_compilers():
build_platform=Utils.unversioned_sys_platform()
possible_compiler_list=c_compiler.get(build_platform,c_compiler['default'])
return' '.join(possible_compiler_list)
def configure(conf):
try:test_for_compiler=conf.options.check_c_compiler or default_compilers()
except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')")
for compiler in re.split('[ ,]+',test_for_compiler):
conf.env.stash()
conf.start_msg('Checking for %r (C compiler)'%compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_c: %r'%e)
else:
if conf.env['CC']:
conf.end_msg(conf.env.get_flat('CC'))
conf.env['COMPILER_CC']=compiler
break
conf.end_msg(False)
else:
conf.fatal('could not configure a C compiler!')
def options(opt):
test_for_compiler=default_compilers()
opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py'])
cc_compiler_opts=opt.add_option_group('Configuration options')
cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler")
for x in test_for_compiler.split():
opt.load('%s'%x)
|
softDi/clusim
|
ns3/ns-3.26/.waf-1.8.19-b1fc8f7baef51bd2db4c2971909a568d/waflib/Tools/compiler_c.py
|
Python
|
apache-2.0
| 1,750
|
""" This file contains the baseline functions for detecting anomalies from
sensor data using the ellipsoid boundary modeling techniques outlined by
Dr. Suthaharan et al. They are accessible via the IPython notebook
at the root of the repository and can be interchanged with custom functions
for exploratory analysis of the algorithm(s)
These baseline functions can be replaced with other ellipsoid boundary
modeling techniques to work with the respective IPython notebook.
"""
import math
import numpy
import random
"""Begin data input functions"""
def read_ibrl_data(data_file):
"""Reads IBRL data from file and returns dict mapping
temp./humidity sensor data to the node that collected them
:param data_file: string representing path to ibrl dataset
:return: dictionary mapping sensor node to list of tuples containing sensor data
"""
with open(data_file, 'r') as fp:
row_count = 0
bad_count = 0
input_readings = {}
for line in fp:
row_count = row_count + 1
line = line.strip() # remove edge whitespace
tokens = line.split(',') # segregate each section
try:
if len(tokens) == 5: #
temp_reading = tokens[0]
humidity_reading = tokens[1]
sensor_id = tokens[3]
# if sensor id is in the input_readings dict
if sensor_id in input_readings:
# add the temp and humidity values
input_readings[sensor_id]['temp_readings'].append(temp_reading)
input_readings[sensor_id]['humidity_readings'].append(humidity_reading)
else:
# add the sensor id and new temp and humidity values
input_readings[sensor_id] = {
'temp_readings': [tokens[0]],
'humidity_readings': [tokens[1]]
}
else: # Note and dump bad data
bad_count = bad_count + 1
except Exception as e:
raise e
# Convert data points to numpy arrays
measurements = {sensor: numpy.array([readings['temp_readings'], readings['humidity_readings']], float)
for (sensor, readings) in input_readings.iteritems()}
print "Total rows: %s" % row_count
print "Total incomplete rows: %s" % bad_count
return measurements
"""Begin data transformation functions"""
def randomize_readings(sensors):
""" Pseudo randomly shuffles location of each pair of temperature and
humidity observations
:param sensors: Dictionary of sensors containing temp. and humidity readings
:return: Dictionary of sensors containing shuffled temp. and humid. readings
"""
for sensor in sensors:
tuples = [(sensors[sensor][0][i],sensors[sensor][1][i])
for i in range(len(sensors[sensor][0]))]
random.shuffle(tuples)
sensors[sensor] = numpy.array([[reading[0] for reading in tuples], [reading[1] for reading in tuples]], float)
return sensors
def generate_differences(sensors):
"""Generates a dictionary mapping sensors to a 2D array containing
the successive differences of temp. and humidity measurements as well as
a look up table mapping the resulting differences to their original
measurements.
:param sensor: Dictionary mapping sensors to original arrays of temp.
and humidity readings
:return: tuple containing dictionary mapping sensors to successive
differences and look up table mapping the results to their original
measurements
"""
differences = {}
lookup_table = {}
for sensor in sensors:
differences[sensor] = calc_succ_diff(sensors[sensor])
return (differences, lookup_table)
def calc_succ_diff(sensor):
""" Calculates the successive differences of for a given sensor
:param sensor: Sensor to be operated on
:return: numpy array of successive differences
"""
sensor = [
successive_diff(sensor[0]),
successive_diff(sensor[1])
]
return numpy.array(sensor, float)
def successive_diff(array):
""" Calculates the successive differences for an array
:param array: Array to be operated on
:return: Array of resulting successive differences
"""
return [array[i+1] - array[i] for i in range(len(array) - 1)]
"""Begin ellipsoid modeling functions"""
def generate_regional_ellipsoid_parameters(sensors_ellipsoid_parameters):
""" Generates the aggregate ellipsoid parameters from a list of ellipsoids
within a region
:param ellipsoid_parameters: list of dictionaries representing ellipsoid
parameters from individual sensors
:return: dictionary representing the aggregate ellipsoid parameters for a
given region
"""
num_of_ellipsoids = len(sensors_ellipsoid_parameters)
ave_a = sum([sensors_ellipsoid_parameters[ellipsoid]['a'] for ellipsoid in sensors_ellipsoid_parameters]) / num_of_ellipsoids
ave_b = sum([sensors_ellipsoid_parameters[ellipsoid]['b'] for ellipsoid in sensors_ellipsoid_parameters]) / num_of_ellipsoids
ave_theta = sum([sensors_ellipsoid_parameters[ellipsoid]['theta'] for ellipsoid in sensors_ellipsoid_parameters]) / num_of_ellipsoids
return (ave_a, ave_b, ave_theta)
def generate_ellipsoid(sensor, a, b, theta=None):
""" Calculates points representing an ellipsoid for a given a and b
over from sensor readings.
:param sensor: sensor mapped to a 2D array of temp. and humidity readings
:param a: a parameter used in calculating ellipsoid parameters
:param b: b parameter used in calculating ellipsoid parameters
:param theta: optional hardcoded theta value
:return: ellipsoid_parameters: dictionary containing parameters used in creation of
as well as results from modeling ellipsoid boundaries
"""
if theta is None:
theta = calculate_ellipsoid_orientation(sensor)
A = calc_A(a, b, theta) # A is independent of the temperatures
ellipsoid_parameters = {
'a': a,
'b': b,
'theta': theta,
'original_sensor_readings': sensor,
'ellipsoid_points': []
}
for temp_reading in sensor[0]:
B = calc_B(a, b, temp_reading, theta)
C = calc_C(a, b, temp_reading, theta)
hi1 = calc_hi1(A, B, C)
ellipsoid_parameters['ellipsoid_points'].append((temp_reading, hi1))
hi2 = calc_hi2(A, B, C)
ellipsoid_parameters['ellipsoid_points'].append((temp_reading, hi2))
return ellipsoid_parameters
def calculate_ellipsoid_orientation(sensor):
""" Calculates the orientation of raw sensor data points
:param sensor: sensor mapped to a 2D array of temp. and humidity readings
:return: float, theta of ellipsoid orientation
"""
n = len(sensor[0])
temperature_readings = sensor[0]
humidity_readings = sensor[1]
#FIXME(hrybacki): Come up with a better way of breaking this components down
# part_one
part_one_multiplicands = [temperature_readings[i]*humidity_readings[i] for i in range(n)]
part_one_value = n * sum(part_one_multiplicands)
# part two
part_two_value = sum(temperature_readings) * sum(humidity_readings)
# part three
part_three_value = n * sum([math.pow(temp, 2) for temp in temperature_readings])
# part four
part_four_value = math.pow(sum(temperature_readings), 2)
# arctan(theta)
tan_theta = (part_one_value - part_two_value) / (part_three_value - part_four_value)
#return math.atan(tan_theta)
# @FIXME(hrybacki): Dr. Shan want's this to be absolute value. Do we need that? Why?
#return math.fabs(math.atan(tan_theta))
return math.atan(tan_theta)
def calc_A(a, b, theta):
""" Returns the A value used in ellipsoid boundary modeling
:param a: represents the major axis of the ellipsoid
:param b: represents the mini axis os the ellipsoid
:param theta: represents the orientation of the raw measurements
:return: A value used in ellipsoid boundary modeling
"""
A = (math.pow(math.sin(theta), 2) / math.pow(a, 2)) + (math.pow(math.cos(theta), 2) / math.pow(b, 2))
return A
def calc_B(a, b, ti, theta):
""" Returns the B value used in ellipsoid boundary modeling
:param a: represents the major axis of the ellipsoid
:param b: represents the mini axis os the ellipsoid
:param ti: temperature (independent variable) used in calculation
:param theta: represents the orientation of the raw measurements
:return: B value used in ellipsoid boundary modeling
"""
B = ((1/math.pow(a, 2)) - (1/math.pow(b, 2))) * ti * math.sin(2*theta)
return B
def calc_C(a, b, ti, theta):
""" Returns the C value used in ellipsoid boundary modeling
:param a: represents the major axis of the ellipsoid
:param b: represents the mini axis os the ellipsoid
:param ti: temperature (independent variable) used in calculation
:param theta: represents the orientation of the raw measurements
:return: C value used in ellipsoid boundary modeling
"""
C = ((math.pow(ti, 2) * math.pow(math.cos(theta), 2)) / math.pow(a, 2)) + \
((math.pow(ti, 2) * math.pow(math.sin(theta), 2)) / math.pow(b, 2)) - 1
return C
def calc_hi1(A, B, C):
""" Calculates the upper point for a given temp modeling an ellipsoid
:param A: A value used in ellipsoid boundary modeling
:param B: B value used in ellipsoid boundary modeling
:param C: C value used in ellipsoid boundary modeling
:return: Upper point for given temperature
"""
try:
return (-B + math.sqrt(math.pow(B, 2) - (4*A*C))) / (2*A)
except ValueError:
pass # skip domain errors
def calc_hi2(A, B, C):
""" Calculates the lower point for a given temp modeling an ellipsoid
:param A: A value used in ellipsoid boundary modeling
:param B: B value used in ellipsoid boundary modeling
:param C: C value used in ellipsoid boundary modeling
:return: Lower point for given temperature
"""
try:
return (-B - math.sqrt(math.pow(B, 2) - (4*A*C))) / (2*A)
except ValueError:
pass # ignore domain errors
"""Begin incomplete functions"""
def inverse_transformation(lookup_table, aggregate_ellipsoid):
""" Generates a tuple of two dicts mapping sensors to anomalies and true measurements
:param lookup_table: dictionary mapping difference readings to their raw measurements
:param aggregate_ellipsoid: 3-tuple containing aggregate ellipsoid parameters
:return: tuple containing two dicts, one of true measurements and another of anomalies
each mapped to their original sensors
"""
pass
def is_anomaly(reading, aggregate_ellipsoid):
""" Determines if reading is anomaly with respect to an ellipsoid
:param reading: temperature and humidity readings
:param aggregate_ellipsoid: parameters for aggregate ellipsoid
:return: True if an anomaly, else False
"""
pass
|
HarryRybacki/SensorDataResearchReproduction
|
baseline.py
|
Python
|
apache-2.0
| 11,136
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Oppia resource handling (e.g. templates, images)."""
from __future__ import annotations
import os
from core import feconf
from core import utils
from core.constants import constants
from core.domain import exp_services
from core.domain import fs_domain
from core.domain import fs_services
from core.domain import rights_manager
from core.domain import skill_services
from core.domain import story_services
from core.domain import topic_domain
from core.domain import topic_fetchers
from core.domain import user_services
from core.tests import test_utils
class AssetDevHandlerImageTests(test_utils.GenericTestBase):
ASSET_HANDLER_URL_PREFIX = '/assetsdevhandler'
def _get_image_url(self, entity_type, entity_id, filename):
"""Gets the image URL."""
return '%s/%s/%s/assets/image/%s' % (
self.ASSET_HANDLER_URL_PREFIX, entity_type, entity_id, filename)
def setUp(self):
"""Load a demo exploration and register self.EDITOR_EMAIL."""
super(AssetDevHandlerImageTests, self).setUp()
exp_services.delete_demo('0')
self.system_user = user_services.get_system_user()
exp_services.load_demo('0')
rights_manager.release_ownership_of_exploration(
self.system_user, '0')
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
def test_image_upload_with_no_filename_raises_error(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, {},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),),
expected_status_int=400)
self.assertEqual(
response_dict['error'], 'Missing key in handler args: filename.')
self.logout()
def test_get_image_with_invalid_page_context_raises_error(self):
self.login(self.EDITOR_EMAIL)
# Only 404 is raised here due to the try - except block in the
# controller.
self.get_json(
self._get_image_url('invalid_context', '0', 'filename'),
expected_status_int=404)
self.logout()
def test_image_upload_with_invalid_filename_raises_error(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': '.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),),
expected_status_int=400)
error_msg = (
'Schema validation for \'filename\' failed: Validation'
' failed: is_regex_matched ({\'regex_pattern\': '
'\'\\\\w+[.]\\\\w+\'}) for object .png'
)
self.assertEqual(response_dict['error'], error_msg)
self.logout()
def test_cannot_upload_duplicate_image(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),))
filename = response_dict['filename']
response = self.get_custom_response(
self._get_image_url('exploration', '0', filename), 'image/png')
self.assertEqual(response.body, raw_image)
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),),
expected_status_int=400)
self.assertEqual(
response_dict['error'],
'A file with the name test.png already exists. Please choose a '
'different name.')
def test_image_upload_and_download(self):
"""Test image uploading and downloading."""
self.signup(self.CURRICULUM_ADMIN_EMAIL, self.CURRICULUM_ADMIN_USERNAME)
admin_id = self.get_user_id_from_email(self.CURRICULUM_ADMIN_EMAIL)
self.set_curriculum_admins([self.CURRICULUM_ADMIN_USERNAME])
subtopic = topic_domain.Subtopic.create_default_subtopic(
1, 'Subtopic Title')
story_id = story_services.get_new_story_id()
topic_id = topic_fetchers.get_new_topic_id()
skill_id = skill_services.get_new_skill_id()
self.save_new_story(story_id, admin_id, topic_id)
self.save_new_topic(
topic_id, admin_id, name='Name',
description='Description', canonical_story_ids=[story_id],
additional_story_ids=[], uncategorized_skill_ids=[],
subtopics=[subtopic], next_subtopic_id=2)
self.save_new_skill(skill_id, admin_id, description='Description')
# Page context: Exploration.
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),)
)
filename = response_dict['filename']
self.logout()
response = self.get_custom_response(
self._get_image_url('exploration', '0', filename), 'image/png')
self.assertEqual(response.body, raw_image)
# Page context: Topic.
self.login(self.CURRICULUM_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/topic/%s' % (
feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, topic_id),
{'filename': 'test.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),)
)
filename = response_dict['filename']
self.logout()
response = self.get_custom_response(
self._get_image_url('topic', topic_id, filename), 'image/png')
self.assertEqual(response.body, raw_image)
# Page context: Story.
self.login(self.CURRICULUM_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/story/%s' % (
feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, story_id),
{'filename': 'test.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),)
)
filename = response_dict['filename']
self.logout()
response = self.get_custom_response(
self._get_image_url('story', story_id, filename), 'image/png')
self.assertEqual(response.body, raw_image)
# Page context: Skill.
self.login(self.CURRICULUM_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/skill/%s' % (
feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, skill_id),
{'filename': 'test.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),)
)
filename = response_dict['filename']
self.logout()
response = self.get_custom_response(
self._get_image_url('skill', skill_id, filename), 'image/png')
self.assertEqual(response.body, raw_image)
# Image context: Question Suggestions.
self.login(self.CURRICULUM_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/question_suggestions/%s' % (
feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
skill_id
),
{'filename': 'test.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),)
)
filename = response_dict['filename']
self.logout()
response = self.get_custom_response(
self._get_image_url('skill', skill_id, filename), 'image/png')
self.assertEqual(response.body, raw_image)
def test_non_matching_extensions_are_detected(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
filename_without_extension = 'test'
supplied_filename = ('%s.jpg' % filename_without_extension)
filename_with_correct_extension = (
'%s.png' % filename_without_extension)
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_image = f.read()
# Pass JPG extension even though raw_image data is PNG.
# This test verifies that, when the filename extension differs from what
# the raw data 'appears' to be, the image is rejected.
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': supplied_filename},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('image', 'unused_filename', raw_image),)
)
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(
response_dict['error'],
'Expected a filename ending in .png, received test.jpg')
self.logout()
# Test that neither form of the image is stored.
self.get_json(
self._get_image_url('exploration', '0', supplied_filename),
expected_status_int=404)
self.get_json(
self._get_image_url(
'exploration', '0', filename_with_correct_extension),
expected_status_int=404)
def test_upload_empty_image(self):
"""Test upload of an empty image."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
# Upload an empty image.
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.png'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('image', 'unused_filename', ''),)
)
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(response_dict['error'], 'No image supplied')
self.logout()
def test_upload_bad_image(self):
"""Test upload of a malformed image."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
# Upload an invalid image.
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.png'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('image', 'unused_filename', 'non_image_data'),)
)
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(response_dict['error'], 'Image not recognized')
self.logout()
def test_upload_an_invalid_svg_image(self):
"""Test upload of an invalid SVG image."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
# Upload an invalid SVG image.
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.svg'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('image', 'unused_filename', '<badsvg></badsvg>'),)
)
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(
response_dict['error'],
'Unsupported tags/attributes found in the SVG:\ntags: '
'[\'badsvg\']\n')
self.logout()
def test_upload_a_large_svg(self):
"""Test upload of an SVG image that exceeds the 100 KB size limit."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
# Upload an SVG image that exceeds the file size limit of 100 KB.
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.svg'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=((
'image',
'unused_filename',
'<svg><path d="%s" /></svg>' % (
'M150 0 L75 200 L225 200 Z ' * 4000)),)
)
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(
response_dict['error'], 'Image exceeds file size limit of 100 KB.')
self.logout()
def test_get_invalid_image(self):
"""Test retrieval of invalid images."""
self.get_json(
self._get_image_url('exploration', '0', 'bad_image'),
expected_status_int=404)
def test_bad_filenames_are_detected(self):
# TODO(sll): Add more tests here.
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test/a.png'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('image', 'unused_filename', raw_image),),
)
self.assertEqual(response_dict['status_code'], 400)
error_msg = (
'Schema validation for \'filename\' failed: Validation failed: '
'is_regex_matched ({\'regex_pattern\': \'\\\\w+[.]\\\\w+\'}) '
'for object test/a.png')
self.assertIn(error_msg, response_dict['error'])
self.logout()
def test_missing_extensions_are_detected(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('image', 'unused_filename', raw_image),),
)
self.assertEqual(response_dict['status_code'], 400)
error_msg = (
'Schema validation for \'filename\' failed: Validation failed: '
'is_regex_matched ({\'regex_pattern\': \'\\\\w+[.]\\\\w+\'}) '
'for object test')
self.assertIn(error_msg, response_dict['error'])
self.logout()
def test_bad_extensions_are_detected(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_image = f.read()
response_dict = self.post_json(
'%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX,
{'filename': 'test.pdf'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('image', 'unused_filename', raw_image),),
)
self.assertEqual(response_dict['status_code'], 400)
self.assertIn(
'Expected a filename ending in .png, received test.pdf',
response_dict['error'])
self.logout()
def test_request_invalid_asset_type(self):
"""Test that requests for invalid asset type is rejected with a 404."""
self.login(self.EDITOR_EMAIL)
self.get_html_response(
'/assetsdevhandler/exploration/0/assets/unknowntype/myfile',
expected_status_int=404)
self.logout()
def test_get_response_with_dev_mode_disabled_returns_404_status(self):
self.login(self.EDITOR_EMAIL)
with self.swap(constants, 'EMULATOR_MODE', False):
self.get_json(
'/assetsdevhandler/exploration/0/assets/image/myfile',
expected_status_int=404)
self.logout()
class AssetDevHandlerAudioTest(test_utils.GenericTestBase):
"""Test the upload of audio files to GCS."""
TEST_AUDIO_FILE_MP3 = 'cafe.mp3'
TEST_AUDIO_FILE_FLAC = 'cafe.flac'
TEST_AUDIO_FILE_OVER_MAX_LENGTH = 'cafe-over-five-minutes.mp3'
TEST_AUDIO_FILE_MPEG_CONTAINER = 'test-mpeg-container.mp3'
AUDIO_UPLOAD_URL_PREFIX = '/createhandler/audioupload'
def setUp(self):
super(AssetDevHandlerAudioTest, self).setUp()
exp_services.delete_demo('0')
self.system_user = user_services.get_system_user()
exp_services.load_demo('0')
rights_manager.release_ownership_of_exploration(
self.system_user, '0')
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
mock_accepted_audio_extensions = {
'mp3': ['audio/mp3'],
'flac': ['audio/flac']
}
self.accepted_audio_extensions_swap = self.swap(
feconf, 'ACCEPTED_AUDIO_EXTENSIONS',
mock_accepted_audio_extensions)
def test_guest_can_not_upload(self):
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3),
'rb', encoding=None
) as f:
raw_audio = f.read()
response = self.post_json(
'%s/0' % (self.AUDIO_UPLOAD_URL_PREFIX),
{'filename': self.TEST_AUDIO_FILE_MP3},
csrf_token=csrf_token,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),),
expected_status_int=401
)
self.assertEqual(
response['error'],
'You must be logged in to access this resource.')
def test_cannot_upload_audio_with_invalid_exp_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3),
'rb', encoding=None
) as f:
raw_audio = f.read()
self.post_json(
'%s/invalid_exp_id' % (self.AUDIO_UPLOAD_URL_PREFIX),
{'filename': self.TEST_AUDIO_FILE_MP3},
csrf_token=csrf_token,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),),
expected_status_int=404
)
self.logout()
def test_audio_upload(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3),
'rb', encoding=None
) as f:
raw_audio = f.read()
self.post_json(
'%s/0' % (self.AUDIO_UPLOAD_URL_PREFIX),
{'filename': self.TEST_AUDIO_FILE_MP3},
csrf_token=csrf_token,
upload_files=(
('raw_audio_file', self.TEST_AUDIO_FILE_MP3, raw_audio),)
)
self.logout()
def test_audio_upload_with_non_mp3_file(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
file_system_class = fs_services.get_entity_file_system_class()
fs = fs_domain.AbstractFileSystem(file_system_class(
feconf.ENTITY_TYPE_EXPLORATION, '0'))
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_FLAC),
'rb', encoding=None
) as f:
raw_audio = f.read()
self.assertFalse(fs.isfile('audio/%s' % self.TEST_AUDIO_FILE_FLAC))
with self.accepted_audio_extensions_swap:
self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': self.TEST_AUDIO_FILE_FLAC},
csrf_token=csrf_token,
upload_files=[
('raw_audio_file', self.TEST_AUDIO_FILE_FLAC, raw_audio)]
)
self.assertTrue(fs.isfile('audio/%s' % self.TEST_AUDIO_FILE_FLAC))
self.logout()
def test_detect_non_matching_extensions(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
# Use an accepted audio extension in mismatched_filename
# that differs from the uploaded file's audio type.
mismatched_filename = 'test.flac'
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3),
'rb', encoding=None
) as f:
raw_audio = f.read()
with self.accepted_audio_extensions_swap:
response_dict = self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': mismatched_filename},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=[
('raw_audio_file', mismatched_filename, raw_audio)]
)
self.logout()
self.assertIn(
'Although the filename extension indicates the file is a flac '
'file, it was not recognized as one. Found mime types:',
response_dict['error'])
def test_detect_non_audio_file(self):
"""Test that filenames with extensions that don't match the audio are
detected.
"""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None
) as f:
raw_audio = f.read()
with self.accepted_audio_extensions_swap:
response_dict = self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': self.TEST_AUDIO_FILE_FLAC},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),)
)
self.logout()
self.assertEqual(
response_dict['error'], 'Audio not recognized as a flac file')
def test_audio_upload_mpeg_container(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(
feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MPEG_CONTAINER),
'rb', encoding=None
) as f:
raw_audio = f.read()
self.post_json(
'%s/0' % (self.AUDIO_UPLOAD_URL_PREFIX),
{'filename': self.TEST_AUDIO_FILE_MPEG_CONTAINER},
csrf_token=csrf_token,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),)
)
self.logout()
def test_invalid_extension_is_detected(self):
"""Test that invalid extensions are caught."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
filename_without_extension = 'test'
invalid_extension = 'wav'
supplied_filename = (
'%s.%s' % (filename_without_extension, invalid_extension))
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3),
'rb', encoding=None
) as f:
raw_audio = f.read()
response_dict = self.post_json(
'%s/0' % (self.AUDIO_UPLOAD_URL_PREFIX),
{'filename': supplied_filename},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),)
)
self.logout()
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(
response_dict['error'],
'Invalid filename extension: it should have '
'one of the following extensions: %s'
% list(feconf.ACCEPTED_AUDIO_EXTENSIONS.keys()))
def test_upload_empty_audio(self):
"""Test upload of empty audio."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
# Upload empty audio.
response_dict = self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': 'test.mp3'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('raw_audio_file', 'unused_filename', ''),)
)
self.logout()
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(response_dict['error'], 'No audio supplied')
def test_upload_bad_audio(self):
"""Test upload of malformed audio."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': 'test.mp3'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(
('raw_audio_file', 'unused_filename', 'non_audio_data'),)
)
self.logout()
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(
response_dict['error'], 'Audio not recognized as a mp3 file')
def test_missing_extensions_are_detected(self):
"""Test upload of filenames with no extensions are caught."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
missing_extension_filename = 'test'
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3),
'rb', encoding=None
) as f:
raw_audio = f.read()
response_dict = self.post_json(
'%s/0' % (self.AUDIO_UPLOAD_URL_PREFIX),
{'filename': missing_extension_filename},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),)
)
self.logout()
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(
response_dict['error'],
'No filename extension: it should have '
'one of the following extensions: '
'%s' % list(feconf.ACCEPTED_AUDIO_EXTENSIONS.keys()))
def test_exceed_max_length_detected(self):
"""Test that audio file is less than max playback length."""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(
feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_OVER_MAX_LENGTH),
'rb', encoding=None
) as f:
raw_audio = f.read()
response_dict = self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': 'test.mp3'},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),)
)
self.logout()
self.assertEqual(response_dict['status_code'], 400)
self.assertIn(
'Audio files must be under %s seconds in length'
% feconf.MAX_AUDIO_FILE_LENGTH_SEC, response_dict['error'])
def test_non_matching_extensions_are_detected(self):
"""Test that filenames with extensions that don't match the audio are
detected.
"""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
# Use an accepted audio extension in mismatched_filename
# that differs from the uploaded file's audio type.
mismatched_filename = 'test.mp3'
with utils.open_file(
os.path.join(
feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_FLAC),
'rb', encoding=None
) as f:
raw_audio = f.read()
response_dict = self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': mismatched_filename},
csrf_token=csrf_token,
expected_status_int=400,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),)
)
self.logout()
self.assertEqual(response_dict['status_code'], 400)
self.assertEqual(
response_dict['error'], 'Audio not recognized as a mp3 file')
def test_upload_check_for_duration_sec_as_response(self):
"""Tests the file upload and trying to confirm the
audio file duration_secs is accurate.
"""
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3),
'rb', encoding=None
) as f:
raw_audio = f.read()
response_dict = self.post_json(
'%s/0' % self.AUDIO_UPLOAD_URL_PREFIX,
{'filename': self.TEST_AUDIO_FILE_MP3},
csrf_token=csrf_token,
expected_status_int=200,
upload_files=(('raw_audio_file', 'unused_filename', raw_audio),)
)
self.logout()
expected_value = ({
'filename': self.TEST_AUDIO_FILE_MP3,
'duration_secs': 15.255510204081633})
self.assertEqual(response_dict, expected_value)
class PromoBarHandlerTest(test_utils.GenericTestBase):
"""Test for the PromoBarHandler."""
def setUp(self):
super(PromoBarHandlerTest, self).setUp()
self.signup(self.CURRICULUM_ADMIN_EMAIL, self.CURRICULUM_ADMIN_USERNAME)
self.signup(
self.RELEASE_COORDINATOR_EMAIL, self.RELEASE_COORDINATOR_USERNAME)
self.add_user_role(
self.RELEASE_COORDINATOR_USERNAME,
feconf.ROLE_ID_RELEASE_COORDINATOR)
def test_get_promo_bar_data(self):
response = self.get_json('/promo_bar_handler')
self.assertEqual(
response, {
'promo_bar_enabled': False,
'promo_bar_message': ''
})
def test_release_coordinator_able_to_update_promo_bar_config(self):
self.login(self.RELEASE_COORDINATOR_EMAIL)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/promo_bar_handler', {
'promo_bar_enabled': True,
'promo_bar_message': 'New promo bar message.'
}, csrf_token=csrf_token)
self.assertEqual(response, {})
response = self.get_json('/promo_bar_handler')
self.assertEqual(
response, {
'promo_bar_enabled': True,
'promo_bar_message': 'New promo bar message.'
})
self.logout()
class ValueGeneratorHandlerTests(test_utils.GenericTestBase):
def test_value_generated_error(self):
dummy_id = 'someID'
response = self.get_json(
'/value_generator_handler/%s' % dummy_id,
expected_status_int=404
)
error_message = 'Could not find the page http://localhost/{}{}.'.format(
'value_generator_handler/', dummy_id
)
self.assertEqual(response['error'], error_message)
def test_html_response(self):
copier_id = 'Copier'
response = self.get_html_response(
'/value_generator_handler/' + copier_id
)
self.assertIn(b'<object-editor obj-type="<[objType]>"', response.body)
|
oppia/oppia
|
core/controllers/resources_test.py
|
Python
|
apache-2.0
| 33,854
|
import pytest
from .._util import LocalProtocolError
from .._events import *
from .._state import *
from .._state import ConnectionState, _SWITCH_UPGRADE, _SWITCH_CONNECT
def test_ConnectionState():
cs = ConnectionState()
# Basic event-triggered transitions
assert cs.states == {CLIENT: IDLE, SERVER: IDLE}
cs.process_event(CLIENT, Request)
# The SERVER-Request special case:
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
# Illegal transitions raise an error and nothing happens
with pytest.raises(LocalProtocolError):
cs.process_event(CLIENT, Request)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, InformationalResponse)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, Response)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY}
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, EndOfMessage)
assert cs.states == {CLIENT: DONE, SERVER: DONE}
# State-triggered transition
cs.process_event(SERVER, ConnectionClosed)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED}
def test_ConnectionState_keep_alive():
# keep_alive = False
cs = ConnectionState()
cs.process_event(CLIENT, Request)
cs.process_keep_alive_disabled()
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE}
def test_ConnectionState_keep_alive_in_DONE():
# Check that if keep_alive is disabled when the CLIENT is already in DONE,
# then this is sufficient to immediately trigger the DONE -> MUST_CLOSE
# transition
cs = ConnectionState()
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
assert cs.states[CLIENT] is DONE
cs.process_keep_alive_disabled()
assert cs.states[CLIENT] is MUST_CLOSE
def test_ConnectionState_switch_denied():
for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE):
for deny_early in (True, False):
cs = ConnectionState()
cs.process_client_switch_proposal(switch_type)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, Data)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
assert switch_type in cs.pending_switch_proposals
if deny_early:
# before client reaches DONE
cs.process_event(SERVER, Response)
assert not cs.pending_switch_proposals
cs.process_event(CLIENT, EndOfMessage)
if deny_early:
assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY}
else:
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL,
SERVER: SEND_RESPONSE}
cs.process_event(SERVER, InformationalResponse)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL,
SERVER: SEND_RESPONSE}
cs.process_event(SERVER, Response)
assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY}
assert not cs.pending_switch_proposals
_response_type_for_switch = {
_SWITCH_UPGRADE: InformationalResponse,
_SWITCH_CONNECT: Response,
None: Response,
}
def test_ConnectionState_protocol_switch_accepted():
for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]:
cs = ConnectionState()
cs.process_client_switch_proposal(switch_event)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, Data)
assert cs.states == {CLIENT: SEND_BODY,
SERVER: SEND_RESPONSE}
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL,
SERVER: SEND_RESPONSE}
cs.process_event(SERVER, InformationalResponse)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL,
SERVER: SEND_RESPONSE}
cs.process_event(SERVER,
_response_type_for_switch[switch_event],
switch_event)
assert cs.states == {CLIENT: SWITCHED_PROTOCOL,
SERVER: SWITCHED_PROTOCOL}
def test_ConnectionState_double_protocol_switch():
# CONNECT + Upgrade is legal! Very silly, but legal. So we support
# it. Because sometimes doing the silly thing is easier than not.
for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]:
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_client_switch_proposal(_SWITCH_CONNECT)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL,
SERVER: SEND_RESPONSE}
cs.process_event(SERVER,
_response_type_for_switch[server_switch],
server_switch)
if server_switch is None:
assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY}
else:
assert cs.states == {CLIENT: SWITCHED_PROTOCOL,
SERVER: SWITCHED_PROTOCOL}
def test_ConnectionState_inconsistent_protocol_switch():
for client_switches, server_switch in [
([], _SWITCH_CONNECT),
([], _SWITCH_UPGRADE),
([_SWITCH_UPGRADE], _SWITCH_CONNECT),
([_SWITCH_CONNECT], _SWITCH_UPGRADE),
]:
cs = ConnectionState()
for client_switch in client_switches:
cs.process_client_switch_proposal(client_switch)
cs.process_event(CLIENT, Request)
with pytest.raises(LocalProtocolError):
cs.process_event(SERVER, Response, server_switch)
def test_ConnectionState_keepalive_protocol_switch_interaction():
# keep_alive=False + pending_switch_proposals
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_event(CLIENT, Request)
cs.process_keep_alive_disabled()
cs.process_event(CLIENT, Data)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
# the protocol switch "wins"
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE}
# but when the server denies the request, keep_alive comes back into play
cs.process_event(SERVER, Response)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY}
def test_ConnectionState_reuse():
cs = ConnectionState()
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
cs.start_next_cycle()
assert cs.states == {CLIENT: IDLE, SERVER: IDLE}
# No keepalive
cs.process_event(CLIENT, Request)
cs.process_keep_alive_disabled()
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
# One side closed
cs = ConnectionState()
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(CLIENT, ConnectionClosed)
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
# Succesful protocol switch
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
# Failed protocol switch
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
cs.start_next_cycle()
assert cs.states == {CLIENT: IDLE, SERVER: IDLE}
def test_server_request_is_illegal():
# There used to be a bug in how we handled the Request special case that
# made this allowed...
cs = ConnectionState()
with pytest.raises(LocalProtocolError):
cs.process_event(SERVER, Request)
|
njsmith/h11
|
h11/tests/test_state.py
|
Python
|
mit
| 8,778
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder import flags
FLAGS = flags.FLAGS
flags.DECLARE('iscsi_num_targets', 'cinder.volume.driver')
flags.DECLARE('policy_file', 'cinder.policy')
flags.DECLARE('volume_driver', 'cinder.volume.manager')
flags.DECLARE('xiv_proxy', 'cinder.volume.xiv')
def_vol_type = 'fake_vol_type'
def set_defaults(conf):
conf.set_default('default_volume_type', def_vol_type)
conf.set_default('volume_driver', 'cinder.volume.driver.FakeISCSIDriver')
conf.set_default('connection_type', 'fake')
conf.set_default('fake_rabbit', True)
conf.set_default('rpc_backend', 'cinder.openstack.common.rpc.impl_fake')
conf.set_default('iscsi_num_targets', 8)
conf.set_default('verbose', True)
conf.set_default('sql_connection', "sqlite://")
conf.set_default('sqlite_synchronous', False)
conf.set_default('policy_file', 'cinder/tests/policy.json')
conf.set_default('xiv_proxy', 'cinder.tests.test_xiv.XIVFakeProxyDriver')
|
NewpTone/stacklab-cinder
|
cinder/tests/fake_flags.py
|
Python
|
apache-2.0
| 1,721
|
import os
import shutil
import sys
import tempfile
import pytest
from mock import Mock, patch, mock_open
from pip.exceptions import (
PreviousBuildDirError, InvalidWheelFilename, UnsupportedWheel,
)
from pip.download import PipSession
from pip._vendor import pkg_resources
from pip.index import PackageFinder
from pip.req import (InstallRequirement, RequirementSet,
Requirements, parse_requirements)
from pip.req.req_install import parse_editable
from pip.utils import read_text_file
from tests.lib import assert_raises_regexp
class TestRequirementSet(object):
"""RequirementSet tests"""
def setup(self):
self.tempdir = tempfile.mkdtemp()
def teardown(self):
shutil.rmtree(self.tempdir, ignore_errors=True)
def basic_reqset(self):
return RequirementSet(
build_dir=os.path.join(self.tempdir, 'build'),
src_dir=os.path.join(self.tempdir, 'src'),
download_dir=None,
session=PipSession(),
)
def test_no_reuse_existing_build_dir(self, data):
"""Test prepare_files raise exception with previous build dir"""
build_dir = os.path.join(self.tempdir, 'build', 'simple')
os.makedirs(build_dir)
open(os.path.join(build_dir, "setup.py"), 'w')
reqset = self.basic_reqset()
req = InstallRequirement.from_line('simple')
reqset.add_requirement(req)
finder = PackageFinder([data.find_links], [], session=PipSession())
assert_raises_regexp(
PreviousBuildDirError,
"pip can't proceed with [\s\S]*%s[\s\S]*%s" %
(req, build_dir.replace('\\', '\\\\')),
reqset.prepare_files,
finder,
)
@pytest.mark.parametrize(('file_contents', 'expected'), [
(b'\xf6\x80', b'\xc3\xb6\xe2\x82\xac'), # cp1252
(b'\xc3\xb6\xe2\x82\xac', b'\xc3\xb6\xe2\x82\xac'), # utf-8
(b'\xc3\xb6\xe2', b'\xc3\x83\xc2\xb6\xc3\xa2'), # Garbage
])
def test_egg_info_data(file_contents, expected):
om = mock_open(read_data=file_contents)
em = Mock()
em.return_value = 'cp1252'
with patch('pip.utils.open', om, create=True):
with patch('locale.getpreferredencoding', em):
ret = read_text_file('foo')
assert ret == expected.decode('utf-8')
class TestInstallRequirement(object):
def test_url_with_query(self):
"""InstallRequirement should strip the fragment, but not the query."""
url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz'
fragment = '#egg=bar'
req = InstallRequirement.from_line(url + fragment)
assert req.url == url + fragment, req.url
def test_unsupported_wheel_requirement_raises(self):
with pytest.raises(UnsupportedWheel):
InstallRequirement.from_line(
'peppercorn-0.4-py2.py3-bogus-any.whl',
)
def test_invalid_wheel_requirement_raises(self):
with pytest.raises(InvalidWheelFilename):
InstallRequirement.from_line('invalid.whl')
def test_wheel_requirement_sets_req_attribute(self):
req = InstallRequirement.from_line('simple-0.1-py2.py3-none-any.whl')
assert req.req == pkg_resources.Requirement.parse('simple==0.1')
def test_url_preserved_line_req(self):
"""Confirm the url is preserved in a non-editable requirement"""
url = 'git+http://foo.com@ref#egg=foo'
req = InstallRequirement.from_line(url)
assert req.url == url
def test_url_preserved_editable_req(self):
"""Confirm the url is preserved in a editable requirement"""
url = 'git+http://foo.com@ref#egg=foo'
req = InstallRequirement.from_editable(url)
assert req.url == url
def test_markers(self):
for line in (
# recommanded syntax
'mock3; python_version >= "3"',
# with more spaces
'mock3 ; python_version >= "3" ',
# without spaces
'mock3;python_version >= "3"',
):
req = InstallRequirement.from_line(line)
assert req.req.project_name == 'mock3'
assert req.req.specs == []
assert req.markers == 'python_version >= "3"'
def test_markers_semicolon(self):
# check that the markers can contain a semicolon
req = InstallRequirement.from_line('semicolon; os_name == "a; b"')
assert req.req.project_name == 'semicolon'
assert req.req.specs == []
assert req.markers == 'os_name == "a; b"'
def test_markers_url(self):
# test "URL; markers" syntax
url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz'
line = '%s; python_version >= "3"' % url
req = InstallRequirement.from_line(line)
assert req.url == url, req.url
assert req.markers == 'python_version >= "3"'
# without space, markers are part of the URL
url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz'
line = '%s;python_version >= "3"' % url
req = InstallRequirement.from_line(line)
assert req.url == line, req.url
assert req.markers is None
def test_markers_match(self):
# match
for markers in (
'python_version >= "1.0"',
'sys_platform == %r' % sys.platform,
):
line = 'name; ' + markers
req = InstallRequirement.from_line(line)
assert req.markers == markers
assert req.match_markers()
# don't match
for markers in (
'python_version >= "5.0"',
'sys_platform != %r' % sys.platform,
):
line = 'name; ' + markers
req = InstallRequirement.from_line(line)
assert req.markers == markers
assert not req.match_markers()
def test_requirements_data_structure_keeps_order():
requirements = Requirements()
requirements['pip'] = 'pip'
requirements['nose'] = 'nose'
requirements['coverage'] = 'coverage'
assert ['pip', 'nose', 'coverage'] == list(requirements.values())
assert ['pip', 'nose', 'coverage'] == list(requirements.keys())
def test_requirements_data_structure_implements__repr__():
requirements = Requirements()
requirements['pip'] = 'pip'
requirements['nose'] = 'nose'
assert "Requirements({'pip': 'pip', 'nose': 'nose'})" == repr(requirements)
def test_requirements_data_structure_implements__contains__():
requirements = Requirements()
requirements['pip'] = 'pip'
assert 'pip' in requirements
assert 'nose' not in requirements
@patch('os.path.normcase')
@patch('pip.req.req_install.os.getcwd')
@patch('pip.req.req_install.os.path.exists')
@patch('pip.req.req_install.os.path.isdir')
def test_parse_editable_local(
isdir_mock, exists_mock, getcwd_mock, normcase_mock):
exists_mock.return_value = isdir_mock.return_value = True
# mocks needed to support path operations on windows tests
normcase_mock.return_value = getcwd_mock.return_value = "/some/path"
assert parse_editable('.', 'git') == (None, 'file:///some/path', None)
normcase_mock.return_value = "/some/path/foo"
assert parse_editable('foo', 'git') == (
None, 'file:///some/path/foo', None,
)
def test_parse_editable_default_vcs():
assert parse_editable('https://foo#egg=foo', 'git') == (
'foo',
'git+https://foo#egg=foo',
{'egg': 'foo'},
)
def test_parse_editable_explicit_vcs():
assert parse_editable('svn+https://foo#egg=foo', 'git') == (
'foo',
'svn+https://foo#egg=foo',
{'egg': 'foo'},
)
def test_parse_editable_vcs_extras():
assert parse_editable('svn+https://foo#egg=foo[extras]', 'git') == (
'foo[extras]',
'svn+https://foo#egg=foo[extras]',
{'egg': 'foo[extras]'},
)
@patch('os.path.normcase')
@patch('pip.req.req_install.os.getcwd')
@patch('pip.req.req_install.os.path.exists')
@patch('pip.req.req_install.os.path.isdir')
def test_parse_editable_local_extras(
isdir_mock, exists_mock, getcwd_mock, normcase_mock):
exists_mock.return_value = isdir_mock.return_value = True
normcase_mock.return_value = getcwd_mock.return_value = "/some/path"
assert parse_editable('.[extras]', 'git') == (
None, 'file://' + "/some/path", ('extras',),
)
normcase_mock.return_value = "/some/path/foo"
assert parse_editable('foo[bar,baz]', 'git') == (
None, 'file:///some/path/foo', ('bar', 'baz'),
)
def test_remote_reqs_parse():
"""
Test parsing a simple remote requirements file
"""
# this requirements file just contains a comment
# previously this has failed in py3: https://github.com/pypa/pip/issues/760
for req in parse_requirements(
'https://raw.githubusercontent.com/pypa/pip-test-package/master/'
'tests/req_just_comment.txt', session=PipSession()):
pass
def test_req_file_parse_no_use_wheel(data):
"""
Test parsing --no-use-wheel from a req file
"""
finder = PackageFinder([], [], session=PipSession())
for req in parse_requirements(
data.reqfiles.join("supported_options.txt"), finder,
session=PipSession()):
pass
assert not finder.use_wheel
def test_req_file_parse_comment_start_of_line(tmpdir):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("# Comment ")
finder = PackageFinder([], [], session=PipSession())
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder,
session=PipSession()))
assert not reqs
def test_req_file_parse_comment_end_of_line_with_url(tmpdir):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("https://example.com/foo.tar.gz # Comment ")
finder = PackageFinder([], [], session=PipSession())
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder,
session=PipSession()))
assert len(reqs) == 1
assert reqs[0].url == "https://example.com/foo.tar.gz"
def test_req_file_parse_egginfo_end_of_line_with_url(tmpdir):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("https://example.com/foo.tar.gz#egg=wat")
finder = PackageFinder([], [], session=PipSession())
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder,
session=PipSession()))
assert len(reqs) == 1
assert reqs[0].name == "wat"
def test_req_file_no_finder(tmpdir):
"""
Test parsing a requirements file without a finder
"""
with open(tmpdir.join("req.txt"), "w") as fp:
fp.write("""
--find-links https://example.com/
--index-url https://example.com/
--extra-index-url https://two.example.com/
--no-use-wheel
--no-index
--allow-external foo
--allow-all-external
--allow-insecure foo
--allow-unverified foo
""")
parse_requirements(tmpdir.join("req.txt"), session=PipSession())
|
Carreau/pip
|
tests/unit/test_req.py
|
Python
|
mit
| 11,138
|
'''
Created on Aug 24, 2016
@author: Rykath
Package: Utilities
Usage: various functions and classes
'''
def output(typ,message):
if typ == "error":
print("Error: "+message)
elif typ == "warning":
print("Warning: "+message)
elif typ in ["console","debug","test"]:
print(message)
def intToBaseX(n,b):
if n == 0:
d = [0]
else:
digits = []
while n:
digits.append(int(n % b))
n //= b
d = digits[::-1]
keys = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ+/'
if b > len(keys):
return d
else:
s = ''
for i in d:
s += keys[i]
return s
def baseXToInt(n,b):
keys = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ+/'
I = 0
for i in range(len(n)):
I += keys.index(n[i])*b**(len(n)-(i+1))
return I
def baseXToBaseY(n,bX,bY):
return intToBaseX(baseXToInt(n,bX),bY)
|
Rykath/RM-GoL
|
GameOfLife/Main-Py/Utilities/misc.py
|
Python
|
gpl-3.0
| 974
|
#!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class SignatureFormsResult:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'count': 'int',
'forms': 'list[SignatureFormInfo]'
}
self.count = None # int
self.forms = None # list[SignatureFormInfo]
|
liosha2007/temporary-groupdocs-python3-sdk
|
groupdocs/models/SignatureFormsResult.py
|
Python
|
apache-2.0
| 1,012
|
import numpy as np
import equidistant as eq
selector = eq.PythonEquidistantSelector()
completePoints = np.array([1, 2, 4, 7])
sampledPoints = np.array([[1, 5], [3, 7], [6, 10]])
selector.setSampledPoints(sampledPoints)
selector.setCompletePoints(completePoints)
while(selector.hasNextInput()):
nextInp = selector.getNextInput()
print 'Next input ... ', nextInp
print '------- second interface equiv to java ------'
selector = eq.PythonEquidistantSelector()
completePoints = [1, 2, 4, 7]
sampledInputs = [1, 3, 6]
sampledOutputs = [5, 7, 10]
selector.setCompletePoints(completePoints)
selector.setInitialPoints(sampledInputs, sampledOutputs)
selector.inform(7, 11)
print selector.sampledPoints
while(selector.hasNextInput()):
nextInp = selector.getNextInput()
print 'Next input ... ', nextInp
|
Alexander-Schiendorfer/active-learning-collectives
|
CSP Model Abstraction/python/equidistant/testEquidistant.py
|
Python
|
mit
| 809
|
#!/usr/bin/env python
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class NumberFormatDlg(QDialog):
def __init__(self, format, parent=None):
super(NumberFormatDlg, self).__init__(parent)
thousandsLabel = QLabel('&Thousands separator')
self.thousandsEdit = QLineEdit(format['thousandsseparator'])
thousandsLabel.setBuddy(self.thousandsEdit)
decimalMarkerLabel = QLabel('Decimal &marker')
self.decimalMarkerEdit = QLineEdit(format['decimalmarker'])
decimalMarkerLabel.setBuddy(self.decimalMarkerEdit)
decimalPlacesLabel = QLabel('&Decimal places')
self.decimalPlacesSpinBox = QSpinBox()
decimalPlacesLabel.setBuddy(self.decimalPlacesSpinBox)
self.decimalPlacesSpinBox.setRange(0, 6)
self.decimalPlacesSpinBox.setValue(format['decimalplaces'])
self.redNegativesCheckBox = QCheckBox('&Red negative numbers')
self.redNegativesCheckBox.setChecked(format['rednegatives'])
buttonBox = QDialogButtonBox(QDialogButtonBox.Ok|
QDialogButtonBox.Cancel)
|
opensvn/python
|
numberformatdlg1.py
|
Python
|
gpl-2.0
| 1,085
|
from mediadrop.forms.admin.settings import *
|
jobsafran/mediadrop
|
mediacore/forms/admin/settings.py
|
Python
|
gpl-3.0
| 45
|
"""
WSGI config for backbonejs_todos_with_Django project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backbonejs_todos_with_Django.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
ladder1984/backbonejs_todos_with_Django
|
backbonejs_todos_with_Django/wsgi.py
|
Python
|
mit
| 431
|
# Import the Python Imaging Library if it is available. On error, ignore
# the problem and continue. PIL being absent should only affect the
# graphic lump loading/saving methods and the user may not be interested
# in installing PIL just to pass this line if not interested in using the
# graphics functionality at all.
try:
from PIL import Image, ImageDraw, ImageOps
except:
pass
import os
import omg.palette
from omg.util import *
from omg import six
class Lump(object):
"""Basic lump class. Instances of Lump (and its subclasses)
always have the following:
.data -- a bytes object holding the lump's data
.from_file -- load the data to a file
.to_file -- save the data to a file
The default Lump class merely copies the raw data when
loading/saving to files, but subclasses may convert data
appropriately (for example, Graphic supports various image
formats)."""
def __init__(self, data=None, from_file=None):
"""Create a new instance. The `data` parameter may be a string
representing data for the lump. The `source` parameter may be
a path to a file or a file-like object to load from."""
self.data = bytes()
if issubclass(type(data), Lump):
self.data = data.data
elif data is not None:
self.data = data or bytes()
if from_file:
self.from_file(from_file)
def from_file(self, source):
"""Load data from a file. Source may be a path name string
or a file-like object (with a `write` method)."""
self.data = readfile(source)
def to_file(self, target):
"""Write data to a file. Target may be a path name string
or a file-like object (with a `write` method)."""
writefile(target, self.data)
def copy(self):
return deepcopy(self)
class Music(Lump):
"""Subclass of Lump, for music lumps. Not yet implemented."""
pass
class Sound(Lump):
"""Subclass of Lump, for sound lumps. Not yet implemented."""
pass
class Graphic(Lump):
"""Subclass of Lump, for Doom format graphics. Supports
conversion from/to RAWs (sequences of bytes) and PIL
Image objects, as well as saving to/loading from various
file formats (via PIL).
Useful attributes:
.dimensions -- (width, height)
.width -- width of the image
.height -- height of the image
.x_offset -- x offset
.y_offset -- y offset
"""
def __init__(self, data=None, from_file=None, palette=None):
self.palette = palette or omg.palette.default
Lump.__init__(self, data, from_file)
def get_offsets(self):
"""Retrieve the (x, y) offsets of the graphic."""
return unpack('<hh', self.data[4:8])
def set_offsets(self, xy):
"""Set the (x, y) offsets of the graphic."""
self.data = self.data[0:4] + pack('<hh', *xy) + self.data[8:]
def get_dimensions(self):
"""Retrieve the (width, height) dimensions of the graphic."""
return unpack('<hh', self.data[0:4])
offsets = property(get_offsets, set_offsets)
x_offset = property(lambda self: self.offsets[0],
lambda self, x: self.set_offsets((x, self.y_offset)))
y_offset = property(lambda self: self.offsets[1],
lambda self, y: self.set_offsets((self.x_offset, y)))
dimensions = property(get_dimensions)
width = property(lambda self: self.dimensions[0])
height = property(lambda self: self.dimensions[1])
def from_raw(self, data, width, height, x_offset=0, y_offset=0, pal=None):
"""Load a raw 8-bpp image, converting to the Doom picture format
(used by all graphics except flats)"""
pal = pal or omg.palette.default
# First pass: extract pixel data in column+post format
columns_in = [data[n:width*height:width] for n in range(width)]
columns_out = []
for column in columns_in:
# Split into chunks of continuous non-transparent pixels
postdata = filter(None, column.split(six.int2byte(pal.tran_index)))
# Find the y position where each chunk starts
start_rows = []
in_trans = True
for y in range(height):
if six.indexbytes(column, y) == pal.tran_index:
in_trans = True
elif in_trans:
start_rows.append(y)
in_trans = False
columns_out.append(zip(start_rows, postdata))
# Second pass: compile column+post data, adding pointers
data = []
columnptrs = []
pointer = 4*width + 8
for column in columns_out:
columnptrs.append(pack('<i', pointer))
for row, pixels in column:
data.append(b"%c%c\x00%s\x00" % (row, len(pixels), pixels))
pointer += 4 + len(pixels)
data.append(b'\xff')
pointer += 1
# Merge everything together
self.data = bytes().join([pack('4h', width, height, x_offset, y_offset),
bytes().join(columnptrs), bytes().join(data)])
def to_raw(self, tran_index=None):
"""Returns self converted to a raw (8-bpp) image.
`tran_index` specifies the palette index to use for
transparent pixels. The value defaults to that of the
Graphic object's palette instance."""
data = self.data
width, height = self.dimensions
tran_index = tran_index or self.palette.tran_index
output = bytearray([tran_index] * (width*height))
pointers = unpack('<%il'%width, data[8 : 8 + width*4])
for x in range(width):
pointer = pointers[x]
while six.indexbytes(data, pointer) != 0xff:
post_length = six.indexbytes(data, pointer+1)
op = six.indexbytes(data, pointer)*width + x
for p in range(pointer + 3, pointer + post_length + 3):
output[op] = six.indexbytes(data, p)
op += width
pointer += post_length + 4
return bytes(output)
def to_Image(self):
"""Convert to a PIL Image instance"""
im = Image.new('P', self.dimensions, None)
if isinstance(self, Flat):
im.frombytes(self.data)
else:
im.frombytes(self.to_raw())
im.putpalette(self.palette.save_bytes)
return im
def from_Image(self, im, translate=False):
"""Load from a PIL Image instance
If the input image is 24-bit, the colors will be looked up
in the current palette.
If the input image is 8-bit, indices will simply be copied
from the input image. To properly translate colors between
palettes, set the `translate` parameter."""
pixels = im.tobytes()
width, height = im.size
# High resolution graphics not supported yet, so truncate
height = min(254, height)
xoff, yoff = (width // 2)-1, height-5
if im.mode == "RGB":
pixels = join([chr(self.palette.match(unpack('BBB', \
pixels[i*3:(i+1)*3]))) for i in range(width*height)])
elif im.mode == 'P':
srcpal = im.palette.tobytes()
if im.palette.mode == "RGB":
palsize = 3
elif im.palette.mode == "RGBA":
palsize = 4
else:
raise TypeError("palette mode must be 'RGB' or 'RGBA'")
if translate:
R = [c for c in six.iterbytes(srcpal[0::palsize])]
G = [c for c in six.iterbytes(srcpal[1::palsize])]
B = [c for c in six.iterbytes(srcpal[2::palsize])]
srcpal = zip(R, G, B)
lexicon = [six.int2byte(self.palette.match(c)) for c in srcpal]
pixels = join([lexicon[b] for b in six.iterbytes(pixels)])
else:
# Simply copy pixels. However, make sure to translate
# all colors matching the transparency color to the
# right index. This is necessary because programs
# aren't consistent in choice of position for the
# transparent entry.
packed_color = pack("BBB", *self.palette.tran_color)
ri = 0
while ri != -1:
ri = srcpal.find(packed_color, ri+palsize)
if not ri % palsize and ri//palsize != self.palette.tran_index:
pixels = pixels.replace(six.int2byte(ri//palsize),
six.int2byte(self.palette.tran_index))
else:
raise TypeError("image mode must be 'P' or 'RGB'")
self.from_raw(pixels, width, height, xoff, yoff, self.palette)
def from_file(self, filename, translate=False):
"""Load graphic from an image file."""
if filename[-4:].lower() == '.lmp':
self.data = readfile(filename)
else:
im = Image.open(filename)
self.from_Image(im, translate)
def to_file(self, filename, mode='P'):
"""Save the graphic to an image file.
The output format is selected based on the filename extension.
For example, "file.jpg" saves to JPEG format. If the file has
no extension, PNG format is used.
Special cases: ".lmp" saves the raw lump data, and ".raw" saves
the raw pixel data.
`mode` may be be 'P' or 'RGB' for palette or 24 bit output,
respectively. However, .raw ignores this parameter and always
writes in palette mode."""
format = os.path.splitext(filename)[1][1:].upper()
if format == 'LMP': writefile(filename, self.data)
elif format == 'RAW': writefile(filename, self.to_raw())
else:
im = self.to_Image()
om = im.convert(mode)
if format:
om.save(filename)
else:
om.save(filename, "PNG")
def translate(self, pal):
"""Translate (in-place) the graphic to another palette."""
lexicon = [chr(pal.match(self.palette.colors[i])) for i in range(256)]
lexicon[self.palette.tran_index] = chr(pal.tran_index)
if isinstance(self, Flat):
self.data = join([lexicon[b] for b in self.data])
else:
raw = self.to_raw()
#raw = raw.replace(chr(self.palette.tran_index), chr(pal.tran_index))
self.load_raw(join([lexicon[b] for b in raw]),
self.width, self.height,
self.x_offset, self.y_offset)
class Flat(Graphic):
"""Subclass of Graphic, for flat graphics"""
def get_dimensions(self):
sz = len(self.data)
if sz == 4096: return (64, 64)
if sz == 4160: return (64, 65)
root = int(sz**0.5)
if root**2 != sz:
raise TypeError("unable to determine size: not a square number")
return (root, root)
dimensions = property(get_dimensions)
width = property(lambda self: self.dimensions[0])
height = property(lambda self: self.dimensions[1])
def load_raw(self, data, *unused):
self.data = data
def to_raw(self):
return self.data
|
jmickle66666666/omgifol
|
lump.py
|
Python
|
mit
| 11,325
|
#SPDX-License-Identifier: MIT
"""
Metrics that provide data about with insight detection and reporting
"""
import sqlalchemy as s
import pandas as pd
from augur.util import register_metric
@register_metric()
def deps(self, repo_group_id, repo_id=None):
depsSQL = s.sql.text("""
SELECT * FROM augur_data.dependencies
""")
results = pd.read_sql(depsSQL, self.database)
return results
|
OSSHealth/ghdata
|
augur/metrics/deps.py
|
Python
|
mit
| 395
|
# coding=utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
try:
from typing import Dict, Text, Deque, List, Tuple
except ImportError:
pass
from contextlib import contextmanager
from threading import Lock
from flask_restful import abort
from zmq import green as zmq
import copy
from jormungandr.exceptions import TechnicalError
from navitiacommon import response_pb2, request_pb2, type_pb2
from navitiacommon.default_values import get_value_or_default
from jormungandr.timezone import set_request_instance_timezone
import logging
from jormungandr.exceptions import DeadSocketException
from navitiacommon import models
from importlib import import_module
from jormungandr import cache, memory_cache, app, global_autocomplete
from shapely import wkt, geometry
from shapely.geos import PredicateError, ReadingError, TopologicalError
from flask import g
import flask
import pybreaker
from jormungandr import georef, planner, schedule, realtime_schedule, ptref, street_network, fallback_modes
from jormungandr.scenarios.ridesharing import ridesharing_service
import six
import time
from collections import deque
from datetime import datetime, timedelta
from navitiacommon import default_values
from jormungandr.equipments import EquipmentProviderManager
type_to_pttype = {
"stop_area": request_pb2.PlaceCodeRequest.StopArea, # type: ignore
"network": request_pb2.PlaceCodeRequest.Network, # type: ignore
"company": request_pb2.PlaceCodeRequest.Company, # type: ignore
"line": request_pb2.PlaceCodeRequest.Line, # type: ignore
"route": request_pb2.PlaceCodeRequest.Route, # type: ignore
"vehicle_journey": request_pb2.PlaceCodeRequest.VehicleJourney, # type: ignore
"stop_point": request_pb2.PlaceCodeRequest.StopPoint, # type: ignore
"calendar": request_pb2.PlaceCodeRequest.Calendar, # type: ignore
}
@app.before_request
def _init_g():
g.instances_model = {}
# TODO: use this helper function for all properties if possible
# Warning: it breaks static type deduction
def _make_property_getter(attr_name):
"""
a helper function.
return a getter for Instance's attr
:param attr_name:
:return:
"""
def _getter(self):
return get_value_or_default(attr_name, self.get_models(), self.name)
return property(_getter)
class Instance(object):
name = None # type: Text
_sockets = None # type: Deque[Tuple[zmq.Socket, float]]
def __init__(
self,
context, # type: zmq.Context
name, # type: Text
zmq_socket, # type: Text
street_network_configurations,
ridesharing_configurations,
realtime_proxies_configuration,
zmq_socket_type,
autocomplete_type,
instance_equipment_providers, # type: List[Text]
streetnetwork_backend_manager,
):
self.geom = None
self.geojson = None
self._sockets = deque()
self.socket_path = zmq_socket
self._scenario = None
self._scenario_name = None
self.lock = Lock()
self.context = context
self.name = name
self.timezone = None # timezone will be fetched from the kraken
self.publication_date = -1
self.is_initialized = False # kraken hasn't been called yet we don't have geom nor timezone
self.breaker = pybreaker.CircuitBreaker(
fail_max=app.config.get(str('CIRCUIT_BREAKER_MAX_INSTANCE_FAIL'), 5),
reset_timeout=app.config.get(str('CIRCUIT_BREAKER_INSTANCE_TIMEOUT_S'), 60),
)
self.georef = georef.Kraken(self)
self.planner = planner.Kraken(self)
self._streetnetwork_backend_manager = streetnetwork_backend_manager
if app.config[str('DISABLE_DATABASE')]:
self._streetnetwork_backend_manager.init_streetnetwork_backends_legacy(
self, street_network_configurations
)
self.ridesharing_services = [] # type: List[ridesharing_service.AbstractRidesharingService]
if ridesharing_configurations is not None:
self.ridesharing_services = ridesharing_service.Ridesharing.get_ridesharing_services(
self, ridesharing_configurations
)
self.ptref = ptref.PtRef(self)
self.schedule = schedule.MixedSchedule(self)
self.realtime_proxy_manager = realtime_schedule.RealtimeProxyManager(
realtime_proxies_configuration, self
)
self._autocomplete_type = autocomplete_type
if self._autocomplete_type is not None and self._autocomplete_type not in global_autocomplete:
raise RuntimeError(
'impossible to find autocomplete system {} '
'cannot initialize instance {}'.format(autocomplete_type, name)
)
self.zmq_socket_type = zmq_socket_type
if app.config[str('DISABLE_DATABASE')]:
self.equipment_provider_manager = EquipmentProviderManager(
app.config[str('EQUIPMENT_DETAILS_PROVIDERS')]
)
else:
self.equipment_provider_manager = EquipmentProviderManager(
app.config[str('EQUIPMENT_DETAILS_PROVIDERS')], self.get_providers_from_db
)
# Init equipment providers from config
self.equipment_provider_manager.init_providers(instance_equipment_providers)
def get_providers_from_db(self):
"""
:return: a callable query of equipment providers associated to the current instance in db
"""
return self._get_models().equipment_details_providers
@property
def autocomplete(self):
if self._autocomplete_type:
# retrocompat: we need to continue to read configuration from file
# while we migrate to database configuration
return global_autocomplete.get(self._autocomplete_type)
backend = global_autocomplete.get(self.autocomplete_backend)
if backend is None:
raise RuntimeError(
'impossible to find autocomplete {} for instance {}'.format(self.autocomplete_backend, self.name)
)
return backend
def stop_point_fallbacks(self):
return [a for a in global_autocomplete.values() if a.is_handling_stop_points()]
def get_models(self):
if self.name not in g.instances_model:
g.instances_model[self.name] = self._get_models()
return g.instances_model[self.name]
def __repr__(self):
return 'instance.{}'.format(self.name)
@memory_cache.memoize(app.config[str('MEMORY_CACHE_CONFIGURATION')].get(str('TIMEOUT_PARAMS'), 30))
@cache.memoize(app.config[str('CACHE_CONFIGURATION')].get(str('TIMEOUT_PARAMS'), 300))
def _get_models(self):
if app.config['DISABLE_DATABASE']:
return None
return models.Instance.get_by_name(self.name)
def scenario(self, override_scenario=None):
"""
once a scenario has been chosen for a request for an instance (coverage), we cannot change it
"""
if hasattr(g, 'scenario') and g.scenario.get(self.name):
return g.scenario[self.name]
def replace_experimental_scenario(s):
return 'distributed' if s == 'experimental' else s
if override_scenario:
logger = logging.getLogger(__name__)
logger.debug('overriding the scenario for %s with %s', self.name, override_scenario)
try:
# for the sake of backwards compatibility... some users may still be using experimental...
override_scenario = replace_experimental_scenario(override_scenario)
module = import_module('jormungandr.scenarios.{}'.format(override_scenario))
except ImportError:
logger.exception('scenario not found')
abort(404, message='invalid scenario: {}'.format(override_scenario))
scenario = module.Scenario()
# Save scenario_name and scenario
self._scenario_name = override_scenario
self._scenario = scenario
if not hasattr(g, 'scenario'):
g.scenario = {}
g.scenario[self.name] = scenario
return scenario
instance_db = self.get_models()
scenario_name = instance_db.scenario if instance_db else 'new_default'
# for the sake of backwards compatibility... some users may still be using experimental...
scenario_name = replace_experimental_scenario(scenario_name)
if not self._scenario or scenario_name != self._scenario_name:
logger = logging.getLogger(__name__)
logger.info('loading of scenario %s for instance %s', scenario_name, self.name)
self._scenario_name = scenario_name
module = import_module('jormungandr.scenarios.{}'.format(scenario_name))
self._scenario = module.Scenario()
# we save the used scenario for future use
if not hasattr(g, 'scenario'):
g.scenario = {}
g.scenario[self.name] = self._scenario
return self._scenario
@property
def journey_order(self):
# type: () -> Text
instance_db = self.get_models()
return get_value_or_default('journey_order', instance_db, self.name)
@property
def autocomplete_backend(self):
# type: () -> Text
instance_db = self.get_models()
return get_value_or_default('autocomplete_backend', instance_db, self.name)
@property
def max_walking_duration_to_pt(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_walking_duration_to_pt', instance_db, self.name)
@property
def max_bss_duration_to_pt(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_bss_duration_to_pt', instance_db, self.name)
@property
def max_bike_duration_to_pt(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_bike_duration_to_pt', instance_db, self.name)
@property
def max_car_duration_to_pt(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_car_duration_to_pt', instance_db, self.name)
@property
def max_car_no_park_duration_to_pt(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_car_no_park_duration_to_pt', instance_db, self.name)
@property
def max_ridesharing_duration_to_pt(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_ridesharing_duration_to_pt', instance_db, self.name)
@property
def max_taxi_duration_to_pt(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_taxi_duration_to_pt', instance_db, self.name)
@property
def walking_speed(self):
# type: () -> float
instance_db = self.get_models()
return get_value_or_default('walking_speed', instance_db, self.name)
@property
def bss_speed(self):
# type: () -> float
instance_db = self.get_models()
return get_value_or_default('bss_speed', instance_db, self.name)
@property
def bike_speed(self):
# type: () -> float
instance_db = self.get_models()
return get_value_or_default('bike_speed', instance_db, self.name)
@property
def car_speed(self):
# type: () -> float
instance_db = self.get_models()
return get_value_or_default('car_speed', instance_db, self.name)
@property
def car_no_park_speed(self):
# type: () -> float
instance_db = self.get_models()
return get_value_or_default('car_no_park_speed', instance_db, self.name)
@property
def ridesharing_speed(self):
# type: () -> float
instance_db = self.get_models()
return get_value_or_default('ridesharing_speed', instance_db, self.name)
@property
def max_nb_transfers(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_nb_transfers', instance_db, self.name)
@property
def min_bike(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('min_bike', instance_db, self.name)
@property
def min_bss(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('min_bss', instance_db, self.name)
@property
def min_car(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('min_car', instance_db, self.name)
@property
def min_taxi(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('min_taxi', instance_db, self.name)
@property
def successive_physical_mode_to_limit_id(self):
# type: () -> Text
instance_db = self.get_models()
return get_value_or_default('successive_physical_mode_to_limit_id', instance_db, self.name)
@property
def priority(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('priority', instance_db, self.name)
@property
def bss_provider(self):
# type: () -> bool
instance_db = self.get_models()
return get_value_or_default('bss_provider', instance_db, self.name)
@property
def car_park_provider(self):
# type: () -> bool
instance_db = self.get_models()
return get_value_or_default('car_park_provider', instance_db, self.name)
@property
def max_additional_connections(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_additional_connections', instance_db, self.name)
@property
def is_free(self):
# type: () -> bool
instance_db = self.get_models()
if not instance_db:
return False
else:
return instance_db.is_free
@property
def is_open_data(self):
# type: () -> bool
instance_db = self.get_models()
if not instance_db:
return False
else:
return instance_db.is_open_data
@property
def max_duration(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_duration', instance_db, self.name)
@property
def walking_transfer_penalty(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('walking_transfer_penalty', instance_db, self.name)
@property
def night_bus_filter_max_factor(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('night_bus_filter_max_factor', instance_db, self.name)
@property
def night_bus_filter_base_factor(self):
# type: () -> float
instance_db = self.get_models()
return get_value_or_default('night_bus_filter_base_factor', instance_db, self.name)
@property
def realtime_pool_size(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('realtime_pool_size', instance_db, self.name)
@property
def min_nb_journeys(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('min_nb_journeys', instance_db, self.name)
@property
def max_nb_journeys(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_nb_journeys', instance_db, self.name)
@property
def min_journeys_calls(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('min_journeys_calls', instance_db, self.name)
@property
def max_successive_physical_mode(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_successive_physical_mode', instance_db, self.name)
@property
def final_line_filter(self):
instance_db = self.get_models()
return get_value_or_default('final_line_filter', instance_db, self.name)
@property
def max_extra_second_pass(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_extra_second_pass', instance_db, self.name)
@property
def asynchronous_ridesharing(self):
# type: () -> bool
instance_db = self.get_models()
return get_value_or_default('asynchronous_ridesharing', instance_db, self.name)
@property
def max_nb_crowfly_by_mode(self):
# type: () -> Dict[Text, int]
instance_db = self.get_models()
# the value by default is a dict...
d = copy.deepcopy(get_value_or_default('max_nb_crowfly_by_mode', instance_db, self.name))
# In case we add a new max_nb_crowfly for an other mode than
# the ones already present in the database.
for mode, duration in default_values.max_nb_crowfly_by_mode.items():
if mode not in d:
d[mode] = duration
return d
@property
def poi_dataset(self):
# type: () -> Text
instance_db = self.get_models()
return instance_db.poi_dataset if instance_db else None
@property
def max_car_no_park_direct_path_duration(self):
# type: () -> int
instance_db = self.get_models()
return get_value_or_default('max_car_no_park_direct_path_duration', instance_db, self.name)
# TODO: refactorise all properties
taxi_speed = _make_property_getter('taxi_speed')
additional_time_after_first_section_taxi = _make_property_getter('additional_time_after_first_section_taxi')
additional_time_before_last_section_taxi = _make_property_getter('additional_time_before_last_section_taxi')
max_walking_direct_path_duration = _make_property_getter('max_walking_direct_path_duration')
max_bike_direct_path_duration = _make_property_getter('max_bike_direct_path_duration')
max_bss_direct_path_duration = _make_property_getter('max_bss_direct_path_duration')
max_car_direct_path_duration = _make_property_getter('max_car_direct_path_duration')
max_taxi_direct_path_duration = _make_property_getter('max_taxi_direct_path_duration')
max_ridesharing_direct_path_duration = _make_property_getter('max_ridesharing_direct_path_duration')
street_network_car = _make_property_getter('street_network_car')
street_network_car_no_park = _make_property_getter('street_network_car_no_park')
street_network_walking = _make_property_getter('street_network_walking')
street_network_bike = _make_property_getter('street_network_bike')
street_network_bss = _make_property_getter('street_network_bss')
street_network_ridesharing = _make_property_getter('street_network_ridesharing')
street_network_taxi = _make_property_getter('street_network_taxi')
stop_points_nearby_duration = _make_property_getter('stop_points_nearby_duration')
def reap_socket(self, ttl):
# type: (int) -> None
if self.zmq_socket_type != 'transient':
return
logger = logging.getLogger(__name__)
now = time.time()
while True:
try:
socket, t = self._sockets.popleft()
if now - t > ttl:
logger.debug("closing one socket for %s", self.name)
socket.setsockopt(zmq.LINGER, 0)
socket.close()
else:
self._sockets.appendleft((socket, t))
break # remaining socket are still in "keep alive" state
except IndexError:
break
@contextmanager
def socket(self, context):
socket = None
try:
socket, _ = self._sockets.pop()
except IndexError: # there is no socket available: lets create one
socket = context.socket(zmq.REQ)
socket.connect(self.socket_path)
try:
yield socket
finally:
if not socket.closed:
self._sockets.append((socket, time.time()))
def send_and_receive(self, *args, **kwargs):
"""
encapsulate all call to kraken in a circuit breaker, this way we don't loose time calling dead instance
"""
try:
return self.breaker.call(self._send_and_receive, *args, **kwargs)
except pybreaker.CircuitBreakerError as e:
raise DeadSocketException(self.name, self.socket_path)
def _send_and_receive(
self, request, timeout=app.config.get('INSTANCE_TIMEOUT', 10000), quiet=False, **kwargs
):
logger = logging.getLogger(__name__)
deadline = datetime.utcnow() + timedelta(milliseconds=timeout)
request.deadline = deadline.strftime('%Y%m%dT%H%M%S,%f')
with self.socket(self.context) as socket:
if 'request_id' in kwargs and kwargs['request_id']:
request.request_id = kwargs['request_id']
else:
try:
request.request_id = flask.request.id
except RuntimeError:
# we aren't in a flask context, so there is no request
if 'flask_request_id' in kwargs and kwargs['flask_request_id']:
request.request_id = kwargs['flask_request_id']
socket.send(request.SerializeToString())
if socket.poll(timeout=timeout) > 0:
pb = socket.recv()
resp = response_pb2.Response()
resp.ParseFromString(pb)
self.update_property(resp) # we update the timezone and geom of the instances at each request
return resp
else:
socket.setsockopt(zmq.LINGER, 0)
socket.close()
if not quiet:
logger.error('request on %s failed: %s', self.socket_path, six.text_type(request))
raise DeadSocketException(self.name, self.socket_path)
def get_id(self, id_):
"""
Get the pt_object that have the given id
"""
req = request_pb2.Request()
req.requested_api = type_pb2.place_uri
req.place_uri.uri = id_
return self.send_and_receive(req, timeout=app.config.get('INSTANCE_FAST_TIMEOUT', 1000))
def has_id(self, id_):
"""
Does this instance has this id
"""
try:
return len(self.get_id(id_).places) > 0
except DeadSocketException:
return False
def has_coord(self, lon, lat):
return self.has_point(geometry.Point(lon, lat))
def has_point(self, p):
try:
return self.geom and self.geom.contains(p)
except DeadSocketException:
return False
except PredicateError:
logging.getLogger(__name__).exception("has_coord failed")
return False
except TopologicalError:
logging.getLogger(__name__).exception("check_topology failed")
return False
def get_external_codes(self, type_, id_):
"""
Get all pt_object with the given id
"""
req = request_pb2.Request()
req.requested_api = type_pb2.place_code
if type_ not in type_to_pttype:
raise ValueError("Can't call pt_code API with type: {}".format(type_))
req.place_code.type = type_to_pttype[type_]
req.place_code.type_code = "external_code"
req.place_code.code = id_
# we set the timeout to 1s
return self.send_and_receive(req, timeout=app.config.get('INSTANCE_FAST_TIMEOUT', 1000))
def has_external_code(self, type_, id_):
"""
Does this instance has the given id
Returns None if it doesnt, the kraken uri otherwise
"""
try:
res = self.get_external_codes(type_, id_)
except DeadSocketException:
return False
if len(res.places) > 0:
return res.places[0].uri
return None
def update_property(self, response):
"""
update the property of an instance from a response if the metadatas field if present
"""
# after a successful call we consider the instance initialised even if no data were loaded
self.is_initialized = True
if response.HasField(str("metadatas")) and response.publication_date != self.publication_date:
logging.getLogger(__name__).debug('updating metadata for %s', self.name)
with self.lock as lock:
self.publication_date = response.publication_date
if response.metadatas.shape and response.metadatas.shape != "":
try:
self.geom = wkt.loads(response.metadatas.shape)
except ReadingError:
self.geom = None
else:
self.geom = None
self.timezone = response.metadatas.timezone
self._update_geojson()
set_request_instance_timezone(self)
def _update_geojson(self):
"""construct the geojson object from the shape"""
if not self.geom or not self.geom.is_valid:
self.geojson = None
return
# simplify the geom to prevent slow query on bragi
geom = self.geom.simplify(tolerance=0.01)
self.geojson = geometry.mapping(geom)
def init(self):
"""
Get and store variables of the instance.
Returns True if we need to clear the cache, False otherwise.
"""
pub_date = self.publication_date
req = request_pb2.Request()
req.requested_api = type_pb2.METADATAS
request_id = "instance_init"
try:
# we use _send_and_receive to avoid the circuit breaker, we don't want fast fail on init :)
resp = self._send_and_receive(req, request_id=request_id, timeout=1000, quiet=True)
# the instance is automatically updated on a call
if self.publication_date != pub_date:
return True
except DeadSocketException:
# we don't do anything on error, a new session will be established to an available kraken on
# the next request. We don't want to purge all our cache for a small error.
logging.getLogger(__name__).debug('timeout on init for %s', self.name)
return False
def _get_street_network(self, mode, request):
if app.config[str('DISABLE_DATABASE')]:
return self._streetnetwork_backend_manager.get_street_network_legacy(self, mode, request)
else:
# We get the name of the column in the database corresponding to the mode used in the request
# And we get the value of this column for this instance
column_in_db = "street_network_{}".format(mode)
streetnetwork_backend_conf = getattr(self, column_in_db)
return self._streetnetwork_backend_manager.get_street_network_db(self, streetnetwork_backend_conf)
def get_street_network(self, mode, request):
if mode != fallback_modes.FallbackModes.car.name:
return self._get_street_network(mode, request)
walking_service = self._get_street_network(fallback_modes.FallbackModes.walking.name, request)
car_service = self._get_street_network(fallback_modes.FallbackModes.car.name, request)
return street_network.CarWithPark(
instance=self, walking_service=walking_service, car_service=car_service
)
def get_all_street_networks(self):
if app.config[str('DISABLE_DATABASE')]:
return self._streetnetwork_backend_manager.get_all_street_networks_legacy(self)
else:
return self._streetnetwork_backend_manager.get_all_street_networks_db(self)
def get_autocomplete(self, requested_autocomplete):
if not requested_autocomplete:
return self.autocomplete
autocomplete = global_autocomplete.get(requested_autocomplete)
if not autocomplete:
raise TechnicalError('autocomplete {} not available'.format(requested_autocomplete))
return autocomplete
def get_ridesharing_journeys_with_feed_publishers(self, from_coord, to_coord, period_extremity, limit=None):
res = []
fps = set()
for service in self.ridesharing_services:
rsjs, fp = service.request_journeys_with_feed_publisher(
from_coord, to_coord, period_extremity, limit
)
res.extend(rsjs)
fps.add(fp)
return res, fps
|
xlqian/navitia
|
source/jormungandr/jormungandr/instance.py
|
Python
|
agpl-3.0
| 30,251
|
import os
import zipfile
import simplejson as json
from flask import Blueprint, Response, current_app, request
from models.repository import Repository
from formatters.repository_formatter import RepositoryFormatter
from datetime import datetime
index_blueprint = Blueprint('index', __name__)
@index_blueprint.route("/")
def index():
output = []
for repository in Repository.all(current_app.config.get("REPOS_DIR_PATH")):
output.append(RepositoryFormatter.json_format(repository))
return Response(json.dumps(output, sort_keys=True), mimetype="application/json")
@index_blueprint.route("/<repository_name>")
def view(repository_name):
repository = Repository.find(current_app.config.get("REPOS_DIR_PATH"), repository_name)
if repository:
changes = {}
changes["added_documents"] = [change.a_path for change in repository.added_files()]
changes["deleted_documents"] = [change.a_path for change in repository.deleted_files()]
changes["modified_documents"] = [change.a_path for change in repository.modified_files()]
return Response(json.dumps(changes, sort_keys=True), mimetype="application/json")
else:
error = { "error": "Not found!" }
return Response(json.dumps(error, sort_keys=True), mimetype="application/json"), 404
@index_blueprint.route("/<repository_name>/files")
def view_files(repository_name):
repository = Repository.find(current_app.config.get("REPOS_DIR_PATH"), repository_name)
if repository:
search_term = request.args.get("search_term")
if search_term:
return Response(json.dumps(repository.search(search_term), sort_keys=True), mimetype="application/json")
else:
return Response(json.dumps(repository.list_files(), sort_keys=True), mimetype="application/json")
else:
error = { "error": "Not found!" }
return Response(json.dumps(error, sort_keys=True), mimetype="application/json"), 404
@index_blueprint.route("/<repository_name>/files/<file_name>")
def view_file(repository_name, file_name):
steps = request.args.get("steps")
repository = Repository.find(current_app.config.get("REPOS_DIR_PATH"), repository_name)
if repository:
if steps:
if repository.file_exists_in_commit(file_name, steps):
file_content = repository.get_file_content_from_commit(file_name, steps)
return render_file_content(file_content)
else:
if repository.file_exists(file_name):
file_content = repository.get_file_content(file_name)
return render_file_content(file_content)
error = { "error": "Not found!" }
return Response(json.dumps(error, sort_keys=True), mimetype="application/json"), 404
@index_blueprint.route("/<repository_name>/update", methods=["post"])
def updated_repository(repository_name):
repository_path = os.path.join(current_app.config.get("REPOS_DIR_PATH"), repository_name)
repository = Repository.find_or_create(current_app.config.get("REPOS_DIR_PATH"), repository_name)
zip_file = zipfile.ZipFile(request.files["file"])
for name in [name for name in os.listdir(repository_path) if name != ".git"]:
os.remove(os.path.join(repository_path, name))
zip_file.extractall(repository_path)
repository.repo.git.add(A=True)
try:
repository.repo.git.commit(m=datetime.today().strftime("%s"))
except:
return "No changes where made"
return "updated"
@index_blueprint.route("/<repository_name>/files/<file_name>/diff")
def view_File(repository_name, file_name):
repository = Repository.find(current_app.config.get("REPOS_DIR_PATH"), repository_name)
steps = request.args.get("steps")
if repository:
if repository.file_exists(file_name):
if steps:
return Response(repository.get_file_diff(file_name, steps), mimetype="text/plain")
else:
return Response(repository.get_file_diff(file_name, 1), mimetype="text/plain")
error = { "error": "Not found!" }
return Response(json.dumps(error, sort_keys=True), mimetype="application/json"), 404
@index_blueprint.route("/<repository_name>/commits")
def list_commits(repository_name):
amount = request.args.get("amount") or 10
repository = Repository.find(current_app.config.get("REPOS_DIR_PATH"), repository_name)
formatted_commits = []
for index, commit in enumerate(repository.get_commits(amount)):
commit_object = {}
commit_object["time"] = datetime.fromtimestamp(commit.committed_date).isoformat()
commit_object["step"] = index
formatted_commits.append(commit_object)
return Response(json.dumps(formatted_commits, sort_keys=True), mimetype="application/json")
def render_file_content(content):
if isinstance(content, str):
return Response("{}", mimetype="application/json")
else:
return Response(content.decode("utf-8"), mimetype="application/json")
|
c0d3m0nkey/json-diff-api
|
app/controllers/index_controller.py
|
Python
|
bsd-2-clause
| 5,021
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class Channel(models.Model):
_inherit = 'slide.channel'
nbr_certification = fields.Integer("Number of Certifications", compute='_compute_slides_statistics', store=True)
|
ddico/odoo
|
addons/website_slides_survey/models/slide_channel.py
|
Python
|
agpl-3.0
| 312
|
#
# network.py - network configuration install data
#
# Copyright (C) 2008, 2009 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): David Cantrell <dcantrell@redhat.com>
#
import dbus
# This code was taken from anaconda, and slightly adapted to not rely
# on isys.
NM_SERVICE = "org.freedesktop.NetworkManager"
NM_MANAGER_PATH = "/org/freedesktop/NetworkManager"
NM_STATE_CONNECTED = 3
DBUS_PROPS_IFACE = "org.freedesktop.DBus.Properties"
def hasActiveNetDev():
"""Does the system have an enabled network interface?"""
try:
bus = dbus.SystemBus()
nm = bus.get_object(NM_SERVICE, NM_MANAGER_PATH)
props = dbus.Interface(nm, DBUS_PROPS_IFACE)
state = props.Get(NM_SERVICE, "State")
if int(state) == NM_STATE_CONNECTED:
return True
else:
return False
except:
return False
|
vojtechtrefny/python-meh
|
meh/network.py
|
Python
|
gpl-2.0
| 1,481
|
#!/usr/bin/env
def main() :
if __name__ == "__main__" :
main()
|
prodicus/dabble
|
sqlite3/prac.py
|
Python
|
mit
| 72
|
"""... automodule::"""
from autoencoder import Autoencoder
from backproptrainer import BackPropTrainer, SparseBackPropTrainer
|
thomlake/EbmLib
|
ebmlib/autoencoder/__init__.py
|
Python
|
gpl-3.0
| 126
|
# stdlib
import importlib
import sys
from types import ModuleType
from typing import Any
from typing import Any as TypeAny
from typing import Dict as TypeDict
from typing import Iterable
from typing import List as TypeList
from typing import Optional
from typing import Set as TypeSet
from typing import Tuple as TypeTuple
from typing import Union as TypeUnion
import warnings
# third party
from cachetools import cached
from cachetools.keys import hashkey
from packaging import version
# relative
from ..ast.globals import Globals
from ..core.adp import create_adp_ast
from ..core.node.abstract.node import AbstractNodeClient
from ..core.node.common import create_client_ast
from ..core.smpc import create_smpc_ast
from ..core.tensor import create_tensor_ast
from ..logger import critical
from ..logger import traceback_and_raise
from ..logger import warning
from .misc import create_union_ast
from .numpy import create_ast
from .python import create_python_ast
from .torch import create_torch_ast
class VendorLibraryImportException(Exception):
pass
def vendor_requirements_available(vendor_requirements: TypeDict[str, TypeAny]) -> bool:
"""
Check whether torch or python version is supported
Args:
vendor_requirements: dictionary containing version of python or torch to be supported
Returns:
True if system supports all vendor requirements
"""
# see if python version is supported
if "python" in vendor_requirements:
python_reqs = vendor_requirements["python"]
PYTHON_VERSION = sys.version_info
min_version = python_reqs.get("min_version", None)
if min_version is not None:
if PYTHON_VERSION < min_version:
traceback_and_raise(
VendorLibraryImportException(
f"Unable to load {vendor_requirements['lib']}."
+ f"Python: {PYTHON_VERSION} < {min_version}"
)
)
max_version = python_reqs.get("max_version", None)
if max_version is not None:
if PYTHON_VERSION > max_version:
traceback_and_raise(
VendorLibraryImportException(
f"Unable to load {vendor_requirements['lib']}."
+ f"Python: {PYTHON_VERSION} > {max_version}"
)
)
# see if torch version is supported
if "torch" in vendor_requirements:
torch_reqs = vendor_requirements["torch"]
# third party
import torch
TORCH_VERSION = version.parse(torch.__version__.split("+")[0])
min_version = torch_reqs.get("min_version", None)
if min_version is not None:
if TORCH_VERSION < version.parse(min_version):
traceback_and_raise(
VendorLibraryImportException(
f"Unable to load {vendor_requirements['lib']}."
+ f"Torch: {TORCH_VERSION} < {min_version}"
)
)
max_version = torch_reqs.get("max_version", None)
if max_version is not None:
if TORCH_VERSION > version.parse(max_version):
traceback_and_raise(
VendorLibraryImportException(
f"Unable to load {vendor_requirements['lib']}."
+ f"Torch: {TORCH_VERSION} > {max_version}"
)
)
return True
def _add_lib(
*, vendor_ast: ModuleType, ast_or_client: TypeUnion[Globals, AbstractNodeClient]
) -> None:
update_ast = getattr(vendor_ast, "update_ast", None)
if update_ast is not None:
update_ast(ast_or_client=ast_or_client)
def _regenerate_unions(*, lib_ast: Globals, client: TypeAny = None) -> None:
union_misc_ast = getattr(
getattr(create_union_ast(lib_ast=lib_ast, client=client), "syft", None),
"lib",
None,
)
if union_misc_ast is None:
return
elif client is not None:
client.syft.lib.add_attr(attr_name="misc", attr=union_misc_ast.attrs["misc"])
else:
lib_ast.syft.lib.add_attr(attr_name="misc", attr=union_misc_ast.attrs["misc"])
def get_cache() -> TypeDict:
return dict()
@cached(get_cache(), lambda *, lib, options=None: hashkey(lib))
def _load_lib(*, lib: str, options: Optional[TypeDict[str, TypeAny]] = None) -> None:
"""
Load and Update Node with given library module
Args:
lib: name of library to load and update Node with
options: external requirements for loading library successfully
"""
global lib_ast
_options = {} if options is None else options
_ = importlib.import_module(lib)
vendor_ast = importlib.import_module(f"syft.lib.{lib}")
PACKAGE_SUPPORT = getattr(vendor_ast, "PACKAGE_SUPPORT", None)
if PACKAGE_SUPPORT is None:
raise Exception(f"Unable to load package: {lib}. Missing PACKAGE_SUPPORT.")
PACKAGE_SUPPORT.update(_options)
if PACKAGE_SUPPORT is not None and vendor_requirements_available(
vendor_requirements=PACKAGE_SUPPORT
):
_add_lib(vendor_ast=vendor_ast, ast_or_client=lib_ast)
# cache the constructor for future created clients
update_ast_func = getattr(vendor_ast, "update_ast", None)
if update_ast_func is None:
raise Exception(f"Unable to load package: {lib}. Missing update_ast func")
lib_ast.loaded_lib_constructors[lib] = update_ast_func
_regenerate_unions(lib_ast=lib_ast)
for _, client in lib_ast.registered_clients.items():
_add_lib(vendor_ast=vendor_ast, ast_or_client=client)
_regenerate_unions(lib_ast=lib_ast, client=client)
def load(
*libs: TypeUnion[TypeList[str], TypeTuple[str], TypeSet[str], str],
options: Optional[TypeDict[str, TypeAny]] = None,
ignore_warning: bool = False,
**kwargs: str,
) -> None:
"""
Load and Update Node with given library module
Args:
*libs: names of libraries to load and update Node with (can be variadic, tuple, list, set)
options: external requirements for loading library successfully
**kwargs: for backward compatibility with calls like `syft.load(lib = "opacus")`
"""
# For backward compatibility with calls like `syft.load(lib = "opacus")`
# Note: syft.load(lib = "opacus") doesnot work as it iterates the string, syft.load('opacus') works
options = options if options is not None else {}
if not ignore_warning:
msg = "sy.load() is deprecated and not needed anymore"
warning(msg, print=True)
warnings.warn(msg, DeprecationWarning)
if "lib" in kwargs.keys():
libs += tuple(kwargs["lib"])
if isinstance(libs[0], Iterable):
if not isinstance(libs[0], str):
libs = tuple(libs[0])
for lib in libs:
if isinstance(lib, str):
try:
_load_lib(lib=str(lib), options=options)
except VendorLibraryImportException as e:
warning(e)
except Exception as e:
warning(f"Unable to load package support for: {lib}. {e}")
else:
warning(
f"Unable to load package support for: {lib}. Pass lib name as string object."
)
else:
critical(
"Unable to load package support for any library. Iterable object not found."
)
def load_lib(lib: str, options: Optional[TypeDict[str, TypeAny]] = None) -> None:
"""
Load and Update Node with given library module
_load_lib() is deprecated please use load() in the future
Args:
lib: name of library to load and update Node with
options: external requirements for loading library successfully
"""
options = options if options is not None else {}
msg = "sy._load_lib() is deprecated and not needed anymore"
warning(msg, print=True)
warnings.warn(msg, DeprecationWarning)
load(lib=lib, options=options)
# now we need to load the relevant frameworks onto the node
def create_lib_ast(client: Optional[Any] = None) -> Globals:
"""
Create AST and load the relevant frameworks onto the node
Args:
client: VM client onto whom the frameworks need to be loaded
Returns:
AST for client of type Globals
"""
python_ast = create_python_ast(client=client)
torch_ast = create_torch_ast(client=client)
numpy_ast = create_ast(client=client)
adp_ast = create_adp_ast(client=client)
client_ast = create_client_ast(client=client)
tensor_ast = create_tensor_ast(client=client)
smpc_ast = create_smpc_ast(client=client)
lib_ast = Globals(client=client)
lib_ast.add_attr(attr_name="syft", attr=python_ast.attrs["syft"])
lib_ast.add_attr(attr_name="torch", attr=torch_ast.attrs["torch"])
lib_ast.add_attr(attr_name="numpy", attr=numpy_ast.attrs["numpy"])
lib_ast.syft.add_attr("core", attr=adp_ast.syft.core)
lib_ast.syft.core.add_attr("node", attr=client_ast.syft.core.node)
lib_ast.syft.core.add_attr("common", attr=client_ast.syft.core.common)
lib_ast.syft.core.add_attr("tensor", attr=tensor_ast.syft.core.tensor)
lib_ast.syft.core.add_attr("smpc", attr=smpc_ast.syft.core.smpc)
# let the misc creation be always the last, as it needs the full ast solved
# to properly generated unions
union_misc_ast = getattr(
getattr(create_union_ast(lib_ast, client), "syft", None), "lib", None
)
if union_misc_ast:
lib_ast.syft.lib.add_attr(attr_name="misc", attr=union_misc_ast.attrs["misc"])
return lib_ast
lib_ast = create_lib_ast(None)
# @wrapt.when_imported("numpy")
# def post_import_hook_third_party(module: TypeAny) -> None:
# """
# Note: This needs to be after `lib_ast` because code above uses lib-ast
# """
# # msg = f"inside post_import_hook_third_party module_name {module.__name__}"
# # warning(msg, print=True)
# # warnings.warn(msg, DeprecationWarning)
# load(module.__name__, ignore_warning=True)
|
OpenMined/PySyft
|
packages/syft/src/syft/lib/__init__.py
|
Python
|
apache-2.0
| 10,154
|
import logging
from datetime import date
from util import parse_date
from programtitles import ProgramTitles
from programeventdetails import ProgramEventDetails
from programdescriptionlist import ProgramDescriptionList
from programmetadata import ProgramMetadata
from programcast import ProgramCast
from programcrew import ProgramCrew
from programcontentrating import ProgramContentRating
from programrecommendation import ProgramRecommendation
from programmovie import ProgramMovie
from programkeywords import ProgramKeywords
from image import Image
from programaward import ProgramAward
class Program(object):
def __init__(self):
self.program_id = None # type: unicode
self.md5 = None # type: unicode
self.titles = None # type: ProgramTitles
self.event_details = None # type: ProgramEventDetails
self.descriptions = None # type: ProgramDescriptionList
self.original_air_date = None # type: date
self.genres = [] # type: List[unicode]
self.episode_title = None # type: unicode
self.metadata = None # type: ProgramMetadata
self.cast = [] # type: List[ProgramCast]
self.crew = [] # type: List[ProgramCrew]
self.show_type = None # type: unicode
self.has_image_artwork = False # type: bool
self.content_ratings = [] # type: List[ProgramContentRating]
self.content_advisories = [] # type: List[unicode]
self.recommendations = [] # type: List[ProgramRecommendation]
self.movie = None # type: ProgramMovie
self.episode_num = None # type: int
self.animation = None # type: unicode
self.audience = None # type: unicode
self.holiday = None # type: unicode
self.keywords = None # type: ProgramKeywords
self.official_url = None # type: unicode
self.entity_type = None # type: unicode
self.resource_id = None # type: unicode
self.episode_image = None # type: Image
self.duration = None # type: int
self.awards = None # type: List[ProgramAward]
@property
def artwork_id(self): # type: () -> unicode
if not self.has_image_artwork:
return None
return self.program_id[0:10]
@property
def is_show_entity(self): # type: () -> bool
return self.entity_type == u"Show"
@property
def is_episode_entity(self): # type: () -> bool
return self.entity_type == u"Episode"
@property
def is_movie_entity(self): # type: () -> bool
return self.entity_type == u"Movie"
@property
def is_sports_entity(self): # type: () -> bool
return self.entity_type == u"Sports"
def __unicode__(self): # type: () -> unicode
return u"{0.program_id} '{1.title120}'".format(self, self.titles)
def __str__(self):
return unicode(self).encode("utf-8")
def get_content_rating(self, body):
return next((content_rating for content_rating in self.content_ratings if content_rating.body == body), None)
def get_cast(self, in_roles): # type: (List[unicode]) -> List[ProgramCast]
return [cast for cast in self.cast if cast.role in in_roles]
def get_crew(self, in_roles): # type: (List[unicode]) -> List[ProgramCrew]
return [crew for crew in self.crew if crew.role in in_roles]
@staticmethod
def from_dict(dct): # type: (dict) -> Program
"""
:param dct:
:return:
"""
if "programID" not in dct or "md5" not in dct:
return dct
program = Program()
program.program_id = dct.pop("programID")
if program.program_id[:2] == "EP":
program.episode_num = int(program.program_id[-4:])
program.titles = ProgramTitles.from_iterable(dct.pop("titles"))
program.md5 = dct.pop("md5")
if "eventDetails" in dct:
program.event_details = ProgramEventDetails.from_dict(dct.pop("eventDetails"))
if "descriptions" in dct:
program.descriptions = ProgramDescriptionList.from_dict(dct.pop("descriptions"))
if "originalAirDate" in dct:
program.original_air_date = parse_date(dct.pop("originalAirDate"))
if "genres" in dct:
program.genres = dct.pop("genres")
if "episodeTitle150" in dct:
program.episode_title = dct.pop("episodeTitle150")
if "metadata" in dct:
program.metadata = ProgramMetadata.from_iterable(dct.pop("metadata"))
if "cast" in dct:
program.cast = ProgramCast.from_iterable(dct.pop("cast"))
if "crew" in dct:
program.crew = ProgramCrew.from_iterable(dct.pop("crew"))
if "showType" in dct:
program.show_type = dct.pop("showType")
if "hasImageArtwork" in dct:
program.has_image_artwork = dct.pop("hasImageArtwork")
if "contentRating" in dct:
program.content_ratings = ProgramContentRating.from_iterable(dct.pop("contentRating"))
if "contentAdvisory" in dct:
program.content_advisories = dct.pop("contentAdvisory")
if "recommendations" in dct:
program.recommendations = ProgramRecommendation.from_iterable(dct.pop("recommendations"))
if "movie" in dct:
program.movie = ProgramMovie.from_dict(dct.pop("movie"))
if "animation" in dct:
program.animation = dct.pop("animation")
if "audience" in dct:
program.audience = dct.pop("audience")
if "holiday" in dct:
program.holiday = dct.pop("holiday")
if "keyWords" in dct:
program.keywords = ProgramKeywords.from_dict(dct.pop("keyWords"))
if "officialURL" in dct:
program.official_url = dct.pop("officialURL")
if "entityType" in dct:
program.entity_type = dct.pop("entityType")
if "resourceID" in dct:
program.resource_id = dct.pop("resourceID")
if "episodeImage" in dct:
program.episode_image = Image.from_dict(dct.pop("episodeImage"))
if "duration" in dct:
program.duration = dct.pop("duration")
if "awards" in dct:
program.awards = ProgramAward.from_iterable(dct.pop("awards"))
if len(dct) != 0:
logging.warn("Key(s) not processed for Program: %s", ", ".join(dct.keys()))
return program
|
astrilchuk/sd2xmltv
|
libschedulesdirect/common/program.py
|
Python
|
mit
| 6,469
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-mgmt-storagepool"
PACKAGE_PPRINT_NAME = "Storage Pool Management"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mgmt',
]),
install_requires=[
'msrest>=0.6.21',
'azure-common~=1.1',
'azure-mgmt-core>=1.3.0,<2.0.0',
],
python_requires=">=3.6"
)
|
Azure/azure-sdk-for-python
|
sdk/storagepool/azure-mgmt-storagepool/setup.py
|
Python
|
mit
| 2,679
|
from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket
#from websocket import create_connection
import threading
import time
clients = []
def searchFiles():
filelist = []
listOfFiles = os.listdir()
listOfFiles.sort()
pattern = 'file*'
for entry in listOfFiles:
if fnmatch.fnmatch(entry, pattern):
filelist.append(entry)
print(filelist)
return filelist
def authentication(data):
mdata = data.split()
name = mdata[0]
password = mdata[1]
if name == '123' and password == '456':
return 'yes'
else :
return 'no'
class SimpleChat(WebSocket):
def handleMessage(self):
counter = 0
fileli = ['file1.zip','file2.zip']
msg = self.data
if(msg!='file'):
reply = authentication(msg)
else:
if len(fileli) > 0 and len(fileli) > counter:
reply = fileli[counter]
print(reply)
counter = counter + 1
for client in clients:
if client == self:
client.sendMessage(reply)
print(self.data)
def handleConnected(self):
print(self.address, 'connected')
for client in clients:
if client == self:
client.sendMessage(self.address[0] + u' - connected')
clients.append(self)
def handleClose(self):
clients.remove(self)
print(self.address, 'closed')
# for client in clients:
# client.sendMessage(self.address[0] + u' - disconnected')
def main():
# threading.Thread(target=start_server,args=()).start
server = SimpleWebSocketServer('0.0.0.0', 9999, SimpleChat)
server.serveforever()
if __name__=="__main__":
main()
|
Bowenislandsong/Distributivecom
|
Archive-Dec-8/ServerCode/wsserver.py
|
Python
|
gpl-3.0
| 1,500
|
# -*- coding: utf-8 -*-
"""djwechat URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^portfolio/', include('portfolio.urls')),
url(r'^weixin/', include('weixin.urls')),
url(r'^rss/', include('rss.urls')),
url(r'^wallpaper/', include('wallpaper.urls')),
]
|
stornado/djwechat
|
djwechat/djwechat/urls.py
|
Python
|
apache-2.0
| 1,016
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import frappe
import frappe.share
import unittest
class TestDocShare(unittest.TestCase):
def setUp(self):
self.user = "test@example.com"
self.event = frappe.get_doc({"doctype": "Event",
"subject": "test share event",
"starts_on": "2015-01-01 10:00:00",
"event_type": "Private"}).insert()
def tearDown(self):
frappe.set_user("Administrator")
self.event.delete()
def test_add(self):
# user not shared
self.assertTrue(self.event.name not in frappe.share.get_shared("Event", self.user))
frappe.share.add("Event", self.event.name, self.user)
self.assertTrue(self.event.name in frappe.share.get_shared("Event", self.user))
def test_doc_permission(self):
frappe.set_user(self.user)
self.assertFalse(self.event.has_permission())
frappe.set_user("Administrator")
frappe.share.add("Event", self.event.name, self.user)
frappe.set_user(self.user)
self.assertTrue(self.event.has_permission())
def test_share_permission(self):
frappe.share.add("Event", self.event.name, self.user, write=1, share=1)
frappe.set_user(self.user)
self.assertTrue(self.event.has_permission("share"))
# test cascade
self.assertTrue(self.event.has_permission("read"))
self.assertTrue(self.event.has_permission("write"))
def test_set_permission(self):
frappe.share.add("Event", self.event.name, self.user)
frappe.set_user(self.user)
self.assertFalse(self.event.has_permission("share"))
frappe.set_user("Administrator")
frappe.share.set_permission("Event", self.event.name, self.user, "share")
frappe.set_user(self.user)
self.assertTrue(self.event.has_permission("share"))
def test_permission_to_share(self):
frappe.set_user(self.user)
self.assertRaises(frappe.PermissionError, frappe.share.add, "Event", self.event.name, self.user)
frappe.set_user("Administrator")
frappe.share.add("Event", self.event.name, self.user, write=1, share=1)
# test not raises
frappe.set_user(self.user)
frappe.share.add("Event", self.event.name, "test1@example.com", write=1, share=1)
def test_remove_share(self):
frappe.share.add("Event", self.event.name, self.user, write=1, share=1)
frappe.set_user(self.user)
self.assertTrue(self.event.has_permission("share"))
frappe.set_user("Administrator")
frappe.share.remove("Event", self.event.name, self.user)
frappe.set_user(self.user)
self.assertFalse(self.event.has_permission("share"))
def test_share_with_everyone(self):
self.assertTrue(self.event.name not in frappe.share.get_shared("Event", self.user))
frappe.share.set_permission("Event", self.event.name, None, "read", everyone=1)
self.assertTrue(self.event.name in frappe.share.get_shared("Event", self.user))
self.assertTrue(self.event.name in frappe.share.get_shared("Event", "test1@example.com"))
self.assertTrue(self.event.name not in frappe.share.get_shared("Event", "Guest"))
frappe.share.set_permission("Event", self.event.name, None, "read", value=0, everyone=1)
self.assertTrue(self.event.name not in frappe.share.get_shared("Event", self.user))
self.assertTrue(self.event.name not in frappe.share.get_shared("Event", "test1@example.com"))
self.assertTrue(self.event.name not in frappe.share.get_shared("Event", "Guest"))
|
indautgrp/frappe
|
frappe/core/doctype/docshare/test_docshare.py
|
Python
|
mit
| 3,305
|
from aiosparkapi.baseresponse import BaseResponse
from aiosparkapi.async_generator import AsyncGenerator
class Person(BaseResponse):
def __init__(self, result):
self._result = result
@property
def id(self):
return self._result['id']
@property
def emails(self):
return self._result['emails']
@property
def displayName(self):
return self._result['displayName']
@property
def nickName(self):
return self._result.get('nickName')
@property
def firstName(self):
return self._result['firstName']
@property
def lastName(self):
return self._result['lastName']
@property
def avatar(self):
return self._result['avatar']
@property
def orgId(self):
return self._result['orgId']
@property
def roles(self):
return self._result.get('roles')
@property
def licenses(self):
return self._result.get('licenses')
@property
def timezone(self):
return self._result.get('timezone')
@property
def lastActivity(self):
return self._result['lastActivity']
@property
def status(self):
return self._result['status']
@property
def invitePending(self):
return self._result.get('invitePending')
@property
def loginEnabled(self):
return self._result.get('loginEnabled')
@property
def type(self):
return self._result['type']
@property
def created(self):
return self._result['created']
class People:
def __init__(self, requests):
self._requests = requests
async def list(self, **kwargs):
result = await self._requests.list('people', kwargs)
return AsyncGenerator(result, Person)
async def get(self, person_id):
return Person(await self._requests.get('people', person_id))
async def me(self):
return await self.get('me')
async def create(self, **kwargs):
assert 'emails' in kwargs and type(kwargs['emails']) in [list, tuple]
return Person(await self._requests.create('people', kwargs))
|
martiert/aiosparkapi
|
aiosparkapi/api/people.py
|
Python
|
mit
| 2,135
|
# coding: utf-8
#
# Copyright 2015 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domain objects for a collection and its constituents.
Domain objects capture domain-specific logic and are agnostic of how the
objects they represent are stored. All methods and properties in this file
should therefore be independent of the specific storage models used.
"""
import copy
import feconf
import utils
# Do not modify the values of these constants. This is to preserve backwards
# compatibility with previous change dicts.
COLLECTION_NODE_PROPERTY_PREREQUISITE_SKILLS = 'prerequisite_skills'
COLLECTION_NODE_PROPERTY_ACQUIRED_SKILLS = 'acquired_skills'
# This takes an additional 'exploration_id' parameter.
CMD_ADD_COLLECTION_NODE = 'add_collection_node'
# This takes an additional 'exploration_id' parameter.
CMD_DELETE_COLLECTION_NODE = 'delete_collection_node'
# This takes additional 'property_name' and 'new_value' parameters and,
# optionally, 'old_value'.
CMD_EDIT_COLLECTION_NODE_PROPERTY = 'edit_collection_node_property'
# This takes additional 'property_name' and 'new_value' parameters and,
# optionally, 'old_value'.
CMD_EDIT_COLLECTION_PROPERTY = 'edit_collection_property'
# This takes additional 'from_version' and 'to_version' parameters for logging.
CMD_MIGRATE_SCHEMA_TO_LATEST_VERSION = 'migrate_schema_to_latest_version'
class CollectionChange(object):
"""Domain object class for a change to a collection.
IMPORTANT: Ensure that all changes to this class (and how these cmds are
interpreted in general) preserve backward-compatibility with the
collection snapshots in the datastore. Do not modify the definitions of
cmd keys that already exist.
"""
COLLECTION_NODE_PROPERTIES = (
COLLECTION_NODE_PROPERTY_PREREQUISITE_SKILLS,
COLLECTION_NODE_PROPERTY_ACQUIRED_SKILLS)
COLLECTION_PROPERTIES = ('title', 'category', 'objective')
def __init__(self, change_dict):
"""Initializes an CollectionChange object from a dict.
change_dict represents a command. It should have a 'cmd' key, and one
or more other keys. The keys depend on what the value for 'cmd' is.
The possible values for 'cmd' are listed below, together with the other
keys in the dict:
- 'add_collection_node' (with exploration_id)
- 'delete_collection_node' (with exploration_id)
- 'edit_collection_node_property' (with exploration_id,
property_name, new_value and, optionally, old_value)
- 'edit_collection_property' (with property_name, new_value and,
optionally, old_value)
- 'migrate_schema' (with from_version and to_version)
For a collection node, property_name must be one of
COLLECTION_NODE_PROPERTIES. For a collection, property_name must be
one of COLLECTION_PROPERTIES.
"""
if 'cmd' not in change_dict:
raise Exception('Invalid change_dict: %s' % change_dict)
self.cmd = change_dict['cmd']
if self.cmd == CMD_ADD_COLLECTION_NODE:
self.exploration_id = change_dict['exploration_id']
elif self.cmd == CMD_DELETE_COLLECTION_NODE:
self.exploration_id = change_dict['exploration_id']
elif self.cmd == CMD_EDIT_COLLECTION_NODE_PROPERTY:
if (change_dict['property_name'] not in
self.COLLECTION_NODE_PROPERTIES):
raise Exception('Invalid change_dict: %s' % change_dict)
self.exploration_id = change_dict['exploration_id']
self.property_name = change_dict['property_name']
self.new_value = change_dict['new_value']
self.old_value = change_dict.get('old_value')
elif self.cmd == CMD_EDIT_COLLECTION_PROPERTY:
if (change_dict['property_name'] not in
self.COLLECTION_PROPERTIES):
raise Exception('Invalid change_dict: %s' % change_dict)
self.property_name = change_dict['property_name']
self.new_value = change_dict['new_value']
self.old_value = change_dict.get('old_value')
elif self.cmd == CMD_MIGRATE_SCHEMA_TO_LATEST_VERSION:
self.from_version = change_dict['from_version']
self.to_version = change_dict['to_version']
else:
raise Exception('Invalid change_dict: %s' % change_dict)
class CollectionCommitLogEntry(object):
"""Value object representing a commit to an collection."""
def __init__(
self, created_on, last_updated, user_id, username, collection_id,
commit_type, commit_message, commit_cmds, version,
post_commit_status, post_commit_community_owned,
post_commit_is_private):
self.created_on = created_on
self.last_updated = last_updated
self.user_id = user_id
self.username = username
self.collection_id = collection_id
self.commit_type = commit_type
self.commit_message = commit_message
self.commit_cmds = commit_cmds
self.version = version
self.post_commit_status = post_commit_status
self.post_commit_community_owned = post_commit_community_owned
self.post_commit_is_private = post_commit_is_private
def to_dict(self):
"""This omits created_on, user_id and (for now) commit_cmds."""
return {
'last_updated': utils.get_time_in_millisecs(self.last_updated),
'username': self.username,
'collection_id': self.collection_id,
'commit_type': self.commit_type,
'commit_message': self.commit_message,
'version': self.version,
'post_commit_status': self.post_commit_status,
'post_commit_community_owned': self.post_commit_community_owned,
'post_commit_is_private': self.post_commit_is_private,
}
class CollectionNode(object):
"""Domain object describing a node in the exploration graph of a
collection. The node contains various information, including a reference to
an exploration (its ID), prerequisite skills in order to be qualified to
play the exploration, and acquired skills attained once the exploration is
completed.
"""
def __init__(self, exploration_id, prerequisite_skills, acquired_skills):
"""Constructs a new CollectionNode object.
Args:
- exploration_id: A valid ID of an exploration referenced by this node.
- prerequisite_skills: A list of skills (strings).
- acquired_skills: A list of skills (strings).
"""
self.exploration_id = exploration_id
self.prerequisite_skills = prerequisite_skills
self.acquired_skills = acquired_skills
def to_dict(self):
return {
'exploration_id': self.exploration_id,
'prerequisite_skills': self.prerequisite_skills,
'acquired_skills': self.acquired_skills
}
@classmethod
def from_dict(cls, node_dict):
return cls(
copy.deepcopy(node_dict['exploration_id']),
copy.deepcopy(node_dict['prerequisite_skills']),
copy.deepcopy(node_dict['acquired_skills']))
@property
def skills(self):
"""Returns a set of skills where each prerequisite and acquired skill
in this collection node is represented at most once.
"""
return set(self.prerequisite_skills) | set(self.acquired_skills)
def update_prerequisite_skills(self, prerequisite_skills):
self.prerequisite_skills = copy.deepcopy(prerequisite_skills)
def update_acquired_skills(self, acquired_skills):
self.acquired_skills = copy.deepcopy(acquired_skills)
def validate(self):
"""Validates various properties of the collection node."""
if not isinstance(self.exploration_id, basestring):
raise utils.ValidationError(
'Expected exploration ID to be a string, received %s' %
self.exploration_id)
if not isinstance(self.prerequisite_skills, list):
raise utils.ValidationError(
'Expected prerequisite_skills to be a list, received %s' %
self.prerequisite_skills)
if len(set(self.prerequisite_skills)) != len(self.prerequisite_skills):
raise utils.ValidationError(
'The prerequisite_skills list has duplicate entries: %s' %
self.prerequisite_skills)
for prerequisite_skill in self.prerequisite_skills:
if not isinstance(prerequisite_skill, basestring):
raise utils.ValidationError(
'Expected all prerequisite skills to be strings, '
'received %s' % prerequisite_skill)
if not isinstance(self.acquired_skills, list):
raise utils.ValidationError(
'Expected acquired_skills to be a list, received %s' %
self.acquired_skills)
if len(set(self.acquired_skills)) != len(self.acquired_skills):
raise utils.ValidationError(
'The acquired_skills list has duplicate entries: %s' %
self.acquired_skills)
for acquired_skill in self.acquired_skills:
if not isinstance(acquired_skill, basestring):
raise utils.ValidationError(
'Expected all acquired skills to be strings, received %s' %
acquired_skill)
redundant_skills = (
set(self.prerequisite_skills) & set(self.acquired_skills))
if redundant_skills:
raise utils.ValidationError(
'There are some skills which are both required for '
'exploration %s and acquired after playing it: %s' %
(self.exploration_id, redundant_skills))
@classmethod
def create_default_node(cls, exploration_id):
return cls(exploration_id, [], [])
class Collection(object):
"""Domain object for an Oppia collection."""
def __init__(self, collection_id, title, category, objective,
schema_version, nodes, version, created_on=None,
last_updated=None):
"""Constructs a new collection given all the information necessary to
represent a collection.
Note: The schema_version represents the version of any underlying
dictionary or list structures stored within the collection. In
particular, the schema for CollectionNodes is represented by this
version. If the schema for CollectionNode changes, then a migration
function will need to be added to this class to convert from the
current schema version to the new one. This function should be called
in both from_yaml in this class and
collection_services._migrate_collection_to_latest_schema.
feconf.CURRENT_COLLECTION_SCHEMA_VERSION should be incremented and the
new value should be saved in the collection after the migration
process, ensuring it represents the latest schema version.
"""
self.id = collection_id
self.title = title
self.category = category
self.objective = objective
self.schema_version = schema_version
self.nodes = nodes
self.version = version
self.created_on = created_on
self.last_updated = last_updated
def to_dict(self):
return {
'id': self.id,
'title': self.title,
'category': self.category,
'objective': self.objective,
'schema_version': self.schema_version,
'nodes': [
node.to_dict() for node in self.nodes
]
}
@classmethod
def create_default_collection(
cls, collection_id, title, category, objective):
return cls(
collection_id, title, category, objective,
feconf.CURRENT_COLLECTION_SCHEMA_VERSION, [], 0)
@classmethod
def from_dict(
cls, collection_dict, collection_version=0,
collection_created_on=None, collection_last_updated=None):
collection = cls(
collection_dict['id'], collection_dict['title'],
collection_dict['category'], collection_dict['objective'],
collection_dict['schema_version'], [], collection_version,
collection_created_on, collection_last_updated)
for node_dict in collection_dict['nodes']:
collection.nodes.append(
CollectionNode.from_dict(node_dict))
return collection
def to_yaml(self):
collection_dict = self.to_dict()
# The ID is the only property which should not be stored within the
# YAML representation.
del collection_dict['id']
return utils.yaml_from_dict(collection_dict)
@classmethod
def from_yaml(cls, collection_id, yaml_content):
try:
collection_dict = utils.dict_from_yaml(yaml_content)
except Exception as e:
raise Exception(
'Please ensure that you are uploading a YAML text file, not '
'a zip file. The YAML parser returned the following error: %s'
% e)
collection_dict['id'] = collection_id
return Collection.from_dict(collection_dict)
@property
def skills(self):
"""The skills of a collection are made up of all prerequisite and
acquired skills of each exploration that is part of this collection.
This returns a sorted list of all the skills of the collection.
"""
unique_skills = set()
for node in self.nodes:
unique_skills.update(node.skills)
return sorted(unique_skills)
@property
def exploration_ids(self):
"""Returns a list of all the exploration IDs that are part of this
collection.
"""
return [
node.exploration_id for node in self.nodes]
@property
def init_exploration_ids(self):
"""Returns a list of exploration IDs that are starting points for this
collection (ie, they require no prior skills to complete). The order
of these IDs is given by the order each respective exploration was
added to the collection.
"""
init_exp_ids = []
for node in self.nodes:
if not node.prerequisite_skills:
init_exp_ids.append(node.exploration_id)
return init_exp_ids
def get_next_exploration_ids(self, completed_exploration_ids):
"""Returns a list of exploration IDs for which the prerequisite skills
are satisfied. These are the next explorations to complete for a user.
If the list returned is empty and the collection is valid, then all
skills have been acquired and the collection is completed. If the input
list is empty, then only explorations with no prerequisite skills are
returned. The order of the exploration IDs is given by the order in
which each exploration was added to the collection.
"""
acquired_skills = set()
for completed_exp_id in completed_exploration_ids:
acquired_skills.update(
self.get_node(completed_exp_id).acquired_skills)
next_exp_ids = []
for node in self.nodes:
if node.exploration_id in completed_exploration_ids:
continue
prereq_skills = set(node.prerequisite_skills)
if prereq_skills <= acquired_skills:
next_exp_ids.append(node.exploration_id)
return next_exp_ids
@classmethod
def is_demo_collection_id(cls, collection_id):
"""Whether the collection id is that of a demo collection."""
return collection_id in feconf.DEMO_COLLECTIONS
@property
def is_demo(self):
"""Whether the collection is one of the demo collections."""
return self.is_demo_collection_id(self.id)
def update_title(self, title):
self.title = title
def update_category(self, category):
self.category = category
def update_objective(self, objective):
self.objective = objective
def _find_node(self, exploration_id):
for ind, node in enumerate(self.nodes):
if node.exploration_id == exploration_id:
return ind
return None
def get_node(self, exploration_id):
"""Retrieves a collection node from the collection based on an
exploration ID.
"""
for node in self.nodes:
if node.exploration_id == exploration_id:
return node
return None
def add_node(self, exploration_id):
if self.get_node(exploration_id) is not None:
raise ValueError(
'Exploration is already part of this collection: %s' %
exploration_id)
self.nodes.append(CollectionNode.create_default_node(exploration_id))
def delete_node(self, exploration_id):
node_index = self._find_node(exploration_id)
if node_index is None:
raise ValueError(
'Exploration is not part of this collection: %s' %
exploration_id)
del self.nodes[node_index]
def validate(self, strict=True):
"""Validates all properties of this collection and its constituents."""
if not isinstance(self.title, basestring):
raise utils.ValidationError(
'Expected title to be a string, received %s' % self.title)
utils.require_valid_name(self.title, 'the collection title')
if not isinstance(self.category, basestring):
raise utils.ValidationError(
'Expected category to be a string, received %s'
% self.category)
utils.require_valid_name(self.category, 'the collection category')
if not isinstance(self.objective, basestring):
raise utils.ValidationError(
'Expected objective to be a string, received %s' %
self.objective)
if not self.objective:
raise utils.ValidationError(
'An objective must be specified (in the \'Settings\' tab).')
if not isinstance(self.schema_version, int):
raise utils.ValidationError(
'Expected schema version to be an integer, received %s' %
self.schema_version)
if self.schema_version != feconf.CURRENT_COLLECTION_SCHEMA_VERSION:
raise utils.ValidationError(
'Expected schema version to be %s, received %s' % (
feconf.CURRENT_COLLECTION_SCHEMA_VERSION,
self.schema_version))
if not isinstance(self.nodes, list):
raise utils.ValidationError(
'Expected nodes to be a list, received %s' % self.nodes)
all_exp_ids = self.exploration_ids
if len(set(all_exp_ids)) != len(all_exp_ids):
raise utils.ValidationError(
'There are explorations referenced in the collection more '
'than once.')
# Validate all collection nodes.
for node in self.nodes:
node.validate()
if strict:
if not self.nodes:
raise utils.ValidationError(
'Expected to have at least 1 exploration in the '
'collection.')
# Ensure the collection may be started.
if not self.init_exploration_ids:
raise utils.ValidationError(
'Expected to have at least 1 exploration with no '
'prerequisite skills.')
# Ensure the collection can be completed. This is done in two
# steps: first, no exploration may grant a skill that it
# simultaneously lists as a prerequisite. Second, every exploration
# in the collection must be reachable when starting from the
# explorations with no prerequisite skills and playing through all
# subsequent explorations provided by get_next_exploration_ids.
completed_exp_ids = set(self.init_exploration_ids)
next_exp_ids = self.get_next_exploration_ids(
list(completed_exp_ids))
while next_exp_ids:
completed_exp_ids.update(set(next_exp_ids))
next_exp_ids = self.get_next_exploration_ids(
list(completed_exp_ids))
if len(completed_exp_ids) != len(self.nodes):
unreachable_ids = set(all_exp_ids) - completed_exp_ids
raise utils.ValidationError(
'Some explorations are unreachable from the initial '
'explorations: %s' % unreachable_ids)
class CollectionSummary(object):
"""Domain object for an Oppia collection summary."""
def __init__(self, collection_id, title, category, objective,
status, community_owned, owner_ids, editor_ids,
viewer_ids, contributor_ids, contributors_summary, version,
collection_model_created_on, collection_model_last_updated):
self.id = collection_id
self.title = title
self.category = category
self.objective = objective
self.status = status
self.community_owned = community_owned
self.owner_ids = owner_ids
self.editor_ids = editor_ids
self.viewer_ids = viewer_ids
self.contributor_ids = contributor_ids
self.contributors_summary = contributors_summary
self.version = version
self.collection_model_created_on = collection_model_created_on
self.collection_model_last_updated = collection_model_last_updated
def to_dict(self):
return {
'id': self.id,
'title': self.title,
'category': self.category,
'objective': self.objective,
'status': self.status,
'community_owned': self.community_owned,
'owner_ids': self.owner_ids,
'editor_ids': self.editor_ids,
'viewer_ids': self.viewer_ids,
'contributor_ids': self.contributor_ids,
'contributors_summary': self.contributors_summary,
'version': self.version,
'collection_model_created_on': self.collection_model_created_on,
'collection_model_last_updated': self.collection_model_last_updated
}
|
DewarM/oppia
|
core/domain/collection_domain.py
|
Python
|
apache-2.0
| 23,086
|
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from datetime import datetime, timedelta
import re
import sys
from random import choice
from collections import OrderedDict
from weboob.capabilities.content import CapContent
from weboob.tools.application.repl import ReplApplication
from weboob.tools.compat import urlsplit
__all__ = ['Boobathon']
class Task(object):
STATUS_NONE = 0
STATUS_PROGRESS = 1
STATUS_DONE = 2
def __init__(self, backend, capability):
self.backend = backend
self.capability = capability
self.status = self.STATUS_NONE
self.date = None
self.branch = u''
def __repr__(self):
return '<Task (%s,%s)>' % (self.backend, self.capability)
class Member(object):
def __init__(self, id, name):
self.name = name
self.id = id
self.tasks = []
self.availabilities = u''
self.repository = None
self.hardware = u''
self.is_me = False
def shortname(self):
name = self.name
if len(name) > 20:
name = '%s..' % name[:18]
return name
class Event(object):
def __init__(self, name, backend):
self.my_id = backend.browser.get_userid()
self.name = 'wiki/weboob/%s' % name
self.description = None
self.date = None
self.begin = None
self.end = None
self.location = None
self.winner = None
self.backend = backend
self.members = OrderedDict()
self.load()
def get_me(self):
return self.members.get(self.backend.browser.get_userid(), None)
def currently_in_event(self):
if not self.date or not self.begin or not self.end:
return False
return self.begin < datetime.now() < self.end
def is_closed(self):
return self.end < datetime.now()
def format_duration(self):
if not self.begin or not self.end:
return None
delta = self.end - self.begin
return '%02d:%02d' % (delta.seconds/3600, delta.seconds%3600)
def check_time_coherence(self):
"""
Check if the end's day is before the begin's one, in
case it stops at the next day (eg. 15h->1h).
If it occures, add a day.
"""
if self.begin > self.end:
self.end = self.end + timedelta(1)
def load(self):
self.content = self.backend.get_content(self.name)
self.members.clear()
member = None
for line in self.content.content.split('\n'):
line = line.strip()
if line.startswith('h1. '):
self.title = line[4:]
elif line.startswith('h3=. '):
m = re.match('h3=. Event finished. Winner is "(.*)":/users/(\d+)\!', line)
if not m:
print('Unable to parse h3=: %s' % line, file=self.stderr)
continue
self.winner = Member(int(m.group(2)), m.group(1))
elif line.startswith('h2. '):
continue
elif line.startswith('h3. '):
m = re.match('h3. "(.*)":/users/(\d+)', line)
if not m:
print('Unable to parse user "%s"' % line, file=self.stderr)
continue
member = Member(int(m.group(2)), m.group(1))
if member.id == self.my_id:
member.is_me = True
if self.winner is not None and member.id == self.winner.id:
self.winner = member
self.members[member.id] = member
elif self.description is None and len(line) > 0 and line != '{{TOC}}':
self.description = line
elif line.startswith('* '):
m = re.match('\* \*(\w+)\*: (.*)', line)
if not m:
continue
key, value = m.group(1), m.group(2)
if member is None:
if key == 'Date':
self.date = self.parse_date(value)
elif key == 'Start' or key == 'Begin':
self.begin = self.parse_time(value)
elif key == 'End':
self.end = self.parse_time(value)
self.check_time_coherence()
elif key == 'Location':
self.location = value
else:
if key == 'Repository':
m = re.match('"(.*.git)":.*', value)
if m:
member.repository = m.group(1)
else:
member.repository = value
elif key == 'Hardware':
member.hardware = value
elif key == 'Availabilities':
member.availabilities = value
elif line.startswith('[['):
m = re.match('\[\[(\w+)\]\]\|\[\[(\w+)\]\]\|(.*)\|', line)
if not m:
print('Unable to parse task: "%s"' % line, file=self.stderr)
continue
task = Task(m.group(1), m.group(2))
member.tasks.append(task)
if m.group(3) == '!/img/weboob/_progress.png!':
task.status = task.STATUS_PROGRESS
continue
mm = re.match('!/img/weboob/_done.png! (\d+):(\d+) (\w+)', m.group(3))
if mm and self.date:
task.status = task.STATUS_DONE
task.date = datetime(self.date.year,
self.date.month,
self.date.day,
int(mm.group(1)),
int(mm.group(2)))
task.branch = mm.group(3)
def parse_date(self, value):
try:
return datetime.strptime(value, '%Y-%m-%d')
except ValueError:
return None
def parse_time(self, value):
m = re.match('(\d+):(\d+)', value)
if not m:
return
try:
return self.date.replace(hour=int(m.group(1)),
minute=int(m.group(2)))
except ValueError:
return None
def save(self, message):
if self.winner:
finished = u'\nh3=. Event finished. Winner is "%s":/users/%d!\n' % (self.winner.name,
self.winner.id)
else:
finished = u''
s = u"""h1. %s
{{TOC}}
%s
h2. Event
%s
* *Date*: %s
* *Begin*: %s
* *End*: %s
* *Duration*: %s
* *Location*: %s
h2. Attendees
""" % (self.title,
finished,
self.description,
self.date.strftime('%Y-%m-%d') if self.date else '_Unknown_',
self.begin.strftime('%H:%M') if self.begin else '_Unknown_',
self.end.strftime('%H:%M') if self.end else '_Unknown_',
self.format_duration() or '_Unknown_',
self.location or '_Unknown_')
for member in self.members.values():
if self.date:
availabilities = ''
else:
availabilities = '* *Availabilities*: %s' % member.availabilities
if member.repository is None:
repository = '_Unknown_'
elif member.repository.endswith('.git'):
repository = '"%s":git://git.symlink.me/pub/%s ("http":http://git.symlink.me/?p=%s;a=summary)'
repository = repository.replace('%s', member.repository)
else:
repository = member.repository
s += u"""h3. "%s":/users/%d
* *Repository*: %s
* *Hardware*: %s
%s
|_.Backend|_.Capabilities|_.Status|""" % (member.name,
member.id,
repository,
member.hardware,
availabilities)
for task in member.tasks:
if task.status == task.STATUS_DONE:
status = '!/img/weboob/_done.png! %02d:%02d %s' % (task.date.hour,
task.date.minute,
task.branch)
elif task.status == task.STATUS_PROGRESS:
status = '!/img/weboob/_progress.png!'
else:
status = ' '
s += u"""
|=.!/img/weboob/%s.png!:/projects/weboob/wiki/%s
[[%s]]|[[%s]]|%s|""" % (task.backend.lower(), task.backend, task.backend, task.capability, status)
s += '\n\n'
self.content.content = s
self.backend.push_content(self.content, message)
class Boobathon(ReplApplication):
APPNAME = 'boobathon'
VERSION = '2.1'
COPYRIGHT = 'Copyright(C) 2011-YEAR Romain Bignon'
DESCRIPTION = 'Console application to participate to a Boobathon.'
SHORT_DESCRIPTION = "participate in a Boobathon"
CAPS = CapContent
SYNOPSIS = 'Usage: %prog [-dqv] [-b backends] [-cnfs] boobathon\n'
SYNOPSIS += ' %prog [--help] [--version]'
radios = []
def __init__(self, *args, **kwargs):
super(Boobathon, self).__init__(*args, **kwargs)
def main(self, argv):
if len(argv) < 2:
print('Please give the name of the boobathon', file=self.stderr)
return 1
self.event = Event(argv[1], choice(list(self.weboob.backend_instances.values())))
if self.event.description is None:
if not self.ask("This event doesn't seem to exist. Do you want to create it?", default=True):
return 1
self.edit_event()
self.save_event('Event created')
return ReplApplication.main(self, [argv[0]])
def save_event(self, message):
if self.ask("Do you confirm your changes?", default=True):
self.event.save(message)
return True
return False
def edit_event(self):
self.event.title = self.ask('Enter a title', default=self.event.title)
self.event.description = self.ask('Enter a description', default=self.event.description)
self.event.date = self.ask('Enter a date (yyyy-mm-dd)',
default=self.event.date.strftime('%Y-%m-%d') if self.event.date else '',
regexp='^(\d{4}-\d{2}-\d{2})?$')
if self.event.date:
self.event.date = datetime.strptime(self.event.date, '%Y-%m-%d')
s = self.ask('Begin at (HH:MM)',
default=self.event.begin.strftime('%H:%M') if self.event.begin else '',
regexp='^(\d{2}:\d{2})?$')
if s:
h, m = s.split(':')
self.event.begin = self.event.date.replace(hour=int(h), minute=int(m))
s = self.ask('End at (HH:MM)',
default=self.event.end.strftime('%H:%M') if self.event.end else '',
regexp='^(\d{2}:\d{2})?$')
if s:
h, m = s.split(':')
self.event.end = self.event.date.replace(hour=int(h), minute=int(m))
self.event.check_time_coherence()
self.event.location = self.ask('Enter a location', default=self.event.location)
def edit_member(self, member):
if member.name is None:
firstname = self.ask('Enter your firstname')
lastname = self.ask('Enter your lastname')
member.name = '%s %s' % (firstname, lastname)
else:
member.name = self.ask('Enter your name', default=member.name)
if self.event.date is None:
member.availabilities = self.ask('Enter availabilities', default=member.availabilities)
member.repository = self.ask('Enter your repository (ex. romain/weboob.git)', default=member.repository)
member.hardware = self.ask('Enter your hardware', default=member.hardware)
def do_progress(self, line):
"""
progress
Display progress of members.
"""
self.event.load()
for member in self.event.members.values():
if member.is_me and member is self.event.winner:
status = '\o/ ->'
elif member.is_me:
status = ' ->'
elif member is self.event.winner:
status = ' \o/'
else:
status = ' '
s = u' %s%20s %s|' % (status, member.shortname(), self.BOLD)
for task in member.tasks:
if task.status == task.STATUS_DONE:
s += '##'
elif task.status == task.STATUS_PROGRESS:
s += u'=>'
else:
s += ' '
s += '|%s' % self.NC
print(s)
print('')
now = datetime.now()
if self.event.begin > now:
d = self.event.begin - now
msg = 'The event will start in %d days, %02d:%02d:%02d'
elif self.event.end < now:
d = now - self.event.end
msg = 'The event is finished since %d days, %02d:%02d:%02d'
else:
tot = (self.event.end - self.event.begin).seconds
cur = (datetime.now() - self.event.begin).seconds
pct = cur*20/tot
progress = ''
for i in range(20):
if i < pct:
progress += '='
elif i == pct:
progress += '>'
else:
progress += ' '
print('Event started: %s |%s| %s' % (self.event.begin.strftime('%H:%M'),
progress,
self.event.end.strftime('%H:%M')))
d = self.event.end - now
msg = 'The event will be finished in %d days, %02d:%02d:%02d'
print(msg % (d.days, d.seconds/3600, d.seconds%3600/60, d.seconds%60))
def do_tasks(self, line):
"""
tasks
Display all tasks of members.
"""
self.event.load()
stop = False
i = -2
while not stop:
if i >= 0 and not i%2:
self.stdout.write(' #%-2d' % (i/2))
else:
self.stdout.write(' ')
if i >= 0 and i%2:
# second line of task, see if we'll stop
stop = True
for mem in self.event.members.values():
if len(mem.tasks) > (i/2+1):
# there are more tasks, don't stop now
stop = False
if i == -2:
self.stdout.write(' %s%-20s%s' % (self.BOLD, mem.shortname().encode('utf-8'), self.NC))
elif i == -1:
self.stdout.write(' %s%-20s%s' % (self.BOLD, '-' * len(mem.shortname()), self.NC))
elif len(mem.tasks) <= (i/2):
self.stdout.write(' ' * (20+1))
else:
task = mem.tasks[i/2]
if task.status == task.STATUS_DONE:
status = u'#'
elif task.status == task.STATUS_PROGRESS:
if not i%2:
status = u'|' #1st line
else:
status = u'v' #2nd line
else:
status = u' '
if not i%2: #1st line
line = u'%s %s' % (status, task.backend)
else: #2nd line
line = u'%s `-%s' % (status, task.capability[3:])
self.stdout.write((u' %-20s' % line).encode('utf-8'))
self.stdout.write('\n')
i += 1
def complete_close(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
self.event.load()
return [member.name for member in self.event.members.values()]
def do_close(self, name):
"""
close WINNER
Close the event and set the winner.
"""
self.event.load()
for member in self.event.members.values():
if member.name == name:
self.event.winner = member
if self.save_event('Close event'):
print('Event is now closed. Winner is %s!' % self.event.winner.name)
return
print('"%s" not found' % name, file=self.stderr)
return 3
def complete_edit(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return ['event', 'me']
def do_edit(self, line):
"""
edit [event | me]
Edit information about you or about event.
"""
if not line:
print('Syntax: edit [event | me]', file=self.stderr)
return 2
self.event.load()
if line == 'event':
self.edit_event()
self.save_event('Event edited')
elif line == 'me':
mem = self.event.get_me()
if not mem:
print('You haven\'t joined the event.', file=self.stderr)
return 1
self.edit_member(mem)
self.save_event('Member edited')
else:
print('Unable to edit "%s"' % line, file=self.stderr)
return 1
def do_info(self, line):
"""
info
Display information about this event.
"""
self.event.load()
print(self.event.title)
print('-' * len(self.event.title))
print(self.event.description)
print('')
print('Date:', self.event.date.strftime('%Y-%m-%d') if self.event.date else 'Unknown')
print('Begin:', self.event.begin.strftime('%H:%M') if self.event.begin else 'Unknown')
print('End:', self.event.end.strftime('%H:%M') if self.event.end else 'Unknown')
print('Duration:', self.event.format_duration() or 'Unknown')
print('Location:', self.event.location or 'Unknown')
print('')
print('There are %d members, use the "members" command to list them' % len(self.event.members))
if self.event.get_me() is None:
print('To join this event, use the command "join".')
def do_members(self, line):
"""
members
Display members information.
"""
self.event.load()
for member in self.event.members.values():
print(member.name)
print('-' * len(member.name))
print('Repository:', member.repository)
if self.event.date is None:
print('Availabilities:', member.availabilities)
print('Hardware:', member.hardware)
accompl = 0
for task in member.tasks:
if task.status == task.STATUS_DONE:
accompl += 1
print('%d tasks (%d accomplished)' % (len(member.tasks), accompl))
if member is self.event.winner:
print('=== %s is the winner!' % member.name)
print('')
print('Use the "tasks" command to display all tasks')
def do_join(self, line):
"""
join
Join this event.
"""
self.event.load()
if self.event.backend.browser.get_userid() in self.event.members:
print('You have already joined this event.', file=self.stderr)
return 1
if self.event.is_closed():
print("Boobathon is closed.", file=self.stderr)
return 1
m = Member(self.event.backend.browser.get_userid(), None)
self.edit_member(m)
self.event.members[m.id] = m
self.save_event('Joined the event')
def do_leave(self, line):
"""
leave
Leave this event.
"""
self.event.load()
if self.event.currently_in_event():
print('Unable to leave during the event, loser!', file=self.stderr)
return 1
if self.event.is_closed():
print("Boobathon is closed.", file=self.stderr)
return 1
try:
self.event.members.pop(self.event.backend.browser.get_userid())
except KeyError:
print("You have not joined this event.", file=self.stderr)
return 1
else:
self.save_event('Left the event')
def do_remtask(self, line):
"""
remtask TASK_ID
Remove a task.
"""
self.event.load()
mem = self.event.get_me()
if not mem:
print("You have not joined this event.", file=self.stderr)
return 1
if self.event.is_closed():
print("Boobathon is closed.", file=self.stderr)
return 1
try:
task_id = int(line)
except ValueError:
print('The task ID should be a number', file=self.stderr)
return 2
try:
task = mem.tasks.pop(task_id)
except IndexError:
print('Unable to find task #%d' % task_id, file=self.stderr)
return 1
else:
print('Removing task #%d (%s,%s).' % (task_id, task.backend, task.capability))
self.save_event('Remove task')
def do_addtask(self, line):
"""
addtask BACKEND CAPABILITY
Add a new task.
"""
self.event.load()
mem = self.event.get_me()
if not mem:
print("You have not joined this event.", file=self.stderr)
return 1
if self.event.is_closed():
print("Boobathon is closed.", file=self.stderr)
return 1
backend, capability = self.parse_command_args(line, 2, 2)
if not backend[0].isupper():
print('The backend name "%s" needs to start with a capital.' % backend, file=self.stderr)
return 2
if not capability.startswith('Cap') or not capability[3].isupper():
print('"%s" is not a proper capability name (must start with Cap).' % capability, file=self.stderr)
return 2
for task in mem.tasks:
if (task.backend,task.capability) == (backend,capability):
print("A task already exists for that.", file=self.stderr)
return 1
task = Task(backend, capability)
mem.tasks.append(task)
self.save_event('New task')
def do_start(self, line):
"""
start [TASK_ID]
Start a task. If you don't give a task ID, the first available
task will be taken.
"""
self.event.load()
mem = self.event.get_me()
if not mem:
print("You have not joined this event.", file=self.stderr)
return 1
if len(mem.tasks) == 0:
print("You don't have any task to do.", file=self.stderr)
return 1
if not self.event.currently_in_event():
print("You can't start a task, we are not in event.", file=self.stderr)
return 1
if line.isdigit():
task_id = int(line)
else:
task_id = -1
last_done = -1
for i, task in enumerate(mem.tasks):
if task.status == task.STATUS_DONE:
last_done = i
elif task.status == task.STATUS_PROGRESS:
task.status = task.STATUS_NONE
print('Task #%s (%s,%s) canceled.' % (i, task.backend, task.capability))
if (i == task_id or task_id < 0) and task.status == task.STATUS_NONE:
break
else:
print('Task not found.', file=self.stderr)
return 3
if task.status == task.STATUS_DONE:
print('Task is already done.', file=self.stderr)
return 1
task.status = task.STATUS_PROGRESS
mem.tasks.remove(task)
mem.tasks.insert(last_done + 1, task)
self.save_event('Started a task')
def do_done(self, line):
"""
done
Set the current task as done.
"""
self.event.load()
mem = self.event.get_me()
if not mem:
print("You have not joined this event.", file=self.stderr)
return 1
if self.event.is_closed():
print("Boobathon is closed.", file=self.stderr)
return 1
for i, task in enumerate(mem.tasks):
if task.status == task.STATUS_PROGRESS:
print('Task (%s,%s) done! (%d%%)' % (task.backend, task.capability, (i+1)*100/len(mem.tasks)))
if self.event.currently_in_event():
task.status = task.STATUS_DONE
task.date = datetime.now()
task.branch = self.ask('Enter name of branch')
self.save_event('Task accomplished')
else:
task.status = task.STATUS_NONE
print('Oops, you are out of event. Canceling the task...', file=self.stderr)
self.save_event('Cancel task')
return 1
return
print("There isn't any task in progress.", file=self.stderr)
return 1
def do_cancel(self, line):
"""
cancel
Cancel the current task.
"""
self.event.load()
mem = self.event.get_me()
if not mem:
print("You have not joined this event.", file=self.stderr)
return 1
if self.event.is_closed():
print("Boobathon is closed.", file=self.stderr)
return 1
for task in mem.tasks:
if task.status == task.STATUS_PROGRESS:
print('Task (%s,%s) canceled.' % (task.backend, task.capability))
task.status = task.STATUS_NONE
self.save_event('Cancel task')
return
print("There isn't any task in progress.", file=self.stderr)
return 1
def load_default_backends(self):
"""
Overload a Application method.
"""
for backend_name, module_name, params in self.weboob.backends_config.iter_backends():
if module_name != 'redmine':
continue
v = urlsplit(params['url'])
if v.netloc == 'symlink.me':
self.load_backends(names=[backend_name])
return
if not self.check_loaded_backends({'url': 'https://symlink.me'}):
print("Ok, so leave now.")
sys.exit(0)
def is_module_loadable(self, module):
"""
Overload a ConsoleApplication method.
"""
return module.name == 'redmine'
|
laurentb/weboob
|
weboob/applications/boobathon/boobathon.py
|
Python
|
lgpl-3.0
| 27,634
|
"""
Copyright (c) 2016- by Dietmar W Weiss
This is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation; either version 3.0 of
the License, or (at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this software; if not, write to the Free
Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
02110-1301 USA, or see the FSF site: http://www.fsf.org.
Version:
2018-03-13 DWW
"""
from Base import Base
from Forward import Forward
from Sensitivity import Sensitivity
from Optimum import Optimum
from Inverse import Inverse
class Operation(Base):
"""
Convenience class proving access to all operations on models (Forward,
Sensitivity, Optimum, Inverse).
"""
def __init__(self, identifier='Operation'):
super().__init__(identifier)
self._validOperations = [Forward, Sensitivity, Optimum, Inverse]
# initial value of self._operation is an instance of class Forward
self._operation = self._validOperations[0]()
def op2str(self, value):
"""
Converts operation class to string
Args:
value (None, operations class or instance of it):
instance of an operation class (see self._validOperations)
Returns:
(string):
name of operation class in lower case
"""
if issubclass(value, self._validOperations):
s = type(value).__name__.lower()
elif value in self._validOperations:
s = value.__name__.lower()
elif value is None:
s = str(None)
else:
self.warning("invalid value: '", value, "' is not valid operation")
s = None
return s
@property
def operation(self):
"""
Returns:
(an operation class):
Actual instance of an operation class
"""
return self._operation
@operation.setter
def operation(self, value):
"""
Sets actual instance of an operation class
Args:
value (None, string, class or instance of an operation class):
new operation
Side effects:
if self.operation is not None, it will be deleted and set to None
"""
if self._operation is not None:
del self._operation
self._operation = None
if value is None:
return
if isinstance(value, str):
for x in self._validOperations:
if value.lower().startswith(x.__name__.lower()[:3]):
self._operation = x()
return
assert 0, "operation.setter, value:'" + str(value) + "'"
elif value in self._validOperations:
self._operation = value()
elif type(value) in self._validOperations:
self._operation = value
else:
assert 0, "operation.setter, else: invalid type:'" + \
str(type(value)) + "' value: '" + str(value) + "'"
def pre(self, **kwargs):
super().pre(**kwargs)
assert self.operation is not None
assert isinstance(self.operation, self._validOperations)
assert self.operation.model is not None, 'model is not assigned'
if not self.operation.model.ready():
# trains model
XY = kwargs.get('XY', None)
if XY is not None:
self.model.train(XY=XY, **self.kwargsDel(kwargs, ['XY']))
else:
X, Y = kwargs.get('X', None), kwargs.get('Y', None)
if X is not None and Y is not None:
assert X.shape[0] == Y.shape[0], \
str(X.shape[0]) + ' ' + str(Y.shape[0])
self.model.train(X=X, Y=Y,
**self.kwargsDel(kwargs, ['X', 'Y']))
def task(self, **kwargs):
super().task(**kwargs)
x = kwargs.get('x', None)
if x is None:
return None, None
x, y = self.operation(x=x, **self.kwargsDel(kwargs, 'x'))
return x, y
# Examples ####################################################################
if __name__ == '__main__':
ALL = 1
import numpy as np
from Theoretical import Theoretical
from plotArrays import plot_X_Y_Yref
# user defined method
def f(self, x, c0=1, c1=1, c2=1, c3=1, c4=1, c5=1, c6=1, c7=1):
x0, x1 = x[0], x[1]
y0 = c0 * np.sin(c1 * x0) + c2 * (x1 - 1.5)**2 + c3
return [y0]
if 0 or ALL:
s = 'Check operation.setter'
print('-' * len(s) + '\n' + s + '\n' + '-' * len(s))
foo = Operation()
for op in ['inv', 'Inverse', Optimum(), Sensitivity, None,
# 1.234,
# 'wrong'
]:
foo.operation = op
print('operation:', type(foo.operation).__name__,
type(foo.operation))
print()
for op in foo._validOperations:
foo.operation = op
print('operation:', type(foo.operation).__name__,
type(foo.operation))
if 1 or ALL:
s = 'All operation types'
print('-' * len(s) + '\n' + s + '\n' + '-' * len(s))
model = Theoretical(f=f)
noise = 0.5
n = 20
X0 = np.linspace(-1, 5, n)
X1 = np.linspace(0, 3, X0.size)
X0, X1 = np.meshgrid(X0, X1)
X = np.asfarray([X0.ravel(), X1.ravel()]).T # X.shape = (nPoint, 2)
Y_exa = np.array([np.array(model.f(x)) for x in X])
Y_noise = Y_exa + (1 - 2 * np.random.rand(Y_exa.shape[0],
Y_exa.shape[1])) * noise
plot_X_Y_Yref(X, Y_noise, Y_exa, ['X', 'Y_{nos}', 'Y_{exa}'])
Y_fit = model(X=X, Y=Y_noise, x=X)
plot_X_Y_Yref(X, Y_fit, Y_exa, ['X', 'Y_{fit}', 'Y_{exa}'])
operations = Operation()
for op in operations._validOperations:
print('=', op.__name__, '=' * 50)
foo = op(model=model)
x, y = foo(X=X, Y=Y_noise, x=[(0, 0.5), (1, 3), (1, 2)], y=(0.5))
if type(foo) in (Optimum, Inverse):
foo.plot()
print("+++ Operation:'" + type(foo).__name__ + "'x:", x, 'y:', y)
|
dwweiss/pmLib
|
src/Operation.py
|
Python
|
lgpl-3.0
| 6,899
|
"""
Core methods
------------
.. autofunction:: cache_toolbox.core.get_instance
.. autofunction:: cache_toolbox.core.delete_instance
.. autofunction:: cache_toolbox.core.instance_key
"""
from django.core.cache import cache
from django.db import DEFAULT_DB_ALIAS
from . import app_settings
def get_instance(model, instance_or_pk, timeout=None, using=None):
"""
Returns the ``model`` instance with a primary key of ``instance_or_pk``.
If the data is cached it will be returned from there, otherwise the regular
Django ORM is queried for this instance and the data stored in the cache.
If omitted, the timeout value defaults to
``settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT`` instead of 0 (zero).
Example::
>>> get_instance(User, 1) # Cache miss
<User: lamby>
>>> get_instance(User, 1) # Cache hit
<User: lamby>
>>> User.objects.get(pk=1) == get_instance(User, 1)
True
"""
pk = getattr(instance_or_pk, 'pk', instance_or_pk)
key = instance_key(model, instance_or_pk)
data = cache.get(key)
if data is not None:
try:
# Try and construct instance from dictionary
instance = model(pk=pk, **data)
# Ensure instance knows that it already exists in the database,
# otherwise we will fail any uniqueness checks when saving the
# instance.
instance._state.adding = False
# Specify database so that instance is setup correctly. We don't
# namespace cached objects by their origin database, however.
instance._state.db = using or DEFAULT_DB_ALIAS
return instance
except:
# Error when deserialising - remove from the cache; we will
# fallback and return the underlying instance
cache.delete(key)
# Use the default manager so we are never filtered by a .get_query_set()
instance = model._default_manager.using(using).get(pk=pk)
data = {}
for field in instance._meta.fields:
# Harmless to save, but saves space in the dictionary - we already know
# the primary key when we lookup
if field.primary_key:
continue
if field.get_internal_type() == 'FileField':
# Prevent problems with DNImageField by not serialising it.
continue
data[field.attname] = getattr(instance, field.attname)
if timeout is None:
timeout = app_settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT
cache.set(key, data, timeout)
return instance
def delete_instance(model, *instance_or_pk):
"""
Purges the cache keys for the instances of this model.
"""
cache.delete_many([instance_key(model, x) for x in instance_or_pk])
def instance_key(model, instance_or_pk):
"""
Returns the cache key for this (model, instance) pair.
"""
return '%s.%s:%d' % (
model._meta.app_label,
model._meta.module_name,
getattr(instance_or_pk, 'pk', instance_or_pk),
)
|
lamby/live-studio
|
contrib/cache_toolbox/core.py
|
Python
|
agpl-3.0
| 3,040
|
def mapper( keyword ):
result = ""
keywordFile = open("all_agu_keywords.csv", "r")
for line in keywordFile:
parts = line.split(",")
k = int(parts[1].strip())
name = parts[2].strip()
if ( keyword == k ):
result = name
return result
# the range of keywords we're interested in
# for instance, 1600 to 1699 if we want to limit the results to Global Change
# 0 to 9999 if we want everything
start = int(input("Enter starting keyword value: "))
end = int(input("Enter ending keyword value: "))
# setup output JSON
outFile = open("keyword_timeseries.json", "w")
outFile.write("{ ")
counter = 0
# input data file
inFile = open("all_agu_keyword_counts_all_years.csv", "r")
# loop over all the lines in the file
for line in inFile:
# get the keyword, first value
parts = line.split(",")
keyword = int(parts[0].strip())
# check if it's in our range
if ( (keyword >= start) and (keyword <= end) ):
if counter == 0:
outline = ' "' + mapper(keyword) + '": ['
else:
outline = outline + ' "' + mapper(keyword) + '": ['
# skip the 1st value, we already got it
for p in parts[1:]:
outline = outline + str(p).strip() + ','
outline = outline[:-1] # remove last character, in this case unneeded ,
outline = outline + "],\n"
counter = counter + 1
outline = outline.strip()
outline = outline[:-1] # remove last character, in this case unneeded ,
outFile.write(outline)
outFile.write("\n}")
outFile.close()
|
narock/agu_analytics
|
obsolete/keyword_sort_by_topic.py
|
Python
|
gpl-3.0
| 1,743
|
from __future__ import annotations
import os
import shutil
import time
import gc
import threading
from typing import Optional
from utils.utilfuncs import safeprint
def DummyAsyncFileWrite(fn, writestr, access='a'):
safeprint('Called HB file write before init {} {} {}'.format(fn, writestr, access))
AsyncFileWrite = DummyAsyncFileWrite # set from log support to avoid circular imports
DevPrint = None
# import topper
WatchGC = False # set True to see garbage collection info
Buffers = {}
HBdir = ''
GCBuf: Optional[HistoryBuffer] = None
bufdumpseq = 0
HBNet = None
def SetupHistoryBuffers(dirnm, maxlogs):
global HBdir, GCBuf
r = [k for k in os.listdir('.') if '.HistoryBuffer' in k]
if ".HistoryBuffer." + str(maxlogs) in r:
shutil.rmtree(".HistoryBuffer." + str(maxlogs))
for i in range(maxlogs - 1, 0, -1):
if ".HistoryBuffer." + str(i) in r:
os.rename('.HistoryBuffer.' + str(i), ".HistoryBuffer." + str(i + 1))
# noinspection PyBroadException
try:
os.rename('.HistoryBuffer', '.HistoryBuffer.1')
except:
pass
os.mkdir('.HistoryBuffer')
HBdir = dirnm + '/.HistoryBuffer/'
if WatchGC:
gc.callbacks.append(NoteGCs)
GCBuf = HistoryBuffer(50, 'GC')
def NoteGCs(phase, info):
if GCBuf is not None:
GCBuf.Entry('GC Call' + phase + repr(info))
def DumpAll(idline, entrytime):
global bufdumpseq
if HBdir == '': # logs not yet set up
safeprint(time.strftime('%m-%d-%y %H:%M:%S') + ' Suppressing History Buffer Dump for {}'.format(idline))
return
fn = HBdir + str(bufdumpseq) + '-' + entrytime
try:
#topper.mvtops(str(bufdumpseq) + '-' + entrytime)
bufdumpseq += 1
t = {}
curfirst = {}
curtime = {}
initial = {}
now = time.time()
more = True
for nm, HB in Buffers.items():
t[nm] = HB.content()
try:
curfirst[nm] = next(t[nm])
curtime[nm] = curfirst[nm][1]
except StopIteration:
if nm in curfirst: del curfirst[nm]
if nm in curtime: del curtime[nm]
initial[nm] = '*'
if curfirst == {} or curtime == {}:
more = False
prevtime = 0
AsyncFileWrite(fn, '{} ({}): '.format(entrytime, now) + idline + '\n', 'w')
while more:
nextup = min(curtime, key=curtime.get)
if curtime[nextup] > prevtime:
prevtime = curtime[nextup]
else:
AsyncFileWrite(fn, 'seq error:' + str(prevtime) + ' ' + str(curtime[nextup]) + '\n')
prevtime = 0
if now - curfirst[nextup][1] < 300: # limit history dump to 5 minutes worth
AsyncFileWrite(fn,
'{:1s}{:10s}:({:3d}) {:.5f}: [{}] {}\n'.format(initial[nextup], nextup,
curfirst[nextup][0],
now - curfirst[nextup][1],
curfirst[nextup][3],
curfirst[nextup][2]))
initial[nextup] = ' '
try:
curfirst[nextup] = next(t[nextup])
curtime[nextup] = curfirst[nextup][1]
except StopIteration:
del curfirst[nextup]
del curtime[nextup]
if curfirst == {} or curtime == {}: more = False
except Exception as E:
AsyncFileWrite(fn, 'Error dumping buffer for: ' + entrytime + ': ' + idline + '\n')
AsyncFileWrite(fn, 'Exception was: ' + repr(E) + '\n')
class EntryItem(object):
def __init__(self):
self.timeofentry = 0
self.entry = ""
self.thread = ""
class HistoryBuffer(object):
def __init__(self, size, name):
self.buf = []
for i in range(size):
self.buf.append(EntryItem())
self.current = 0
self.size = size
self.name = name
Buffers[name] = self
def Entry(self, entry):
self.buf[self.current].entry = entry
self.buf[self.current].timeofentry = time.time()
self.buf[self.current].thread = threading.current_thread().name
self.current = (self.current + 1) % self.size
def content(self):
# freeze for dump and reset empty
# this is subject to races from other threads doing entry reports
# sequence must be create new buf offline, replace current buf with it so always one or other valid list
# then change current back to 0
# at worst this loses a few events that record between grabbing current and replacing with new one
tempbuf = []
for i in range(self.size):
tempbuf.append(EntryItem())
cur = self.buf
curind = self.current
self.buf = tempbuf
self.current = 0
#DevPrint('Enter HB content for: {} index {}'.format(self.name, curind))
for i in range(self.size):
j = (i + curind) % self.size
if cur[j].timeofentry != 0:
# DevPrint('Item from {}: {}/{}/{}/{}'.format(self.name, i, j, cur[j].timeofentry, cur[j].entry))
yield j, cur[j].timeofentry, cur[j].entry, cur[j].thread
#DevPrint('Content exit: {}/{}'.format(self.name, j))
|
kevinkahn/softconsole
|
historybuffer.py
|
Python
|
apache-2.0
| 4,574
|
import sublime
import sublime_plugin
from isort.isort import SortImports
class PysortCommand(sublime_plugin.TextCommand):
def run(self, edit):
old_content = self.view.substr(sublime.Region(0, self.view.size()))
new_content = SortImports(file_contents=old_content).output
self.view.replace(edit, sublime.Region(0, self.view.size()), new_content)
sublime.status_message("Python sort import complete.")
sublime.run_command('sub_notify', {'title': 'ISort', 'msg': 'Python sort import complete.', 'sound': False})
|
turbidsoul/isort
|
sort.py
|
Python
|
mit
| 556
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from neutron_lib import constants as n_const
from neutron_lib import context
from neutron_lib import exceptions
from neutron_lib.services.logapi import constants as log_const
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils
from neutron.services.logapi.common import exceptions as log_exc
from neutron.tests.functional import base as functional_base
class LogApiTestCaseBase(functional_base.TestOVNFunctionalBase):
def setUp(self):
super().setUp()
self.log_driver = self.mech_driver.log_driver
self._check_is_supported()
self.ctxt = context.Context('admin', 'fake_tenant')
def _check_is_supported(self):
if not self.log_driver.network_logging_supported(self.nb_api):
self.skipTest("The current OVN version does not offer support "
"for neutron network log functionality.")
self.assertIsNotNone(self.log_plugin)
def _log_data(self, sg_id=None, port_id=None, enabled=True):
log_data = {'project_id': self.ctxt.project_id,
'resource_type': 'security_group',
'description': 'test net log',
'name': 'logme',
'enabled': enabled,
'event': log_const.ALL_EVENT}
if sg_id:
log_data['resource_id'] = sg_id
if port_id:
log_data['target_id'] = port_id
return {'log': log_data}
class LogApiTestCaseSimple(LogApiTestCaseBase):
def test_basic_get(self):
log_obj = self.log_plugin.create_log(self.ctxt, self._log_data())
self.assertIsNotNone(log_obj)
log_obj_get = self.log_plugin.get_log(self.ctxt, log_obj['id'])
self.assertEqual(log_obj, log_obj_get)
log_obj2 = self.log_plugin.create_log(self.ctxt, self._log_data())
self.assertIsNotNone(log_obj2)
log_objs_get = self.log_plugin.get_logs(self.ctxt)
log_objs_ids = {x['id'] for x in log_objs_get}
self.assertEqual({log_obj['id'], log_obj2['id']}, log_objs_ids)
def test_log_ovn_unsupported(self):
with mock.patch.object(self.log_driver, 'network_logging_supported',
return_value=False) as supported_mock:
log_data = {'log': {'resource_type': 'security_group',
'enabled': True}}
self.assertRaises(exceptions.DriverCallError,
self.log_plugin.create_log,
self.ctxt, log_data)
supported_mock.assert_called_once()
class LogApiTestCaseComplex(LogApiTestCaseBase):
def setUp(self):
super().setUp()
self._prepare_env()
def _prepare_env(self):
self.net = self._create_network(
self.fmt, 'private', admin_state_up=True).json['network']['id']
self.subnet = self._create_subnet(
self.fmt, self.net, '10.0.0.0/24', enable_dhcp=False).json[
'subnet']['id']
self.sg1 = self._create_security_group('test_sg1_ssh')
self.sg2 = self._create_security_group('test_sg2_http')
self.sg3 = self._create_security_group('test_sg3_telnet_ssh')
self.sg1rs = [self._create_security_group_rule(self.sg1, 22)]
self.sg2rs = [self._create_security_group_rule(self.sg2, 80)]
self.sg3rs = [self._create_security_group_rule(self.sg3, 23),
self._create_security_group_rule(self.sg3, 22)]
self.sgs = [self.sg1, self.sg2, self.sg3]
self.sgrs = self.sg1rs + self.sg2rs + self.sg3rs
self.port1_sgs = [self.sg1]
self.port1_sgrs = self.sg1rs
self.port1 = self._create_port(self.fmt, self.net,
security_groups=self.port1_sgs)
self.port2_sgs = [self.sg2, self.sg3]
self.port2_sgrs = self.sg2rs + self.sg3rs
self.port2 = self._create_port(self.fmt, self.net,
security_groups=self.port2_sgs)
self.port3_sgs = [self.sg1, self.sg3]
self.port3_sgrs = self.sg1rs + self.sg3rs
self.port3 = self._create_port(self.fmt, self.net,
security_groups=self.port3_sgs)
def _create_port(self, name, net_id, security_groups):
data = {'port': {'name': name,
'tenant_id': self.ctxt.project_id,
'network_id': net_id,
'security_groups': security_groups}}
req = self.new_create_request('ports', data, self.fmt)
res = req.get_response(self.api)
return self.deserialize(self.fmt, res)['port']['id']
def _create_security_group(self, name):
data = {'security_group': {'name': name,
'tenant_id': self.ctxt.project_id}}
req = self.new_create_request('security-groups', data, self.fmt)
res = req.get_response(self.api)
return self.deserialize(self.fmt, res)['security_group']['id']
def _create_security_group_rule(self, sg_id, tcp_port):
data = {'security_group_rule': {'security_group_id': sg_id,
'direction': 'ingress',
'protocol': n_const.PROTO_NAME_TCP,
'ethertype': n_const.IPv4,
'port_range_min': tcp_port,
'port_range_max': tcp_port,
'tenant_id': self.ctxt.project_id}}
req = self.new_create_request('security-group-rules', data, self.fmt)
res = req.get_response(self.api)
return self.deserialize(self.fmt, res)['security_group_rule']['id']
def _find_security_group_row_by_id(self, sg_id):
for row in self.nb_api._tables['Port_Group'].rows.values():
if row.name == utils.ovn_port_group_name(sg_id):
return row
def _find_security_group_rule_row_by_id(self, sgr_id):
for row in self.nb_api._tables['ACL'].rows.values():
if (row.external_ids.get(
ovn_const.OVN_SG_RULE_EXT_ID_KEY) == sgr_id):
return row
def _check_acl_log(self, sgr, is_enabled=True):
acl = self._find_security_group_rule_row_by_id(sgr)
self.assertIsNotNone(acl)
self.assertEqual(is_enabled, acl.log)
return acl
def _check_acl_log_drop(self, is_enabled=True):
acls = self.nb_api.get_port_group(
ovn_const.OVN_DROP_PORT_GROUP_NAME).acls
self.assertTrue(acls)
for acl in acls:
self.assertEqual(is_enabled, acl.log)
return acls
def _check_sgrs(self, sgrs=None, is_enabled=True):
if not sgrs:
sgrs = self.sgrs
for sgr in sgrs:
self._check_acl_log(sgr, is_enabled)
def test_add_and_remove(self):
self._check_sgrs(is_enabled=False)
self.assertEqual([],
self.nb_api.meter_list().execute(check_error=True))
log_obj = self.log_plugin.create_log(self.ctxt, self._log_data())
for sgr in self.sgrs:
acl = self._check_acl_log(sgr)
self.assertEqual(utils.ovn_name(log_obj['id']), acl.name[0])
meter = self.nb_api.meter_get(acl.meter[0]).execute(
check_error=True)
self.assertEqual([True], meter.fair)
self.assertEqual('pktps', meter.unit)
self.assertEqual(1, len(meter.bands))
self.assertEqual({ovn_const.OVN_DEVICE_OWNER_EXT_ID_KEY:
log_const.LOGGING_PLUGIN}, meter.external_ids)
self.log_plugin.delete_log(self.ctxt, log_obj['id'])
self._check_sgrs(is_enabled=False)
self.assertEqual([],
self.nb_api.meter_list().execute(check_error=True))
log_objs = []
for sg in self.sgs:
log_data = self._log_data(sg_id=sg)
log_objs.append(self.log_plugin.create_log(self.ctxt, log_data))
self.assertEqual(len(log_objs),
len(self.log_plugin.get_logs(self.ctxt)))
self._check_sgrs(is_enabled=True)
# Attempt to delete non-existing row
self.assertRaises(log_exc.LogResourceNotFound,
self.log_plugin.delete_log,
self.ctxt, log_obj['id'])
self.log_plugin.delete_log(self.ctxt, log_objs[1]['id'])
self._check_sgrs(sgrs=self.sg1rs, is_enabled=True)
self._check_sgrs(sgrs=self.sg2rs, is_enabled=False)
self._check_sgrs(sgrs=self.sg3rs, is_enabled=True)
self.log_plugin.delete_log(self.ctxt, log_objs[2]['id'])
self._check_sgrs(sgrs=self.sg1rs, is_enabled=True)
self._check_sgrs(sgrs=self.sg2rs, is_enabled=False)
self._check_sgrs(sgrs=self.sg3rs, is_enabled=False)
self.log_plugin.delete_log(self.ctxt, log_objs[0]['id'])
self.assertEqual([], self.log_plugin.get_logs(self.ctxt))
self._check_sgrs(is_enabled=False)
# Attempt to delete from empty table
self.assertRaises(log_exc.LogResourceNotFound,
self.log_plugin.delete_log,
self.ctxt, log_objs[0]['id'])
def test_update_all(self):
# Note: only these fields are supported for update:
# openstack network log set [-h] [--description <description>]
# [--enable | --disable] [--name <name>] <network-log>
log_data = self._log_data()
log_obj = self.log_plugin.create_log(self.ctxt, log_data)
self._check_sgrs()
log_data['log']['name'] = 'logme-nay'
log_data['log']['enabled'] = False
self.log_plugin.update_log(self.ctxt, log_obj['id'], log_data)
self._check_sgrs(is_enabled=False)
log_data['log']['name'] = 'logme-yay'
log_data['log']['description'] = 'logs are a beautiful thing'
log_data['log']['enabled'] = True
self.log_plugin.update_log(self.ctxt, log_obj['id'], log_data)
self._check_sgrs()
def test_update_one_sg(self):
log_data = self._log_data(sg_id=self.sg2, enabled=False)
log_obj = self.log_plugin.create_log(self.ctxt, log_data)
self._check_sgrs(is_enabled=False)
log_data['log']['enabled'] = True
self.log_plugin.update_log(self.ctxt, log_obj['id'], log_data)
self._check_sgrs(sgrs=self.sg1rs, is_enabled=False)
self._check_sgrs(sgrs=self.sg2rs, is_enabled=True)
self._check_sgrs(sgrs=self.sg3rs, is_enabled=False)
def test_overlap_net_logs(self):
log_data1 = self._log_data(sg_id=self.sg3, port_id=self.port3)
log_obj1 = self.log_plugin.create_log(self.ctxt, log_data1)
self._check_sgrs(sgrs=self.sg1rs, is_enabled=False)
self._check_sgrs(sgrs=self.sg2rs, is_enabled=False)
self._check_sgrs(sgrs=self.sg3rs, is_enabled=True)
log_data2 = self._log_data(port_id=self.port2)
log_obj2 = self.log_plugin.create_log(self.ctxt, log_data2)
self._check_sgrs(sgrs=self.sg1rs, is_enabled=False)
# port 2 uses sg2 and sg3. However, sg3 is in use by log_obj1
# so only acls for 2 would be associated with log_obj2
for sgr in self.sg2rs:
acl = self._check_acl_log(sgr)
self.assertEqual(utils.ovn_name(log_obj2['id']), acl.name[0])
for sgr in self.sg3rs:
acl = self._check_acl_log(sgr)
self.assertEqual(utils.ovn_name(log_obj1['id']), acl.name[0])
# Next, delete log_obj1 and make sure that lob_obj2 gets to
# claim what it could not use before
self.log_plugin.delete_log(self.ctxt, log_obj1['id'])
self._check_sgrs(sgrs=self.sg1rs, is_enabled=False)
for sgr in self.sg2rs + self.sg3rs:
acl = self._check_acl_log(sgr)
self.assertEqual(utils.ovn_name(log_obj2['id']), acl.name[0])
# Delete log_obj2 and ensure that logs are off and meter is no
# longer used
self.log_plugin.delete_log(self.ctxt, log_obj2['id'])
self._check_sgrs(is_enabled=False)
self.assertEqual([],
self.nb_api.meter_list().execute(check_error=True))
def _add_logs_then_remove(self, event1, event2, sg=None, sgrs=None):
# Events were previously not correctly applied on ACLs. This test
# ensures that each event log only the necessary acls
drop_true_events = (log_const.DROP_EVENT, log_const.ALL_EVENT)
accept_true_events = (log_const.ALL_EVENT, log_const.ACCEPT_EVENT)
# Check there are no acls with their logging active
self._check_sgrs(sgrs=sgrs, is_enabled=False)
self._check_acl_log_drop(is_enabled=False)
# Add first log object
log_data1 = self._log_data(sg_id=sg)
log_data1['log']['event'] = event1
log_obj1 = self.log_plugin.create_log(self.ctxt, log_data1)
self._check_acl_log_drop(is_enabled=event1 in drop_true_events)
self._check_sgrs(sgrs=sgrs, is_enabled=event1 in accept_true_events)
# Add second log object
log_data2 = self._log_data(sg_id=sg)
log_data2['log']['event'] = event2
log_obj2 = self.log_plugin.create_log(self.ctxt, log_data2)
self._check_acl_log_drop(is_enabled=(event1 in drop_true_events or
event2 in drop_true_events))
self._check_sgrs(sgrs=sgrs, is_enabled=(event1 in accept_true_events or
event2 in accept_true_events))
# Delete second log object
self.log_plugin.delete_log(self.ctxt, log_obj2['id'])
self._check_acl_log_drop(is_enabled=event1 in drop_true_events)
self._check_sgrs(sgrs=sgrs, is_enabled=event1 in accept_true_events)
# Delete first log object
self.log_plugin.delete_log(self.ctxt, log_obj1['id'])
self._check_sgrs(sgrs=sgrs, is_enabled=False)
self._check_acl_log_drop(is_enabled=False)
def test_events_all_sg(self):
self._add_logs_then_remove(log_const.DROP_EVENT, log_const.ALL_EVENT)
self._add_logs_then_remove(
log_const.ACCEPT_EVENT, log_const.DROP_EVENT)
self._add_logs_then_remove(
log_const.DROP_EVENT, log_const.ACCEPT_EVENT)
def test_events_one_sg(self):
self._add_logs_then_remove(log_const.DROP_EVENT, log_const.ALL_EVENT,
sg=self.sg1, sgrs=self.sg1rs)
self._add_logs_then_remove(
log_const.ACCEPT_EVENT, log_const.DROP_EVENT, sg=self.sg2,
sgrs=self.sg2rs)
self._add_logs_then_remove(
log_const.DROP_EVENT, log_const.ACCEPT_EVENT, sg=self.sg3,
sgrs=self.sg3rs)
|
mahak/neutron
|
neutron/tests/functional/services/logapi/drivers/ovn/test_driver.py
|
Python
|
apache-2.0
| 15,392
|
"""
RenderPipeline
Copyright (c) 2014-2016 tobspr <tobias.springer1@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import print_function
import time
import hashlib
from panda3d.core import PStatCollector, Mat4, Point4, Vec3
from rpcore.globals import Globals
def rgb_from_string(text, min_brightness=0.6):
""" Creates a rgb color from a given string """
ohash = hashlib.md5(text[::-1].encode("ascii")).hexdigest()
r, g, b = int(ohash[0:2], 16), int(ohash[2:4], 16), int(ohash[4:6], 16)
neg_inf = 1.0 - min_brightness
return (min_brightness + r / 255.0 * neg_inf,
min_brightness + g / 255.0 * neg_inf,
min_brightness + b / 255.0 * neg_inf)
def profile(func):
""" Handy decorator which can be used to profile a function with pstats """
collector_name = "Debug:%s" % func.__name__
global_showbase = Globals.base
# Insert the collector to a custom dictionary attached to the base
if hasattr(global_showbase, 'custom_collectors'):
if collector_name in global_showbase.custom_collectors.keys():
pstat = global_showbase.custom_collectors[collector_name]
else:
global_showbase.custom_collectors[collector_name] = \
PStatCollector(collector_name)
pstat = global_showbase.custom_collectors[collector_name]
else:
pstat = PStatCollector(collector_name)
global_showbase.custom_collectors = {}
global_showbase.custom_collectors[collector_name] = pstat
def do_pstat(*args, **kargs):
pstat.start()
returned = func(*args, **kargs)
pstat.stop()
return returned
do_pstat.__name__ = func.__name__
do_pstat.__dict__ = func.__dict__
do_pstat.__doc__ = func.__doc__
return do_pstat
class profile_cpu(object): # noqa # pylint: disable=invalid-name,too-few-public-methods
"""
Context manager for profiling CPU duration. This is useful for timing
loading of files or other CPU-heavy operations. Example usage:
with profile_cpu("Some Task"):
some_slow_operation()
Duration of the process will be print out on the console later on.
"""
def __init__(self, name):
self.name = name
def __enter__(self):
self.start_time = time.clock()
def __exit__(self, *args):
duration = (time.clock() - self.start_time) * 1000.0
print(self.name, "took", round(duration, 2), "ms ")
def snap_shadow_map(mvp, cam_node, resolution):
""" 'Snaps' a shadow map to make sure it always is on full texel centers.
This ensures no flickering occurs while moving the shadow map.
This works by projecting the Point (0,0,0) to light space, compute the
texcoord differences and offset the light world space position by that. """
mvp = Mat4(mvp)
base_point = mvp.xform(Point4(0, 0, 0, 1)) * 0.5 + 0.5
texel_size = 1.0 / float(resolution)
offset_x = base_point.x % texel_size
offset_y = base_point.y % texel_size
mvp.invert_in_place()
new_base = mvp.xform(Point4(
(base_point.x - offset_x) * 2.0 - 1.0,
(base_point.y - offset_y) * 2.0 - 1.0,
(base_point.z) * 2.0 - 1.0, 1))
cam_node.set_pos(cam_node.get_pos() - Vec3(new_base.x, new_base.y, new_base.z))
|
eswartz/RenderPipeline
|
rpcore/util/generic.py
|
Python
|
mit
| 4,285
|
# -*- coding: utf-8 -*-
from flask import g, flash, render_template, url_for, request
from coaster.views import load_model
from baseframe import _
from baseframe.forms import render_form, render_redirect, render_delete_sqla
from lastuser_core.models import db, UserEmail, UserEmailClaim, UserPhone, UserPhoneClaim
from lastuser_core.signals import user_data_changed
from lastuser_oauth.mailclient import send_email_verify_link
from lastuser_oauth.views.helpers import requires_login
from lastuser_oauth.forms import PasswordResetForm, PasswordChangeForm
from .. import lastuser_ui
from ..forms import NewEmailAddressForm, NewPhoneForm, VerifyPhoneForm
from .sms import send_phone_verify_code
@lastuser_ui.route('/profile')
@requires_login
def profile():
return render_template('profile.html')
@lastuser_ui.route('/profile/password', methods=['GET', 'POST'])
@requires_login
def change_password():
if not g.user.pw_hash:
form = PasswordResetForm()
form.edit_user = g.user
del form.username
else:
form = PasswordChangeForm()
form.edit_user = g.user
if form.validate_on_submit():
g.user.password = form.password.data
db.session.commit()
flash(_("Your new password has been saved"), category='success')
return render_redirect(url_for('.profile'), code=303)
return render_form(form=form, title=_("Change password"), formid='changepassword',
submit=_("Change password"), ajax=True)
@lastuser_ui.route('/profile/email/new', methods=['GET', 'POST'])
@requires_login
def add_email():
form = NewEmailAddressForm()
if form.validate_on_submit():
useremail = UserEmailClaim.get(user=g.user, email=form.email.data)
if useremail is None:
useremail = UserEmailClaim(user=g.user, email=form.email.data, type=form.type.data)
db.session.add(useremail)
db.session.commit()
send_email_verify_link(useremail)
flash(_("We sent you an email to confirm your address"), 'success')
user_data_changed.send(g.user, changes=['email-claim'])
return render_redirect(url_for('.profile'), code=303)
return render_form(form=form, title=_("Add an email address"), formid='email_add',
submit=_("Add email"), ajax=True)
@lastuser_ui.route('/profile/email/<md5sum>/remove', methods=['GET', 'POST'])
@requires_login
def remove_email(md5sum):
useremail = UserEmail.query.filter_by(md5sum=md5sum, user=g.user).first()
if not useremail:
useremail = UserEmailClaim.query.filter_by(md5sum=md5sum, user=g.user).first_or_404()
if isinstance(useremail, UserEmail) and useremail.primary:
flash(_("You cannot remove your primary email address"), 'error')
return render_redirect(url_for('.profile'), code=303)
if request.method == 'POST':
# FIXME: Confirm validation success
user_data_changed.send(g.user, changes=['email-delete'])
return render_delete_sqla(useremail, db, title=_(u"Confirm removal"),
message=_(u"Remove email address {email}?").format(
email=useremail.email),
success=_(u"You have removed your email address {email}").format(email=useremail.email),
next=url_for('.profile'))
@lastuser_ui.route('/profile/phone/new', methods=['GET', 'POST'])
@requires_login
def add_phone():
form = NewPhoneForm()
if form.validate_on_submit():
userphone = UserPhoneClaim.get(user=g.user, phone=form.phone.data)
if userphone is None:
userphone = UserPhoneClaim(user=g.user, phone=form.phone.data, type=form.type.data)
db.session.add(userphone)
try:
send_phone_verify_code(userphone)
db.session.commit() # Commit after sending because send_phone_verify_code saves the message sent
flash(_("We sent a verification code to your phone number"), 'success')
user_data_changed.send(g.user, changes=['phone-claim'])
return render_redirect(url_for('.verify_phone', number=userphone.phone), code=303)
except ValueError as e:
db.session.rollback()
form.phone.errors.append(unicode(e))
return render_form(form=form, title=_("Add a phone number"), formid='phone_add',
submit=_("Add phone"), ajax=True)
@lastuser_ui.route('/profile/phone/<number>/remove', methods=['GET', 'POST'])
@requires_login
def remove_phone(number):
userphone = UserPhone.query.filter_by(phone=number, user=g.user).first()
if userphone is None:
userphone = UserPhoneClaim.query.filter_by(phone=number, user=g.user).first_or_404()
if request.method == 'POST':
# FIXME: Confirm validation success
user_data_changed.send(g.user, changes=['phone-delete'])
return render_delete_sqla(userphone, db, title=_(u"Confirm removal"),
message=_(u"Remove phone number {phone}?").format(
phone=userphone.phone),
success=_(u"You have removed your number {phone}").format(phone=userphone.phone),
next=url_for('.profile'))
@lastuser_ui.route('/profile/phone/<number>/verify', methods=['GET', 'POST'])
@requires_login
@load_model(UserPhoneClaim, {'phone': 'number'}, 'phoneclaim', permission='verify')
def verify_phone(phoneclaim):
form = VerifyPhoneForm()
form.phoneclaim = phoneclaim
if form.validate_on_submit():
if UserPhone.get(phoneclaim.phone) is None:
if not g.user.phones:
primary = True
else:
primary = False
userphone = UserPhone(user=g.user, phone=phoneclaim.phone, gets_text=True, primary=primary)
db.session.add(userphone)
db.session.delete(phoneclaim)
db.session.commit()
flash(_("Your phone number has been verified"), 'success')
user_data_changed.send(g.user, changes=['phone'])
return render_redirect(url_for('.profile'), code=303)
else:
db.session.delete(phoneclaim)
db.session.commit()
flash(_("This phone number has already been claimed by another user"), 'danger')
return render_form(form=form, title=_("Verify phone number"), formid='phone_verify',
submit=_("Verify"), ajax=True)
|
sindhus/lastuser
|
lastuser_ui/views/profile.py
|
Python
|
bsd-2-clause
| 6,260
|
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import
__all__ = (
'Publication',
'Subscription',
'Handler',
'Registration',
'Endpoint',
'PublishRequest',
'SubscribeRequest',
'UnsubscribeRequest',
'CallRequest',
'InvocationRequest',
'RegisterRequest',
'UnregisterRequest',
)
class Publication(object):
"""
Object representing a publication (feedback from publishing an event when doing
an acknowledged publish).
"""
__slots__ = ('id', 'was_encrypted')
def __init__(self, publication_id, was_encrypted):
self.id = publication_id
self.was_encrypted = was_encrypted
def __str__(self):
return "Publication(id={0}, was_encrypted={1})".format(self.id, self.was_encrypted)
class Subscription(object):
"""
Object representing a handler subscription.
"""
__slots__ = ('id', 'topic', 'active', 'session', 'handler')
def __init__(self, subscription_id, topic, session, handler):
"""
"""
self.id = subscription_id
self.topic = topic
self.active = True
self.session = session
self.handler = handler
def unsubscribe(self):
"""
"""
if self.active:
return self.session._unsubscribe(self)
else:
raise Exception("subscription no longer active")
def __str__(self):
return "Subscription(id={0}, is_active={1})".format(self.id, self.active)
class Handler(object):
"""
Object representing an event handler attached to a subscription.
"""
__slots__ = ('fn', 'obj', 'details_arg')
def __init__(self, fn, obj=None, details_arg=None):
"""
:param fn: The event handler function to be called.
:type fn: callable
:param obj: The (optional) object upon which to call the function.
:type obj: obj or None
:param details_arg: The keyword argument under which event details should be provided.
:type details_arg: str or None
"""
self.fn = fn
self.obj = obj
self.details_arg = details_arg
class Registration(object):
"""
Object representing a registration.
"""
__slots__ = ('id', 'active', 'session', 'procedure', 'endpoint')
def __init__(self, session, registration_id, procedure, endpoint):
self.id = registration_id
self.active = True
self.session = session
self.procedure = procedure
self.endpoint = endpoint
def unregister(self):
"""
"""
if self.active:
return self.session._unregister(self)
else:
raise Exception("registration no longer active")
class Endpoint(object):
"""
Object representing an procedure endpoint attached to a registration.
"""
__slots__ = ('fn', 'obj', 'details_arg')
def __init__(self, fn, obj=None, details_arg=None):
"""
:param fn: The endpoint procedure to be called.
:type fn: callable
:param obj: The (optional) object upon which to call the function.
:type obj: obj or None
:param details_arg: The keyword argument under which call details should be provided.
:type details_arg: str or None
"""
self.fn = fn
self.obj = obj
self.details_arg = details_arg
class Request(object):
"""
Object representing an outstanding request, such as for subscribe/unsubscribe,
register/unregister or call/publish.
"""
__slots__ = ('request_id', 'on_reply')
def __init__(self, request_id, on_reply):
"""
:param request_id: The WAMP request ID.
:type request_id: int
:param on_reply: The Deferred/Future to be fired when the request returns.
:type on_reply: Deferred/Future
"""
self.request_id = request_id
self.on_reply = on_reply
class PublishRequest(Request):
"""
Object representing an outstanding request to publish (acknowledged) an event.
"""
__slots__ = ('was_encrypted')
def __init__(self, request_id, on_reply, was_encrypted):
Request.__init__(self, request_id, on_reply)
self.was_encrypted = was_encrypted
class SubscribeRequest(Request):
"""
Object representing an outstanding request to subscribe to a topic.
"""
__slots__ = ('handler', 'topic')
def __init__(self, request_id, topic, on_reply, handler):
"""
:param request_id: The WAMP request ID.
:type request_id: int
:param topic: The topic URI being subscribed to.
:type topic: unicode
:param on_reply: The Deferred/Future to be fired when the request returns.
:type on_reply: Deferred/Future
:param handler: WAMP call options that are in use for this call.
:type handler: callable
"""
Request.__init__(self, request_id, on_reply)
self.topic = topic
self.handler = handler
class UnsubscribeRequest(Request):
"""
Object representing an outstanding request to unsubscribe a subscription.
"""
def __init__(self, request_id, on_reply, subscription_id):
Request.__init__(self, request_id, on_reply)
self.subscription_id = subscription_id
class CallRequest(Request):
"""
Object representing an outstanding request to call a procedure.
"""
__slots__ = ('procedure', 'options',)
def __init__(self, request_id, procedure, on_reply, options):
"""
:param request_id: The WAMP request ID.
:type request_id: int
:param on_reply: The Deferred/Future to be fired when the request returns.
:type on_reply: Deferred/Future
:param options: WAMP call options that are in use for this call.
:type options: dict
"""
Request.__init__(self, request_id, on_reply)
self.procedure = procedure
self.options = options
class InvocationRequest(Request):
"""
Object representing an outstanding request to invoke an endpoint.
"""
class RegisterRequest(Request):
"""
Object representing an outstanding request to register a procedure.
"""
def __init__(self, request_id, on_reply, procedure, endpoint):
Request.__init__(self, request_id, on_reply)
self.procedure = procedure
self.endpoint = endpoint
class UnregisterRequest(Request):
"""
Object representing an outstanding request to unregister a registration.
"""
def __init__(self, request_id, on_reply, registration_id):
Request.__init__(self, request_id, on_reply)
self.registration_id = registration_id
|
meejah/AutobahnPython
|
autobahn/wamp/request.py
|
Python
|
mit
| 7,949
|
import math
def swap(arr, i, j):
arr[i], arr[j] = arr[j], arr[i]
def selection_sort(a, lo, hi):
for i in range(lo, hi):
m = i
for j in range(i, hi):
if a[j] < a[m]:
m = j
swap(a, i, m)
def bubble_sort(a, lo, hi):
for i in range(lo, hi):
for j in range(1, hi - i):
if a[j] < a[j - 1]:
swap(a, j - 1, j)
def insertion_sort(a, lo, hi):
for i in range(1, hi):
j = i
while j > 0 and a[j] < a[j - 1]:
swap(a, j, j - 1)
j -= 1
def partition(seq, lo, hi, pivot_index):
p = lo # index of first elem not less than pivot
swap(seq, pivot_index, hi - 1) # move pivot to the end
for i in range(lo, hi):
if seq[i] < seq[hi - 1]:
swap(seq, p, i)
p += 1
swap(seq, p, hi - 1)
return p
def partition_three_way(seq, lo, hi, pivot_value):
"""Partitions sequence seq[start:end] into three parts:
less that value, equal to valye, and greater that pivot_value
Returns
-----------
k1: int
index of the begging of "equal" part i.e. all elements of seq[start:k1]
are less than pivot_value
k2 : int
index of first element of "greater" part: all lements of seq[k2:end] are
greater than pivot_value
"""
i = lo
k1 = lo
k2 = hi
while i < k2:
if seq[i] < pivot_value:
swap(seq, i, k1)
k1 += 1
i += 1
elif seq[i] == pivot_value:
i += 1
else: # seq[i] >= value
swap(seq, i, k2 - 1)
k2 -= 1
return k1, k2
def pivot_median(seq, lo, hi):
"""Returns index to the median of seq[lo], seq[mid], seq[hi - 1]"""
m = lo + (hi - lo) // 2 # middle element
if seq[lo] < seq[m]:
if seq[m] < seq[hi - 1]:
return m
elif seq[hi - 1] < seq[lo]:
return lo
else:
if seq[hi - 1] < seq[m]:
return m
elif seq[lo] < seq[hi - 1]:
return lo
return hi - 1
def quick_sort(seq, start, end):
"""quick sort with median of left, middle, right as pivot element"""
if end - start >= 2:
pivot_index = pivot_median(seq, start, end)
k1, k2 = partition_three_way(seq, start, end, seq[pivot_index])
quick_sort(seq, start, k1)
quick_sort(seq, k2, end)
def quick_select(seq, k, start=0, end=None):
"""Partitions array such that seq[k] contains k-th order statistic, seq[:k] contains elements less than seq[k],
seq[k+1:] contain elements greater that seq[k]"""
# todo: not tested
if end is None:
end = len(seq)
if end - start >= 2:
pivot_index = pivot_median(seq, start, end)
k1, k2 = partition_three_way(seq, start, end, seq[pivot_index])
if k1 <= k < k2:
return
if k < k1:
quick_select(seq, k, start, k1)
else:
quick_select(seq, k, k2, end)
def intro_sort(seq, start, end):
MAX_INSERTION_SORT_SIZE = 4
max_recursion_level = math.ceil(math.log(end - start)) + 1
def quick_sort_step(start, end, depth):
if end - start <= 1:
return
if end - start <= MAX_INSERTION_SORT_SIZE:
insertion_sort(seq, start, end)
return
if depth >= max_recursion_level:
heap_sort(seq, start, end)
# do a quick sort step
pivot_index = pivot_median(seq, start, end)
k1, k2 = partition_three_way(seq, start, end, seq[pivot_index])
quick_sort_step(start, k1, depth + 1)
quick_sort_step(k2, end, depth + 1)
quick_sort_step(start, end, 0)
def heap_sort(a, lo, hi):
"""HeapSort algorithm
Heap is the structure with the following properties:
a[k] <= a[2*k + 1] and a[k] <= a[2*k + 2]
a[0] is the smallest element
-Tree is balanced: all nodes have depth of k or k-1
-level of depth k - 1 is completely full
-level of depth k is being filled left to right
-all child nodes are less or equal to parent node
"""
def siftdown(i_, hi_):
while i_ * 2 + 1 < hi_:
if i_ * 2 + 2 < hi_ and a[i_ * 2 + 2] > a[i_ * 2 + 1]:
j = i_ * 2 + 2
else:
j = i_ * 2 + 1
if a[i_] < a[j]:
swap(a, i_, j)
i_ = j
else:
break
# heapify
for i in reversed(range(lo + (hi - lo) // 2 + 1)):
siftdown(i, hi)
# popmax
for i in range(lo, hi):
swap(a, hi - i - lo - 1, lo)
siftdown(lo, hi - i - lo - 1)
def merge(a, lo, mid, hi, buf):
q, p = lo, mid
for i in range(lo, hi):
# either second array is exhausted
# or next element in the left array is lt. in one in the right
if p >= hi or q < mid and a[q] < a[p]:
buf[i] = a[q]
q += 1
else:
buf[i] = a[p]
p += 1
def merge_inplace(a, lo, mid, hi):
# TODO: inplace merge w/o buffer. Currently works as merge function adapter
buf = [0] * (hi - lo)
merge(a, lo, mid, hi, buf)
a[lo:hi] = buf
def merge_n(a, run):
"""
merge all runs into one array
e.g [1,2,3] + [10,20,80] + [5,7,8,9]
:param a: array of sorted runs
:param run: run[i] - begging of the ith run (sorted subseq)
run[-1] == index of the next to the rightmost element in the range
"""
# number of elements in the range
n = run[-1] - run[0]
# temporary array
b = [0] * n
# number of runs
nrun = len(run) - 1
# TODO: Smart temporary array creation (get rid of last copy)
# unless all runs are merged
while nrun > 1:
nrun = 1
for k in range(1, nrun, 2):
lo, mid, hi = run[k - 1 : k + 2] # bounds
p, q = lo, mid # pointers to the next elements
run[nrun] = hi
nrun += 1
for i in range(n):
if p > hi or q < mid and a[q] <= a[p]:
b[i] = a[q]
q += 1
else:
b[i] = a[p]
p += 1
a, b = b, a
b[:] = a[:]
def merge_lists(xs, ys):
res = xs + ys
merge_inplace(res, 0, len(xs), len(res))
return res
def merge_n_lists(lsts):
k = 0
runs = []
res = []
for l in lsts:
res.extend(l)
runs.append(k)
k += len(l)
runs.append(k - 1)
merge_n(res, runs)
return res
MIN_MERGE = 8
def merge_sort(arr, lo, hi):
buf = [0] * (hi - lo)
swapped = False
m = MIN_MERGE # size of minimal sorted subarray
# optional step. Also works when m = 1
for k in range(lo, hi, MIN_MERGE):
insertion_sort(arr, k, min(hi, k + MIN_MERGE))
while m < len(arr):
for k in range(lo, hi - m, 2 * m):
merge(arr, lo, lo + m, min(lo + 2 * m, hi), buf)
swapped = not swapped
arr, buf = buf, arr
m *= 2
if swapped:
buf[lo:hi] = arr[:]
def counting_sort(seq, start, end, max_elem=None, min_elem=None):
# todo: not tested
if max_elem is None:
max_elem = max(seq[start:end])
if min_elem is None:
min_elem = min(seq[start:end])
num_bins = max_elem - min_elem + 1
counts = [0] * num_bins
for i in range(start, end):
counts[seq[i] - min_elem] += 1
for i in range(1, num_bins):
counts[i] += counts[i - 1]
# copy sorted elements to its positions
for pos in range(seq + counts[i - 1], seq + counts[i]):
seq[pos] = min_elem + i
def radix_sort(seq, start, end, radix=10):
# todo: not tested
"""LSD radix sort"""
def get_digit(number, exp):
return (number // exp) % radix
def stable_counting_sort(exp):
counts = [0] * radix
for i in range(start, end):
counts[get_digit(seq[i], exp)] += 1
for i in range(1, radix):
counts[i] += counts[i - 1]
for i in reversed(range(seq, start)):
digit = seq[get_digit(seq[i], exp)]
pos = counts[digit] - 1
buffer[pos] = seq[i]
counts[digit] -= 1
seq[start:end] = buffer
buffer = [0] * (end - start)
max_element = max(seq[start:end])
exp = 1
while max_element / exp > 0:
# while there are digits in the largest number left
stable_counting_sort(exp)
exp *= radix
|
vadimadr/python-algorithms
|
algorithms/sorting.py
|
Python
|
mit
| 8,501
|
"""Program to find the credit grade of a person.
usage:
firefly credit_grade.find_credit_grade
"""
import zlib
import random
def find_credit_grade(email):
"""Returns the credit grade of the person identified by the given email address.
The credit grade is generated randomly using the email as the seed to the random
number generator.
The credit grade can be either A, B, C, D, E, F or G.
"""
# since we need to give the same grade everytime the function is called
# with the same email. Using the checksum of the string as random seed
# to get the same result everytime when used with the same email.
seed = zlib.adler32(email.encode("utf-8"))
r = random.Random(seed)
return r.choice(["A", "B", "C", "D", "E", "F", "G"])
|
amitkaps/full-stack-data-science
|
credit-risk-deploy/credit_grade.py
|
Python
|
mit
| 789
|
import pathlib
import pytest
import salt.modules.runit as runit
from tests.support.mock import patch
pytestmark = [pytest.mark.skip_on_windows]
@pytest.fixture
def configure_loader_modules():
return {runit: {}}
@pytest.fixture
def service_dir(tmp_path):
dirname = tmp_path / "services"
dirname.mkdir(exist_ok=True, parents=True)
return str(dirname)
def test__get_svc_path_on_non_symlinked_service(service_dir):
service = pathlib.Path(service_dir, "service")
service.mkdir(exist_ok=True, parents=True)
service_runfile = service / "run"
service_runfile.touch()
with patch.object(runit, "SERVICE_DIR", service_dir):
with patch("os.access", return_value=True):
path_list = runit._get_svc_path(str(service), "ENABLED")
assert path_list
assert len(path_list) == 1
assert path_list[0] == str(service)
def test__get_svc_path_on_symlinked_service(service_dir, tmp_path):
sym_dir = tmp_path / "sym_dir"
sym_dir.mkdir(exist_ok=True, parents=True)
service_runfile = sym_dir / "run"
service_runfile.touch()
# Create the symlink
service = pathlib.Path(service_dir, "service")
service.symlink_to(sym_dir)
with patch.object(runit, "SERVICE_DIR", service_dir):
with patch("os.access", return_value=True):
path_list = runit._get_svc_path(str(service), "ENABLED")
assert path_list
assert len(path_list) == 1
assert path_list[0] == str(sym_dir)
|
saltstack/salt
|
tests/pytests/functional/modules/test_runit.py
|
Python
|
apache-2.0
| 1,518
|
# -*- coding: utf-8 -*-
"""
netvisor.auth
~~~~~~~~~~~~~
:copyright: (c) 2013-2016 by Fast Monkeys Oy.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import datetime
import hashlib
import uuid
from requests.auth import AuthBase
from ._compat import text_type
class NetvisorAuth(AuthBase):
"""
Implements the custom authentication mechanism used by Netvisor.
"""
VALID_LANGUAGES = ('EN', 'FI', 'SE')
def __init__(
self, sender, partner_id, partner_key, customer_id, customer_key,
organization_id, language='FI'
):
self.sender = sender
self.partner_id = partner_id
self.partner_key = partner_key
self.customer_id = customer_id
self.customer_key = customer_key
self.organization_id = organization_id
self.language = language
@property
def language(self):
"""
The language the API uses for the error messages.
The language must be in ISO-3166 format.
.. seealso:: :const:`VALID_LANGUAGES` for a list of accepted
languages.
"""
return self._language
@language.setter
def language(self, value):
if value not in self.VALID_LANGUAGES:
msg = 'language must be one of {}'.format(self.VALID_LANGUAGES)
raise ValueError(msg)
self._language = value
@staticmethod
def make_transaction_id():
"""
Make a unique identifier for a Netvisor API request.
Each request sent by the partner must use a unique identfier.
Otherwise Netvisor API will raise :exc:`RequestNotUnique` error.
"""
return uuid.uuid4().hex
@staticmethod
def make_timestamp():
"""
Make a timestamp for a Netvisor API request.
The timestamp is the current time in UTC as string in ANSI
format.
Example::
>>> NetvisorAuth.make_timestamp()
2008-07-24 15:49:12.221
"""
now = datetime.datetime.utcnow()
return now.isoformat(' ')[:-3]
def make_mac(self, url, timestamp, transaction_id):
"""
Make a MAC code to authenticate a Netvisor API request.
:param url:
the URL where the request is sent to
:param timestamp:
a timestamp returned by :meth:`make_timestamp`
:param transaction_id:
a unique identifier returned by :meth:`make_transaction_id`
"""
parameters = [
url,
self.sender,
self.customer_id,
timestamp,
self.language,
self.organization_id,
transaction_id,
self.customer_key,
self.partner_key,
]
joined_parameters = b'&'.join(
p.encode('utf-8') if isinstance(p, text_type) else p
for p in parameters
)
return hashlib.md5(joined_parameters).hexdigest()
def __call__(self, r):
timestamp = self.make_timestamp()
transaction_id = self.make_transaction_id()
mac = self.make_mac(r.url, timestamp, transaction_id)
r.headers['X-Netvisor-Authentication-CustomerId'] = self.customer_id
r.headers['X-Netvisor-Authentication-MAC'] = mac
r.headers['X-Netvisor-Authentication-PartnerId'] = self.partner_id
r.headers['X-Netvisor-Authentication-Sender'] = self.sender
r.headers['X-Netvisor-Authentication-Timestamp'] = timestamp
r.headers['X-Netvisor-Authentication-TransactionId'] = transaction_id
r.headers['X-Netvisor-Interface-Language'] = self.language
r.headers['X-Netvisor-Organisation-ID'] = self.organization_id
return r
|
fastmonkeys/netvisor.py
|
netvisor/auth.py
|
Python
|
mit
| 3,762
|
__author__ = 'ahmetdal'
urlpatterns = [
]
|
mstzn36/django-river
|
test_urls.py
|
Python
|
gpl-3.0
| 44
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
BasicStatistics.py
---------------------
Date : November 2016
Copyright : (C) 2016 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'November 2016'
__copyright__ = '(C) 2016, Nyall Dawson'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import codecs
from qgis.PyQt.QtCore import QVariant
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsStatisticalSummary,
QgsStringStatisticalSummary,
QgsDateTimeStatisticalSummary,
QgsFeatureRequest,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterField,
QgsProcessingParameterFileDestination,
QgsProcessingOutputHtml,
QgsProcessingOutputNumber)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class BasicStatisticsForField(QgisAlgorithm):
INPUT_LAYER = 'INPUT_LAYER'
FIELD_NAME = 'FIELD_NAME'
OUTPUT_HTML_FILE = 'OUTPUT_HTML_FILE'
MIN = 'MIN'
MAX = 'MAX'
COUNT = 'COUNT'
UNIQUE = 'UNIQUE'
EMPTY = 'EMPTY'
FILLED = 'FILLED'
MIN_LENGTH = 'MIN_LENGTH'
MAX_LENGTH = 'MAX_LENGTH'
MEAN_LENGTH = 'MEAN_LENGTH'
CV = 'CV'
SUM = 'SUM'
MEAN = 'MEAN'
STD_DEV = 'STD_DEV'
RANGE = 'RANGE'
MEDIAN = 'MEDIAN'
MINORITY = 'MINORITY'
MAJORITY = 'MAJORITY'
FIRSTQUARTILE = 'FIRSTQUARTILE'
THIRDQUARTILE = 'THIRDQUARTILE'
IQR = 'IQR'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'ftools', 'basic_statistics.png'))
def tags(self):
return self.tr('stats,statistics,date,time,datetime,string,number,text,table,layer,maximum,minimum,mean,average,standard,deviation,'
'count,distinct,unique,variance,median,quartile,range,majority,minority').split(',')
def group(self):
return self.tr('Vector analysis')
def groupId(self):
return 'vectoranalysis'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT_LAYER,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterField(self.FIELD_NAME,
self.tr('Field to calculate statistics on'),
None, self.INPUT_LAYER, QgsProcessingParameterField.Any))
self.addParameter(QgsProcessingParameterFileDestination(self.OUTPUT_HTML_FILE, self.tr('Statistics'), self.tr('HTML files (*.html)'), None, True))
self.addOutput(QgsProcessingOutputHtml(self.OUTPUT_HTML_FILE, self.tr('Statistics')))
self.addOutput(QgsProcessingOutputNumber(self.COUNT, self.tr('Count')))
self.addOutput(QgsProcessingOutputNumber(self.UNIQUE, self.tr('Number of unique values')))
self.addOutput(QgsProcessingOutputNumber(self.EMPTY, self.tr('Number of empty (null) values')))
self.addOutput(QgsProcessingOutputNumber(self.FILLED, self.tr('Number of non-empty values')))
self.addOutput(QgsProcessingOutputNumber(self.MIN, self.tr('Minimum value')))
self.addOutput(QgsProcessingOutputNumber(self.MAX, self.tr('Maximum value')))
self.addOutput(QgsProcessingOutputNumber(self.MIN_LENGTH, self.tr('Minimum length')))
self.addOutput(QgsProcessingOutputNumber(self.MAX_LENGTH, self.tr('Maximum length')))
self.addOutput(QgsProcessingOutputNumber(self.MEAN_LENGTH, self.tr('Mean length')))
self.addOutput(QgsProcessingOutputNumber(self.CV, self.tr('Coefficient of Variation')))
self.addOutput(QgsProcessingOutputNumber(self.SUM, self.tr('Sum')))
self.addOutput(QgsProcessingOutputNumber(self.MEAN, self.tr('Mean value')))
self.addOutput(QgsProcessingOutputNumber(self.STD_DEV, self.tr('Standard deviation')))
self.addOutput(QgsProcessingOutputNumber(self.RANGE, self.tr('Range')))
self.addOutput(QgsProcessingOutputNumber(self.MEDIAN, self.tr('Median')))
self.addOutput(QgsProcessingOutputNumber(self.MINORITY, self.tr('Minority (rarest occurring value)')))
self.addOutput(QgsProcessingOutputNumber(self.MAJORITY, self.tr('Majority (most frequently occurring value)')))
self.addOutput(QgsProcessingOutputNumber(self.FIRSTQUARTILE, self.tr('First quartile')))
self.addOutput(QgsProcessingOutputNumber(self.THIRDQUARTILE, self.tr('Third quartile')))
self.addOutput(QgsProcessingOutputNumber(self.IQR, self.tr('Interquartile Range (IQR)')))
def name(self):
return 'basicstatisticsforfields'
def displayName(self):
return self.tr('Basic statistics for fields')
def processAlgorithm(self, parameters, context, feedback):
source = self.parameterAsSource(parameters, self.INPUT_LAYER, context)
field_name = self.parameterAsString(parameters, self.FIELD_NAME, context)
field = source.fields().at(source.fields().lookupField(field_name))
output_file = self.parameterAsFileOutput(parameters, self.OUTPUT_HTML_FILE, context)
request = QgsFeatureRequest().setFlags(QgsFeatureRequest.NoGeometry).setSubsetOfAttributes([field_name], source.fields())
features = source.getFeatures(request)
count = source.featureCount()
data = []
data.append(self.tr('Analyzed field: {}').format(field_name))
results = {}
if field.isNumeric():
d, results = self.calcNumericStats(features, feedback, field, count)
data.extend(d)
elif field.type() in (QVariant.Date, QVariant.Time, QVariant.DateTime):
d, results = self.calcDateTimeStats(features, feedback, field, count)
data.extend(d)
else:
d, results = self.calcStringStats(features, feedback, field, count)
data.extend(d)
if output_file:
self.createHTML(output_file, data)
results[self.OUTPUT_HTML_FILE] = output_file
return results
def calcNumericStats(self, features, feedback, field, count):
total = 100.0 / count if count else 0
stat = QgsStatisticalSummary()
for current, ft in enumerate(features):
if feedback.isCanceled():
break
stat.addVariant(ft[field.name()])
feedback.setProgress(int(current * total))
stat.finalize()
cv = stat.stDev() / stat.mean() if stat.mean() != 0 else 0
results = {self.COUNT: stat.count(),
self.UNIQUE: stat.variety(),
self.EMPTY: stat.countMissing(),
self.FILLED: count - stat.countMissing(),
self.MIN: stat.min(),
self.MAX: stat.max(),
self.RANGE: stat.range(),
self.SUM: stat.sum(),
self.MEAN: stat.mean(),
self.MEDIAN: stat.median(),
self.STD_DEV: stat.stDev(),
self.CV: cv,
self.MINORITY: stat.minority(),
self.MAJORITY: stat.majority(),
self.FIRSTQUARTILE: stat.firstQuartile(),
self.THIRDQUARTILE: stat.thirdQuartile(),
self.IQR: stat.interQuartileRange()}
data = []
data.append(self.tr('Count: {}').format(stat.count()))
data.append(self.tr('Unique values: {}').format(stat.variety()))
data.append(self.tr('NULL (missing) values: {}').format(stat.countMissing()))
data.append(self.tr('Minimum value: {}').format(stat.min()))
data.append(self.tr('Maximum value: {}').format(stat.max()))
data.append(self.tr('Range: {}').format(stat.range()))
data.append(self.tr('Sum: {}').format(stat.sum()))
data.append(self.tr('Mean value: {}').format(stat.mean()))
data.append(self.tr('Median value: {}').format(stat.median()))
data.append(self.tr('Standard deviation: {}').format(stat.stDev()))
data.append(self.tr('Coefficient of Variation: {}').format(cv))
data.append(self.tr('Minority (rarest occurring value): {}').format(stat.minority()))
data.append(self.tr('Majority (most frequently occurring value): {}').format(stat.majority()))
data.append(self.tr('First quartile: {}').format(stat.firstQuartile()))
data.append(self.tr('Third quartile: {}').format(stat.thirdQuartile()))
data.append(self.tr('Interquartile Range (IQR): {}').format(stat.interQuartileRange()))
return data, results
def calcStringStats(self, features, feedback, field, count):
total = 100.0 / count if count else 1
stat = QgsStringStatisticalSummary()
for current, ft in enumerate(features):
if feedback.isCanceled():
break
stat.addValue(ft[field.name()])
feedback.setProgress(int(current * total))
stat.finalize()
results = {self.COUNT: stat.count(),
self.UNIQUE: stat.countDistinct(),
self.EMPTY: stat.countMissing(),
self.FILLED: stat.count() - stat.countMissing(),
self.MIN: stat.min(),
self.MAX: stat.max(),
self.MIN_LENGTH: stat.minLength(),
self.MAX_LENGTH: stat.maxLength(),
self.MEAN_LENGTH: stat.meanLength()}
data = []
data.append(self.tr('Count: {}').format(count))
data.append(self.tr('Unique values: {}').format(stat.countDistinct()))
data.append(self.tr('NULL (missing) values: {}').format(stat.countMissing()))
data.append(self.tr('Minimum value: {}').format(stat.min()))
data.append(self.tr('Maximum value: {}').format(stat.max()))
data.append(self.tr('Minimum length: {}').format(stat.minLength()))
data.append(self.tr('Maximum length: {}').format(stat.maxLength()))
data.append(self.tr('Mean length: {}').format(stat.meanLength()))
return data, results
def calcDateTimeStats(self, features, feedback, field, count):
total = 100.0 / count if count else 1
stat = QgsDateTimeStatisticalSummary()
for current, ft in enumerate(features):
if feedback.isCanceled():
break
stat.addValue(ft[field.name()])
feedback.setProgress(int(current * total))
stat.finalize()
results = {self.COUNT: stat.count(),
self.UNIQUE: stat.countDistinct(),
self.EMPTY: stat.countMissing(),
self.FILLED: stat.count() - stat.countMissing(),
self.MIN: stat.statistic(QgsDateTimeStatisticalSummary.Min),
self.MAX: stat.statistic(QgsDateTimeStatisticalSummary.Max)}
data = []
data.append(self.tr('Count: {}').format(count))
data.append(self.tr('Unique values: {}').format(stat.countDistinct()))
data.append(self.tr('NULL (missing) values: {}').format(stat.countMissing()))
data.append(self.tr('Minimum value: {}').format(field.displayString(stat.statistic(QgsDateTimeStatisticalSummary.Min))))
data.append(self.tr('Maximum value: {}').format(field.displayString(stat.statistic(QgsDateTimeStatisticalSummary.Max))))
return data, results
def createHTML(self, outputFile, algData):
with codecs.open(outputFile, 'w', encoding='utf-8') as f:
f.write('<html><head>\n')
f.write('<meta http-equiv="Content-Type" content="text/html; \
charset=utf-8" /></head><body>\n')
for s in algData:
f.write('<p>' + str(s) + '</p>\n')
f.write('</body></html>\n')
|
stevenmizuno/QGIS
|
python/plugins/processing/algs/qgis/BasicStatistics.py
|
Python
|
gpl-2.0
| 12,810
|
# -*- coding: utf-8 -*-
import typing
import urllib.parse
import telegram.ext
import telegram.utils.helpers
import analytics
import constants
def check_admin(bot: telegram.Bot, context: telegram.ext.CallbackContext, message: telegram.Message, analytics_handler: analytics.AnalyticsHandler, admin_user_id: int) -> bool:
user = message.from_user
if user is None:
return False
analytics_handler.track(context, analytics.AnalyticsType.COMMAND, user, message.text)
if user.id != admin_user_id:
bot.send_message(message.chat_id, 'You are not allowed to use this command')
return False
return True
def get_no_results_message(query: str) -> str:
url = constants.DEX_SEARCH_URL_FORMAT.format(urllib.parse.quote(query))
url_text = escape_v2_markdown_text_link(
text='aici',
url=url
)
first_phrase = f'Niciun rezultat găsit pentru "{query}".'
second_phrase = 'Incearcă o căutare in tot textul definițiilor'
return (
f'{escape_v2_markdown_text(first_phrase)} '
f'{escape_v2_markdown_text(second_phrase)} '
f'{url_text}{ESCAPED_FULL_STOP}'
)
def send_no_results_message(bot: telegram.Bot, chat_id: int, message_id: int, query: str) -> None:
bot.send_message(
chat_id=chat_id,
text=get_no_results_message(query),
parse_mode=telegram.ParseMode.MARKDOWN_V2,
disable_web_page_preview=True,
reply_to_message_id=message_id
)
def send_subscription_update_message(bot: telegram.Bot, chat_id: int, text: str) -> None:
bot.send_message(
chat_id=chat_id,
text=text,
parse_mode=telegram.ParseMode.MARKDOWN_V2
)
def escape_v2_markdown_text(text: str, entity_type: typing.Optional[str] = None) -> str:
return telegram.utils.helpers.escape_markdown(
text=text,
version=2,
entity_type=entity_type
)
def escape_v2_markdown_text_link(text: str, url: str) -> str:
escaped_text = escape_v2_markdown_text(text)
escaped_url = escape_v2_markdown_text(
text=url,
entity_type=telegram.MessageEntity.TEXT_LINK
)
return f'[{escaped_text}]({escaped_url})'
ESCAPED_FULL_STOP = escape_v2_markdown_text('.')
ESCAPED_VERTICAL_LINE = escape_v2_markdown_text('|')
|
revolter/DexRoBot
|
src/telegram_utils.py
|
Python
|
gpl-3.0
| 2,302
|
# -*- coding: cp1252 -*-
#-------------------------------------------------------------------------------
# Name: Cumpleaños
#
# Author: Carlos Chesta
#-------------------------------------------------------------------------------
n = {
'Pepito': (1990, 10, 20),
'Yayita': (1992, 3, 3),
'Panchito': (1989, 10, 20),
'Perica': (1989, 12, 8),
'Fulanita': (1991, 2, 14),
}
def mismo_dia(fecha1,fecha2):
_,_,dia1 = fecha1
_,_,dia2 = fecha2
if dia1 == dia2:
return True
else:
return False
def mas_viejo(n):
viejo = (999999,9999999,999999)
viejo_nombre = ''
for nombre,fecha in n.items():
if fecha < viejo:
viejo = fecha
viejo_nombre = nombre
return viejo_nombre
def primer_cumple(n):
primero = (9999,9999)
for nombre, fecha in n.items():
_,mes,dia = fecha
if (mes,dia) < primero:
primero = (mes,dia)
nombre_primero = nombre
return nombre_primero
|
xbash/LabUNAB
|
15_estructuras/cumpleaños.py
|
Python
|
gpl-3.0
| 1,052
|
import cx_Freeze
import sys
import os
os.environ['TCL_LIBRARY'] = "C:\\LOCAL_TO_PYTHON\\Python35-32\\tcl\\tcl8.6"
os.environ['TCL_LIBRARY'] = "C:\\LOCAL_TO_PYTHON\\Python35-32\\tcl\\tk8.6"
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
executables = [cx_Freeze.Executable("test.py", base=None)]
cx_Freeze.setup(
name="FDDS",
options = {"build_exe": {"packages":{"numpy"}}},
version = "0.01",
description = "FDDS",
executables = executables
)
# from cx_Freeze import setup, Executable
#
# setup(
# name = "FDDS",
# version = "1.0",
# description = "FDDS",
# executables = [Executable("index.py", base = "Win32GUI")])
|
AmilaViduranga/FDDS
|
setup.py
|
Python
|
mit
| 670
|
"""Config flow for BSB-Lan integration."""
import logging
from typing import Any, Dict, Optional
from bsblan import BSBLan, BSBLanError, Info
import voluptuous as vol
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import ( # pylint:disable=unused-import
CONF_DEVICE_IDENT,
CONF_PASSKEY,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
class BSBLanFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a BSBLan config flow."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by the user."""
if user_input is None:
return self._show_setup_form()
try:
info = await self._get_bsblan_info(
host=user_input[CONF_HOST],
port=user_input[CONF_PORT],
passkey=user_input.get(CONF_PASSKEY),
)
except BSBLanError:
return self._show_setup_form({"base": "cannot_connect"})
# Check if already configured
await self.async_set_unique_id(info.device_identification)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=info.device_identification,
data={
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
CONF_PASSKEY: user_input.get(CONF_PASSKEY),
CONF_DEVICE_IDENT: info.device_identification,
},
)
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
"""Show the setup form to the user."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Optional(CONF_PORT, default=80): int,
vol.Optional(CONF_PASSKEY): str,
}
),
errors=errors or {},
)
async def _get_bsblan_info(
self, host: str, passkey: Optional[str], port: int
) -> Info:
"""Get device information from an BSBLan device."""
session = async_get_clientsession(self.hass)
_LOGGER.debug("request bsblan.info:")
bsblan = BSBLan(
host, passkey=passkey, port=port, session=session, loop=self.hass.loop
)
return await bsblan.info()
|
GenericStudent/home-assistant
|
homeassistant/components/bsblan/config_flow.py
|
Python
|
apache-2.0
| 2,690
|
"""this extension try to enhance the function of the calculator"""
from lib import basic_cal
import math
class MathExpressionsCal(basic_cal.Calculator):
def __init__(self):
super().__init__()
self.math_operator_list = {"sin": -1, "cos": -1, "tan": -1, "log": -1, ',': -1, "ln": -1}
self.math_operate = {"sin": "_sin_it", "cos": "_cos_it", "tan": "_tan_it", ",": "_pass_it",
"ln": "_log_it"}
self.binary_math_operate = {"log": "_log_it"}
self.operator_list.update(self.math_operator_list)
self.operate.update(self.binary_math_operate)
self.PI = math.pi
def _sin_it(self, x):
return math.sin(x*self.PI/180)
def _cos_it(self, x):
return math.cos(x*self.PI/180)
def _tan_it(self, x):
return math.tan(x*self.PI/180)
@staticmethod
def _log_it(a=math.e, b=math.e):
try:
return math.log(b, a)
except ZeroDivisionError as error:
print(error)
@staticmethod
def _pass_it(x):
return x
def load_expressions(self, expressions):
super().load_expressions(expressions)
def parse_expressions(self):
super().parse_expressions()
def calculate_expressions(self):
for element in self.expressions:
if element not in self.operator_list:
self.stack.append(element)
elif element in self.math_operate:
x = float(self.stack.pop())
calculate = getattr(self, self.math_operate[element])
self.stack.append(calculate(x))
else:
a = float(self.stack.pop())
b = float(self.stack.pop())
calculate = getattr(self, self.operate[element])
self.stack.append(calculate(b, a))
return self.stack.pop()
def clear_stack(self):
super().clear_stack()
def execute_it(self, expressions):
self.load_expressions(expressions)
self.check_invalid_expressions()
self.parse_expressions()
answer = self.calculate_expressions()
self.previous_answer = answer
print("answer = %s" % answer)
self.clear_stack()
return answer
if __name__ == "__main__":
math_cal = MathExpressionsCal()
math_cal.execute_it("log(10,100)")
|
DaivdZhang/pyCalculator
|
src/lib/math_cal.py
|
Python
|
mit
| 2,432
|
from threading import Thread
import zmq
from protobuf.cta_event_pb2 import CTAEvent
class ReadProtoBuf(Thread, object):
'''reads cta data from protobuf and pushes the data into the tk window '''
def __init__(self, ip, port, queue, stop_event):
Thread.__init__(self)
self.stop_event = stop_event
self.context = zmq.Context()
self.socket = self.context.socket(zmq.SUB)
self.socket.connect('tcp://{ip}:{port}'.format(ip=ip, port=port))
self.socket.setsockopt(zmq.SUBSCRIBE, '1000')
self.poller = zmq.Poller()
self.poller.register(self.socket, zmq.POLLIN)
self.queue = queue
def run(self):
while not self.stop_event.is_set():
self.read()
def read(self):
mesg = self.poller.poll(500)
if mesg:
event = CTAEvent()
rec = self.socket.recv()
binary_data = rec[5:]
event.ParseFromString(binary_data)
self.queue.append(event)
|
MaxNoe/cta_event_viewer
|
read/__init__.py
|
Python
|
mit
| 1,003
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2019-01-09 13:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('crm', '0006_auto_20181001_0631'),
]
operations = [
migrations.AddField(
model_name='organization',
name='billing_type',
field=models.CharField(choices=[(b'normal', b'Normal billing'), (b'consortia', b'Billed via Associate Consortia'), (b'custom', b'Custom Agreement'), (b'waiver', b'Fee waiver')], default=b'normal', max_length=128),
),
]
|
ocwc/ocwc-members
|
members/crm/migrations/0007_organization_billing_type.py
|
Python
|
mit
| 633
|
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Gilles-Alexandre Quenot
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
# python2.5 compatibility
from __future__ import with_statement
from weboob.capabilities.bank import ICapBank, AccountNotFound
from weboob.tools.backend import BaseBackend, BackendConfig
from weboob.tools.value import ValueBackendPassword
from .browser import Fortuneo
__all__ = ['FortuneoBackend']
class FortuneoBackend(BaseBackend, ICapBank):
NAME = 'fortuneo'
MAINTAINER = u'Gilles-Alexandre Quenot'
EMAIL = 'gilles.quenot@gmail.com'
VERSION = '0.f'
LICENSE = 'AGPLv3+'
DESCRIPTION = u'Fortuneo French bank website'
CONFIG = BackendConfig(
ValueBackendPassword(
'login',
label='Account ID',
masked=False,
required=True
),
ValueBackendPassword(
'password',
label='Password',
required=True
)
)
BROWSER = Fortuneo
def create_default_browser(self):
return self.create_browser(
self.config['login'].get(),
self.config['password'].get()
)
def iter_accounts(self):
"""Iter accounts"""
for account in self.browser.get_accounts_list():
yield account
def get_account(self, _id):
with self.browser:
account = self.browser.get_account(_id)
if account:
return account
else:
raise AccountNotFound()
def iter_history(self, account):
"""Iter history of transactions on a specific account"""
with self.browser:
for history in self.browser.get_history(account):
yield history
# vim:ts=4:sw=4
|
franek/weboob
|
modules/fortuneo/backend.py
|
Python
|
agpl-3.0
| 2,495
|
"""This module contains functionality expected to be used by all request handlers."""
import tor_async_util
class RequestHandler(tor_async_util.RequestHandler):
"""Absract base class for all request handlers."""
@property
def config(self):
"""motivated by desire to make code easier to read and make it super
clear to all request handlers from where they should get their config.
"""
return tor_async_util.Config.instance
|
simonsdave/cloudfeaster_infrastructure
|
cloudfeaster_services/request_handlers.py
|
Python
|
mit
| 470
|
files = [ "strobe_gen.vhd" ]
|
lnls-dig/dsp-cores
|
hdl/modules/strobe_gen/Manifest.py
|
Python
|
lgpl-3.0
| 30
|
# -*- coding: utf-8 -*-
# This coding header is significant for tests, as the debug view is parsing
# files to search for such a header to decode the source file content
from __future__ import absolute_import, unicode_literals
import inspect
import os
import sys
from django.conf import settings
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.test import TestCase, RequestFactory
from django.test.utils import (override_settings, setup_test_template_loader,
restore_template_loaders)
from django.views.debug import ExceptionReporter
from .. import BrokenException, except_args
from ..views import (sensitive_view, non_sensitive_view, paranoid_view,
custom_exception_reporter_filter_view, sensitive_method_view)
@override_settings(DEBUG=True, TEMPLATE_DEBUG=True)
class DebugViewTests(TestCase):
urls = "regressiontests.views.urls"
def test_files(self):
response = self.client.get('/raises/')
self.assertEqual(response.status_code, 500)
data = {
'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'),
}
response = self.client.post('/raises/', data)
self.assertContains(response, 'file_data.txt', status_code=500)
self.assertNotContains(response, 'haha', status_code=500)
def test_403(self):
# Ensure no 403.html template exists to test the default case.
setup_test_template_loader({})
try:
response = self.client.get('/views/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
finally:
restore_template_loaders()
def test_403_template(self):
# Set up a test 403.html template.
setup_test_template_loader(
{'403.html': 'This is a test template for a 403 Forbidden error.'}
)
try:
response = self.client.get('/views/raises403/')
self.assertContains(response, 'test template', status_code=403)
finally:
restore_template_loaders()
def test_404(self):
response = self.client.get('/views/raises404/')
self.assertEqual(response.status_code, 404)
def test_view_exceptions(self):
for n in range(len(except_args)):
self.assertRaises(BrokenException, self.client.get,
reverse('view_exception', args=(n,)))
def test_template_exceptions(self):
for n in range(len(except_args)):
try:
self.client.get(reverse('template_exception', args=(n,)))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertFalse(raising_loc.find('raise BrokenException') == -1,
"Failed to find 'raise BrokenException' in last frame of traceback, instead found: %s" %
raising_loc)
def test_template_loader_postmortem(self):
response = self.client.get(reverse('raises_template_does_not_exist'))
template_path = os.path.join('templates', 'i_dont_exist.html')
self.assertContains(response, template_path, status_code=500)
class ExceptionReporterTests(TestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ValueError</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">No exception supplied</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
class PlainTextReportTests(TestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError at /test_view/', text)
self.assertIn("Can't find my keys", text)
self.assertIn('Request Method:', text)
self.assertIn('Request URL:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request information:', text)
self.assertNotIn('Request data not supplied', text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError', text)
self.assertIn("Can't find my keys", text)
self.assertNotIn('Request Method:', text)
self.assertNotIn('Request URL:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request data not supplied', text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
text = reporter.get_traceback_text()
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
text = reporter.get_traceback_text()
class ExceptionReportTestMixin(object):
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {'sausage-key': 'sausage-value',
'baked-beans-key': 'baked-beans-value',
'hash-brown-key': 'hash-brown-value',
'bacon-key': 'bacon-value',}
def verify_unsafe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, 'baked-beans-value', status_code=500)
self.assertContains(response, 'hash-brown-value', status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, 'sausage-value', status_code=500)
self.assertNotContains(response, 'bacon-value', status_code=500)
def verify_paranoid_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that no variables or POST parameters are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertNotContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email report.
"""
with self.settings(ADMINS=(('Admin', 'admin@fattie-breakie.com'),)):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
self.assertNotIn('cooked_eggs', email.body)
self.assertNotIn('scrambled', email.body)
self.assertNotIn('sauce', email.body)
self.assertNotIn('worcestershire', email.body)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, email.body)
self.assertIn(v, email.body)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email report.
"""
with self.settings(ADMINS=(('Admin', 'admin@fattie-breakie.com'),)):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
self.assertNotIn('cooked_eggs', email.body)
self.assertNotIn('scrambled', email.body)
self.assertNotIn('sauce', email.body)
self.assertNotIn('worcestershire', email.body)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, email.body)
# Non-sensitive POST parameters' values are shown.
self.assertIn('baked-beans-value', email.body)
self.assertIn('hash-brown-value', email.body)
# Sensitive POST parameters' values are not shown.
self.assertNotIn('sausage-value', email.body)
self.assertNotIn('bacon-value', email.body)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email report.
"""
with self.settings(ADMINS=(('Admin', 'admin@fattie-breakie.com'),)):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
self.assertNotIn('cooked_eggs', email.body)
self.assertNotIn('scrambled', email.body)
self.assertNotIn('sauce', email.body)
self.assertNotIn('worcestershire', email.body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, email.body)
# No POST parameters' values are shown.
self.assertNotIn(v, email.body)
class ExceptionReporterFilterTests(TestCase, ExceptionReportTestMixin):
"""
Ensure that sensitive information can be filtered out of error reports.
Refs #14614.
"""
rf = RequestFactory()
def test_non_sensitive_request(self):
"""
Ensure that everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Ensure that sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_paranoid_request(self):
"""
Ensure that no POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_custom_exception_reporter_filter(self):
"""
Ensure that it's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
Ensure that the sensitive_variables decorator works with object
methods.
Refs #18379.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_method_view,
check_for_POST_params=False)
self.verify_unsafe_email(sensitive_method_view,
check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_method_view,
check_for_POST_params=False)
self.verify_safe_email(sensitive_method_view,
check_for_POST_params=False)
class AjaxResponseExceptionReporterFilter(TestCase, ExceptionReportTestMixin):
"""
Ensure that sensitive information can be filtered out of error reports.
Here we specifically test the plain text 500 debug-only error page served
when it has been detected the request was sent by JS code. We don't check
for (non)existence of frames vars in the traceback information section of
the response content because we don't include them in these error pages.
Refs #14614.
"""
rf = RequestFactory(HTTP_X_REQUESTED_WITH='XMLHttpRequest')
def test_non_sensitive_request(self):
"""
Ensure that request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Ensure that sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_paranoid_request(self):
"""
Ensure that no POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
Ensure that it's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view,
check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view,
check_for_vars=False)
|
Proggie02/TestRepo
|
tests/regressiontests/views/tests/debug.py
|
Python
|
bsd-3-clause
| 22,519
|
"""
Constants used in ops classes
"""
HYBRID_VM = 'hybridvm'
VM_POWER_ON_STATUS = 4
VM_POWER_OFF_STATUS = 8
|
HybridF5/hybrid-jacket
|
nova_jacket/virt/jacket/vcloud/constants.py
|
Python
|
apache-2.0
| 110
|
########################################################################
#
# File Name: HTMLScriptElement
#
# Documentation: http://docs.4suite.com/4DOM/HTMLScriptElement.html
#
### This file is automatically generated by GenerateHtml.py.
### DO NOT EDIT!
"""
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from xml.dom import Node
from xml.dom.html.HTMLElement import HTMLElement
class HTMLScriptElement(HTMLElement):
def __init__(self, ownerDocument, nodeName="SCRIPT"):
HTMLElement.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_charset(self):
return self.getAttribute("CHARSET")
def _set_charset(self, value):
self.setAttribute("CHARSET", value)
def _get_defer(self):
return self.hasAttribute("DEFER")
def _set_defer(self, value):
if value:
self.setAttribute("DEFER", "DEFER")
else:
self.removeAttribute("DEFER")
def _get_event(self):
return self.getAttribute("EVENT")
def _set_event(self, value):
self.setAttribute("EVENT", value)
def _get_htmlFor(self):
return self.getAttribute("FOR")
def _set_htmlFor(self, value):
self.setAttribute("FOR", value)
def _get_src(self):
return self.getAttribute("SRC")
def _set_src(self, value):
self.setAttribute("SRC", value)
def _get_text(self):
if not self.firstChild:
return
if self.firstChild == self.lastChild:
return self.firstChild.data
self.normalize()
text = filter(lambda x: x.nodeType == Node.TEXT_NODE, self.childNodes)
return text[0].data
def _set_text(self, value):
text = None
for node in self.childNodes:
if not text and node.nodeType == Node.TEXT_NODE:
text = node
else:
self.removeChild(node)
if text:
text.data = value
else:
text = self.ownerDocument.createTextNode(value)
self.appendChild(text)
def _get_type(self):
return self.getAttribute("TYPE")
def _set_type(self, value):
self.setAttribute("TYPE", value)
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update({
"charset" : _get_charset,
"defer" : _get_defer,
"event" : _get_event,
"htmlFor" : _get_htmlFor,
"src" : _get_src,
"text" : _get_text,
"type" : _get_type
})
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update({
"charset" : _set_charset,
"defer" : _set_defer,
"event" : _set_event,
"htmlFor" : _set_htmlFor,
"src" : _set_src,
"text" : _set_text,
"type" : _set_type
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())
|
carvalhomb/tsmells
|
guess/src/Lib/xml/dom/html/HTMLScriptElement.py
|
Python
|
gpl-2.0
| 3,340
|
import numpy as np
import warnings
import operator
from heapq import merge
class intervals(object):
r"""
This class implements methods for intervals or union of two unbounded
intervals, when all these sets have a point in their intersection
"""
def __init__(self, I = None):
"""
Create a intervals object, with some unbounded and bounded intervals
Parameters
----------
I : tuple
I is a tuple (inf, sup), the interval created
Returns
-------
interv : intervals
The intervals object
Warning : sup has to be larger than inf. If not, it raises a
ValueError exception
If sup == inf, it creates an empty interval, and raise a Warning
>>> I = intervals()
>>> I2 = intervals((-1, 1))
"""
if I == None:
self._U = []
else:
## Check that the interval is correct
(inf, sup) = I
if sup < inf:
raise ValueError("The given tuple " + \
"does not represent an interval : " + repr(I))
# elif inf == sup:
# self._U = []
else:
self._U = [I]
def __call__(self, x):
"""
Check if x is in the intersection of the intervals
Parameters
----------
x : float
The point you want to know if it is in the intervals
Returns
-------
is_in : bool
True if x is in the intersection, False if it's not
Examples
--------
>>> I = intervals()
>>> I(2)
False
>>> I = intervals.intersection(intervals((-1, 6)), \
intervals(( 0, 7)), \
~intervals((1, 4)))
>>> x1, x2, x3, x4, x5 = 0.5, 1.5, 5, 6.5, 8
>>> I(x1), I(x2), I(x3), I(x4), I(x5)
(True, False, True, False, False)
"""
return any( a <= x and x <= b for (a, b) in self )
def __len__(self):
"""
Return the number of connex intervas composing this instance
>>> I = intervals.intersection(intervals((-1, 6)), \
intervals(( 0, 7)), \
~intervals((1, 4)))
>>> len(I)
2
"""
return len(self._U)
def __invert__(self):
"""
Return the complement of the interval in the reals
>>> I = intervals.intersection(intervals((-1, 6)), \
intervals(( 0, 7)), \
~intervals((1, 4)))
>>> print(~I)
[(-inf, 0), (1, 4), (6, inf)]
"""
if len(self) == 0:
return intervals((-np.inf, np.inf))
inverse = intervals()
a, _ = self._U[0]
if a > -np.inf:
inverse._U.append((-np.inf, a))
for (a1, b1), (a2, b2) in zip(self._U[:-1], self._U[1:]):
inverse._U.append((b1, a2))
_, b = self._U[-1]
if b < np.inf:
inverse._U.append((b, np.inf))
return inverse
def __repr__(self):
return repr(self._U)
def __iter__(self):
return iter(self._U)
def __getitem__(self,index):
return self._U[index]
@staticmethod
def union(*interv):
"""
Return the union of all the given intervals
Parameters
----------
interv1, ... : interv
intervals instance
Returns
-------
union, a new intervals instance, representing the union of interv1, ...
>>> I = intervals.union(intervals((-np.inf, 0)), \
intervals((-1, 1)), \
intervals((3, 6)))
>>> print(I)
[(-inf, 1), (3, 6)]
"""
## Define the union of an empty family as an empty set
union = intervals()
if len(interv) == 0:
return interv
interv_merged_gen = merge(*interv)
old_a, old_b = None, None
for new_a, new_b in interv_merged_gen:
if old_b is not None and new_a < old_b: # check to see if union of (old_a, old_b) and
# (new_a, new_b) is (old_a, new_b)
old_b = max(old_b, new_b)
elif old_b is None: # first interval
old_a, old_b = new_a, new_b
else:
union._U.append((old_a, old_b))
old_a, old_b = new_a, new_b
union._U.append((old_a, old_b))
return union
@staticmethod
def intersection(*interv):
"""
Return the intersection of all the given intervals
Parameters
----------
interv1, ... : interv
intervals instance
Returns
-------
intersection, a new intervals instance, representing the intersection
of interv1, ...
>>> I = intervals.intersection(intervals((-1, 6)), \
intervals(( 0, 7)), \
~intervals((1, 4)))
>>> print(I)
[(0, 1), (4, 6)]
"""
if len(interv) == 0:
I = intervals()
return ~I
return ~(intervals.union(*(~I for I in interv)))
def __add__(self, offset):
"""
Add an offset to the intervals
Parameters
----------
off : float
The offset added
Returns
-------
interv : intervals
a new instance, self + offset
Examples
--------
>>> I = intervals.intersection(intervals((-1, 6)), \
intervals(( 0, 7)), \
~intervals((1, 4)))
>>> J = I+2
>>> print(J)
[(2, 3), (6, 8)]
"""
interv = intervals()
interv._U = [(a+offset, b+offset) for (a, b) in self._U]
return interv
if __name__ == "__main__":
import doctest
doctest.testmod()
|
selective-inference/selective-inference
|
selectinf/constraints/intervals.py
|
Python
|
bsd-3-clause
| 6,279
|
"""
Unit tests for trust-region optimization routines.
To run it in its simplest form::
nosetests test_optimize.py
"""
import pytest
import numpy as np
from numpy.testing import assert_, assert_equal, assert_allclose
from scipy.optimize import (minimize, rosen, rosen_der, rosen_hess,
rosen_hess_prod)
class Accumulator:
""" This is for testing callbacks."""
def __init__(self):
self.count = 0
self.accum = None
def __call__(self, x):
self.count += 1
if self.accum is None:
self.accum = np.array(x)
else:
self.accum += x
class TestTrustRegionSolvers:
def setup_method(self):
self.x_opt = [1.0, 1.0]
self.easy_guess = [2.0, 2.0]
self.hard_guess = [-1.2, 1.0]
def test_dogleg_accuracy(self):
# test the accuracy and the return_all option
x0 = self.hard_guess
r = minimize(rosen, x0, jac=rosen_der, hess=rosen_hess, tol=1e-8,
method='dogleg', options={'return_all': True},)
assert_allclose(x0, r['allvecs'][0])
assert_allclose(r['x'], r['allvecs'][-1])
assert_allclose(r['x'], self.x_opt)
def test_dogleg_callback(self):
# test the callback mechanism and the maxiter and return_all options
accumulator = Accumulator()
maxiter = 5
r = minimize(rosen, self.hard_guess, jac=rosen_der, hess=rosen_hess,
callback=accumulator, method='dogleg',
options={'return_all': True, 'maxiter': maxiter},)
assert_equal(accumulator.count, maxiter)
assert_equal(len(r['allvecs']), maxiter+1)
assert_allclose(r['x'], r['allvecs'][-1])
assert_allclose(sum(r['allvecs'][1:]), accumulator.accum)
def test_dogleg_user_warning(self):
with pytest.warns(RuntimeWarning,
match=r'Maximum number of iterations'):
minimize(rosen, self.hard_guess, jac=rosen_der,
hess=rosen_hess, method='dogleg',
options={'disp': True, 'maxiter': 1}, )
def test_solver_concordance(self):
# Assert that dogleg uses fewer iterations than ncg on the Rosenbrock
# test function, although this does not necessarily mean
# that dogleg is faster or better than ncg even for this function
# and especially not for other test functions.
f = rosen
g = rosen_der
h = rosen_hess
for x0 in (self.easy_guess, self.hard_guess):
r_dogleg = minimize(f, x0, jac=g, hess=h, tol=1e-8,
method='dogleg', options={'return_all': True})
r_trust_ncg = minimize(f, x0, jac=g, hess=h, tol=1e-8,
method='trust-ncg',
options={'return_all': True})
r_trust_krylov = minimize(f, x0, jac=g, hess=h, tol=1e-8,
method='trust-krylov',
options={'return_all': True})
r_ncg = minimize(f, x0, jac=g, hess=h, tol=1e-8,
method='newton-cg', options={'return_all': True})
r_iterative = minimize(f, x0, jac=g, hess=h, tol=1e-8,
method='trust-exact',
options={'return_all': True})
assert_allclose(self.x_opt, r_dogleg['x'])
assert_allclose(self.x_opt, r_trust_ncg['x'])
assert_allclose(self.x_opt, r_trust_krylov['x'])
assert_allclose(self.x_opt, r_ncg['x'])
assert_allclose(self.x_opt, r_iterative['x'])
assert_(len(r_dogleg['allvecs']) < len(r_ncg['allvecs']))
def test_trust_ncg_hessp(self):
for x0 in (self.easy_guess, self.hard_guess, self.x_opt):
r = minimize(rosen, x0, jac=rosen_der, hessp=rosen_hess_prod,
tol=1e-8, method='trust-ncg')
assert_allclose(self.x_opt, r['x'])
def test_trust_ncg_start_in_optimum(self):
r = minimize(rosen, x0=self.x_opt, jac=rosen_der, hess=rosen_hess,
tol=1e-8, method='trust-ncg')
assert_allclose(self.x_opt, r['x'])
def test_trust_krylov_start_in_optimum(self):
r = minimize(rosen, x0=self.x_opt, jac=rosen_der, hess=rosen_hess,
tol=1e-8, method='trust-krylov')
assert_allclose(self.x_opt, r['x'])
def test_trust_exact_start_in_optimum(self):
r = minimize(rosen, x0=self.x_opt, jac=rosen_der, hess=rosen_hess,
tol=1e-8, method='trust-exact')
assert_allclose(self.x_opt, r['x'])
|
scipy/scipy
|
scipy/optimize/tests/test_trustregion.py
|
Python
|
bsd-3-clause
| 4,701
|
import time
import os
import pickle
from softwarecenter.paths import SOFTWARE_CENTER_CACHE_DIR
# decorator to add a fake network delay if set
# in FakeReviewSettings.fake_network_delay
def network_delay(fn):
def slp(self, *args, **kwargs):
fake_settings = FakeReviewSettings()
delay = fake_settings.get_setting('fake_network_delay')
if delay:
time.sleep(delay)
return fn(self, *args, **kwargs)
return slp
class FakeReviewSettings(object):
'''An object that simply holds settings which are used by
RatingsAndReviewsAPI in the rnrclient_fake module. Using this module
allows a developer to test the reviews functionality without any
interaction with a reviews server. Each setting here provides complete
control over how the 'server' will respond. Changes to these settings
should be made to the class attributes directly without creating an
instance of this class.
The intended usage is for unit tests where a predictable response is
required and where the application should THINK it has spoken to a
server.
The unit test would make changes to settings in this class before
running the unit test.
'''
_FAKE_SETTINGS = {}
#general settings
#*****************************
#delay (in seconds) before returning from any of the fake rnr methods
#useful for emulating real network timings (use None for no delays)
_FAKE_SETTINGS['fake_network_delay'] = 2
#server status
#*****************************
#raises APIError if True
_FAKE_SETTINGS['server_response_error'] = False
#review stats
#*****************************
#raises APIError if True
_FAKE_SETTINGS['review_stats_error'] = False
#the following has no effect if review_stats_error = True
#determines the number of package stats (i.e. ReviewStats list size) to
#return max 15 packages (any number higher than 15 will still return 15)
_FAKE_SETTINGS['packages_returned'] = 10
#get reviews
#*****************************
#raises APIError if True
_FAKE_SETTINGS['get_reviews_error'] = False
#number of pages of 10 reviews to return before returning the number
# specified in the reviews_returned value below
_FAKE_SETTINGS['review_pages'] = 1
#the following has no effect if get_reviews_error = True
#determines number of reviews to return
# (Accepts 0 to n but should really be between 1 and 10)
_FAKE_SETTINGS['reviews_returned'] = 3
#get review
#*****************************
#raises APIError if True
_FAKE_SETTINGS['get_review_error'] = False
#submit review
#*****************************
#raises APIError if True
_FAKE_SETTINGS['submit_review_error'] = False
#fake username(str) and review_id(int) to give back with a successful
# review
#leave as None to generate a random username and review_id
_FAKE_SETTINGS['reviewer_username'] = None
_FAKE_SETTINGS['submit_review_id'] = None
#flag review
#*****************************
#raises APIError if True
_FAKE_SETTINGS['flag_review_error'] = False
#fake username(str) to give back as 'flagger'
_FAKE_SETTINGS['flagger_username'] = None
#fake package name (str) to give back as flagged app
_FAKE_SETTINGS['flag_package_name'] = None
#submit usefulness
#*****************************
#raises APIError if True
_FAKE_SETTINGS['submit_usefulness_error'] = False
#the following has no effect if submit_usefulness_error = True
#which string to pretend the server returned
#choices are "Created", "Updated", "Not modified"
_FAKE_SETTINGS['usefulness_response_string'] = "Created"
#get usefulness
#*****************************
#raises APIError if True
_FAKE_SETTINGS['get_usefulness_error'] = False
#the following has no effect if get_usefulness_error = True
#how many usefulness votes to return
_FAKE_SETTINGS['votes_returned'] = 5
#pre-configured review ids to return in the result
#if you don't complete this or enter less review ids than votes_returned
#above, it will be random
_FAKE_SETTINGS['required_review_ids'] = [3, 6, 15]
#THE FOLLOWING SETTINGS RELATE TO LOGIN SSO FUNCTIONALITY
# LoginBackendDbusSSO
# login()
#***********************
# what to fake the login response as
# choices (strings): "successful", "failed", "denied"
_FAKE_SETTINGS['login_response'] = "successful"
# UbuntuSSOAPI
# whoami()
#***********************
# what to fake whoami response as
# choices (strings): "whoami", "error"
_FAKE_SETTINGS['whoami_response'] = "whoami"
#this only has effect if whoami_response = 'whoami'
#determines the username to return in a successful whoami
#expects a string or None (for a random username)
_FAKE_SETTINGS['whoami_username'] = None
def __init__(self, defaults=False):
'''Initialises the object and loads the settings into the
_FAKE_SETTINGS dict.. If defaults is passed as True any existing
settings in the cache file are ignored and the cache file is
overwritten with the defaults set in the class. This is useful if
you don't want previously used settings from the cache file being
used again'''
fname = 'fake_review_settings.p'
self.LOCATION = os.path.join(SOFTWARE_CENTER_CACHE_DIR, fname)
if defaults:
self._save_settings()
else:
self._update_from_file()
def update_setting(self, key_name, new_value):
'''Takes a string (key_name) which corresponds to a setting in this
object and updates it with the value passed in (new_value).
Raises a NameError if the setting name doesn't exist'''
if not key_name in self._FAKE_SETTINGS:
raise NameError('Setting key name %s does not exist' % key_name)
else:
self._FAKE_SETTINGS[key_name] = new_value
self._save_settings()
return
def update_multiple(self, settings):
'''Takes a dict (settings) of key,value pairs to perform multiple
updates in one action, then saves. Dict being passed should contain
only keys that match settings in this object or a NameError will be
raised'''
for key, value in settings.items():
if not key in self._FAKE_SETTINGS:
raise NameError('Setting key name %s does not exist' % key)
for key, value in settings.items():
self._FAKE_SETTINGS[key] = value
self._save_settings()
return
def get_setting(self, key_name):
'''Takes a string (key_name) which corresponds to a setting in this
object, gets the latest copy of it from the file and returns the
setting. Raises a NameError if the setting name doesn't exist'''
if not key_name in self._FAKE_SETTINGS:
raise NameError('Setting %s does not exist' % key_name)
else:
self._update_from_file()
return self._FAKE_SETTINGS[key_name]
def _update_from_file(self):
'''Loads existing settings from cache file into _FAKE_SETTINGS dict'''
if os.path.exists(self.LOCATION):
try:
self._FAKE_SETTINGS = pickle.load(open(self.LOCATION))
except:
os.rename(self.LOCATION, self.LOCATION + ".fail")
return
def _save_settings(self):
"""write the dict out to cache file"""
try:
if not os.path.exists(SOFTWARE_CENTER_CACHE_DIR):
os.makedirs(SOFTWARE_CENTER_CACHE_DIR)
pickle.dump(self._FAKE_SETTINGS, open(self.LOCATION, "w"))
return True
except:
return False
|
sti-lyneos/shop
|
softwarecenter/backend/fake_review_settings.py
|
Python
|
lgpl-3.0
| 7,873
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A Deterministic acyclic finite state automaton (DAFSA) is a compact
representation of an unordered word list (dictionary).
https://en.wikipedia.org/wiki/Deterministic_acyclic_finite_state_automaton
This python program converts a list of strings to a byte array in C++.
This python program fetches strings and return values from a gperf file
and generates a C++ file with a byte array representing graph that can be
used as a memory efficient replacement for the perfect hash table.
The input strings are assumed to consist of printable 7-bit ASCII characters
and the return values are assumed to be one digit integers.
In this program a DAFSA is a diamond shaped graph starting at a common
source node and ending at a common sink node. All internal nodes contain
a label and each word is represented by the labels in one path from
the source node to the sink node.
The following python represention is used for nodes:
Source node: [ children ]
Internal node: (label, [ children ])
Sink node: None
The graph is first compressed by prefixes like a trie. In the next step
suffixes are compressed so that the graph gets diamond shaped. Finally
one to one linked nodes are replaced by nodes with the labels joined.
The order of the operations is crucial since lookups will be performed
starting from the source with no backtracking. Thus a node must have at
most one child with a label starting by the same character. The output
is also arranged so that all jumps are to increasing addresses, thus forward
in memory.
The generated output has suffix free decoding so that the sign of leading
bits in a link (a reference to a child node) indicate if it has a size of one,
two or three bytes and if it is the last outgoing link from the actual node.
A node label is terminated by a byte with the leading bit set.
The generated byte array can described by the following BNF:
<byte> ::= < 8-bit value in range [0x00-0xFF] >
<char> ::= < printable 7-bit ASCII character, byte in range [0x20-0x7F] >
<end_char> ::= < char + 0x80, byte in range [0xA0-0xFF] >
<return value> ::= < value + 0x80, byte in range [0x80-0x8F] >
<offset1> ::= < byte in range [0x00-0x3F] >
<offset2> ::= < byte in range [0x40-0x5F] >
<offset3> ::= < byte in range [0x60-0x7F] >
<end_offset1> ::= < byte in range [0x80-0xBF] >
<end_offset2> ::= < byte in range [0xC0-0xDF] >
<end_offset3> ::= < byte in range [0xE0-0xFF] >
<prefix> ::= <char>
<label> ::= <end_char>
| <char> <label>
<end_label> ::= <return_value>
| <char> <end_label>
<offset> ::= <offset1>
| <offset2> <byte>
| <offset3> <byte> <byte>
<end_offset> ::= <end_offset1>
| <end_offset2> <byte>
| <end_offset3> <byte> <byte>
<offsets> ::= <end_offset>
| <offset> <offsets>
<source> ::= <offsets>
<node> ::= <label> <offsets>
| <prefix> <node>
| <end_label>
<dafsa> ::= <source>
| <dafsa> <node>
Decoding:
<char> -> printable 7-bit ASCII character
<end_char> & 0x7F -> printable 7-bit ASCII character
<return value> & 0x0F -> integer
<offset1 & 0x3F> -> integer
((<offset2> & 0x1F>) << 8) + <byte> -> integer
((<offset3> & 0x1F>) << 16) + (<byte> << 8) + <byte> -> integer
end_offset1, end_offset2 and and_offset3 are decoded same as offset1,
offset2 and offset3 respectively.
The first offset in a list of offsets is the distance in bytes between the
offset itself and the first child node. Subsequent offsets are the distance
between previous child node and next child node. Thus each offset links a node
to a child node. The distance is always counted between start addresses, i.e.
first byte in decoded offset or first byte in child node.
Example 1:
%%
aa, 1
a, 2
%%
The input is first parsed to a list of words:
["aa1", "a2"]
A fully expanded graph is created from the words:
source = [node1, node4]
node1 = ("a", [node2])
node2 = ("a", [node3])
node3 = ("\x01", [sink])
node4 = ("a", [node5])
node5 = ("\x02", [sink])
sink = None
Compression results in the following graph:
source = [node1]
node1 = ("a", [node2, node3])
node2 = ("\x02", [sink])
node3 = ("a\x01", [sink])
sink = None
A C++ representation of the compressed graph is generated:
const unsigned char dafsa[7] = {
0x81, 0xE1, 0x02, 0x81, 0x82, 0x61, 0x81,
};
The bytes in the generated array has the following meaning:
0: 0x81 <end_offset1> child at position 0 + (0x81 & 0x3F) -> jump to 1
1: 0xE1 <end_char> label character (0xE1 & 0x7F) -> match "a"
2: 0x02 <offset1> child at position 2 + (0x02 & 0x3F) -> jump to 4
3: 0x81 <end_offset1> child at position 4 + (0x81 & 0x3F) -> jump to 5
4: 0x82 <return_value> 0x82 & 0x0F -> return 2
5: 0x61 <char> label character 0x61 -> match "a"
6: 0x81 <return_value> 0x81 & 0x0F -> return 1
Example 2:
%%
aa, 1
bbb, 2
baa, 1
%%
The input is first parsed to a list of words:
["aa1", "bbb2", "baa1"]
Compression results in the following graph:
source = [node1, node2]
node1 = ("b", [node2, node3])
node2 = ("aa\x01", [sink])
node3 = ("bb\x02", [sink])
sink = None
A C++ representation of the compressed graph is generated:
const unsigned char dafsa[11] = {
0x02, 0x83, 0xE2, 0x02, 0x83, 0x61, 0x61, 0x81, 0x62, 0x62, 0x82,
};
The bytes in the generated array has the following meaning:
0: 0x02 <offset1> child at position 0 + (0x02 & 0x3F) -> jump to 2
1: 0x83 <end_offset1> child at position 2 + (0x83 & 0x3F) -> jump to 5
2: 0xE2 <end_char> label character (0xE2 & 0x7F) -> match "b"
3: 0x02 <offset1> child at position 3 + (0x02 & 0x3F) -> jump to 5
4: 0x83 <end_offset1> child at position 5 + (0x83 & 0x3F) -> jump to 8
5: 0x61 <char> label character 0x61 -> match "a"
6: 0x61 <char> label character 0x61 -> match "a"
7: 0x81 <return_value> 0x81 & 0x0F -> return 1
8: 0x62 <char> label character 0x62 -> match "b"
9: 0x62 <char> label character 0x62 -> match "b"
10: 0x82 <return_value> 0x82 & 0x0F -> return 2
"""
import argparse
import sys
class InputError(Exception):
"""Exception raised for errors in the input file."""
def to_dafsa(words):
"""Generates a DAFSA from a word list and returns the source node.
Each word is split into characters so that each character is represented by
a unique node. It is assumed the word list is not empty.
"""
if not words:
raise InputError('The domain list must not be empty')
def ToNodes(word):
"""Split words into characters"""
if not 0x1F < ord(word[0]) < 0x80:
raise InputError('Domain names must be printable 7-bit ASCII')
if len(word) == 1:
return chr(ord(word[0]) & 0x0F), [None]
return word[0], [ToNodes(word[1:])]
return [ToNodes(word) for word in words]
def to_words(node):
"""Generates a word list from all paths starting from an internal node."""
if not node:
return ['']
return [(node[0] + word) for child in node[1] for word in to_words(child)]
def reverse(dafsa):
"""Generates a new DAFSA that is reversed, so that the old sink node becomes
the new source node.
"""
sink = []
nodemap = {}
def dfs(node, parent):
"""Creates reverse nodes.
A new reverse node will be created for each old node. The new node will
get a reversed label and the parents of the old node as children.
"""
if not node:
sink.append(parent)
elif id(node) not in nodemap:
nodemap[id(node)] = (node[0][::-1], [parent])
for child in node[1]:
dfs(child, nodemap[id(node)])
else:
nodemap[id(node)][1].append(parent)
for node in dafsa:
dfs(node, None)
return sink
def join_labels(dafsa):
"""Generates a new DAFSA where internal nodes are merged if there is a one to
one connection.
"""
parentcount = { id(None): 2 }
nodemap = { id(None): None }
def count_parents(node):
"""Count incoming references"""
if id(node) in parentcount:
parentcount[id(node)] += 1
else:
parentcount[id(node)] = 1
for child in node[1]:
count_parents(child)
def join(node):
"""Create new nodes"""
if id(node) not in nodemap:
children = [join(child) for child in node[1]]
if len(children) == 1 and parentcount[id(node[1][0])] == 1:
child = children[0]
nodemap[id(node)] = (node[0] + child[0], child[1])
else:
nodemap[id(node)] = (node[0], children)
return nodemap[id(node)]
for node in dafsa:
count_parents(node)
return [join(node) for node in dafsa]
def join_suffixes(dafsa):
"""Generates a new DAFSA where nodes that represent the same word lists
towards the sink are merged.
"""
nodemap = { frozenset(('',)): None }
def join(node):
"""Returns a macthing node. A new node is created if no matching node
exists. The graph is accessed in dfs order.
"""
suffixes = frozenset(to_words(node))
if suffixes not in nodemap:
nodemap[suffixes] = (node[0], [join(child) for child in node[1]])
return nodemap[suffixes]
return [join(node) for node in dafsa]
def top_sort(dafsa):
"""Generates list of nodes in topological sort order."""
incoming = {}
def count_incoming(node):
"""Counts incoming references."""
if node:
if id(node) not in incoming:
incoming[id(node)] = 1
for child in node[1]:
count_incoming(child)
else:
incoming[id(node)] += 1
for node in dafsa:
count_incoming(node)
for node in dafsa:
incoming[id(node)] -= 1
waiting = [node for node in dafsa if incoming[id(node)] == 0]
nodes = []
while waiting:
node = waiting.pop()
assert incoming[id(node)] == 0
nodes.append(node)
for child in node[1]:
if child:
incoming[id(child)] -= 1
if incoming[id(child)] == 0:
waiting.append(child)
return nodes
def encode_links(children, offsets, current):
"""Encodes a list of children as one, two or three byte offsets."""
if not children[0]:
# This is an <end_label> node and no links follow such nodes
assert len(children) == 1
return []
guess = 3 * len(children)
assert children
children = sorted(children, key = lambda x: -offsets[id(x)])
while True:
offset = current + guess
buf = []
for child in children:
last = len(buf)
distance = offset - offsets[id(child)]
assert distance > 0 and distance < (1 << 21)
if distance < (1 << 6):
# A 6-bit offset: "s0xxxxxx"
buf.append(distance)
elif distance < (1 << 13):
# A 13-bit offset: "s10xxxxxxxxxxxxx"
buf.append(0x40 | (distance >> 8))
buf.append(distance & 0xFF)
else:
# A 21-bit offset: "s11xxxxxxxxxxxxxxxxxxxxx"
buf.append(0x60 | (distance >> 16))
buf.append((distance >> 8) & 0xFF)
buf.append(distance & 0xFF)
# Distance in first link is relative to following record.
# Distance in other links are relative to previous link.
offset -= distance
if len(buf) == guess:
break
guess = len(buf)
# Set most significant bit to mark end of links in this node.
buf[last] |= (1 << 7)
buf.reverse()
return buf
def encode_prefix(label):
"""Encodes a node label as a list of bytes without a trailing high byte.
This method encodes a node if there is exactly one child and the
child follows immidiately after so that no jump is needed. This label
will then be a prefix to the label in the child node.
"""
assert label
return [ord(c) for c in reversed(label)]
def encode_label(label):
"""Encodes a node label as a list of bytes with a trailing high byte >0x80.
"""
buf = encode_prefix(label)
# Set most significant bit to mark end of label in this node.
buf[0] |= (1 << 7)
return buf
def encode(dafsa):
"""Encodes a DAFSA to a list of bytes"""
output = []
offsets = {}
for node in reversed(top_sort(dafsa)):
if (len(node[1]) == 1 and node[1][0] and
(offsets[id(node[1][0])] == len(output))):
output.extend(encode_prefix(node[0]))
else:
output.extend(encode_links(node[1], offsets, len(output)))
output.extend(encode_label(node[0]))
offsets[id(node)] = len(output)
output.extend(encode_links(dafsa, offsets, len(output)))
output.reverse()
return output
def to_cxx(data):
"""Generates C++ code from a list of encoded bytes."""
text = '/* This file is generated. DO NOT EDIT!\n\n'
text += 'The byte array encodes effective tld names. See make_dafsa.py for'
text += ' documentation.'
text += '*/\n\n'
text += 'const unsigned char kDafsa[%s] = {\n' % len(data)
for i in range(0, len(data), 12):
text += ' '
text += ', '.join('0x%02x' % byte for byte in data[i:i + 12])
text += ',\n'
text += '};\n'
return text
def words_to_cxx(words):
"""Generates C++ code from a word list"""
dafsa = to_dafsa(words)
for fun in (reverse, join_suffixes, reverse, join_suffixes, join_labels):
dafsa = fun(dafsa)
return to_cxx(encode(dafsa))
def parse_gperf(infile, reverse):
"""Parses gperf file and extract strings and return code"""
lines = [line.strip() for line in infile]
# Extract strings after the first '%%' and before the second '%%'.
begin = lines.index('%%') + 1
end = lines.index('%%', begin)
lines = lines[begin:end]
for line in lines:
if line[-3:-1] != ', ':
raise InputError('Expected "domainname, <digit>", found "%s"' % line)
# Technically the DAFSA format can support return values in the range
# [0-31], but only the first three bits have any defined meaning.
if not line.endswith(('0', '1', '2', '3', '4', '5', '6', '7')):
raise InputError('Expected value to be in the range of 0-7, found "%s"' %
line[-1])
if reverse:
return [line[-4::-1] + line[-1] for line in lines]
else:
return [line[:-3] + line[-1] for line in lines]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--reverse', action='store_const', const=True,
default=False)
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'),
default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'),
default=sys.stdout)
args = parser.parse_args()
args.outfile.write(words_to_cxx(parse_gperf(args.infile, args.reverse)))
return 0
if __name__ == '__main__':
sys.exit(main())
|
nwjs/chromium.src
|
net/tools/dafsa/make_dafsa.py
|
Python
|
bsd-3-clause
| 14,621
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.