repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
โ |
|---|---|---|---|---|
SM-G920P/TeamSPR_Kernel
|
refs/heads/master
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py
|
4653
|
# EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
|
jeffery-do/Vizdoombot
|
refs/heads/master
|
doom/lib/python3.5/site-packages/wheel/test/extension.dist/setup.py
|
92
|
from setuptools import setup, Extension
try:
unicode
def u8(s):
return s.decode('unicode-escape').encode('utf-8')
except NameError:
def u8(s):
return s.encode('utf-8')
setup(name='extension.dist',
version='0.1',
description=u8('A testing distribution \N{SNOWMAN}'),
ext_modules=[
Extension(name='extension',
sources=['extension.c'],
py_limited_api=True)
],
)
|
pkill-nine/qutebrowser
|
refs/heads/qutebrowser-custom
|
qutebrowser/browser/webkit/webkittab.py
|
1
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper over our (QtWebKit) WebView."""
import sys
import functools
import xml.etree.ElementTree
import sip
from PyQt5.QtCore import (pyqtSlot, Qt, QEvent, QUrl, QPoint, QTimer, QSizeF,
QSize)
from PyQt5.QtGui import QKeyEvent
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWebKitWidgets import QWebPage, QWebFrame
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtPrintSupport import QPrinter
from qutebrowser.browser import browsertab
from qutebrowser.browser.webkit import webview, tabhistory, webkitelem
from qutebrowser.browser.webkit.network import webkitqutescheme
from qutebrowser.utils import qtutils, objreg, usertypes, utils, log, debug
def init():
"""Initialize QtWebKit-specific modules."""
qapp = QApplication.instance()
log.init.debug("Initializing js-bridge...")
js_bridge = webkitqutescheme.JSBridge(qapp)
objreg.register('js-bridge', js_bridge)
class WebKitAction(browsertab.AbstractAction):
"""QtWebKit implementations related to web actions."""
action_class = QWebPage
action_base = QWebPage.WebAction
def exit_fullscreen(self):
raise browsertab.UnsupportedOperationError
def save_page(self):
"""Save the current page."""
raise browsertab.UnsupportedOperationError
class WebKitPrinting(browsertab.AbstractPrinting):
"""QtWebKit implementations related to printing."""
def _do_check(self):
if not qtutils.check_print_compat():
# WORKAROUND (remove this when we bump the requirements to 5.3.0)
raise browsertab.WebTabError(
"Printing on Qt < 5.3.0 on Windows is broken, please upgrade!")
def check_pdf_support(self):
self._do_check()
def check_printer_support(self):
self._do_check()
def check_preview_support(self):
self._do_check()
def to_pdf(self, filename):
printer = QPrinter()
printer.setOutputFileName(filename)
self.to_printer(printer)
def to_printer(self, printer, callback=None):
self._widget.print(printer)
# Can't find out whether there was an error...
if callback is not None:
callback(True)
class WebKitSearch(browsertab.AbstractSearch):
"""QtWebKit implementations related to searching on the page."""
def __init__(self, parent=None):
super().__init__(parent)
self._flags = QWebPage.FindFlags(0)
def _call_cb(self, callback, found, text, flags, caller):
"""Call the given callback if it's non-None.
Delays the call via a QTimer so the website is re-rendered in between.
Args:
callback: What to call
found: If the text was found
text: The text searched for
flags: The flags searched with
caller: Name of the caller.
"""
found_text = 'found' if found else "didn't find"
# Removing FindWrapsAroundDocument to get the same logging as with
# QtWebEngine
debug_flags = debug.qflags_key(
QWebPage, flags & ~QWebPage.FindWrapsAroundDocument,
klass=QWebPage.FindFlag)
if debug_flags != '0x0000':
flag_text = 'with flags {}'.format(debug_flags)
else:
flag_text = ''
log.webview.debug(' '.join([caller, found_text, text, flag_text])
.strip())
if callback is not None:
QTimer.singleShot(0, functools.partial(callback, found))
def clear(self):
self.search_displayed = False
# We first clear the marked text, then the highlights
self._widget.findText('')
self._widget.findText('', QWebPage.HighlightAllOccurrences)
def search(self, text, *, ignore_case=False, reverse=False,
result_cb=None):
self.search_displayed = True
flags = QWebPage.FindWrapsAroundDocument
if ignore_case == 'smart':
if not text.islower():
flags |= QWebPage.FindCaseSensitively
elif not ignore_case:
flags |= QWebPage.FindCaseSensitively
if reverse:
flags |= QWebPage.FindBackward
# We actually search *twice* - once to highlight everything, then again
# to get a mark so we can navigate.
found = self._widget.findText(text, flags)
self._widget.findText(text, flags | QWebPage.HighlightAllOccurrences)
self.text = text
self._flags = flags
self._call_cb(result_cb, found, text, flags, 'search')
def next_result(self, *, result_cb=None):
self.search_displayed = True
found = self._widget.findText(self.text, self._flags)
self._call_cb(result_cb, found, self.text, self._flags, 'next_result')
def prev_result(self, *, result_cb=None):
self.search_displayed = True
# The int() here makes sure we get a copy of the flags.
flags = QWebPage.FindFlags(int(self._flags))
if flags & QWebPage.FindBackward:
flags &= ~QWebPage.FindBackward
else:
flags |= QWebPage.FindBackward
found = self._widget.findText(self.text, flags)
self._call_cb(result_cb, found, self.text, flags, 'prev_result')
class WebKitCaret(browsertab.AbstractCaret):
"""QtWebKit implementations related to moving the cursor/selection."""
@pyqtSlot(usertypes.KeyMode)
def _on_mode_entered(self, mode):
if mode != usertypes.KeyMode.caret:
return
settings = self._widget.settings()
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, True)
self.selection_enabled = bool(self.selection())
if self._widget.isVisible():
# Sometimes the caret isn't immediately visible, but unfocusing
# and refocusing it fixes that.
self._widget.clearFocus()
self._widget.setFocus(Qt.OtherFocusReason)
# Move the caret to the first element in the viewport if there
# isn't any text which is already selected.
#
# Note: We can't use hasSelection() here, as that's always
# true in caret mode.
if not self.selection():
self._widget.page().currentFrame().evaluateJavaScript(
utils.read_file('javascript/position_caret.js'))
@pyqtSlot()
def _on_mode_left(self):
settings = self._widget.settings()
if settings.testAttribute(QWebSettings.CaretBrowsingEnabled):
if self.selection_enabled and self._widget.hasSelection():
# Remove selection if it exists
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, False)
self.selection_enabled = False
def move_to_next_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextLine
else:
act = QWebPage.SelectNextLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_line(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousLine
else:
act = QWebPage.SelectPreviousLine
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_next_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToNextChar
else:
act = QWebPage.SelectNextChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_prev_char(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousChar
else:
act = QWebPage.SelectPreviousChar
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_end_of_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if sys.platform == 'win32': # pragma: no cover
act.append(QWebPage.MoveToPreviousChar)
else:
act = [QWebPage.SelectNextWord]
if sys.platform == 'win32': # pragma: no cover
act.append(QWebPage.SelectPreviousChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_next_word(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextWord]
if sys.platform != 'win32': # pragma: no branch
act.append(QWebPage.MoveToNextChar)
else:
act = [QWebPage.SelectNextWord]
if sys.platform != 'win32': # pragma: no branch
act.append(QWebPage.SelectNextChar)
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_prev_word(self, count=1):
if not self.selection_enabled:
act = QWebPage.MoveToPreviousWord
else:
act = QWebPage.SelectPreviousWord
for _ in range(count):
self._widget.triggerPageAction(act)
def move_to_start_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfLine
else:
act = QWebPage.SelectStartOfLine
self._widget.triggerPageAction(act)
def move_to_end_of_line(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfLine
else:
act = QWebPage.SelectEndOfLine
self._widget.triggerPageAction(act)
def move_to_start_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine,
QWebPage.MoveToStartOfBlock]
else:
act = [QWebPage.SelectPreviousLine,
QWebPage.SelectStartOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_next_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToNextLine,
QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectNextLine,
QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_end_of_prev_block(self, count=1):
if not self.selection_enabled:
act = [QWebPage.MoveToPreviousLine, QWebPage.MoveToEndOfBlock]
else:
act = [QWebPage.SelectPreviousLine, QWebPage.SelectEndOfBlock]
for _ in range(count):
for a in act:
self._widget.triggerPageAction(a)
def move_to_start_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToStartOfDocument
else:
act = QWebPage.SelectStartOfDocument
self._widget.triggerPageAction(act)
def move_to_end_of_document(self):
if not self.selection_enabled:
act = QWebPage.MoveToEndOfDocument
else:
act = QWebPage.SelectEndOfDocument
self._widget.triggerPageAction(act)
def toggle_selection(self):
self.selection_enabled = not self.selection_enabled
mainwindow = objreg.get('main-window', scope='window',
window=self._win_id)
mainwindow.status.set_mode_active(usertypes.KeyMode.caret, True)
def drop_selection(self):
self._widget.triggerPageAction(QWebPage.MoveToNextChar)
def has_selection(self):
return self._widget.hasSelection()
def selection(self, html=False):
if html:
return self._widget.selectedHtml()
return self._widget.selectedText()
def follow_selected(self, *, tab=False):
if not self.has_selection():
return
if QWebSettings.globalSettings().testAttribute(
QWebSettings.JavascriptEnabled):
if tab:
self._tab.data.override_target = usertypes.ClickTarget.tab
self._tab.run_js_async(
'window.getSelection().anchorNode.parentNode.click()')
else:
selection = self.selection(html=True)
try:
selected_element = xml.etree.ElementTree.fromstring(
'<html>{}</html>'.format(selection)).find('a')
except xml.etree.ElementTree.ParseError:
raise browsertab.WebTabError('Could not parse selected '
'element!')
if selected_element is not None:
try:
url = selected_element.attrib['href']
except KeyError:
raise browsertab.WebTabError('Anchor element without '
'href!')
url = self._tab.url().resolved(QUrl(url))
if tab:
self._tab.new_tab_requested.emit(url)
else:
self._tab.openurl(url)
class WebKitZoom(browsertab.AbstractZoom):
"""QtWebKit implementations related to zooming."""
def _set_factor_internal(self, factor):
self._widget.setZoomFactor(factor)
def factor(self):
return self._widget.zoomFactor()
class WebKitScroller(browsertab.AbstractScroller):
"""QtWebKit implementations related to scrolling."""
# FIXME:qtwebengine When to use the main frame, when the current one?
def pos_px(self):
return self._widget.page().mainFrame().scrollPosition()
def pos_perc(self):
return self._widget.scroll_pos
def to_point(self, point):
self._widget.page().mainFrame().setScrollPosition(point)
def delta(self, x=0, y=0):
qtutils.check_overflow(x, 'int')
qtutils.check_overflow(y, 'int')
self._widget.page().mainFrame().scroll(x, y)
def delta_page(self, x=0.0, y=0.0):
if y.is_integer():
y = int(y)
if y == 0:
pass
elif y < 0:
self.page_up(count=-y)
elif y > 0:
self.page_down(count=y)
y = 0
if x == 0 and y == 0:
return
size = self._widget.page().mainFrame().geometry()
self.delta(x * size.width(), y * size.height())
def to_perc(self, x=None, y=None):
if x is None and y == 0:
self.top()
elif x is None and y == 100:
self.bottom()
else:
for val, orientation in [(x, Qt.Horizontal), (y, Qt.Vertical)]:
if val is not None:
val = qtutils.check_overflow(val, 'int', fatal=False)
frame = self._widget.page().mainFrame()
m = frame.scrollBarMaximum(orientation)
if m == 0:
continue
frame.setScrollBarValue(orientation, int(m * val / 100))
def _key_press(self, key, count=1, getter_name=None, direction=None):
frame = self._widget.page().mainFrame()
getter = None if getter_name is None else getattr(frame, getter_name)
# FIXME:qtwebengine needed?
# self._widget.setFocus()
for _ in range(min(count, 5000)):
# Abort scrolling if the minimum/maximum was reached.
if (getter is not None and
frame.scrollBarValue(direction) == getter(direction)):
return
self._tab.key_press(key)
def up(self, count=1):
self._key_press(Qt.Key_Up, count, 'scrollBarMinimum', Qt.Vertical)
def down(self, count=1):
self._key_press(Qt.Key_Down, count, 'scrollBarMaximum', Qt.Vertical)
def left(self, count=1):
self._key_press(Qt.Key_Left, count, 'scrollBarMinimum', Qt.Horizontal)
def right(self, count=1):
self._key_press(Qt.Key_Right, count, 'scrollBarMaximum', Qt.Horizontal)
def top(self):
self._key_press(Qt.Key_Home)
def bottom(self):
self._key_press(Qt.Key_End)
def page_up(self, count=1):
self._key_press(Qt.Key_PageUp, count, 'scrollBarMinimum', Qt.Vertical)
def page_down(self, count=1):
self._key_press(Qt.Key_PageDown, count, 'scrollBarMaximum',
Qt.Vertical)
def at_top(self):
return self.pos_px().y() == 0
def at_bottom(self):
frame = self._widget.page().currentFrame()
return self.pos_px().y() >= frame.scrollBarMaximum(Qt.Vertical)
class WebKitHistory(browsertab.AbstractHistory):
"""QtWebKit implementations related to page history."""
def current_idx(self):
return self._history.currentItemIndex()
def can_go_back(self):
return self._history.canGoBack()
def can_go_forward(self):
return self._history.canGoForward()
def _item_at(self, i):
return self._history.itemAt(i)
def _go_to_item(self, item):
return self._history.goToItem(item)
def serialize(self):
return qtutils.serialize(self._history)
def deserialize(self, data):
return qtutils.deserialize(data, self._history)
def load_items(self, items):
stream, _data, user_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, self._history)
for i, data in enumerate(user_data):
self._history.itemAt(i).setUserData(data)
cur_data = self._history.currentItem().userData()
if cur_data is not None:
if 'zoom' in cur_data:
self._tab.zoom.set_factor(cur_data['zoom'])
if ('scroll-pos' in cur_data and
self._tab.scroller.pos_px() == QPoint(0, 0)):
QTimer.singleShot(0, functools.partial(
self._tab.scroller.to_point, cur_data['scroll-pos']))
class WebKitElements(browsertab.AbstractElements):
"""QtWebKit implemementations related to elements on the page."""
def find_css(self, selector, callback, *, only_visible=False):
mainframe = self._widget.page().mainFrame()
if mainframe is None:
raise browsertab.WebTabError("No frame focused!")
elems = []
frames = webkitelem.get_child_frames(mainframe)
for f in frames:
for elem in f.findAllElements(selector):
elems.append(webkitelem.WebKitElement(elem, tab=self._tab))
if only_visible:
# pylint: disable=protected-access
elems = [e for e in elems if e._is_visible(mainframe)]
# pylint: enable=protected-access
callback(elems)
def find_id(self, elem_id, callback):
def find_id_cb(elems):
if not elems:
callback(None)
else:
callback(elems[0])
self.find_css('#' + elem_id, find_id_cb)
def find_focused(self, callback):
frame = self._widget.page().currentFrame()
if frame is None:
callback(None)
return
elem = frame.findFirstElement('*:focus')
if elem.isNull():
callback(None)
else:
callback(webkitelem.WebKitElement(elem, tab=self._tab))
def find_at_pos(self, pos, callback):
assert pos.x() >= 0
assert pos.y() >= 0
frame = self._widget.page().frameAt(pos)
if frame is None:
# This happens when we click inside the webview, but not actually
# on the QWebPage - for example when clicking the scrollbar
# sometimes.
log.webview.debug("Hit test at {} but frame is None!".format(pos))
callback(None)
return
# You'd think we have to subtract frame.geometry().topLeft() from the
# position, but it seems QWebFrame::hitTestContent wants a position
# relative to the QWebView, not to the frame. This makes no sense to
# me, but it works this way.
hitresult = frame.hitTestContent(pos)
if hitresult.isNull():
# For some reason, the whole hit result can be null sometimes (e.g.
# on doodle menu links).
log.webview.debug("Hit test result is null!")
callback(None)
return
try:
elem = webkitelem.WebKitElement(hitresult.element(), tab=self._tab)
except webkitelem.IsNullError:
# For some reason, the hit result element can be a null element
# sometimes (e.g. when clicking the timetable fields on
# http://www.sbb.ch/ ).
log.webview.debug("Hit test result element is null!")
callback(None)
return
callback(elem)
class WebKitTab(browsertab.AbstractTab):
"""A QtWebKit tab in the browser."""
def __init__(self, *, win_id, mode_manager, private, parent=None):
super().__init__(win_id=win_id, mode_manager=mode_manager,
private=private, parent=parent)
widget = webview.WebView(win_id=win_id, tab_id=self.tab_id,
private=private, tab=self)
if private:
self._make_private(widget)
self.history = WebKitHistory(self)
self.scroller = WebKitScroller(self, parent=self)
self.caret = WebKitCaret(win_id=win_id, mode_manager=mode_manager,
tab=self, parent=self)
self.zoom = WebKitZoom(win_id=win_id, parent=self)
self.search = WebKitSearch(parent=self)
self.printing = WebKitPrinting()
self.elements = WebKitElements(self)
self.action = WebKitAction()
self._set_widget(widget)
self._connect_signals()
self.backend = usertypes.Backend.QtWebKit
def _install_event_filter(self):
self._widget.installEventFilter(self._mouse_event_filter)
def _make_private(self, widget):
settings = widget.settings()
settings.setAttribute(QWebSettings.PrivateBrowsingEnabled, True)
def openurl(self, url):
self._openurl_prepare(url)
self._widget.openurl(url)
def url(self, requested=False):
frame = self._widget.page().mainFrame()
if requested:
return frame.requestedUrl()
else:
return frame.url()
def dump_async(self, callback, *, plain=False):
frame = self._widget.page().mainFrame()
if plain:
callback(frame.toPlainText())
else:
callback(frame.toHtml())
def run_js_async(self, code, callback=None, *, world=None):
if world is not None and world != usertypes.JsWorld.jseval:
log.webview.warning("Ignoring world ID {}".format(world))
document_element = self._widget.page().mainFrame().documentElement()
result = document_element.evaluateJavaScript(code)
if callback is not None:
callback(result)
def icon(self):
return self._widget.icon()
def shutdown(self):
self._widget.shutdown()
def reload(self, *, force=False):
if force:
action = QWebPage.ReloadAndBypassCache
else:
action = QWebPage.Reload
self._widget.triggerPageAction(action)
def stop(self):
self._widget.stop()
def title(self):
return self._widget.title()
def clear_ssl_errors(self):
self.networkaccessmanager().clear_all_ssl_errors()
def key_press(self, key, modifier=Qt.NoModifier):
press_evt = QKeyEvent(QEvent.KeyPress, key, modifier, 0, 0, 0)
release_evt = QKeyEvent(QEvent.KeyRelease, key, modifier,
0, 0, 0)
self.send_event(press_evt)
self.send_event(release_evt)
@pyqtSlot()
def _on_history_trigger(self):
url = self.url()
requested_url = self.url(requested=True)
self.add_history_item.emit(url, requested_url, self.title())
def set_html(self, html, base_url=QUrl()):
self._widget.setHtml(html, base_url)
def networkaccessmanager(self):
return self._widget.page().networkAccessManager()
def user_agent(self):
page = self._widget.page()
return page.userAgentForUrl(self.url())
@pyqtSlot()
def _on_frame_load_finished(self):
"""Make sure we emit an appropriate status when loading finished.
While Qt has a bool "ok" attribute for loadFinished, it always is True
when using error pages... See
https://github.com/qutebrowser/qutebrowser/issues/84
"""
self._on_load_finished(not self._widget.page().error_occurred)
@pyqtSlot()
def _on_webkit_icon_changed(self):
"""Emit iconChanged with a QIcon like QWebEngineView does."""
if sip.isdeleted(self._widget):
log.webview.debug("Got _on_webkit_icon_changed for deleted view!")
return
self.icon_changed.emit(self._widget.icon())
@pyqtSlot(QWebFrame)
def _on_frame_created(self, frame):
"""Connect the contentsSizeChanged signal of each frame."""
# FIXME:qtwebengine those could theoretically regress:
# https://github.com/qutebrowser/qutebrowser/issues/152
# https://github.com/qutebrowser/qutebrowser/issues/263
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
@pyqtSlot(QSize)
def _on_contents_size_changed(self, size):
self.contents_size_changed.emit(QSizeF(size))
def _connect_signals(self):
view = self._widget
page = view.page()
frame = page.mainFrame()
page.windowCloseRequested.connect(self.window_close_requested)
page.linkHovered.connect(self.link_hovered)
page.loadProgress.connect(self._on_load_progress)
frame.loadStarted.connect(self._on_load_started)
view.scroll_pos_changed.connect(self.scroller.perc_changed)
view.titleChanged.connect(self.title_changed)
view.urlChanged.connect(self._on_url_changed)
view.shutting_down.connect(self.shutting_down)
page.networkAccessManager().sslErrors.connect(self._on_ssl_errors)
frame.loadFinished.connect(self._on_frame_load_finished)
view.iconChanged.connect(self._on_webkit_icon_changed)
page.frameCreated.connect(self._on_frame_created)
frame.contentsSizeChanged.connect(self._on_contents_size_changed)
frame.initialLayoutCompleted.connect(self._on_history_trigger)
def event_target(self):
return self._widget
|
gigq/flasktodo
|
refs/heads/master
|
jinja2/filters.py
|
199
|
# -*- coding: utf-8 -*-
"""
jinja2.filters
~~~~~~~~~~~~~~
Bundled jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import math
from random import choice
from operator import itemgetter
from itertools import imap, groupby
from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode
from jinja2.runtime import Undefined
from jinja2.exceptions import FilterArgumentError, SecurityError
_word_re = re.compile(r'\w+(?u)')
def contextfilter(f):
"""Decorator for marking context dependent filters. The current
:class:`Context` will be passed as first argument.
"""
f.contextfilter = True
return f
def evalcontextfilter(f):
"""Decorator for marking eval-context dependent filters. An eval
context object is passed as first argument. For more information
about the eval context, see :ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfilter = True
return f
def environmentfilter(f):
"""Decorator for marking evironment dependent filters. The current
:class:`Environment` is passed to the filter as first argument.
"""
f.environmentfilter = True
return f
def do_forceescape(value):
"""Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, '__html__'):
value = value.__html__()
return escape(unicode(value))
@evalcontextfilter
def do_replace(eval_ctx, s, old, new, count=None):
"""Return a copy of the value with all occurrences of a substring
replaced with a new one. The first argument is the substring
that should be replaced, the second is the replacement string.
If the optional third argument ``count`` is given, only the first
``count`` occurrences are replaced:
.. sourcecode:: jinja
{{ "Hello World"|replace("Hello", "Goodbye") }}
-> Goodbye World
{{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
-> d'oh, d'oh, aaargh
"""
if count is None:
count = -1
if not eval_ctx.autoescape:
return unicode(s).replace(unicode(old), unicode(new), count)
if hasattr(old, '__html__') or hasattr(new, '__html__') and \
not hasattr(s, '__html__'):
s = escape(s)
else:
s = soft_unicode(s)
return s.replace(soft_unicode(old), soft_unicode(new), count)
def do_upper(s):
"""Convert a value to uppercase."""
return soft_unicode(s).upper()
def do_lower(s):
"""Convert a value to lowercase."""
return soft_unicode(s).lower()
@evalcontextfilter
def do_xmlattr(_eval_ctx, d, autospace=True):
"""Create an SGML/XML attribute string based on the items in a dict.
All values that are neither `none` nor `undefined` are automatically
escaped:
.. sourcecode:: html+jinja
<ul{{ {'class': 'my_list', 'missing': none,
'id': 'list-%d'|format(variable)}|xmlattr }}>
...
</ul>
Results in something like this:
.. sourcecode:: html
<ul class="my_list" id="list-42">
...
</ul>
As you can see it automatically prepends a space in front of the item
if the filter returned something unless the second parameter is false.
"""
rv = u' '.join(
u'%s="%s"' % (escape(key), escape(value))
for key, value in d.iteritems()
if value is not None and not isinstance(value, Undefined)
)
if autospace and rv:
rv = u' ' + rv
if _eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_capitalize(s):
"""Capitalize a value. The first character will be uppercase, all others
lowercase.
"""
return soft_unicode(s).capitalize()
def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
return soft_unicode(s).title()
def do_dictsort(value, case_sensitive=False, by='key'):
"""Sort a dict and yield (key, value) pairs. Because python dicts are
unsorted you may want to use this function to order them by either
key or value:
.. sourcecode:: jinja
{% for item in mydict|dictsort %}
sort the dict by key, case insensitive
{% for item in mydict|dicsort(true) %}
sort the dict by key, case sensitive
{% for item in mydict|dictsort(false, 'value') %}
sort the dict by key, case insensitive, sorted
normally and ordered by value.
"""
if by == 'key':
pos = 0
elif by == 'value':
pos = 1
else:
raise FilterArgumentError('You can only sort by either '
'"key" or "value"')
def sort_func(item):
value = item[pos]
if isinstance(value, basestring) and not case_sensitive:
value = value.lower()
return value
return sorted(value.items(), key=sort_func)
def do_sort(value, case_sensitive=False):
"""Sort an iterable. If the iterable is made of strings the second
parameter can be used to control the case sensitiveness of the
comparison which is disabled by default.
.. sourcecode:: jinja
{% for item in iterable|sort %}
...
{% endfor %}
"""
if not case_sensitive:
def sort_func(item):
if isinstance(item, basestring):
item = item.lower()
return item
else:
sort_func = None
return sorted(seq, key=sort_func)
def do_default(value, default_value=u'', boolean=False):
"""If the value is undefined it will return the passed default value,
otherwise the value of the variable:
.. sourcecode:: jinja
{{ my_variable|default('my_variable is not defined') }}
This will output the value of ``my_variable`` if the variable was
defined, otherwise ``'my_variable is not defined'``. If you want
to use default with variables that evaluate to false you have to
set the second parameter to `true`:
.. sourcecode:: jinja
{{ ''|default('the string was empty', true) }}
"""
if (boolean and not value) or isinstance(value, Undefined):
return default_value
return value
@evalcontextfilter
def do_join(eval_ctx, value, d=u''):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
"""
# no automatic escaping? joining is a lot eaiser then
if not eval_ctx.autoescape:
return unicode(d).join(imap(unicode, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, '__html__'):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
if hasattr(item, '__html__'):
do_escape = True
else:
value[idx] = unicode(item)
if do_escape:
d = escape(d)
else:
d = unicode(d)
return d.join(value)
# no html involved, to normal joining
return soft_unicode(d).join(imap(soft_unicode, value))
def do_center(value, width=80):
"""Centers the value in a field of a given width."""
return unicode(value).center(width)
@environmentfilter
def do_first(environment, seq):
"""Return the first item of a sequence."""
try:
return iter(seq).next()
except StopIteration:
return environment.undefined('No first item, sequence was empty.')
@environmentfilter
def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return iter(reversed(seq)).next()
except StopIteration:
return environment.undefined('No last item, sequence was empty.')
@environmentfilter
def do_random(environment, seq):
"""Return a random item from the sequence."""
try:
return choice(seq)
except IndexError:
return environment.undefined('No random item, sequence was empty.')
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 KB,
4.1 MB, 102 bytes, etc). Per default decimal prefixes are used (mega,
giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (mebi, gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
middle = binary and 'i' or ''
if bytes < base:
return "%d Byte%s" % (bytes, bytes != 1 and 's' or '')
elif bytes < base * base:
return "%.1f K%sB" % (bytes / base, middle)
elif bytes < base * base * base:
return "%.1f M%sB" % (bytes / (base * base), middle)
return "%.1f G%sB" % (bytes / (base * base * base), middle)
def do_pprint(value, verbose=False):
"""Pretty print a variable. Useful for debugging.
With Jinja 1.2 onwards you can pass it a parameter. If this parameter
is truthy the output will be more verbose (this requires `pretty`)
"""
return pformat(value, verbose=verbose)
@evalcontextfilter
def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False):
"""Converts URLs in plain text into clickable links.
If you pass the filter an additional integer it will shorten the urls
to that number. Also a third argument exists that makes the urls
"nofollow":
.. sourcecode:: jinja
{{ mytext|urlize(40, true) }}
links are shortened to 40 chars and defined with rel="nofollow"
"""
rv = urlize(value, trim_url_limit, nofollow)
if eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_indent(s, width=4, indentfirst=False):
"""Return a copy of the passed string, each line indented by
4 spaces. The first line is not indented. If you want to
change the number of spaces or indent the first line too
you can pass additional parameters to the filter:
.. sourcecode:: jinja
{{ mytext|indent(2, true) }}
indent by two spaces and indent the first line too.
"""
indention = u' ' * width
rv = (u'\n' + indention).join(s.splitlines())
if indentfirst:
rv = indention + rv
return rv
def do_truncate(s, length=255, killwords=False, end='...'):
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
parameter is ``true`` the filter will cut the text at length. Otherwise
it will try to save the last word. If the text was in fact
truncated it will append an ellipsis sign (``"..."``). If you want a
different ellipsis sign than ``"..."`` you can specify it using the
third parameter.
.. sourcecode jinja::
{{ mytext|truncate(300, false, '»') }}
truncate mytext to 300 chars, don't split up words, use a
right pointing double arrow as ellipsis sign.
"""
if len(s) <= length:
return s
elif killwords:
return s[:length] + end
words = s.split(' ')
result = []
m = 0
for word in words:
m += len(word) + 1
if m > length:
break
result.append(word)
result.append(end)
return u' '.join(result)
def do_wordwrap(s, width=79, break_long_words=True):
"""
Return a copy of the string passed to the filter wrapped after
``79`` characters. You can override this default using the first
parameter. If you set the second parameter to `false` Jinja will not
split words apart if they are longer than `width`.
"""
import textwrap
return u'\n'.join(textwrap.wrap(s, width=width, expand_tabs=False,
replace_whitespace=False,
break_long_words=break_long_words))
def do_wordcount(s):
"""Count the words in that string."""
return len(_word_re.findall(s))
def do_int(value, default=0):
"""Convert the value into an integer. If the
conversion doesn't work it will return ``0``. You can
override this default using the first parameter.
"""
try:
return int(value)
except (TypeError, ValueError):
# this quirk is necessary so that "42.23"|int gives 42.
try:
return int(float(value))
except (TypeError, ValueError):
return default
def do_float(value, default=0.0):
"""Convert the value into a floating point number. If the
conversion doesn't work it will return ``0.0``. You can
override this default using the first parameter.
"""
try:
return float(value)
except (TypeError, ValueError):
return default
def do_format(value, *args, **kwargs):
"""
Apply python string formatting on an object:
.. sourcecode:: jinja
{{ "%s - %s"|format("Hello?", "Foo!") }}
-> Hello? - Foo!
"""
if args and kwargs:
raise FilterArgumentError('can\'t handle positional and keyword '
'arguments at the same time')
return soft_unicode(value) % (kwargs or args)
def do_trim(value):
"""Strip leading and trailing whitespace."""
return soft_unicode(value).strip()
def do_striptags(value):
"""Strip SGML/XML tags and replace adjacent whitespace by one space.
"""
if hasattr(value, '__html__'):
value = value.__html__()
return Markup(unicode(value)).striptags()
def do_slice(value, slices, fill_with=None):
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration.
"""
seq = list(value)
length = len(seq)
items_per_slice = length // slices
slices_with_extra = length % slices
offset = 0
for slice_number in xrange(slices):
start = offset + slice_number * items_per_slice
if slice_number < slices_with_extra:
offset += 1
end = offset + (slice_number + 1) * items_per_slice
tmp = seq[start:end]
if fill_with is not None and slice_number >= slices_with_extra:
tmp.append(fill_with)
yield tmp
def do_batch(value, linecount, fill_with=None):
"""
A filter that batches items. It works pretty much like `slice`
just the other way round. It returns a list of lists with the
given number of items. If you provide a second parameter this
is used to fill missing items. See this example:
.. sourcecode:: html+jinja
<table>
{%- for row in items|batch(3, ' ') %}
<tr>
{%- for column in row %}
<td>{{ column }}</td>
{%- endfor %}
</tr>
{%- endfor %}
</table>
"""
result = []
tmp = []
for item in value:
if len(tmp) == linecount:
yield tmp
tmp = []
tmp.append(item)
if tmp:
if fill_with is not None and len(tmp) < linecount:
tmp += [fill_with] * (linecount - len(tmp))
yield tmp
def do_round(value, precision=0, method='common'):
"""Round the number to a given precision. The first
parameter specifies the precision (default is ``0``), the
second the rounding method:
- ``'common'`` rounds either up or down
- ``'ceil'`` always rounds up
- ``'floor'`` always rounds down
If you don't specify a method ``'common'`` is used.
.. sourcecode:: jinja
{{ 42.55|round }}
-> 43.0
{{ 42.55|round(1, 'floor') }}
-> 42.5
Note that even if rounded to 0 precision, a float is returned. If
you need a real integer, pipe it through `int`:
.. sourcecode:: jinja
{{ 42.55|round|int }}
-> 43
"""
if not method in ('common', 'ceil', 'floor'):
raise FilterArgumentError('method must be common, ceil or floor')
if precision < 0:
raise FilterArgumentError('precision must be a postive integer '
'or zero.')
if method == 'common':
return round(value, precision)
func = getattr(math, method)
if precision:
return func(value * 10 * precision) / (10 * precision)
else:
return func(value)
def do_sort(value, reverse=False):
"""Sort a sequence. Per default it sorts ascending, if you pass it
true as first argument it will reverse the sorting.
"""
return sorted(value, reverse=reverse)
@environmentfilter
def do_groupby(environment, value, attribute):
"""Group a sequence of objects by a common attribute.
If you for example have a list of dicts or objects that represent persons
with `gender`, `first_name` and `last_name` attributes and you want to
group all users by genders you can do something like the following
snippet:
.. sourcecode:: html+jinja
<ul>
{% for group in persons|groupby('gender') %}
<li>{{ group.grouper }}<ul>
{% for person in group.list %}
<li>{{ person.first_name }} {{ person.last_name }}</li>
{% endfor %}</ul></li>
{% endfor %}
</ul>
Additionally it's possible to use tuple unpacking for the grouper and
list:
.. sourcecode:: html+jinja
<ul>
{% for grouper, list in persons|groupby('gender') %}
...
{% endfor %}
</ul>
As you can see the item we're grouping by is stored in the `grouper`
attribute and the `list` contains all the objects that have this grouper
in common.
"""
expr = lambda x: environment.getitem(x, attribute)
return sorted(map(_GroupTuple, groupby(sorted(value, key=expr), expr)))
class _GroupTuple(tuple):
__slots__ = ()
grouper = property(itemgetter(0))
list = property(itemgetter(1))
def __new__(cls, (key, value)):
return tuple.__new__(cls, (key, list(value)))
def do_list(value):
"""Convert the value into a list. If it was a string the returned list
will be a list of characters.
"""
return list(value)
def do_mark_safe(value):
"""Mark the value as safe which means that in an environment with automatic
escaping enabled this variable will not be escaped.
"""
return Markup(value)
def do_mark_unsafe(value):
"""Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
return unicode(value)
def do_reverse(value):
"""Reverse the object or return an iterator the iterates over it the other
way round.
"""
if isinstance(value, basestring):
return value[::-1]
try:
return reversed(value)
except TypeError:
try:
rv = list(value)
rv.reverse()
return rv
except TypeError:
raise FilterArgumentError('argument must be iterable')
@environmentfilter
def do_attr(environment, obj, name):
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo["bar"]`` just that always an attribute is returned and items are not
looked up.
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
"""
try:
name = str(name)
except UnicodeError:
pass
else:
try:
value = getattr(obj, name)
except AttributeError:
pass
else:
if environment.sandboxed and not \
environment.is_safe_attribute(obj, name, value):
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name)
FILTERS = {
'attr': do_attr,
'replace': do_replace,
'upper': do_upper,
'lower': do_lower,
'escape': escape,
'e': escape,
'forceescape': do_forceescape,
'capitalize': do_capitalize,
'title': do_title,
'default': do_default,
'd': do_default,
'join': do_join,
'count': len,
'dictsort': do_dictsort,
'sort': do_sort,
'length': len,
'reverse': do_reverse,
'center': do_center,
'indent': do_indent,
'title': do_title,
'capitalize': do_capitalize,
'first': do_first,
'last': do_last,
'random': do_random,
'filesizeformat': do_filesizeformat,
'pprint': do_pprint,
'truncate': do_truncate,
'wordwrap': do_wordwrap,
'wordcount': do_wordcount,
'int': do_int,
'float': do_float,
'string': soft_unicode,
'list': do_list,
'urlize': do_urlize,
'format': do_format,
'trim': do_trim,
'striptags': do_striptags,
'slice': do_slice,
'batch': do_batch,
'sum': sum,
'abs': abs,
'round': do_round,
'sort': do_sort,
'groupby': do_groupby,
'safe': do_mark_safe,
'xmlattr': do_xmlattr
}
|
arbitrahj/django-timepiece
|
refs/heads/develop
|
timepiece/entries/tests/test_timesheet.py
|
2
|
import datetime
from dateutil.relativedelta import relativedelta
from decimal import Decimal
import random
from six.moves.urllib.parse import urlencode
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.test import TestCase
from timepiece import utils
from timepiece.tests.base import ViewTestMixin, LogTimeMixin
from timepiece.tests import factories
from timepiece.crm.utils import grouped_totals
from timepiece.entries.models import Activity, Entry
from timepiece.entries.forms import ClockInForm
class EditableTest(TestCase):
def setUp(self):
super(EditableTest, self).setUp()
self.user = factories.User()
self.project = factories.Project(
type__enable_timetracking=True, status__enable_timetracking=True)
self.entry = factories.Entry(**{
'user': self.user,
'project': self.project,
'start_time': timezone.now() - relativedelta(days=6),
'end_time': timezone.now() - relativedelta(days=6),
'seconds_paused': 0,
'status': Entry.VERIFIED,
})
self.entry2 = factories.Entry(**{
'user': self.user,
'project': self.project,
'start_time': timezone.now() - relativedelta(days=2),
'end_time': timezone.now() - relativedelta(days=2),
'seconds_paused': 0,
'status': Entry.UNVERIFIED,
})
def testUnEditable(self):
self.assertFalse(self.entry.is_editable)
def testEditable(self):
self.assertTrue(self.entry2.is_editable)
class MyLedgerTest(ViewTestMixin, LogTimeMixin, TestCase):
def setUp(self):
super(MyLedgerTest, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
self.devl_activity = factories.Activity(billable=True)
self.activity = factories.Activity()
self.url = reverse('view_user_timesheet', args=(self.user.pk,))
def login_with_permissions(self):
view_entry_summary = Permission.objects.get(
codename='view_entry_summary')
user = factories.User()
user.user_permissions.add(view_entry_summary)
user.save()
self.login_user(user)
def test_timesheet_view_permission(self):
"""A user with the correct permissions should see the menu"""
self.login_with_permissions()
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertTrue('user' in response.context['year_month_form'].fields)
def test_timesheet_view_no_permission(self):
"""A regular user should not see the user menu"""
self.login_user(self.user)
response = self.client.get(self.url)
self.assertTrue(response.status_code, 200)
self.assertFalse('user' in response.context['year_month_form'].fields)
def testEmptyTimeSheet(self):
self.login_user(self.user)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(list(response.context['entries']), [])
def testEmptyHourlySummary(self):
self.login_user(self.user)
now = timezone.now()
empty_month = now + relativedelta(months=1)
data = {
'year': empty_month.year,
'month': empty_month.month,
}
url = reverse('view_user_timesheet', args=[self.user.pk])
response = self.client.get(url, data)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.context['grouped_totals'], '')
def testNotMyLedger(self):
self.login_user(self.user2)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 403)
def testNoLedger(self):
self.login_user(self.user2)
self.url = reverse('dashboard')
try:
self.client.get(self.url)
except Exception as e:
self.fail(e)
def make_entries(self):
self.p1 = factories.BillableProject(name='1')
self.p2 = factories.NonbillableProject(name='2')
self.p4 = factories.BillableProject(name='4')
self.p3 = factories.NonbillableProject(name='1')
days = [
utils.add_timezone(datetime.datetime(2011, 1, 1)),
utils.add_timezone(datetime.datetime(2011, 1, 28)),
utils.add_timezone(datetime.datetime(2011, 1, 31)),
utils.add_timezone(datetime.datetime(2011, 2, 1)),
timezone.now(),
]
self.log_time(project=self.p1, start=days[0], delta=(1, 0))
self.log_time(project=self.p2, start=days[0], delta=(1, 0))
self.log_time(project=self.p4, start=days[0], delta=(1, 0))
self.log_time(project=self.p1, start=days[1], delta=(1, 0))
self.log_time(project=self.p3, start=days[1], delta=(1, 0))
self.log_time(project=self.p4, start=days[1], delta=(1, 0))
self.log_time(project=self.p1, start=days[2], delta=(1, 0))
self.log_time(project=self.p2, start=days[2], delta=(1, 0))
self.log_time(project=self.p4, start=days[2], delta=(1, 0))
self.log_time(project=self.p1, start=days[3], delta=(1, 0))
self.log_time(project=self.p3, start=days[3], delta=(1, 0))
self.log_time(project=self.p4, start=days[3], delta=(1, 0))
self.log_time(project=self.p1, start=days[4], delta=(1, 0))
self.log_time(project=self.p3, start=days[4], delta=(1, 0))
self.log_time(project=self.p4, start=days[4], delta=(1, 0))
def testCurrentTimeSheet(self):
self.login_user(self.user)
self.make_entries()
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertEqual(len(response.context['entries']), 3)
self.assertEqual(response.context['summary']['total'], Decimal(3))
def testOldTimeSheet(self):
self.login_user(self.user)
self.make_entries()
data = {
'month': 1,
'year': 2011,
}
response = self.client.get(self.url, data)
self.assertEquals(response.status_code, 200)
self.assertEqual(len(response.context['entries']), 9)
self.assertEqual(response.context['summary']['total'], Decimal(9))
class ClockInTest(ViewTestMixin, TestCase):
def setUp(self):
super(ClockInTest, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.url = reverse('clock_in')
self.now = timezone.now()
self.ten_min_ago = self.now - relativedelta(minutes=10)
self.clock_in_form = {
'project': self.project.pk,
'location': self.location.pk,
'activity': self.devl_activity.pk,
'start_time_0': self.ten_min_ago.strftime('%m/%d/%Y'),
'start_time_1': self.ten_min_ago.strftime('%H:%M:%S'),
}
def testClockIn(self):
"""Test the simplest clock in scenario"""
self.login_user(self.user)
data = self.clock_in_form
response = self.client.post(self.url, data, follow=True)
# Clock in form submission leads to the dashboard page
# with one active entry
self.assertRedirects(response, reverse('dashboard'),
status_code=302, target_status_code=200)
entries = Entry.objects.filter(
end_time__isnull=True, user=self.user
)
self.assertEqual(entries.count(), 1)
def testClockInAutoOut(self):
"""
Clocking in during an active entry automatically clocks out the current
entry one second before the new entry.
"""
self.login_user(self.user)
factories.Entry(user=self.user, start_time=self.ten_min_ago)
data = self.clock_in_form
data.update({
'start_time_0': self.now.strftime('%m/%d/%Y'),
'start_time_1': self.now.strftime('%H:%M:%S'),
})
self.client.post(self.url, data)
entries = Entry.objects.all()
# These clock in times do not overlap
for entry in entries:
if entry.is_overlapping():
self.fail('Overlapping Times')
# There is one closed entry and open current entry
closed_entry = entries.get(end_time__isnull=False)
current_entry = entries.get(end_time__isnull=True)
# The current start time is one second after the closed entry's end time
self.assertEqual(closed_entry.end_time + relativedelta(seconds=1),
current_entry.start_time)
def testClockInManyActive(self):
"""
There should never be more than one active entry. If this happens,
a 500 error should be raised so that we are notified of the situation.
"""
self.login_user(self.user)
entry1 = factories.Entry(**{
'user': self.user,
'start_time': self.ten_min_ago,
})
entry2 = factories.Entry(**{
'user': self.user,
'start_time': self.now - relativedelta(minutes=20),
})
data = self.clock_in_form
data.update({
'start_time_0': self.now.strftime('%m/%d/%Y'),
'start_time_1': self.now.strftime('%H:%M:%S'),
})
try:
self.client.post(self.url, data)
except utils.ActiveEntryError as e:
self.assertEqual(str(e), "Only one active entry is allowed.")
else:
self.fail("Only one active entry should be allowed.")
self.assertEqual(Entry.objects.count(), 2)
self.assertEqual(Entry.objects.get(pk=entry1.pk), entry1)
self.assertEqual(Entry.objects.get(pk=entry2.pk), entry2)
def testClockInCurrentStatus(self):
"""Verify the status of the current entry shows what is expected"""
self.login_user(self.user)
entry1 = factories.Entry(**{
'user': self.user,
'start_time': self.ten_min_ago,
})
data = self.clock_in_form
data.update({
'start_time_0': self.now.strftime('%m/%d/%Y'),
'start_time_1': self.now.strftime('%H:%M:%S'),
})
response = self.client.get(self.url, data)
self.assertEqual(response.context['active'], entry1)
def testClockInPause(self):
"""
Test that the user can clock in while the current entry is paused.
The current entry will be clocked out.
"""
self.login_user(self.user)
entry1 = factories.Entry(**{
'user': self.user,
'start_time': self.ten_min_ago,
})
e_id = Entry.objects.get(pk=entry1.id)
e_id.pause()
data = self.clock_in_form
data.update({
'start_time_0': self.now.strftime('%m/%d/%Y'),
'start_time_1': self.now.strftime('%H:%M:%S'),
'active_comment': 'test comment',
})
self.client.post(self.url, data, follow=True)
# obtain entry1 now that it is closed. The hours should be recorded
e_id = Entry.objects.get(pk=entry1.id)
self.assertTrue(e_id.is_closed)
self.assertTrue(e_id.hours)
self.assertEqual(e_id.comments, 'test comment')
def testClockInBlock(self):
"""
The user cannot clock in to a time that is already logged
"""
self.login_user(self.user)
entry1_data = {
'user': self.user,
'project': self.project,
'activity': self.devl_activity,
'start_time': self.ten_min_ago,
'end_time': self.now,
}
entry1 = factories.Entry(**entry1_data)
entry1_data.update({
'st_str': self.ten_min_ago.strftime('%H:%M:%S'),
'end_str': self.now.strftime('%H:%M:%S'),
})
blocked_start_time = entry1.start_time + relativedelta(minutes=5)
data = self.clock_in_form
data.update({
'start_time_0': blocked_start_time.strftime('%m/%d/%Y'),
'start_time_1': blocked_start_time.strftime('%H:%M:%S'),
})
# This clock in attempt should be blocked by entry1
response = self.client.post(self.url, data)
form = response.context['form']
self.assertEquals(len(form.errors), 1, form.errors)
self.assertTrue('__all__' in form.errors, form.errors.keys())
def testClockInSameTime(self):
"""
Test that the user cannot clock in with the same start time as the
active entry
"""
self.login_user(self.user)
entry1_data = {
'user': self.user,
'start_time': self.now,
'project': self.project,
'activity': self.devl_activity,
}
entry1 = factories.Entry(**entry1_data)
entry1_data.update({
'st_str': self.now.strftime('%H:%M:%S')
})
data = self.clock_in_form
data.update({
'start_time_0': entry1.start_time.strftime('%m/%d/%Y'),
'start_time_1': entry1.start_time.strftime('%H:%M:%S'),
})
# This clock in attempt should be blocked by entry1 (same start time)
response = self.client.post(self.url, data)
self.assertFormError(response, 'form', None,
'Please enter a valid start time')
self.assertFormError(
response, 'form', 'start_time',
'The start time is on or before the current entry: ' +
'%(project)s - %(activity)s starting at %(st_str)s' % entry1_data)
def testClockInBeforeCurrent(self):
"""
Test that the user cannot clock in with a start time before the active
entry
"""
self.login_user(self.user)
entry1_data = {
'user': self.user,
'project': self.project,
'activity': self.devl_activity,
'start_time': self.ten_min_ago,
}
entry1 = factories.Entry(**entry1_data)
entry1_data.update({
'st_str': self.ten_min_ago.strftime('%H:%M:%S')
})
before_entry1 = entry1.start_time - relativedelta(minutes=5)
data = self.clock_in_form
data.update({
'start_time_0': before_entry1.strftime('%m/%d/%Y'),
'start_time_1': before_entry1.strftime('%H:%M:%S'),
})
# This clock in attempt should be blocked by entry1
# (It is before the start time of the current entry)
response = self.client.post(self.url, data)
form = response.context['form']
self.assertEquals(len(form.errors), 2, form.errors)
self.assertTrue('start_time' in form.errors, form.errors.keys)
self.assertTrue('__all__' in form.errors, form.errors.keys)
def testClockInActiveTooLong(self):
"""
Test that if the active entry is too long, the clock in form will
invalidate
"""
self.login_user(self.user)
entry1 = factories.Entry(**{
'user': self.user,
'start_time': self.now - relativedelta(hours=13),
})
end_time = self.now - relativedelta(seconds=1)
data = self.clock_in_form
data.update({
'start_time_0': self.now.strftime('%m/%d/%Y'),
'start_time_1': self.now.strftime('%H:%M:%S'),
})
response = self.client.post(self.url, data)
err_msg = 'Ending time exceeds starting time by 12 hours ' \
'or more for {0} on {1} at {2} to {3} at {4}.'.format(
entry1.project,
entry1.start_time.strftime('%m/%d/%Y'),
entry1.start_time.strftime('%H:%M:%S'),
end_time.strftime('%m/%d/%Y'),
end_time.strftime('%H:%M:%S')
)
self.assertFormError(response, 'form', None, err_msg)
def test_clockin_error_active_entry(self):
"""
If you have an active entry and clock in to another,
you should not be clocked out of the current active entry
if the clock in form contains errors
"""
self.login_user(self.user)
# Create a valid entry and follow the redirect to the homepage
response = self.client.post(self.url, self.clock_in_form, follow=True)
self.assertEquals(response.status_code, 200)
self.assertTrue(response.context['messages'])
data = self.clock_in_form
data.update({'start_time_0': None})
response = self.client.post(self.url, data)
msg = 'Enter a valid date/time.'
self.assertFormError(response, 'form', 'start_time', msg)
active = Entry.objects.get()
self.assertIsNone(active.end_time)
def test_clockin_correct_active_entry(self):
"""
If you clock in with an an active entry, that entry
should be clocked out
"""
self.login_user(self.user)
# Create a valid entry and follow the redirect to the homepage
response = self.client.post(self.url, self.clock_in_form, follow=True)
self.assertEquals(response.status_code, 200)
self.assertTrue(response.context['messages'])
active = Entry.objects.get()
data = self.clock_in_form
start_time = self.now + relativedelta(seconds=10)
data.update({
'start_time_0': start_time.strftime('%m/%d/%Y'),
'start_time_1': start_time.strftime('%H:%M:%S')
})
response = self.client.post(self.url, data)
active = Entry.objects.get(pk=active.pk)
self.assertIsNotNone(active.end_time)
def testProjectListFiltered(self):
self.login_user(self.user)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
projects = list(response.context['form'].fields['project'].queryset)
self.assertTrue(self.project in projects)
self.assertFalse(self.project2 in projects)
self.project.status.enable_timetracking = False
self.project.status.save()
response = self.client.get(self.url)
projects = list(response.context['form'].fields['project'].queryset)
self.assertTrue(self.project not in projects)
def testClockInLogin(self):
response = self.client.get(self.url)
self.assertEquals(response.status_code, 302)
self.login_user(self.user)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
def testClockInUnauthorizedProject(self):
self.login_user(self.user)
data = self.clock_in_form
data.update({'project': self.project2.id})
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 200)
self.assertTrue(response.context['form'].errors)
err_msg = 'Select a valid choice. That choice is not one of the ' + \
'available choices.'
self.assertFormError(response, 'form', 'project', err_msg)
def testClockInBadActivity(self):
self.login_user(self.user)
data = self.clock_in_form
data.update({
'project': self.project.id,
'activity': self.sick_activity.id,
})
response = self.client.post(self.url, data)
err_msg = 'sick/personal is not allowed for this project. Please '
err_msg += 'choose among development, and Work'
self.assertFormError(response, 'form', None, err_msg)
def test_clock_in_active_comments(self):
"""
Comments left from editing the current active entry should appear
if you are clocking in
"""
entry = factories.Entry(**{
'user': self.user,
'start_time': self.ten_min_ago
})
entry.comments = 'Some comments'
entry.save()
self.login_user(self.user)
response = self.client.get(self.url)
self.assertContains(response, 'Some comments')
class AutoActivityTest(ViewTestMixin, LogTimeMixin, TestCase):
"""Test the initial value chosen for activity on clock in form"""
def setUp(self):
super(AutoActivityTest, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
def get_activity(self, project=None):
if not project:
project = self.project
initial = {'project': project.id}
form = ClockInForm(user=self.user, initial=initial)
return form.initial['activity']
def testNewWorker(self):
"""The worker has 0 entries on this project. Activity should = None"""
self.login_user(self.user)
self.assertEqual(self.get_activity(), None)
def testLastWorkedOneEntry(self):
"""The worker has one previous entry on the project"""
self.login_user(self.user)
self.log_time(project=self.project, activity=self.devl_activity)
self.assertEqual(self.get_activity(), self.devl_activity.id)
def testLastWorkedSeveralEntries(self):
"""The worker has several entries on a project. Use the most recent"""
self.login_user(self.user)
for day in range(0, 10):
this_day = utils.add_timezone(datetime.datetime(2011, 1, 1))
this_day += relativedelta(days=day)
activity = self.activity if day == 9 else self.devl_activity
self.log_time(start=this_day, project=self.project,
activity=activity)
self.assertEqual(self.get_activity(), self.activity.id)
def testLastWorkedSeveralProjects(self):
"""
Obtain activities contingent on the project when worker is on several
"""
self.login_user(self.user)
project1 = self.project
project2 = self.project2
for day in range(0, 10):
this_day = utils.add_timezone(datetime.datetime(2011, 1, 1))
this_day += relativedelta(days=day)
# Cycle through projects and activities
project = project1 if day % 2 == 0 else project2
activity = self.devl_activity if day % 3 == 0 else self.activity
self.log_time(start=this_day, project=project, activity=activity)
self.assertEqual(self.get_activity(project1), self.activity.id)
self.assertEqual(self.get_activity(project2), self.devl_activity.id)
class ClockOutTest(ViewTestMixin, TestCase):
def setUp(self):
super(ClockOutTest, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.url = reverse('clock_out')
self.login_user(self.user)
# Create an active entry, so that clock out tests don't have to.
self.default_end_time = timezone.now()
back = timezone.now() - relativedelta(hours=5)
self.entry = factories.Entry(**{
'user': self.user,
'start_time': back,
'project': self.project,
'activity': self.devl_activity,
})
def testBasicClockOut(self):
data = {
'start_time_0': self.entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': self.entry.start_time.strftime('%H:%M:%S'),
'end_time_0': self.default_end_time.strftime('%m/%d/%Y'),
'end_time_1': self.default_end_time.strftime('%H:%M:%S'),
'location': self.location.pk,
}
self.client.post(self.url, data, follow=True)
closed_entry = Entry.objects.get(pk=self.entry.pk)
self.assertTrue(closed_entry.is_closed)
def testClockOutWithSecondsPaused(self):
"""
Test that clocking out of an unpaused entry with previous pause time
calculates the correct amount of unpaused time.
"""
entry_with_pause = self.entry
# paused for a total of 1 hour
entry_with_pause.seconds_paused = 3600
entry_with_pause.save()
data = {
'start_time_0': entry_with_pause.start_time.strftime('%m/%d/%Y'),
'start_time_1': entry_with_pause.start_time.strftime('%H:%M:%S'),
'end_time_0': self.default_end_time.strftime('%m/%d/%Y'),
'end_time_1': self.default_end_time.strftime('%H:%M:%S'),
'location': self.location.pk,
}
self.client.post(reverse('clock_out'), data)
entry_with_pause = Entry.objects.get(pk=entry_with_pause.pk)
self.assertAlmostEqual(entry_with_pause.hours, 4)
def testClockOutWhilePaused(self):
"""
Test that clocking out of a paused entry calculates the correct time
"""
paused_entry = self.entry
paused_entry.pause_time = self.entry.start_time \
+ relativedelta(hours=1)
paused_entry.save()
data = {
'start_time_0': paused_entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': paused_entry.start_time.strftime('%H:%M:%S'),
'end_time_0': self.default_end_time.strftime('%m/%d/%Y'),
'end_time_1': self.default_end_time.strftime('%H:%M:%S'),
'location': self.location.pk,
}
self.client.post(reverse('clock_out'), data)
paused_entry = Entry.objects.get(pk=paused_entry.pk)
self.assertAlmostEqual(paused_entry.hours, 1)
def testClockOutReverse(self):
"""
Test that the user can't clock out at a time prior to the starting time
"""
backward_entry = self.entry
backward_entry.save()
# reverse the times
data = {
'start_time_0': self.default_end_time.strftime('%m/%d/%Y'),
'start_time_1': self.default_end_time.strftime('%H:%M:%S'),
'end_time_0': self.entry.start_time.strftime('%m/%d/%Y'),
'end_time_1': self.entry.start_time.strftime('%H:%M:%S'),
'location': self.location.pk,
}
response = self.client.post(reverse('clock_out'), data)
self.assertFormError(response, 'form', None,
'Ending time must exceed the starting time')
def testClockOutTooLong(self):
end_time = self.entry.start_time + relativedelta(hours=13)
data = {
'start_time_0': self.entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': self.entry.start_time.strftime('%H:%M:%S'),
'end_time_0': end_time.strftime('%m/%d/%Y'),
'end_time_1': end_time.strftime('%H:%M:%S'),
'location': self.location.pk,
}
response = self.client.post(self.url, data)
err_msg = 'Ending time exceeds starting time by 12 hours ' \
'or more for {0} on {1} at {2} to {3} at {4}.'.format(
self.entry.project,
self.entry.start_time.strftime('%m/%d/%Y'),
self.entry.start_time.strftime('%H:%M:%S'),
end_time.strftime('%m/%d/%Y'),
end_time.strftime('%H:%M:%S')
)
self.assertFormError(response, 'form', None, err_msg)
def testClockOutPauseTooLong(self):
paused_entry = self.entry
paused_entry.seconds_paused = 60 * 60 * 13
paused_entry.save()
data = {
'start_time_0': paused_entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': paused_entry.start_time.strftime('%H:%M:%S'),
'end_time_0': self.default_end_time.strftime('%m/%d/%Y'),
'end_time_1': self.default_end_time.strftime('%H:%M:%S'),
'location': self.location.pk,
}
response = self.client.post(reverse('clock_out'), data)
err_msg = 'Ending time exceeds starting time by 12 hours ' \
'or more for {0} on {1} at {2} to {3} at {4}.'.format(
self.entry.project,
paused_entry.start_time.strftime('%m/%d/%Y'),
paused_entry.start_time.strftime('%H:%M:%S'),
self.default_end_time.strftime('%m/%d/%Y'),
self.default_end_time.strftime('%H:%M:%S')
)
self.assertFormError(response, 'form', None, err_msg)
def testClockOutOverlap(self):
"""
Test that the user cannot clock out if the times overlap with an
existing entry
"""
# Create a closed and valid entry
now = timezone.now() - relativedelta(hours=5)
entry1_data = {
'user': self.user,
'project': self.project,
'activity': self.devl_activity,
'start_time': now,
'end_time': self.default_end_time
}
entry1 = factories.Entry(**entry1_data)
entry1_data.update({
'st_str': entry1.start_time.strftime('%H:%M:%S'),
'end_str': entry1.end_time.strftime('%H:%M:%S'),
})
# Create a form with times that overlap with entry1
bad_start = entry1.start_time - relativedelta(hours=1)
bad_end = entry1.end_time + relativedelta(hours=1)
factories.Entry(user=self.user, start_time=bad_start, end_time=bad_end)
data = {
'start_time_0': bad_start.strftime('%m/%d/%Y'),
'start_time_1': bad_start.strftime('%H:%M:%S'),
'end_time_0': bad_end.strftime('%m/%d/%Y'),
'end_time_1': bad_end.strftime('%H:%M:%S'),
'location': self.location.pk,
}
# With entry1 on either side, a post with the bad_entry data should
# fail
response = self.client.post(reverse('clock_out'), data)
form = response.context['form']
self.assertEquals(len(form.errors), 1, form.errors.keys)
self.assertTrue('__all__' in form.errors, form.errors)
def test_clocking_out_inactive(self):
# If clock out when not active, redirect to dashboard
# (e.g. double-clicked clock out button or clicked it on an old page)
# setUp clocked us in, so clock out again
data = {
'start_time_0': self.entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': self.entry.start_time.strftime('%H:%M:%S'),
'end_time_0': self.default_end_time.strftime('%m/%d/%Y'),
'end_time_1': self.default_end_time.strftime('%H:%M:%S'),
'location': self.location.pk,
}
response = self.client.post(
self.url, data,
follow=True,
)
# Do it again - make sure we redirect to the dashboard
response = self.client.post(
self.url, data,
follow=False,
)
self.assertRedirects(response, reverse('dashboard'),
status_code=302, target_status_code=200)
class CheckOverlap(ViewTestMixin, LogTimeMixin, TestCase):
"""
With entry overlaps, entry.check_overlap method should return True
With valid entries, check_overlap should return False
"""
def setUp(self):
super(CheckOverlap, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(
code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.login_user(self.user)
self.now = timezone.now()
# define start and end times to create valid entries
self.start = self.now - relativedelta(days=0, hours=8)
self.end = self.now - relativedelta(days=0)
# Create a valid entry for the tests to overlap with
self.log_time(start=self.start, end=self.end)
# define bad start times relative to the valid one (just in/outside)
self.start_before = self.start - relativedelta(minutes=2)
self.start_inside = self.start + relativedelta(minutes=2)
self.end_inside = self.end - relativedelta(minutes=2)
self.end_after = self.end + relativedelta(minutes=2)
# helper functions
def use_checkoverlap(self, entries):
"""
Uses entry.check_overlap given a list of entries returns all overlaps
"""
user_total_overlaps = 0
for index_a, entry_a in enumerate(entries):
for index_b in range(index_a, len(entries)):
entry_b = entries[index_b]
if entry_a.check_overlap(entry_b):
user_total_overlaps += 1
return user_total_overlaps
def get_entries(self):
return Entry.objects.filter(user=self.user)
# Invalid entries to test against
def testBeforeAndIn(self):
self.log_time(start=self.start_before, end=self.end_inside)
user_total_overlaps = self.use_checkoverlap(self.get_entries())
self.assertEqual(user_total_overlaps, 1)
def testAfterAndIn(self):
self.log_time(start=self.start_inside, end=self.end_after)
user_total_overlaps = self.use_checkoverlap(self.get_entries())
self.assertEqual(user_total_overlaps, 1)
def testInside(self):
self.log_time(start=self.start_inside, end=self.end_inside)
user_total_overlaps = self.use_checkoverlap(self.get_entries())
self.assertEqual(user_total_overlaps, 1)
def testOutside(self):
self.log_time(start=self.start_before, end=self.end_after)
user_total_overlaps = self.use_checkoverlap(self.get_entries())
self.assertEqual(user_total_overlaps, 1)
def testOverlapWithPause(self):
"""Overlaps by two minutes. Passes because it has 2 min. of pause"""
self.log_time(start=self.start_before, end=self.start_inside,
pause=120)
user_total_overlaps = self.use_checkoverlap(self.get_entries())
self.assertEqual(user_total_overlaps, 0)
def testOverlapWithoutEnoughPause(self):
"""Overlaps by two minutes, but only has 119 seconds of pause"""
self.log_time(start=self.start_before, end=self.start_inside,
pause=119)
user_total_overlaps = self.use_checkoverlap(self.get_entries())
self.assertEqual(user_total_overlaps, 1)
class CreateEditEntry(ViewTestMixin, TestCase):
def setUp(self):
super(CreateEditEntry, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.login_user(self.user)
self.now = timezone.now()
valid_start = self.now - relativedelta(days=1)
valid_end = valid_start + relativedelta(hours=1)
self.ten_min_ago = self.now - relativedelta(minutes=10)
self.two_hour_ago = self.now - relativedelta(hours=2)
self.one_hour_ago = self.now - relativedelta(hours=1)
# establish data, entries, urls for all tests
self.default_data = {
'project': self.project.pk,
'location': self.location.pk,
'activity': self.devl_activity.pk,
'seconds_paused': 0,
'start_time_0': valid_start.strftime('%m/%d/%Y'),
'start_time_1': valid_start.strftime('%H:%M:%S'),
'end_time_0': valid_end.strftime('%m/%d/%Y'),
'end_time_1': valid_end.strftime('%H:%M:%S'),
}
self.closed_entry_data = {
'user': self.user,
'project': self.project,
'activity': self.devl_activity,
'start_time': self.two_hour_ago,
'end_time': self.one_hour_ago,
}
self.current_entry_data = {
'user': self.user,
'project': self.project,
'activity': self.devl_activity,
'start_time': self.ten_min_ago,
}
self.closed_entry = factories.Entry(**self.closed_entry_data)
self.current_entry = factories.Entry(**self.current_entry_data)
self.closed_entry_data.update({
'st_str': self.two_hour_ago.strftime('%H:%M:%S'),
'end_str': self.one_hour_ago.strftime('%H:%M:%S'),
})
self.current_entry_data.update({
'st_str': self.ten_min_ago.strftime('%H:%M:%S'),
})
self.create_url = reverse('create_entry')
self.edit_closed_url = reverse('edit_entry', args=[self.closed_entry.pk])
self.edit_current_url = reverse('edit_entry', args=[self.current_entry.pk])
def testCreateEntry(self):
"""
Test the ability to create a valid new entry
"""
response = self.client.post(self.create_url, self.default_data,
follow=True)
self.assertRedirects(response, reverse('dashboard'),
status_code=302, target_status_code=200)
self.assertContains(
response, 'The entry has been created successfully', count=1)
def testEditClosed(self):
"""
Test the ability to edit a closed entry, using valid values
"""
response = self.client.post(self.edit_closed_url, self.default_data,
follow=True)
self.assertRedirects(response, reverse('dashboard'),
status_code=302, target_status_code=200)
self.assertContains(
response, 'The entry has been updated successfully', count=1)
def testEditCurrentSameTime(self):
"""
Test the ability to edit a current entry, not changing the values
"""
data = self.default_data
data.update({
'start_time_0': self.current_entry_data['start_time'].strftime(
'%m/%d/%Y'),
'start_time_1': self.current_entry_data['start_time'].strftime(
'%H:%M:%S'),
})
response = self.client.post(self.edit_current_url, data, follow=True)
# This post should redirect to the dashboard, with the correct message
# and 1 active entry, because we updated the current entry from setUp
self.assertRedirects(response, reverse('dashboard'),
status_code=302, target_status_code=200)
self.assertContains(
response, 'The entry has been updated successfully', count=1)
entries = Entry.objects.filter(
user=self.user, end_time__isnull=True)
self.assertEquals(entries.count(), 1)
def testEditCurrentDiffTime(self):
"""
Test the ability to edit a current entry, using valid new values
"""
data = self.default_data
new_start = self.current_entry_data['start_time'] + \
relativedelta(minutes=5)
data.update({
'start_time_0': new_start.strftime('%m/%d/%Y'),
'start_time_1': new_start.strftime('%H:%M:%S'),
})
response = self.client.post(self.edit_current_url, data, follow=True)
# This post should redirect to the dashboard, with the correct message
# and 1 active entry, because we updated the current entry from setUp
self.assertRedirects(response, reverse('dashboard'),
status_code=302, target_status_code=200)
entries = Entry.objects.filter(user=self.user, end_time__isnull=True)
self.assertEquals(entries.count(), 1)
def testCreateBlockByClosed(self):
"""
Test that the entry is blocked by closed entries that overlap
"""
overlap_entry = self.default_data
overlap_entry.update({
'start_time_0': self.closed_entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': self.closed_entry.start_time.strftime('%H:%M:%S'),
'end_time_0': self.closed_entry.end_time.strftime('%m/%d/%Y'),
'end_time_1': self.closed_entry.end_time.strftime('%H:%M:%S'),
})
response = self.client.post(self.create_url, overlap_entry, follow=True)
form = response.context['form']
self.assertEquals(len(form.errors), 1, form.errors)
self.assertTrue('__all__' in form.errors, form.errors.keys())
def testCreateBlockByCurrent(self):
"""
Test that the entry is blocked by the current entry when appropriate
"""
overlap_entry = self.default_data
overlap_entry.update({
'start_time_0': self.current_entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': self.current_entry.start_time.strftime('%H:%M:%S'),
'end_time_0': self.now.strftime('%m/%d/%Y'),
'end_time_1': self.now.strftime('%H:%M:%S'),
})
response = self.client.post(self.create_url, overlap_entry, follow=True)
form = response.context['form']
self.assertEquals(len(form.errors), 1, form.errors)
self.assertTrue('__all__' in form.errors, form.errors.keys())
def testCreateTooLongEntry(self):
"""
Test that the entry is blocked if the duration is too long.
"""
long_entry = self.default_data
end_time = self.now + relativedelta(hours=13)
long_entry.update({
'start_time_0': self.now.strftime('%m/%d/%Y'),
'start_time_1': self.now.strftime('%H:%M:%S'),
'end_time_0': end_time.strftime('%m/%d/%Y'),
'end_time_1': end_time.strftime('%H:%M:%S'),
})
response = self.client.post(self.create_url, long_entry, follow=True)
err_msg = 'Ending time exceeds starting time by 12 hours ' \
'or more for {0} on {1} at {2} to {3} at {4}.'.format(
self.project,
self.now.strftime('%m/%d/%Y'),
self.now.strftime('%H:%M:%S'),
end_time.strftime('%m/%d/%Y'),
end_time.strftime('%H:%M:%S')
)
self.assertFormError(response, 'form', None, err_msg)
def testCreateLongPauseEntry(self):
"""
Test that the entry is blocked if the duration is too long.
"""
long_pause = self.default_data
long_pause['seconds_paused'] = 60 * 60 * 13
self.client.post(self.create_url, long_pause, follow=True)
def testProjectList(self):
"""
Make sure the list of available projects conforms to user associations
"""
response = self.client.get(reverse('create_entry'))
self.assertEqual(response.status_code, 200)
projects = list(response.context['form'].fields['project'].queryset)
self.assertTrue(self.project in projects)
self.assertTrue(self.project2 not in projects)
self.project.status.enable_timetracking = False
self.project.status.save()
response = self.client.get(reverse('create_entry'))
projects = list(response.context['form'].fields['project'].queryset)
self.assertTrue(self.project not in projects)
def testBadActivity(self):
"""
Make sure the user cannot add an entry for an activity that is not in
the project's activity group
"""
data = self.default_data
data.update({'activity': self.sick_activity.id})
response = self.client.post(self.create_url, data)
err_msg = 'sick/personal is not allowed for this project. Please '
err_msg += 'choose among development, and Work'
self.assertFormError(response, 'form', None, err_msg)
def add_entry_test_helper(self):
self.login_user(self.user)
response = self.client.post(self.create_url, data=self.default_data, follow=True)
self.assertEqual(response.status_code, 200)
msg = ('You cannot add/edit entries after a timesheet has been '
'approved or invoiced. Please correct the start and end times.')
self.assertEqual([msg], response.context['form'].non_field_errors())
def test_add_approved_entries(self):
"""
If your entries have been verified and then approved, you should
not be able to add entries for that time period
"""
entry = factories.Entry(**{
'user': self.user,
'start_time': self.ten_min_ago,
'end_time': self.ten_min_ago + relativedelta(minutes=1)
})
entry.status = Entry.INVOICED
entry.save()
self.add_entry_test_helper()
def test_add_invoiced_entries(self):
"""
If your entries have been verified, approved, and invoiced, you
should not be able to add entries for that time period
"""
entry = factories.Entry(**{
'user': self.user,
'start_time': self.ten_min_ago,
'end_time': self.ten_min_ago + relativedelta(minutes=1)
})
entry.status = Entry.INVOICED
entry.save()
self.add_entry_test_helper()
def edit_entry_helper(self, status='approved'):
"""Helper function for editing approved entries"""
entry = factories.Entry(**{
'user': self.user,
'project': self.project,
'start_time': self.now - relativedelta(hours=6),
'end_time': self.now - relativedelta(hours=5),
'status': status
})
url = reverse('edit_entry', args=(entry.pk,))
data = self.default_data
data.update({
'start_time_0': entry.start_time.strftime('%m/%d/%Y'),
'start_time_1': entry.start_time.strftime('%H:%M:%S'),
'end_time_0': entry.end_time.strftime('%m/%d/%Y'),
'end_time_1': entry.end_time.strftime('%H:%M:%S'),
})
return url, entry, data
def test_admin_edit_approved_entry(self):
"""
An administrator (or anyone with view_payroll_summary perm) should
be able to edit an entry even if theyve been approved
"""
self.client.logout()
self.login_user(self.superuser)
url, entry, data = self.edit_entry_helper()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(url, data=data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'The entry has been updated successfully.')
self.assertEqual(self.user, entry.user)
def test_user_edit_approved_entry(self):
"""A regular user shouldnt be able to edit an approved entry"""
url, entry, data = self.edit_entry_helper()
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 404)
def test_edit_invoiced_entry(self):
"""You shouldnt be able to edit an invoiced entry"""
self.client.logout()
self.login_user(self.superuser)
url, entry, data = self.edit_entry_helper(Entry.INVOICED)
response = self.client.post(url, data=data, follow=True)
self.assertEqual(response.status_code, 200)
msg = ('You cannot add/edit entries after a timesheet has been '
'approved or invoiced. Please correct the start and end times.')
self.assertContains(response, msg)
class StatusTest(ViewTestMixin, TestCase):
def setUp(self):
super(StatusTest, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.login_user(self.user)
self.now = timezone.now()
self.from_date = utils.get_month_start(self.now)
self.sheet_url = reverse('view_user_timesheet', args=[self.user.pk])
def verify_url(self, user=None, from_date=None):
user = user or self.user
from_date = from_date or self.from_date
base_url = reverse('change_user_timesheet', args=(user.pk, 'verify'))
params = {'from_date': from_date.strftime('%Y-%m-%d')}
params = urlencode(params)
return '{0}?{1}'.format(base_url, params)
def approve_url(self, user=None, from_date=None):
user = user or self.user
from_date = from_date or self.from_date
base_url = reverse('change_user_timesheet', args=(user.pk, 'approve'))
params = {'from_date': from_date.strftime('%Y-%m-%d')}
params = urlencode(params)
return '{0}?{1}'.format(base_url, params)
def get_reject_url(self, entry_id):
"Helper for the reject entry view"
return reverse('reject_entry', args=[entry_id])
def login_as_admin(self):
"Helper to login as an admin user"
self.admin = factories.Superuser()
self.login_user(self.admin)
def login_with_permissions(self, *codenames):
"""Helper to login as a user with correct permissions"""
perms = Permission.objects.filter(codename__in=codenames)
self.perm_user = factories.User()
self.perm_user.user_permissions.add(*perms)
self.perm_user.save()
self.login_user(self.perm_user)
def test_verify_link(self):
factories.Entry(
user=self.user,
start_time=self.now - relativedelta(hours=1),
end_time=self.now
)
response = self.client.get(self.sheet_url)
self.assertTrue(response.status_code, 200)
self.assertTrue(response.context['show_verify'])
self.assertFalse(response.context['show_approve'])
def test_approve_link_no_permission(self):
"""Permission is required to see approve timesheet link."""
factories.Entry(
user=self.user,
start_time=self.now - relativedelta(hours=1),
end_time=self.now,
status=Entry.VERIFIED
)
response = self.client.get(self.sheet_url)
self.assertFalse(response.context['show_approve'])
def test_approve_link(self):
self.login_with_permissions('view_entry_summary', 'approve_timesheet')
factories.Entry(
user=self.user,
start_time=self.now - relativedelta(hours=1),
end_time=self.now,
status=Entry.VERIFIED,
)
response = self.client.get(self.sheet_url)
self.assertEquals(response.status_code, 200)
self.assertTrue(response.context['show_approve'])
self.assertFalse(response.context['show_verify'])
def test_no_hours_verify(self):
response = self.client.get(self.verify_url(), follow=True)
self.assertEquals(response.status_code, 200)
msg = 'You cannot verify/approve a timesheet with no hours'
messages = response.context['messages']
self.assertEquals(messages._loaded_messages[0].message, msg)
response = self.client.post(self.verify_url(), follow=True)
self.assertEquals(messages._loaded_messages[0].message, msg)
def test_no_hours_approve(self):
self.login_with_permissions('approve_timesheet', 'view_entry_summary')
response = self.client.get(self.approve_url(), follow=True)
self.assertEquals(response.status_code, 200)
msg = 'You cannot verify/approve a timesheet with no hours'
messages = response.context['messages']
self.assertEquals(messages._loaded_messages[0].message, msg)
response = self.client.post(self.approve_url(), follow=True)
self.assertEquals(messages._loaded_messages[0].message, msg)
def test_verify_other_user(self):
"""A user should not be able to verify another's timesheet"""
entry = factories.Entry(**{
'user': self.user2,
'start_time': self.now - relativedelta(hours=1),
'end_time': self.now,
})
url = self.verify_url(self.user2)
response = self.client.get(url)
self.assertEquals(response.status_code, 403)
self.assertEquals(entry.status, Entry.UNVERIFIED)
response = self.client.post(url, {'do_action': 'Yes'})
self.assertEquals(response.status_code, 403)
self.assertEquals(entry.status, Entry.UNVERIFIED)
def test_approve_user(self):
"""A regular user should not be able to approve their timesheet"""
entry = factories.Entry(**{
'user': self.user,
'start_time': self.now - relativedelta(hours=1),
'end_time': self.now
})
response = self.client.get(self.approve_url())
self.assertEquals(response.status_code, 403)
response = self.client.post(self.approve_url(), {'do_action': 'Yes'})
self.assertEquals(response.status_code, 403)
self.assertNotEquals(entry.status, Entry.APPROVED)
self.assertContains(
response,
'Forbidden: You cannot approve this timesheet',
status_code=403
)
def test_approve_other_user(self):
"""A regular user should not be able to approve another's timesheet"""
entry = factories.Entry(**{
'user': self.user2,
'start_time': self.now - relativedelta(hours=1),
'end_time': self.now
})
response = self.client.get(self.approve_url())
self.assertEquals(response.status_code, 403)
response = self.client.post(self.approve_url(), {'do_action': 'Yes'})
self.assertEquals(response.status_code, 403)
self.assertNotEquals(entry.status, Entry.APPROVED)
self.assertContains(
response,
'Forbidden: You cannot approve this timesheet',
status_code=403
)
def test_verify_active_entry(self):
"""
A user shouldnt be able to verify a timesheet if it contains
an active entry and should be redirect back to the ledger
"""
self.login_as_admin()
entry1 = factories.Entry(**{
'user': self.user,
'start_time': self.now - relativedelta(hours=5),
'end_time': self.now - relativedelta(hours=4),
'status': Entry.UNVERIFIED
})
entry2 = factories.Entry(**{
'user': self.user,
'start_time': self.now - relativedelta(hours=1),
'status': Entry.UNVERIFIED
})
response = self.client.get(self.verify_url(), follow=True)
self.assertEquals(response.status_code, 200)
messages = response.context['messages']
msg = 'You cannot verify/approve this timesheet while the user {0} ' \
'has an active entry. Please have them close any active ' \
'entries.'.format(self.user.get_name_or_username())
self.assertEquals(messages._loaded_messages[0].message, msg)
self.assertEquals(entry1.status, Entry.UNVERIFIED)
self.assertEquals(entry2.status, Entry.UNVERIFIED)
response = self.client.post(self.verify_url(), follow=True)
self.assertEquals(response.status_code, 200)
messages = response.context['messages']
self.assertEquals(messages._loaded_messages[0].message, msg)
self.assertEquals(entry1.status, Entry.UNVERIFIED)
self.assertEquals(entry2.status, Entry.UNVERIFIED)
def testVerifyButton(self):
response = self.client.get(self.sheet_url)
self.assertNotContains(response, self.verify_url())
entry = factories.Entry(**{
'user': self.user,
'start_time': timezone.now() - relativedelta(hours=1),
'end_time': timezone.now(),
})
response = self.client.get(self.sheet_url)
self.assertTrue(response.context['show_verify'])
entry.status = Entry.VERIFIED
entry.save()
response = self.client.get(self.sheet_url)
self.assertFalse(response.context['show_verify'])
def testApproveButton(self):
self.login_as_admin()
response = self.client.get(self.sheet_url)
self.assertFalse(response.context['show_approve'])
entry = factories.Entry(**{
'user': self.user,
'start_time': timezone.now() - relativedelta(hours=1),
'end_time': timezone.now(),
})
response = self.client.get(self.sheet_url)
self.assertFalse(response.context['show_approve'])
entry.status = Entry.VERIFIED
entry.save()
response = self.client.get(self.sheet_url)
self.assertTrue(response.context['show_approve'])
entry.status = Entry.APPROVED
entry.save()
response = self.client.get(self.sheet_url)
self.assertFalse(response.context['show_approve'])
def testVerifyPage(self):
factories.Entry(
user=self.user,
start_time=timezone.now() - relativedelta(hours=1),
end_time=timezone.now(),
)
self.client.get(self.verify_url())
entries = self.user.timepiece_entries.all()
self.assertEquals(entries[0].status, Entry.UNVERIFIED)
self.client.post(self.verify_url(), {'do_action': 'Yes'})
self.assertEquals(entries[0].status, Entry.VERIFIED)
def testApprovePage(self):
self.login_with_permissions('approve_timesheet', 'view_entry_summary')
entry = factories.Entry(
user=self.user,
start_time=timezone.now() - relativedelta(hours=1),
end_time=timezone.now(),
)
self.assertEquals(entry.status, Entry.UNVERIFIED)
entry.status = Entry.VERIFIED
entry.save()
self.client.get(self.approve_url(),)
self.assertEquals(entry.status, Entry.VERIFIED)
self.client.post(self.approve_url(), {'do_action': 'Yes'})
entry = Entry.objects.get(pk=entry.pk)
self.assertEquals(entry.status, Entry.APPROVED)
def test_reject_user(self):
"""A regular user should not be able to reject an entry"""
self.login_user(self.user)
now = timezone.now()
entry = factories.Entry(**{
'user': self.user,
'start_time': now - relativedelta(hours=1),
'end_time': now,
'status': Entry.VERIFIED
})
url = self.get_reject_url(entry.pk)
self.client.post(url, {'Yes': 'yes'})
self.assertEquals(entry.status, Entry.VERIFIED)
def test_reject_other_user(self):
"""
A regular user should not be able to reject
another users entry
"""
self.login_user(self.user2)
now = timezone.now()
entry = factories.Entry(**{
'user': self.user,
'start_time': now - relativedelta(hours=1),
'end_time': now,
'status': Entry.VERIFIED
})
url = self.get_reject_url(entry.pk)
self.client.post(url, {'Yes': 'yes'})
self.assertEquals(entry.status, Entry.VERIFIED)
def testRejectPage(self):
self.login_as_admin()
entry = factories.Entry(**{
'user': self.user,
'start_time': timezone.now() - relativedelta(hours=1),
'end_time': timezone.now(),
})
reject_url = self.get_reject_url(entry.id)
def check_entry_against_code(status, status_code):
entry.status = status
entry.save()
response = self.client.get(reject_url)
self.assertEqual(response.status_code, status_code)
check_entry_against_code(Entry.UNVERIFIED, 302)
check_entry_against_code(Entry.INVOICED, 302)
check_entry_against_code(Entry.APPROVED, 200)
check_entry_against_code(Entry.VERIFIED, 200)
response = self.client.post(reject_url, {'Yes': 'yes'})
self.assertTrue(response.status_code, 302)
entry = Entry.objects.get(user=self.user)
self.assertEqual(entry.status, Entry.UNVERIFIED)
def testNotAllowedToRejectTimesheet(self):
entry = factories.Entry(**{
'user': self.user,
'start_time': timezone.now() - relativedelta(hours=1),
'end_time': timezone.now(),
})
reject_url = self.get_reject_url(entry.id)
response = self.client.get(reject_url)
self.assertTrue(response.status_code, 403)
def testNotAllowedToApproveTimesheet(self):
response = self.client.get(self.approve_url(),)
self.assertTrue(response.status_code, 403)
def testNotAllowedToVerifyTimesheet(self):
self.login_user(self.user2)
response = self.client.get(self.verify_url(),)
self.assertTrue(response.status_code, 403)
class TestTotals(ViewTestMixin, LogTimeMixin, TestCase):
def setUp(self):
super(TestTotals, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.p1 = factories.BillableProject(name='1')
self.p2 = factories.NonbillableProject(name='2')
self.p4 = factories.BillableProject(name='4')
# For use with daily totals (Same project, non-billable activity)
self.p3 = factories.NonbillableProject(name='1')
def testGroupedTotals(self):
self.login_user(self.user)
days = [
utils.add_timezone(datetime.datetime(2010, 12, 20)),
utils.add_timezone(datetime.datetime(2010, 12, 27)),
utils.add_timezone(datetime.datetime(2010, 12, 28)),
utils.add_timezone(datetime.datetime(2011, 1, 3)),
utils.add_timezone(datetime.datetime(2011, 1, 4)),
utils.add_timezone(datetime.datetime(2011, 1, 10)),
utils.add_timezone(datetime.datetime(2011, 1, 16)),
utils.add_timezone(datetime.datetime(2011, 1, 17)),
utils.add_timezone(datetime.datetime(2011, 1, 18)),
utils.add_timezone(datetime.datetime(2011, 2, 2))
]
# Each week has two days of entries, except 12-20, and 2-2 but these
# are excluded in the timespan queryset
for day in days:
self.log_time(project=self.p1, start=day, delta=(1, 0))
self.log_time(project=self.p4, start=day, delta=(1, 0))
if random.choice([True, False]):
self.log_time(project=self.p2, start=day, delta=(1, 0))
else:
self.log_time(project=self.p3, start=day, delta=(1, 0))
date = utils.add_timezone(datetime.datetime(2011, 1, 19))
from_date = utils.get_month_start(date)
to_date = from_date + relativedelta(months=1)
first_week = utils.get_week_start(from_date)
entries = Entry.objects.timespan(first_week, to_date=to_date)
totals = grouped_totals(entries)
for week, week_totals, days in totals:
# Jan. 3rd is a monday. Each week should be on a monday
if week.month == 1:
self.assertEqual(week.day % 7, 3)
self.assertEqual(week_totals['billable'], 4)
self.assertEqual(week_totals['non_billable'], 2)
self.assertEqual(week_totals['total'], 6)
for day, projects in days:
for project, totals in projects[1].items():
self.assertEqual(projects[0]['billable'], 2)
self.assertEqual(projects[0]['non_billable'], 1)
self.assertEqual(projects[0]['total'], 3)
if project == self.p1:
self.assertEqual(totals['billable'], 1)
self.assertEqual(totals['total'], 1)
if project == self.p2:
self.assertEqual(totals['non_billable'], 1)
self.assertEqual(totals['total'], 1)
if project == self.p3:
self.assertEqual(totals['billable'], 1)
self.assertEqual(totals['non_billable'], 1)
self.assertEqual(totals['total'], 2)
if project == self.p4:
self.assertEqual(totals['billable'], 1)
self.assertEqual(totals['total'], 1)
class HourlySummaryTest(ViewTestMixin, TestCase):
def setUp(self):
super(HourlySummaryTest, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.now = timezone.now()
self.month = self.now.replace(day=1)
self.url = reverse('view_user_timesheet', args=(self.user.pk,))
self.login_user(self.user)
def create_month_entries(self):
"""Create four entries, one for each week of the month"""
factories.Entry(
user=self.user,
start_time=self.month,
end_time=self.month + relativedelta(hours=1)
)
factories.Entry(
user=self.user,
start_time=self.month + relativedelta(weeks=1),
end_time=self.month + relativedelta(weeks=1, hours=1)
)
factories.Entry(
user=self.user,
start_time=self.month + relativedelta(weeks=2),
end_time=self.month + relativedelta(weeks=2, hours=1)
)
factories.Entry(
user=self.user,
start_time=self.month + relativedelta(weeks=3),
end_time=self.month + relativedelta(weeks=3, hours=1)
)
def test_start_of_week(self):
"""Test that the entries start being labeled on the first week, ISO"""
self.create_month_entries()
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
start_date = utils.get_week_start(self.month)
# Week of {{ week|date:'M j, Y'}}
msg = 'Week of {0}'.format(start_date.strftime('%b %d, %Y')).replace(" 0", " ")
self.assertContains(response, msg)
def test_contains_only_current_entries(self):
"""
Only entries from the current month should be displayed
using default data from create_month_entries()
"""
self.create_month_entries()
old_entry = factories.Entry(**{
'user': self.user,
'start_time': self.month - relativedelta(days=1, hours=1),
'end_time': self.month - relativedelta(days=1)
})
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertFalse(old_entry in response.context['entries'])
def test_single_entry_in_week(self):
"""
When there is a single entry at the end of an ISO week,
the view should show the entries from that entire week
even though they belong in the previous month.
This occurs in April 2012, so we are using that month
as the basis for out test case
"""
april = utils.add_timezone(
datetime.datetime(month=4, day=1, year=2012)
)
march = utils.add_timezone(
datetime.datetime(month=3, day=26, year=2012)
)
factories.Entry(**{
'user': self.user,
'start_time': april,
'end_time': april + relativedelta(hours=1)
})
factories.Entry(**{
'user': self.user,
'start_time': april + relativedelta(weeks=1),
'end_time': april + relativedelta(weeks=1, hours=1)
})
factories.Entry(**{
'user': self.user,
'start_time': march,
'end_time': march + relativedelta(hours=1)
})
response = self.client.get(self.url + '?{0}'.format(
urlencode({'year': 2012, 'month': 4})
))
self.assertEquals(response.status_code, 200)
# entries context object is a ValuesQuerySet
extra_values = ('start_time', 'end_time', 'comments', 'seconds_paused',
'id', 'location__name', 'project__name',
'activity__name', 'status')
entries = Entry.objects.timespan(april, span='month').date_trunc('month', extra_values)
self.assertEquals(list(entries), list(response.context['entries']))
class MonthlyRejectTestCase(ViewTestMixin, TestCase):
def setUp(self):
super(MonthlyRejectTestCase, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
permissions = Permission.objects.filter(
content_type=ContentType.objects.get_for_model(Entry),
codename__in=('can_clock_in', 'can_clock_out',
'can_pause', 'change_entry')
)
self.user.user_permissions = permissions
self.user2.user_permissions = permissions
self.user.save()
self.user2.save()
self.activity = factories.Activity(code='WRK', name='Work')
self.devl_activity = factories.Activity(
code='devl', name='development', billable=True)
self.sick_activity = factories.Activity(
code="sick", name="sick/personal", billable=False)
self.activity_group_all = factories.ActivityGroup(name='All')
self.activity_group_work = factories.ActivityGroup(name='Client work')
activities = Activity.objects.all()
for activity in activities:
activity.activity_group.add(self.activity_group_all)
if activity != self.sick_activity:
activity.activity_group.add(self.activity_group_work)
self.business = factories.Business()
status = factories.StatusAttribute(
label='Current', enable_timetracking=True)
type_ = factories.TypeAttribute(
label='Web Sites', enable_timetracking=True)
self.project = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user, activity_group=self.activity_group_work)
self.project2 = factories.Project(
type=type_, status=status, business=self.business,
point_person=self.user2, activity_group=self.activity_group_all)
factories.ProjectRelationship(user=self.user, project=self.project)
self.location = factories.Location()
self.now = timezone.now()
self.data = {
'month': self.now.month,
'year': self.now.year,
'yes': 'Yes'
}
self.url = reverse('reject_user_timesheet', args=(self.user.pk,))
def create_entries(self, date, status):
"""Create entries using a date and with a given status"""
factories.Entry(**{
'user': self.user,
'start_time': date,
'end_time': date + relativedelta(hours=1),
'status': status
})
factories.Entry(**{
'user': self.user,
'start_time': date + relativedelta(hours=2),
'end_time': date + relativedelta(hours=3),
'status': status
})
def test_page_permissions(self):
"""
An admin should have the permission to reject a users entries
and unverify them
"""
self.login_user(self.superuser)
self.create_entries(self.now, Entry.VERIFIED)
response = self.client.get(self.url, data=self.data)
self.assertEqual(response.status_code, 200)
response = self.client.post(self.url, data=self.data)
entries = Entry.no_join.filter(status=Entry.VERIFIED)
self.assertEquals(entries.count(), 0)
def test_page_no_permissions(self):
"""
A regular user should not have the permissions to
get or post to the page
"""
self.login_user(self.user)
self.create_entries(timezone.now(), Entry.VERIFIED)
response = self.client.get(self.url, data=self.data)
self.assertEqual(response.status_code, 302)
response = self.client.post(self.url, data=self.data)
entries = Entry.no_join.filter(status=Entry.VERIFIED)
self.assertEquals(entries.count(), 2)
def test_reject_entries_no_date(self):
"""
If you are missing the month/year used to filter the entries
then the reject page should not show
"""
self.login_user(self.superuser)
self.create_entries(timezone.now(), Entry.VERIFIED)
data = {
'month': self.now.month
}
response = self.client.get(self.url, data=data)
self.assertEqual(response.status_code, 302)
data = {
'year': self.now.year
}
response = self.client.get(self.url, data=data)
self.assertEqual(response.status_code, 302)
def test_reject_entries_no_confirm(self):
"""
If a post request contains the month/year but is missing the key
'yes', then the entries are not rejected
"""
self.login_user(self.superuser)
self.create_entries(timezone.now(), Entry.VERIFIED)
data = self.data
data.pop('yes')
self.client.post(self.url, data=data)
entries = Entry.no_join.filter(status=Entry.VERIFIED)
self.assertEquals(entries.count(), 2)
def test_reject_approved_invoiced_entries(self):
"""Entries that are approved invoiced should not be rejected"""
self.login_user(self.superuser)
self.create_entries(timezone.now(), Entry.APPROVED)
self.create_entries(timezone.now(), Entry.INVOICED)
self.client.post(self.url, data=self.data)
entries = Entry.no_join.filter(status=Entry.UNVERIFIED)
self.assertEquals(entries.count(), 0)
|
Juniper/ansible
|
refs/heads/juniper-pr
|
plugins/inventory/apache-libcloud.py
|
89
|
#!/usr/bin/env python
# (c) 2013, Sebastien Goasguen <runseb@gmail.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
'''
Apache Libcloud generic external inventory script
=================================
Generates inventory that Ansible can understand by making API request to
Cloud providers using the Apache libcloud library.
This script also assumes there is a libcloud.ini file alongside it
'''
import sys
import os
import argparse
import re
from time import time
import ConfigParser
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security as sec
try:
import json
except ImportError:
import simplejson as json
class LibcloudInventory(object):
def __init__(self):
''' Main execution path '''
# Inventory grouped by instance IDs, tags, security groups, regions,
# and availability zones
self.inventory = {}
# Index of hostname (address) to instance ID
self.index = {}
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# Cache
if self.args.refresh_cache:
self.do_api_calls_update_cache()
elif not self.is_cache_valid():
self.do_api_calls_update_cache()
# Data to print
if self.args.host:
data_to_print = self.get_host_info()
elif self.args.list:
# Display list of instances for inventory
if len(self.inventory) == 0:
data_to_print = self.get_inventory_from_cache()
else:
data_to_print = self.json_format_dict(self.inventory, True)
print data_to_print
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_index):
return True
return False
def read_settings(self):
''' Reads the settings from the libcloud.ini file '''
config = ConfigParser.SafeConfigParser()
libcloud_default_ini_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'libcloud.ini')
libcloud_ini_path = os.environ.get('LIBCLOUD_INI_PATH', libcloud_default_ini_path)
config.read(libcloud_ini_path)
if not config.has_section('driver'):
raise ValueError('libcloud.ini file must contain a [driver] section')
if config.has_option('driver', 'provider'):
self.provider = config.get('driver','provider')
else:
raise ValueError('libcloud.ini does not have a provider defined')
if config.has_option('driver', 'key'):
self.key = config.get('driver','key')
else:
raise ValueError('libcloud.ini does not have a key defined')
if config.has_option('driver', 'secret'):
self.secret = config.get('driver','secret')
else:
raise ValueError('libcloud.ini does not have a secret defined')
if config.has_option('driver', 'host'):
self.host = config.get('driver', 'host')
if config.has_option('driver', 'secure'):
self.secure = config.get('driver', 'secure')
if config.has_option('driver', 'verify_ssl_cert'):
self.verify_ssl_cert = config.get('driver', 'verify_ssl_cert')
if config.has_option('driver', 'port'):
self.port = config.get('driver', 'port')
if config.has_option('driver', 'path'):
self.path = config.get('driver', 'path')
if config.has_option('driver', 'api_version'):
self.api_version = config.get('driver', 'api_version')
Driver = get_driver(getattr(Provider, self.provider))
self.conn = Driver(key=self.key, secret=self.secret, secure=self.secure,
host=self.host, path=self.path)
# Cache related
cache_path = config.get('cache', 'cache_path')
self.cache_path_cache = cache_path + "/ansible-libcloud.cache"
self.cache_path_index = cache_path + "/ansible-libcloud.index"
self.cache_max_age = config.getint('cache', 'cache_max_age')
def parse_cli_args(self):
'''
Command line argument processing
'''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on libcloud supported providers')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
'''
Do API calls to a location, and save data in cache files
'''
self.get_nodes()
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def get_nodes(self):
'''
Gets the list of all nodes
'''
for node in self.conn.list_nodes():
self.add_node(node)
def get_node(self, node_id):
'''
Gets details about a specific node
'''
return [node for node in self.conn.list_nodes() if node.id == node_id][0]
def add_node(self, node):
'''
Adds a node to the inventory and index, as long as it is
addressable
'''
# Only want running instances
if node.state != 0:
return
# Select the best destination address
if not node.public_ips == []:
dest = node.public_ips[0]
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = node.name
# Inventory: Group by instance ID (always a group of 1)
self.inventory[node.name] = [dest]
'''
# Inventory: Group by region
self.push(self.inventory, region, dest)
# Inventory: Group by availability zone
self.push(self.inventory, node.placement, dest)
# Inventory: Group by instance type
self.push(self.inventory, self.to_safe('type_' + node.instance_type), dest)
'''
# Inventory: Group by key pair
if node.extra['keyname']:
self.push(self.inventory, self.to_safe('key_' + node.extra['keyname']), dest)
# Inventory: Group by security group, quick thing to handle single sg
if node.extra['securitygroup']:
self.push(self.inventory, self.to_safe('sg_' + node.extra['securitygroup'][0]), dest)
def get_host_info(self):
'''
Get variables about a specific host
'''
if len(self.index) == 0:
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
# host migh not exist anymore
return self.json_format_dict({}, True)
node_id = self.index[self.args.host]
node = self.get_node(node_id)
instance_vars = {}
for key in vars(instance):
value = getattr(instance, key)
key = self.to_safe('ec2_' + key)
# Handle complex types
if type(value) in [int, bool]:
instance_vars[key] = value
elif type(value) in [str, unicode]:
instance_vars[key] = value.strip()
elif type(value) == type(None):
instance_vars[key] = ''
elif key == 'ec2_region':
instance_vars[key] = value.name
elif key == 'ec2_tags':
for k, v in value.iteritems():
key = self.to_safe('ec2_tag_' + k)
instance_vars[key] = v
elif key == 'ec2_groups':
group_ids = []
group_names = []
for group in value:
group_ids.append(group.id)
group_names.append(group.name)
instance_vars["ec2_security_group_ids"] = ','.join(group_ids)
instance_vars["ec2_security_group_names"] = ','.join(group_names)
else:
pass
# TODO Product codes if someone finds them useful
#print key
#print type(value)
#print value
return self.json_format_dict(instance_vars, True)
def push(self, my_dict, key, element):
'''
Pushed an element onto an array that may not have been defined in
the dict
'''
if key in my_dict:
my_dict[key].append(element);
else:
my_dict[key] = [element]
def get_inventory_from_cache(self):
'''
Reads the inventory from the cache file and returns it as a JSON
object
'''
cache = open(self.cache_path_cache, 'r')
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
'''
Reads the index from the cache file sets self.index
'''
cache = open(self.cache_path_index, 'r')
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
'''
Writes data in JSON format to a file
'''
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def to_safe(self, word):
'''
Converts 'bad' characters in a string to underscores so they can be
used as Ansible groups
'''
return re.sub("[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False):
'''
Converts a dict to a JSON object and dumps it as a formatted
string
'''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
def main():
LibcloudInventory()
if __name__ == '__main__':
main()
|
openstack/trove
|
refs/heads/master
|
trove/tests/unittests/taskmanager/test_manager.py
|
1
|
# Copyright 2014 eBay Software Foundation
# Copyright [2015] Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest.mock import MagicMock, Mock, patch, PropertyMock
from proboscis.asserts import assert_equal
from trove.backup.models import Backup
from trove.common.exception import TroveError, ReplicationSlaveAttachError
from trove.common import server_group as srv_grp
from trove.instance.tasks import InstanceTasks
from trove.taskmanager.manager import Manager
from trove.taskmanager import models
from trove.taskmanager import service
from trove.tests.unittests import trove_testtools
class TestManager(trove_testtools.TestCase):
def setUp(self):
super(TestManager, self).setUp()
self.manager = Manager()
self.context = trove_testtools.TroveTestContext(self)
self.mock_slave1 = Mock()
self.mock_slave2 = Mock()
type(self.mock_slave1).id = PropertyMock(return_value='some-inst-id')
type(self.mock_slave2).id = PropertyMock(return_value='inst1')
self.mock_old_master = Mock()
type(self.mock_old_master).slaves = PropertyMock(
return_value=[self.mock_slave1, self.mock_slave2])
self.mock_master = Mock()
type(self.mock_master).slaves = PropertyMock(
return_value=[self.mock_slave1, self.mock_slave2])
def tearDown(self):
super(TestManager, self).tearDown()
self.manager = None
def test_getattr_lookup(self):
self.assertTrue(callable(self.manager.delete_cluster))
self.assertTrue(callable(self.manager.mongodb_add_shard_cluster))
def test_most_current_replica(self):
master = Mock()
master.id = 32
def test_case(txn_list, selected_master):
with patch.object(self.manager, '_get_replica_txns',
return_value=txn_list):
result = self.manager._most_current_replica(master, None)
assert_equal(result, selected_master)
with self.assertRaisesRegex(TroveError,
'not all replicating from same'):
test_case([['a', '2a99e-32bf', 2], ['b', '2a', 1]], None)
test_case([['a', '2a99e-32bf', 2]], 'a')
test_case([['a', '2a', 1], ['b', '2a', 2]], 'b')
test_case([['a', '2a', 2], ['b', '2a', 1]], 'a')
test_case([['a', '2a', 1], ['b', '2a', 1]], 'a')
test_case([['a', None, 0]], 'a')
test_case([['a', None, 0], ['b', '2a', 1]], 'b')
def test_detach_replica(self):
slave = Mock()
master = Mock()
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[slave, master]):
self.manager.detach_replica(self.context, 'some-inst-id')
slave.detach_replica.assert_called_with(master)
@patch.object(Manager, '_set_task_status')
def test_promote_to_replica_source(self, mock_set_task_status):
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[self.mock_slave1,
self.mock_old_master,
self.mock_slave2]):
self.manager.promote_to_replica_source(
self.context, 'some-inst-id')
self.mock_slave1.detach_replica.assert_called_with(
self.mock_old_master, for_failover=True)
self.mock_old_master.attach_replica.assert_called_with(
self.mock_slave1, restart=False)
self.mock_slave1.make_read_only.assert_called_with(False)
self.mock_slave2.detach_replica.assert_called_with(
self.mock_old_master, for_failover=True)
self.mock_slave2.attach_replica.assert_called_with(self.mock_slave1,
restart=True)
self.mock_old_master.demote_replication_master.assert_any_call()
mock_set_task_status.assert_called_with(([self.mock_old_master] +
[self.mock_slave1,
self.mock_slave2]),
InstanceTasks.NONE)
@patch.object(Manager, '_set_task_status')
@patch.object(Manager, '_most_current_replica')
def test_eject_replica_source(self, mock_most_current_replica,
mock_set_task_status):
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[self.mock_master, self.mock_slave1,
self.mock_slave2]):
self.manager.eject_replica_source(self.context, 'some-inst-id')
mock_most_current_replica.assert_called_with(self.mock_master,
[self.mock_slave1,
self.mock_slave2])
mock_set_task_status.assert_called_with(([self.mock_master] +
[self.mock_slave1,
self.mock_slave2]),
InstanceTasks.NONE)
@patch.object(Manager, '_set_task_status')
@patch('trove.taskmanager.manager.LOG')
def test_exception_TroveError_promote_to_replica_source(self, *args):
self.mock_slave2.detach_replica = Mock(side_effect=TroveError)
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[self.mock_slave1, self.mock_old_master,
self.mock_slave2]):
self.assertRaises(ReplicationSlaveAttachError,
self.manager.promote_to_replica_source,
self.context, 'some-inst-id')
@patch.object(Manager, '_set_task_status')
@patch.object(Manager, '_most_current_replica')
@patch('trove.taskmanager.manager.LOG')
def test_exception_TroveError_eject_replica_source(
self, mock_logging, mock_most_current_replica,
mock_set_task_status):
self.mock_slave2.detach_replica = Mock(side_effect=TroveError)
mock_most_current_replica.return_value = self.mock_slave1
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[self.mock_master, self.mock_slave1,
self.mock_slave2]):
self.assertRaises(ReplicationSlaveAttachError,
self.manager.eject_replica_source,
self.context, 'some-inst-id')
@patch.object(Manager, '_set_task_status')
def test_error_promote_to_replica_source(self, *args):
self.mock_slave2.detach_replica = Mock(
side_effect=RuntimeError('Error'))
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[self.mock_slave1, self.mock_old_master,
self.mock_slave2]):
self.assertRaisesRegex(RuntimeError, 'Error',
self.manager.promote_to_replica_source,
self.context, 'some-inst-id')
@patch('trove.taskmanager.manager.LOG')
def test_error_demote_replication_master_promote_to_replica_source(
self, mock_logging):
self.mock_old_master.demote_replication_master = Mock(
side_effect=RuntimeError('Error'))
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[self.mock_slave1, self.mock_old_master,
self.mock_slave2]):
self.assertRaises(ReplicationSlaveAttachError,
self.manager.promote_to_replica_source,
self.context, 'some-inst-id')
@patch.object(Manager, '_set_task_status')
@patch.object(Manager, '_most_current_replica')
def test_error_eject_replica_source(self, mock_most_current_replica,
mock_set_task_status):
self.mock_slave2.detach_replica = Mock(
side_effect=RuntimeError('Error'))
mock_most_current_replica.return_value = self.mock_slave1
with patch.object(models.BuiltInstanceTasks, 'load',
side_effect=[self.mock_master, self.mock_slave1,
self.mock_slave2]):
self.assertRaisesRegex(RuntimeError, 'Error',
self.manager.eject_replica_source,
self.context, 'some-inst-id')
@patch.object(Backup, 'delete')
@patch.object(models.BuiltInstanceTasks, 'load')
def test_create_replication_slave(self, mock_load, mock_backup_delete):
mock_tasks = Mock()
mock_snapshot = {'dataset': {'snapshot_id': 'test-id'}}
mock_tasks.get_replication_master_snapshot = Mock(
return_value=mock_snapshot)
mock_flavor = Mock()
with patch.object(models.FreshInstanceTasks, 'load',
return_value=mock_tasks):
self.manager.create_instance(self.context, ['id1'], Mock(),
mock_flavor, Mock(), None, None,
'mysql', 'mysql-server', 2,
'temp-backup-id', None,
'some_password', None, Mock(),
'some-master-id', None, None,
None, None)
mock_tasks.get_replication_master_snapshot.assert_called_with(
self.context, 'some-master-id', mock_flavor,
parent_backup_id='temp-backup-id')
mock_backup_delete.assert_called_with(self.context, 'test-id')
@patch.object(models.FreshInstanceTasks, 'load')
@patch.object(Backup, 'delete')
@patch.object(models.BuiltInstanceTasks, 'load')
@patch('trove.taskmanager.manager.LOG')
def test_exception_create_replication_slave(self, mock_logging, mock_tasks,
mock_delete, mock_load):
mock_load.return_value.create_instance = Mock(side_effect=TroveError)
self.assertRaises(TroveError, self.manager.create_instance,
self.context, ['id1', 'id2'], Mock(), Mock(),
Mock(), None, None, 'mysql', 'mysql-server', 2,
'temp-backup-id', None, 'some_password', None,
Mock(), 'some-master-id', None, None, None, None)
def test_AttributeError_create_instance(self):
self.assertRaisesRegex(
AttributeError, 'Cannot create multiple non-replica instances.',
self.manager.create_instance, self.context, ['id1', 'id2'],
Mock(), Mock(), Mock(), None, None, 'mysql', 'mysql-server', 2,
'temp-backup-id', None, 'some_password', None, Mock(), None, None,
None, None, None)
def test_create_instance(self):
mock_tasks = Mock()
mock_flavor = Mock()
mock_override = Mock()
mock_csg = Mock()
type(mock_csg.return_value).id = PropertyMock(
return_value='sg-id')
with patch.object(models.FreshInstanceTasks, 'load',
return_value=mock_tasks):
with patch.object(srv_grp.ServerGroup, 'create', mock_csg):
self.manager.create_instance(
self.context, 'id1', 'inst1', mock_flavor,
'mysql-image-id', None, None, 'mysql', 'mysql-server', 2,
'temp-backup-id', None, 'password', None, mock_override,
None, None, None, None, 'affinity')
mock_tasks.create_instance.assert_called_with(
mock_flavor,
'mysql-image-id', None,
None, 'mysql',
'mysql-server', 2,
'temp-backup-id', None,
'password', None,
mock_override,
None, None, None, None,
{'group': 'sg-id'},
access=None, ds_version=None)
mock_tasks.wait_for_instance.assert_called_with(3600, mock_flavor)
def test_create_cluster(self):
mock_tasks = Mock()
with patch.object(models, 'load_cluster_tasks',
return_value=mock_tasks):
self.manager.create_cluster(self.context, 'some-cluster-id')
mock_tasks.create_cluster.assert_called_with(self.context,
'some-cluster-id')
def test_delete_cluster(self):
mock_tasks = Mock()
with patch.object(models, 'load_cluster_tasks',
return_value=mock_tasks):
self.manager.delete_cluster(self.context, 'some-cluster-id')
mock_tasks.delete_cluster.assert_called_with(self.context,
'some-cluster-id')
def test_shrink_cluster_with_success(self):
self._assert_shrink_cluster(True)
def test_shrink_cluster_with_error(self):
self._assert_shrink_cluster(False)
@patch('trove.taskmanager.manager.EndNotification')
@patch('trove.taskmanager.manager.models.load_cluster_tasks')
def _assert_shrink_cluster(self, success, mock_load, mock_notification):
if success:
mock_load.side_effect = Mock()
else:
mock_load.side_effect = Exception
end_notification = MagicMock()
mock_notification.return_value = end_notification
context = Mock()
cluster_id = Mock()
instance_ids = Mock()
try:
self.manager.shrink_cluster(context, cluster_id, instance_ids)
self.assertTrue(success)
except Exception:
self.assertFalse(success)
mock_load.assert_called_once_with(context, cluster_id)
mock_notification.assert_called_once_with(context,
cluster_id=cluster_id,
instance_ids=instance_ids)
exit_error_type = end_notification.__exit__.call_args_list[0][0][0]
if success:
self.assertFalse(exit_error_type)
else:
self.assertTrue(exit_error_type)
class TestTaskManagerService(trove_testtools.TestCase):
def test_app_factory(self):
test_service = service.app_factory(Mock())
self.assertIsInstance(test_service, service.TaskService)
|
dednal/chromium.src
|
refs/heads/nw12
|
net/tools/quic/benchmark/run_client.py
|
165
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import csv
import datetime
import json
import os
import shlex
import subprocess
import sys
from optparse import OptionParser
"""Start a client to fetch web pages either using wget or using quic_client.
If --use_wget is set, it uses wget.
Usage: This invocation
run_client.py --quic_binary_dir=../../../../out/Debug \
--address=127.0.0.1 --port=5000 --infile=test_urls.json \
--delay_file=delay.csv --packets_file=packets.csv
fetches pages listed in test_urls.json from a quic server running at
127.0.0.1 on port 5000 using quic binary ../../../../out/Debug/quic_client
and stores the delay in delay.csv and the max received packet number (for
QUIC) in packets.csv.
If --use_wget is present, it will fetch the URLs using wget and ignores
the flags --address, --port, --quic_binary_dir, etc.
"""
def Timestamp(datetm=None):
"""Get the timestamp in microseconds.
Args:
datetm: the date and time to be converted to timestamp.
If not set, use the current UTC time.
Returns:
The timestamp in microseconds.
"""
datetm = datetm or datetime.datetime.utcnow()
diff = datetm - datetime.datetime.utcfromtimestamp(0)
timestamp = (diff.days * 86400 + diff.seconds) * 1000000 + diff.microseconds
return timestamp
class PageloadExperiment:
def __init__(self, use_wget, quic_binary_dir, quic_server_address,
quic_server_port):
"""Initialize PageloadExperiment.
Args:
use_wget: Whether to use wget.
quic_binary_dir: Directory for quic_binary.
quic_server_address: IP address of quic server.
quic_server_port: Port of the quic server.
"""
self.use_wget = use_wget
self.quic_binary_dir = quic_binary_dir
self.quic_server_address = quic_server_address
self.quic_server_port = quic_server_port
if not use_wget and not os.path.isfile(quic_binary_dir + '/quic_client'):
raise IOError('There is no quic_client in the given dir: %s.'
% quic_binary_dir)
@classmethod
def ReadPages(cls, json_file):
"""Return the list of URLs from the json_file.
One entry of the list may contain a html link and multiple resources.
"""
page_list = []
with open(json_file) as f:
data = json.load(f)
for page in data['pages']:
url = page['url']
if 'resources' in page:
resources = page['resources']
else:
resources = None
if not resources:
page_list.append([url])
else:
urls = [url]
# For url http://x.com/z/y.html, url_dir is http://x.com/z
url_dir = url.rsplit('/', 1)[0]
for resource in resources:
urls.append(url_dir + '/' + resource)
page_list.append(urls)
return page_list
def DownloadOnePage(self, urls):
"""Download a page emulated by a list of urls.
Args:
urls: list of URLs to fetch.
Returns:
A tuple (page download time, max packet number).
"""
if self.use_wget:
cmd = 'wget -O -'
else:
cmd = '%s/quic_client --port=%s --address=%s' % (
self.quic_binary_dir, self.quic_server_port, self.quic_server_address)
cmd_in_list = shlex.split(cmd)
cmd_in_list.extend(urls)
start_time = Timestamp()
ps_proc = subprocess.Popen(cmd_in_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_std_out, std_err = ps_proc.communicate()
end_time = Timestamp()
delta_time = end_time - start_time
max_packets = 0
if not self.use_wget:
for line in std_err.splitlines():
if line.find('Client: Got packet') >= 0:
elems = line.split()
packet_num = int(elems[4])
max_packets = max(max_packets, packet_num)
return delta_time, max_packets
def RunExperiment(self, infile, delay_file, packets_file=None, num_it=1):
"""Run the pageload experiment.
Args:
infile: Input json file describing the page list.
delay_file: Output file storing delay in csv format.
packets_file: Output file storing max packet number in csv format.
num_it: Number of iterations to run in this experiment.
"""
page_list = self.ReadPages(infile)
header = [urls[0].rsplit('/', 1)[1] for urls in page_list]
header0 = 'wget' if self.use_wget else 'quic'
header = [header0] + header
plt_list = []
packets_list = []
for i in range(num_it):
plt_one_row = [str(i)]
packets_one_row = [str(i)]
for urls in page_list:
time_micros, num_packets = self.DownloadOnePage(urls)
time_secs = time_micros / 1000000.0
plt_one_row.append('%6.3f' % time_secs)
packets_one_row.append('%5d' % num_packets)
plt_list.append(plt_one_row)
packets_list.append(packets_one_row)
with open(delay_file, 'w') as f:
csv_writer = csv.writer(f, delimiter=',')
csv_writer.writerow(header)
for one_row in plt_list:
csv_writer.writerow(one_row)
if packets_file:
with open(packets_file, 'w') as f:
csv_writer = csv.writer(f, delimiter=',')
csv_writer.writerow(header)
for one_row in packets_list:
csv_writer.writerow(one_row)
def main():
parser = OptionParser()
parser.add_option('--use_wget', dest='use_wget', action='store_true',
default=False)
# Note that only debug version generates the log containing packets
# information.
parser.add_option('--quic_binary_dir', dest='quic_binary_dir',
default='../../../../out/Debug')
# For whatever server address you specify, you need to run the
# quic_server on that machine and populate it with the cache containing
# the URLs requested in the --infile.
parser.add_option('--address', dest='quic_server_address',
default='127.0.0.1')
parser.add_option('--port', dest='quic_server_port',
default='5002')
parser.add_option('--delay_file', dest='delay_file', default='delay.csv')
parser.add_option('--packets_file', dest='packets_file',
default='packets.csv')
parser.add_option('--infile', dest='infile', default='test_urls.json')
(options, _) = parser.parse_args()
exp = PageloadExperiment(options.use_wget, options.quic_binary_dir,
options.quic_server_address,
options.quic_server_port)
exp.RunExperiment(options.infile, options.delay_file, options.packets_file)
if __name__ == '__main__':
sys.exit(main())
|
justanotherbrain/HebbLearn
|
refs/heads/master
|
multilayer-demo.py
|
1
|
import sys
import os.path
import HebbLearn as hl
import numpy as np
import matplotlib.pyplot as plt
try:
import h5py
except:
print('h5py cannot be loaded - may cause error')
pass
if os.path.isfile('processed_data.npy'):
print('==> Load previously saved (preprocessed) data')
unlabeled = np.load('processed_data.npy')
else:
print('==> Loading data')
f = h5py.File('/scratch/mad573/stl10/unlabeled.mat')
u = f['X'][()]
temp = np.reshape(u, (3,96,96,100000))
temp = np.swapaxes(temp,0,2)
unlabeled = np.zeros((96,96,100000))
print('==> Preprocessing data')
for i in range(100000):
unlabeled[:,:,i] = hl.rgb2gray(temp[:,:,:,i])
if np.max(unlabeled[:,:,i])>1:
unlabeled[:,:,i] = unlabeled[:,:,i]/255
np.save('processed_data.npy',unlabeled)
print('==> mean centering data')
pop_mean = np.mean(unlabeled)
unlabeled = unlabeled - pop_mean
pop_std = np.std(unlabeled)
unlabeled = unlabeled/pop_std
#plt.imshow(unlabeled[:,:,0], cmap=plt.get_cmap('gray'))
#plt.show()
if len(sys.argv)>1:
fs = int(sys.argv[1])
ss = int(sys.argv[2])
od = int(sys.argv[3])
lr = float(sys.argv[4])
n_samples = int(sys.argv[5])
else:
fs = [6, 4]
ss = [3, 2]
od = [10, 8]
lr = [1, 1]
n_samples = 500
nl = [hl.TANH, hl.TANH]
ml = hl.MultilayerGHA(num_layers=2, filter_size=fs, step_size=ss, out_dim=od, LR=lr, nonlinearity=nl)
print('==> Training')
layers, output = ml.Train(unlabeled[:,:,:n_samples])
np.save('multi-layers-small.npy',layers)
#print('==> Generating Output of Network')
#output = ml.ImageReconstruction(unlabeled, layers)
plt.imshow(output[:,:,112], cmap=plt.get_cmap('gray'))
plt.show()
|
whip112/Whip112
|
refs/heads/master
|
vendor/packages/pygments/lexers/actionscript.py
|
72
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.actionscript
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for ActionScript and MXML.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, using, this, words, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
class ActionScriptLexer(RegexLexer):
"""
For ActionScript source code.
.. versionadded:: 0.9
"""
name = 'ActionScript'
aliases = ['as', 'actionscript']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
flags = re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
(r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(words((
'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
'switch'), suffix=r'\b'),
Keyword),
(words((
'class', 'public', 'final', 'internal', 'native', 'override', 'private',
'protected', 'static', 'import', 'extends', 'implements', 'interface',
'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
'namespace', 'package', 'set'), suffix=r'\b'),
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
Keyword.Constant),
(words((
'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
'XMLUI'), suffix=r'\b'),
Name.Builtin),
(words((
'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
'unescape'), suffix=r'\b'),
Name.Function),
(r'[$a-zA-Z_]\w*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class ActionScript3Lexer(RegexLexer):
"""
For ActionScript 3 source code.
.. versionadded:: 0.11
"""
name = 'ActionScript 3'
aliases = ['as3', 'actionscript3']
filenames = ['*.as']
mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
'text/actionscript3']
identifier = r'[$a-zA-Z_]\w*'
typeidentifier = identifier + '(?:\.<\w+>)?'
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'\s+', Text),
(r'(function\s+)(' + identifier + r')(\s*)(\()',
bygroups(Keyword.Declaration, Name.Function, Text, Operator),
'funcparams'),
(r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r')',
bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
Keyword.Type)),
(r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
bygroups(Keyword, Text, Name.Namespace, Text)),
(r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
(r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
r'switch|import|include|as|is)\b',
Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
Keyword.Constant),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b', Name.Function),
(identifier, Name),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
],
'funcparams': [
(r'\s+', Text),
(r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r'|\*)(\s*)',
bygroups(Text, Punctuation, Name, Text, Operator, Text,
Keyword.Type, Text), 'defval'),
(r'\)', Operator, 'type')
],
'type': [
(r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
(r'\s+', Text, '#pop:2'),
default('#pop:2')
],
'defval': [
(r'(=)(\s*)([^(),]+)(\s*)(,?)',
bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
(r',', Operator, '#pop'),
default('#pop')
]
}
def analyse_text(text):
if re.match(r'\w+\s*:\s*\w', text):
return 0.3
return 0
class MxmlLexer(RegexLexer):
"""
For MXML markup.
Nested AS3 in <script> tags is highlighted by the appropriate lexer.
.. versionadded:: 1.1
"""
flags = re.MULTILINE | re.DOTALL
name = 'MXML'
aliases = ['mxml']
filenames = ['*.mxml']
mimetimes = ['text/xml', 'application/xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
bygroups(String, using(ActionScript3Lexer), String)),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*[\w:.-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
|
FireWRT/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python2.7/bdb.py
|
144
|
"""Debugger basics"""
import fnmatch
import sys
import os
import types
__all__ = ["BdbQuit","Bdb","Breakpoint"]
class BdbQuit(Exception):
"""Exception to give up completely"""
class Bdb:
"""Generic Python debugger base class.
This class takes care of details of the trace facility;
a derived class should implement user interaction.
The standard debugger class (pdb.Pdb) is an example.
"""
def __init__(self, skip=None):
self.skip = set(skip) if skip else None
self.breaks = {}
self.fncache = {}
self.frame_returning = None
def canonic(self, filename):
if filename == "<" + filename[1:-1] + ">":
return filename
canonic = self.fncache.get(filename)
if not canonic:
canonic = os.path.abspath(filename)
canonic = os.path.normcase(canonic)
self.fncache[filename] = canonic
return canonic
def reset(self):
import linecache
linecache.checkcache()
self.botframe = None
self._set_stopinfo(None, None)
def trace_dispatch(self, frame, event, arg):
if self.quitting:
return # None
if event == 'line':
return self.dispatch_line(frame)
if event == 'call':
return self.dispatch_call(frame, arg)
if event == 'return':
return self.dispatch_return(frame, arg)
if event == 'exception':
return self.dispatch_exception(frame, arg)
if event == 'c_call':
return self.trace_dispatch
if event == 'c_exception':
return self.trace_dispatch
if event == 'c_return':
return self.trace_dispatch
print 'bdb.Bdb.dispatch: unknown debugging event:', repr(event)
return self.trace_dispatch
def dispatch_line(self, frame):
if self.stop_here(frame) or self.break_here(frame):
self.user_line(frame)
if self.quitting: raise BdbQuit
return self.trace_dispatch
def dispatch_call(self, frame, arg):
# XXX 'arg' is no longer used
if self.botframe is None:
# First call of dispatch since reset()
self.botframe = frame.f_back # (CT) Note that this may also be None!
return self.trace_dispatch
if not (self.stop_here(frame) or self.break_anywhere(frame)):
# No need to trace this function
return # None
self.user_call(frame, arg)
if self.quitting: raise BdbQuit
return self.trace_dispatch
def dispatch_return(self, frame, arg):
if self.stop_here(frame) or frame == self.returnframe:
try:
self.frame_returning = frame
self.user_return(frame, arg)
finally:
self.frame_returning = None
if self.quitting: raise BdbQuit
return self.trace_dispatch
def dispatch_exception(self, frame, arg):
if self.stop_here(frame):
self.user_exception(frame, arg)
if self.quitting: raise BdbQuit
return self.trace_dispatch
# Normally derived classes don't override the following
# methods, but they may if they want to redefine the
# definition of stopping and breakpoints.
def is_skipped_module(self, module_name):
for pattern in self.skip:
if fnmatch.fnmatch(module_name, pattern):
return True
return False
def stop_here(self, frame):
# (CT) stopframe may now also be None, see dispatch_call.
# (CT) the former test for None is therefore removed from here.
if self.skip and \
self.is_skipped_module(frame.f_globals.get('__name__')):
return False
if frame is self.stopframe:
if self.stoplineno == -1:
return False
return frame.f_lineno >= self.stoplineno
while frame is not None and frame is not self.stopframe:
if frame is self.botframe:
return True
frame = frame.f_back
return False
def break_here(self, frame):
filename = self.canonic(frame.f_code.co_filename)
if not filename in self.breaks:
return False
lineno = frame.f_lineno
if not lineno in self.breaks[filename]:
# The line itself has no breakpoint, but maybe the line is the
# first line of a function with breakpoint set by function name.
lineno = frame.f_code.co_firstlineno
if not lineno in self.breaks[filename]:
return False
# flag says ok to delete temp. bp
(bp, flag) = effective(filename, lineno, frame)
if bp:
self.currentbp = bp.number
if (flag and bp.temporary):
self.do_clear(str(bp.number))
return True
else:
return False
def do_clear(self, arg):
raise NotImplementedError, "subclass of bdb must implement do_clear()"
def break_anywhere(self, frame):
return self.canonic(frame.f_code.co_filename) in self.breaks
# Derived classes should override the user_* methods
# to gain control.
def user_call(self, frame, argument_list):
"""This method is called when there is the remote possibility
that we ever need to stop in this function."""
pass
def user_line(self, frame):
"""This method is called when we stop or break at this line."""
pass
def user_return(self, frame, return_value):
"""This method is called when a return trap is set here."""
pass
def user_exception(self, frame, exc_info):
exc_type, exc_value, exc_traceback = exc_info
"""This method is called if an exception occurs,
but only if we are to stop at or just below this level."""
pass
def _set_stopinfo(self, stopframe, returnframe, stoplineno=0):
self.stopframe = stopframe
self.returnframe = returnframe
self.quitting = 0
# stoplineno >= 0 means: stop at line >= the stoplineno
# stoplineno -1 means: don't stop at all
self.stoplineno = stoplineno
# Derived classes and clients can call the following methods
# to affect the stepping state.
def set_until(self, frame): #the name "until" is borrowed from gdb
"""Stop when the line with the line no greater than the current one is
reached or when returning from current frame"""
self._set_stopinfo(frame, frame, frame.f_lineno+1)
def set_step(self):
"""Stop after one line of code."""
# Issue #13183: pdb skips frames after hitting a breakpoint and running
# step commands.
# Restore the trace function in the caller (that may not have been set
# for performance reasons) when returning from the current frame.
if self.frame_returning:
caller_frame = self.frame_returning.f_back
if caller_frame and not caller_frame.f_trace:
caller_frame.f_trace = self.trace_dispatch
self._set_stopinfo(None, None)
def set_next(self, frame):
"""Stop on the next line in or below the given frame."""
self._set_stopinfo(frame, None)
def set_return(self, frame):
"""Stop when returning from the given frame."""
self._set_stopinfo(frame.f_back, frame)
def set_trace(self, frame=None):
"""Start debugging from `frame`.
If frame is not specified, debugging starts from caller's frame.
"""
if frame is None:
frame = sys._getframe().f_back
self.reset()
while frame:
frame.f_trace = self.trace_dispatch
self.botframe = frame
frame = frame.f_back
self.set_step()
sys.settrace(self.trace_dispatch)
def set_continue(self):
# Don't stop except at breakpoints or when finished
self._set_stopinfo(self.botframe, None, -1)
if not self.breaks:
# no breakpoints; run without debugger overhead
sys.settrace(None)
frame = sys._getframe().f_back
while frame and frame is not self.botframe:
del frame.f_trace
frame = frame.f_back
def set_quit(self):
self.stopframe = self.botframe
self.returnframe = None
self.quitting = 1
sys.settrace(None)
# Derived classes and clients can call the following methods
# to manipulate breakpoints. These methods return an
# error message is something went wrong, None if all is well.
# Set_break prints out the breakpoint line and file:lineno.
# Call self.get_*break*() to see the breakpoints or better
# for bp in Breakpoint.bpbynumber: if bp: bp.bpprint().
def set_break(self, filename, lineno, temporary=0, cond = None,
funcname=None):
filename = self.canonic(filename)
import linecache # Import as late as possible
line = linecache.getline(filename, lineno)
if not line:
return 'Line %s:%d does not exist' % (filename,
lineno)
if not filename in self.breaks:
self.breaks[filename] = []
list = self.breaks[filename]
if not lineno in list:
list.append(lineno)
bp = Breakpoint(filename, lineno, temporary, cond, funcname)
def _prune_breaks(self, filename, lineno):
if (filename, lineno) not in Breakpoint.bplist:
self.breaks[filename].remove(lineno)
if not self.breaks[filename]:
del self.breaks[filename]
def clear_break(self, filename, lineno):
filename = self.canonic(filename)
if not filename in self.breaks:
return 'There are no breakpoints in %s' % filename
if lineno not in self.breaks[filename]:
return 'There is no breakpoint at %s:%d' % (filename,
lineno)
# If there's only one bp in the list for that file,line
# pair, then remove the breaks entry
for bp in Breakpoint.bplist[filename, lineno][:]:
bp.deleteMe()
self._prune_breaks(filename, lineno)
def clear_bpbynumber(self, arg):
try:
number = int(arg)
except:
return 'Non-numeric breakpoint number (%s)' % arg
try:
bp = Breakpoint.bpbynumber[number]
except IndexError:
return 'Breakpoint number (%d) out of range' % number
if not bp:
return 'Breakpoint (%d) already deleted' % number
bp.deleteMe()
self._prune_breaks(bp.file, bp.line)
def clear_all_file_breaks(self, filename):
filename = self.canonic(filename)
if not filename in self.breaks:
return 'There are no breakpoints in %s' % filename
for line in self.breaks[filename]:
blist = Breakpoint.bplist[filename, line]
for bp in blist:
bp.deleteMe()
del self.breaks[filename]
def clear_all_breaks(self):
if not self.breaks:
return 'There are no breakpoints'
for bp in Breakpoint.bpbynumber:
if bp:
bp.deleteMe()
self.breaks = {}
def get_break(self, filename, lineno):
filename = self.canonic(filename)
return filename in self.breaks and \
lineno in self.breaks[filename]
def get_breaks(self, filename, lineno):
filename = self.canonic(filename)
return filename in self.breaks and \
lineno in self.breaks[filename] and \
Breakpoint.bplist[filename, lineno] or []
def get_file_breaks(self, filename):
filename = self.canonic(filename)
if filename in self.breaks:
return self.breaks[filename]
else:
return []
def get_all_breaks(self):
return self.breaks
# Derived classes and clients can call the following method
# to get a data structure representing a stack trace.
def get_stack(self, f, t):
stack = []
if t and t.tb_frame is f:
t = t.tb_next
while f is not None:
stack.append((f, f.f_lineno))
if f is self.botframe:
break
f = f.f_back
stack.reverse()
i = max(0, len(stack) - 1)
while t is not None:
stack.append((t.tb_frame, t.tb_lineno))
t = t.tb_next
if f is None:
i = max(0, len(stack) - 1)
return stack, i
#
def format_stack_entry(self, frame_lineno, lprefix=': '):
import linecache, repr
frame, lineno = frame_lineno
filename = self.canonic(frame.f_code.co_filename)
s = '%s(%r)' % (filename, lineno)
if frame.f_code.co_name:
s = s + frame.f_code.co_name
else:
s = s + "<lambda>"
if '__args__' in frame.f_locals:
args = frame.f_locals['__args__']
else:
args = None
if args:
s = s + repr.repr(args)
else:
s = s + '()'
if '__return__' in frame.f_locals:
rv = frame.f_locals['__return__']
s = s + '->'
s = s + repr.repr(rv)
line = linecache.getline(filename, lineno, frame.f_globals)
if line: s = s + lprefix + line.strip()
return s
# The following two methods can be called by clients to use
# a debugger to debug a statement, given as a string.
def run(self, cmd, globals=None, locals=None):
if globals is None:
import __main__
globals = __main__.__dict__
if locals is None:
locals = globals
self.reset()
sys.settrace(self.trace_dispatch)
if not isinstance(cmd, types.CodeType):
cmd = cmd+'\n'
try:
exec cmd in globals, locals
except BdbQuit:
pass
finally:
self.quitting = 1
sys.settrace(None)
def runeval(self, expr, globals=None, locals=None):
if globals is None:
import __main__
globals = __main__.__dict__
if locals is None:
locals = globals
self.reset()
sys.settrace(self.trace_dispatch)
if not isinstance(expr, types.CodeType):
expr = expr+'\n'
try:
return eval(expr, globals, locals)
except BdbQuit:
pass
finally:
self.quitting = 1
sys.settrace(None)
def runctx(self, cmd, globals, locals):
# B/W compatibility
self.run(cmd, globals, locals)
# This method is more useful to debug a single function call.
def runcall(self, func, *args, **kwds):
self.reset()
sys.settrace(self.trace_dispatch)
res = None
try:
res = func(*args, **kwds)
except BdbQuit:
pass
finally:
self.quitting = 1
sys.settrace(None)
return res
def set_trace():
Bdb().set_trace()
class Breakpoint:
"""Breakpoint class
Implements temporary breakpoints, ignore counts, disabling and
(re)-enabling, and conditionals.
Breakpoints are indexed by number through bpbynumber and by
the file,line tuple using bplist. The former points to a
single instance of class Breakpoint. The latter points to a
list of such instances since there may be more than one
breakpoint per line.
"""
# XXX Keeping state in the class is a mistake -- this means
# you cannot have more than one active Bdb instance.
next = 1 # Next bp to be assigned
bplist = {} # indexed by (file, lineno) tuple
bpbynumber = [None] # Each entry is None or an instance of Bpt
# index 0 is unused, except for marking an
# effective break .... see effective()
def __init__(self, file, line, temporary=0, cond=None, funcname=None):
self.funcname = funcname
# Needed if funcname is not None.
self.func_first_executable_line = None
self.file = file # This better be in canonical form!
self.line = line
self.temporary = temporary
self.cond = cond
self.enabled = 1
self.ignore = 0
self.hits = 0
self.number = Breakpoint.next
Breakpoint.next = Breakpoint.next + 1
# Build the two lists
self.bpbynumber.append(self)
if (file, line) in self.bplist:
self.bplist[file, line].append(self)
else:
self.bplist[file, line] = [self]
def deleteMe(self):
index = (self.file, self.line)
self.bpbynumber[self.number] = None # No longer in list
self.bplist[index].remove(self)
if not self.bplist[index]:
# No more bp for this f:l combo
del self.bplist[index]
def enable(self):
self.enabled = 1
def disable(self):
self.enabled = 0
def bpprint(self, out=None):
if out is None:
out = sys.stdout
if self.temporary:
disp = 'del '
else:
disp = 'keep '
if self.enabled:
disp = disp + 'yes '
else:
disp = disp + 'no '
print >>out, '%-4dbreakpoint %s at %s:%d' % (self.number, disp,
self.file, self.line)
if self.cond:
print >>out, '\tstop only if %s' % (self.cond,)
if self.ignore:
print >>out, '\tignore next %d hits' % (self.ignore)
if (self.hits):
if (self.hits > 1): ss = 's'
else: ss = ''
print >>out, ('\tbreakpoint already hit %d time%s' %
(self.hits, ss))
# -----------end of Breakpoint class----------
def checkfuncname(b, frame):
"""Check whether we should break here because of `b.funcname`."""
if not b.funcname:
# Breakpoint was set via line number.
if b.line != frame.f_lineno:
# Breakpoint was set at a line with a def statement and the function
# defined is called: don't break.
return False
return True
# Breakpoint set via function name.
if frame.f_code.co_name != b.funcname:
# It's not a function call, but rather execution of def statement.
return False
# We are in the right frame.
if not b.func_first_executable_line:
# The function is entered for the 1st time.
b.func_first_executable_line = frame.f_lineno
if b.func_first_executable_line != frame.f_lineno:
# But we are not at the first line number: don't break.
return False
return True
# Determines if there is an effective (active) breakpoint at this
# line of code. Returns breakpoint number or 0 if none
def effective(file, line, frame):
"""Determine which breakpoint for this file:line is to be acted upon.
Called only if we know there is a bpt at this
location. Returns breakpoint that was triggered and a flag
that indicates if it is ok to delete a temporary bp.
"""
possibles = Breakpoint.bplist[file,line]
for i in range(0, len(possibles)):
b = possibles[i]
if b.enabled == 0:
continue
if not checkfuncname(b, frame):
continue
# Count every hit when bp is enabled
b.hits = b.hits + 1
if not b.cond:
# If unconditional, and ignoring,
# go on to next, else break
if b.ignore > 0:
b.ignore = b.ignore -1
continue
else:
# breakpoint and marker that's ok
# to delete if temporary
return (b,1)
else:
# Conditional bp.
# Ignore count applies only to those bpt hits where the
# condition evaluates to true.
try:
val = eval(b.cond, frame.f_globals,
frame.f_locals)
if val:
if b.ignore > 0:
b.ignore = b.ignore -1
# continue
else:
return (b,1)
# else:
# continue
except:
# if eval fails, most conservative
# thing is to stop on breakpoint
# regardless of ignore count.
# Don't delete temporary,
# as another hint to user.
return (b,0)
return (None, None)
# -------------------- testing --------------------
class Tdb(Bdb):
def user_call(self, frame, args):
name = frame.f_code.co_name
if not name: name = '???'
print '+++ call', name, args
def user_line(self, frame):
import linecache
name = frame.f_code.co_name
if not name: name = '???'
fn = self.canonic(frame.f_code.co_filename)
line = linecache.getline(fn, frame.f_lineno, frame.f_globals)
print '+++', fn, frame.f_lineno, name, ':', line.strip()
def user_return(self, frame, retval):
print '+++ return', retval
def user_exception(self, frame, exc_stuff):
print '+++ exception', exc_stuff
self.set_continue()
def foo(n):
print 'foo(', n, ')'
x = bar(n*10)
print 'bar returned', x
def bar(a):
print 'bar(', a, ')'
return a/2
def test():
t = Tdb()
t.run('import bdb; bdb.foo(10)')
# end
|
i-maravic/ns-3
|
refs/heads/master
|
examples/energy/examples-to-run.py
|
196
|
#! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("energy-model-example", "True", "True"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
dursk/django
|
refs/heads/master
|
tests/i18n/sampleproject/update_catalogs.py
|
344
|
#!/usr/bin/env python
"""
Helper script to update sampleproject's translation catalogs.
When a bug has been identified related to i18n, this helps capture the issue
by using catalogs created from management commands.
Example:
The string "Two %% Three %%%" renders differently using trans and blocktrans.
This issue is difficult to debug, it could be a problem with extraction,
interpolation, or both.
How this script helps:
* Add {% trans "Two %% Three %%%" %} and blocktrans equivalent to templates.
* Run this script.
* Test extraction - verify the new msgid in sampleproject's django.po.
* Add a translation to sampleproject's django.po.
* Run this script.
* Test interpolation - verify templatetag rendering, test each in a template
that is rendered using an activated language from sampleproject's locale.
* Tests should fail, issue captured.
* Fix issue.
* Run this script.
* Tests all pass.
"""
import os
import re
import sys
proj_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.abspath(os.path.join(proj_dir, '..', '..', '..')))
def update_translation_catalogs():
"""Run makemessages and compilemessages in sampleproject."""
from django.core.management import call_command
prev_cwd = os.getcwd()
os.chdir(proj_dir)
call_command('makemessages')
call_command('compilemessages')
# keep the diff friendly - remove 'POT-Creation-Date'
pofile = os.path.join(proj_dir, 'locale', 'fr', 'LC_MESSAGES', 'django.po')
with open(pofile) as f:
content = f.read()
content = re.sub(r'^"POT-Creation-Date.+$\s', '', content, flags=re.MULTILINE)
with open(pofile, 'w') as f:
f.write(content)
os.chdir(prev_cwd)
if __name__ == "__main__":
update_translation_catalogs()
|
ftl-toolbox/lib_openshift
|
refs/heads/master
|
test/test_v1_config_map_key_selector.py
|
2
|
# coding: utf-8
"""
OpenAPI spec version:
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import lib_openshift
from lib_openshift.rest import ApiException
from lib_openshift.models.v1_config_map_key_selector import V1ConfigMapKeySelector
class TestV1ConfigMapKeySelector(unittest.TestCase):
""" V1ConfigMapKeySelector unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ConfigMapKeySelector(self):
"""
Test V1ConfigMapKeySelector
"""
model = lib_openshift.models.v1_config_map_key_selector.V1ConfigMapKeySelector()
if __name__ == '__main__':
unittest.main()
|
erickt/hue
|
refs/heads/master
|
desktop/core/ext-py/tablib-develop/tablib/packages/odf/svg.py
|
91
|
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Sรธren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import SVGNS
from element import Element
from draw import DrawElement
# Autogenerated
def DefinitionSrc(**args):
return Element(qname = (SVGNS,'definition-src'), **args)
def Desc(**args):
return Element(qname = (SVGNS,'desc'), **args)
def FontFaceFormat(**args):
return Element(qname = (SVGNS,'font-face-format'), **args)
def FontFaceName(**args):
return Element(qname = (SVGNS,'font-face-name'), **args)
def FontFaceSrc(**args):
return Element(qname = (SVGNS,'font-face-src'), **args)
def FontFaceUri(**args):
return Element(qname = (SVGNS,'font-face-uri'), **args)
def Lineargradient(**args):
return DrawElement(qname = (SVGNS,'linearGradient'), **args)
def Radialgradient(**args):
return DrawElement(qname = (SVGNS,'radialGradient'), **args)
def Stop(**args):
return Element(qname = (SVGNS,'stop'), **args)
def Title(**args):
return Element(qname = (SVGNS,'title'), **args)
|
cooperra/antlr4
|
refs/heads/master
|
runtime/Python3/src/antlr4/IntervalSet.py
|
17
|
from io import StringIO
import unittest
from antlr4.Token import Token
# need forward declarations
IntervalSet = None
class IntervalSet(object):
def __init__(self):
self.intervals = None
self.readOnly = False
def __iter__(self):
if self.intervals is not None:
for i in self.intervals:
for c in i:
yield c
def __getitem__(self, item):
i = 0
for k in self:
if i==item:
return k
else:
i += 1
return Token.INVALID_TYPE
def addOne(self, v:int):
self.addRange(range(v, v+1))
def addRange(self, v:range):
if self.intervals is None:
self.intervals = list()
self.intervals.append(v)
else:
# find insert pos
k = 0
for i in self.intervals:
# distinct range -> insert
if v.stop<i.start:
self.intervals.insert(k, v)
return
# contiguous range -> adjust
elif v.stop==i.start:
self.intervals[k] = range(v.start, i.stop)
return
# overlapping range -> adjust and reduce
elif v.start<=i.stop:
self.intervals[k] = range(min(i.start,v.start), max(i.stop,v.stop))
self.reduce(k)
return
k += 1
# greater than any existing
self.intervals.append(v)
def addSet(self, other:IntervalSet):
if other.intervals is not None:
for i in other.intervals:
self.addRange(i)
return self
def reduce(self, k:int):
# only need to reduce if k is not the last
if k<len(self.intervals)-1:
l = self.intervals[k]
r = self.intervals[k+1]
# if r contained in l
if l.stop >= r.stop:
self.intervals.pop(k+1)
self.reduce(k)
elif l.stop >= r.start:
self.intervals[k] = range(l.start, r.stop)
self.intervals.pop(k+1)
def complement(self, start, stop):
result = IntervalSet()
result.addRange(range(start,stop+1))
for i in self.intervals:
result.removeRange(i)
return result
def __contains__(self, item):
if self.intervals is None:
return False
else:
for i in self.intervals:
if item in i:
return True
return False
def __len__(self):
xlen = 0
for i in self.intervals:
xlen += len(i)
return xlen
def removeRange(self, v):
if v.start==v.stop-1:
self.removeOne(v.start)
elif self.intervals is not None:
k = 0
for i in self.intervals:
# intervals are ordered
if v.stop<=i.start:
return
# check for including range, split it
elif v.start>i.start and v.stop<i.stop:
self.intervals[k] = range(i.start, v.start)
x = range(v.stop, i.stop)
self.intervals.insert(k, x)
return
# check for included range, remove it
elif v.start<=i.start and v.stop>=i.stop:
self.intervals.pop(k)
k = k - 1 # need another pass
# check for lower boundary
elif v.start<i.stop:
self.intervals[k] = range(i.start, v.start)
# check for upper boundary
elif v.stop<i.stop:
self.intervals[k] = range(v.stop, i.stop)
k += 1
def removeOne(self, v):
if self.intervals is not None:
k = 0
for i in self.intervals:
# intervals is ordered
if v<i.start:
return
# check for single value range
elif v==i.start and v==i.stop-1:
self.intervals.pop(k)
return
# check for lower boundary
elif v==i.start:
self.intervals[k] = range(i.start+1, i.stop)
return
# check for upper boundary
elif v==i.stop-1:
self.intervals[k] = range(i.start, i.stop-1)
return
# split existing range
elif v<i.stop-1:
x = range(i.start, v)
i.start = v + 1
self.intervals.insert(k, x)
return
k += 1
def toString(self, literalNames:list, symbolicNames:list):
if self.intervals is None:
return "{}"
with StringIO() as buf:
if len(self)>1:
buf.write("{")
first = True
for i in self.intervals:
for j in i:
if not first:
buf.write(", ")
buf.write(self.elementName(literalNames, symbolicNames, j))
first = False
if len(self)>1:
buf.write("}")
return buf.getvalue()
def elementName(self, literalNames:list, symbolicNames:list, a:int):
if a==Token.EOF:
return "<EOF>"
elif a==Token.EPSILON:
return "<EPSILON>"
else:
if a<len(literalNames):
return literalNames[a]
if a<len(symbolicNames):
return symbolicNames[a]
return "<UNKNOWN>"
class TestIntervalSet(unittest.TestCase):
def testEmpty(self):
s = IntervalSet()
self.assertIsNone(s.intervals)
self.assertFalse(30 in s)
def testOne(self):
s = IntervalSet()
s.addOne(30)
self.assertTrue(30 in s)
self.assertFalse(29 in s)
self.assertFalse(31 in s)
def testTwo(self):
s = IntervalSet()
s.addOne(30)
s.addOne(40)
self.assertTrue(30 in s)
self.assertTrue(40 in s)
self.assertFalse(35 in s)
def testRange(self):
s = IntervalSet()
s.addRange(range(30,41))
self.assertTrue(30 in s)
self.assertTrue(40 in s)
self.assertTrue(35 in s)
def testDistinct1(self):
s = IntervalSet()
s.addRange(range(30,32))
s.addRange(range(40,42))
self.assertEquals(2,len(s.intervals))
self.assertTrue(30 in s)
self.assertTrue(40 in s)
self.assertFalse(35 in s)
def testDistinct2(self):
s = IntervalSet()
s.addRange(range(40,42))
s.addRange(range(30,32))
self.assertEquals(2,len(s.intervals))
self.assertTrue(30 in s)
self.assertTrue(40 in s)
self.assertFalse(35 in s)
def testContiguous1(self):
s = IntervalSet()
s.addRange(range(30,36))
s.addRange(range(36,41))
self.assertEquals(1,len(s.intervals))
self.assertTrue(30 in s)
self.assertTrue(40 in s)
self.assertTrue(35 in s)
def testContiguous2(self):
s = IntervalSet()
s.addRange(range(36,41))
s.addRange(range(30,36))
self.assertEquals(1,len(s.intervals))
self.assertTrue(30 in s)
self.assertTrue(40 in s)
def testOverlapping1(self):
s = IntervalSet()
s.addRange(range(30,40))
s.addRange(range(35,45))
self.assertEquals(1,len(s.intervals))
self.assertTrue(30 in s)
self.assertTrue(44 in s)
def testOverlapping2(self):
s = IntervalSet()
s.addRange(range(35,45))
s.addRange(range(30,40))
self.assertEquals(1,len(s.intervals))
self.assertTrue(30 in s)
self.assertTrue(44 in s)
def testOverlapping3(self):
s = IntervalSet()
s.addRange(range(30,32))
s.addRange(range(40,42))
s.addRange(range(50,52))
s.addRange(range(20,61))
self.assertEquals(1,len(s.intervals))
self.assertTrue(20 in s)
self.assertTrue(60 in s)
def testComplement(self):
s = IntervalSet()
s.addRange(range(10,21))
c = s.complement(1,100)
self.assertTrue(1 in c)
self.assertTrue(100 in c)
self.assertTrue(10 not in c)
self.assertTrue(20 not in c)
|
brandonium21/snowflake
|
refs/heads/master
|
snowflakeEnv/lib/python2.7/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py
|
33
|
# firebird/kinterbasdb.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: firebird+kinterbasdb
:name: kinterbasdb
:dbapi: kinterbasdb
:connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db\
[?key=value&key=value...]
:url: http://firebirdsql.org/index.php?op=devel&sub=python
Arguments
----------
The Kinterbasdb backend accepts the ``enable_rowcount`` and ``retaining``
arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect.
In addition, it also accepts the following:
* ``type_conv`` - select the kind of mapping done on the types: by default
SQLAlchemy uses 200 with Unicode, datetime and decimal support. See
the linked documents below for further information.
* ``concurrency_level`` - set the backend policy with regards to threading
issues: by default SQLAlchemy uses policy 1. See the linked documents
below for further information.
.. seealso::
http://sourceforge.net/projects/kinterbasdb
http://kinterbasdb.sourceforge.net/dist_docs/usage.html#adv_param_conv_dynamic_type_translation
http://kinterbasdb.sourceforge.net/dist_docs/usage.html#special_issue_concurrency
"""
from .base import FBDialect, FBExecutionContext
from ... import util, types as sqltypes
from re import match
import decimal
class _kinterbasdb_numeric(object):
def bind_processor(self, dialect):
def process(value):
if isinstance(value, decimal.Decimal):
return str(value)
else:
return value
return process
class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric):
pass
class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float):
pass
class FBExecutionContext_kinterbasdb(FBExecutionContext):
@property
def rowcount(self):
if self.execution_options.get('enable_rowcount',
self.dialect.enable_rowcount):
return self.cursor.rowcount
else:
return -1
class FBDialect_kinterbasdb(FBDialect):
driver = 'kinterbasdb'
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
execution_ctx_cls = FBExecutionContext_kinterbasdb
supports_native_decimal = True
colspecs = util.update_copy(
FBDialect.colspecs,
{
sqltypes.Numeric: _FBNumeric_kinterbasdb,
sqltypes.Float: _FBFloat_kinterbasdb,
}
)
def __init__(self, type_conv=200, concurrency_level=1,
enable_rowcount=True,
retaining=False, **kwargs):
super(FBDialect_kinterbasdb, self).__init__(**kwargs)
self.enable_rowcount = enable_rowcount
self.type_conv = type_conv
self.concurrency_level = concurrency_level
self.retaining = retaining
if enable_rowcount:
self.supports_sane_rowcount = True
@classmethod
def dbapi(cls):
return __import__('kinterbasdb')
def do_execute(self, cursor, statement, parameters, context=None):
# kinterbase does not accept a None, but wants an empty list
# when there are no arguments.
cursor.execute(statement, parameters or [])
def do_rollback(self, dbapi_connection):
dbapi_connection.rollback(self.retaining)
def do_commit(self, dbapi_connection):
dbapi_connection.commit(self.retaining)
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if opts.get('port'):
opts['host'] = "%s/%s" % (opts['host'], opts['port'])
del opts['port']
opts.update(url.query)
util.coerce_kw_type(opts, 'type_conv', int)
type_conv = opts.pop('type_conv', self.type_conv)
concurrency_level = opts.pop('concurrency_level',
self.concurrency_level)
if self.dbapi is not None:
initialized = getattr(self.dbapi, 'initialized', None)
if initialized is None:
# CVS rev 1.96 changed the name of the attribute:
# http://kinterbasdb.cvs.sourceforge.net/viewvc/kinterbasdb/
# Kinterbasdb-3.0/__init__.py?r1=1.95&r2=1.96
initialized = getattr(self.dbapi, '_initialized', False)
if not initialized:
self.dbapi.init(type_conv=type_conv,
concurrency_level=concurrency_level)
return ([], opts)
def _get_server_version_info(self, connection):
"""Get the version of the Firebird server used by a connection.
Returns a tuple of (`major`, `minor`, `build`), three integers
representing the version of the attached server.
"""
# This is the simpler approach (the other uses the services api),
# that for backward compatibility reasons returns a string like
# LI-V6.3.3.12981 Firebird 2.0
# where the first version is a fake one resembling the old
# Interbase signature.
fbconn = connection.connection
version = fbconn.server_version
return self._parse_version_info(version)
def _parse_version_info(self, version):
m = match(
'\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version)
if not m:
raise AssertionError(
"Could not determine version from string '%s'" % version)
if m.group(5) != None:
return tuple([int(x) for x in m.group(6, 7, 4)] + ['firebird'])
else:
return tuple([int(x) for x in m.group(1, 2, 3)] + ['interbase'])
def is_disconnect(self, e, connection, cursor):
if isinstance(e, (self.dbapi.OperationalError,
self.dbapi.ProgrammingError)):
msg = str(e)
return ('Unable to complete network request to host' in msg or
'Invalid connection state' in msg or
'Invalid cursor state' in msg or
'connection shutdown' in msg)
else:
return False
dialect = FBDialect_kinterbasdb
|
IllusionRom-deprecated/android_platform_tools_idea
|
refs/heads/master
|
python/lib/Lib/types.py
|
81
|
"""Define names for all type symbols known in the standard interpreter.
Types that are part of optional modules (e.g. array) are not listed.
"""
import sys
# Iterators in Python aren't a matter of type but of protocol. A large
# and changing number of builtin types implement *some* flavor of
# iterator. Don't check the type! Use hasattr to check for both
# "__iter__" and "next" attributes instead.
NoneType = type(None)
TypeType = type
ObjectType = object
IntType = int
LongType = long
FloatType = float
BooleanType = bool
try:
ComplexType = complex
except NameError:
pass
StringType = str
# StringTypes is already outdated. Instead of writing "type(x) in
# types.StringTypes", you should use "isinstance(x, basestring)". But
# we keep around for compatibility with Python 2.2.
try:
UnicodeType = unicode
StringTypes = (StringType, UnicodeType)
except NameError:
StringTypes = (StringType,)
# XXX: no buffer in jython
#BufferType = buffer
TupleType = tuple
ListType = list
DictType = DictionaryType = dict
def _f(): pass
FunctionType = type(_f)
LambdaType = type(lambda: None) # Same as FunctionType
try:
CodeType = type(_f.func_code)
except RuntimeError:
# Execution in restricted environment
pass
def g():
yield 1
GeneratorType = type(g())
del g
class _C:
def _m(self): pass
ClassType = type(_C)
UnboundMethodType = type(_C._m) # Same as MethodType
_x = _C()
InstanceType = type(_x)
MethodType = type(_x._m)
BuiltinFunctionType = type(len)
BuiltinMethodType = type([].append) # Same as BuiltinFunctionType
# XXX: Jython sys is not a real module
#ModuleType = type(sys)
ModuleType = type(sys.modules[__name__])
FileType = file
XRangeType = xrange
try:
raise TypeError
except TypeError:
try:
tb = sys.exc_info()[2]
TracebackType = type(tb)
FrameType = type(tb.tb_frame)
except AttributeError:
# In the restricted environment, exc_info returns (None, None,
# None) Then, tb.tb_frame gives an attribute error
pass
tb = None; del tb
SliceType = slice
EllipsisType = type(Ellipsis)
DictProxyType = type(TypeType.__dict__)
NotImplementedType = type(NotImplemented)
del sys, _f, _C, _x # Not for export
|
akatsoulas/mozillians
|
refs/heads/master
|
mozillians/groups/middleware.py
|
2
|
import re
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from mozillians.common.middleware import safe_query_string
from mozillians.groups.models import Group
class OldGroupRedirectionMiddleware(object):
"""
Redirect requests for groups from /group/<id>-<url> to
/group/<url> to avoid breaking group urls with the new url
schema.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
group_url = re.match(r'^/group/(?P<id>\d+)-(?P<url>[-\w]+)/$',
request.path_info)
if (response.status_code == 404
and group_url and Group.objects.filter(url=group_url.group('url')).exists()):
newurl = reverse('groups:show_group',
kwargs={'url': group_url.group('url')})
if request.GET:
with safe_query_string(request):
newurl += '?' + request.META['QUERY_STRING']
return HttpResponseRedirect(newurl)
return response
|
michath/ConMonkey
|
refs/heads/master
|
media/webrtc/trunk/tools/gyp/test/win/gyptest-link-debug-info.py
|
344
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure debug info setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('debug-info.gyp', chdir=CHDIR)
test.build('debug-info.gyp', test.ALL, chdir=CHDIR)
suffix = '.exe.pdb' if test.format == 'ninja' else '.pdb'
test.built_file_must_not_exist('test_debug_off%s' % suffix, chdir=CHDIR)
test.built_file_must_exist('test_debug_on%s' % suffix, chdir=CHDIR)
test.pass_test()
|
anhstudios/swganh
|
refs/heads/develop
|
data/scripts/templates/object/draft_schematic/bio_engineer/dna_template/shared_dna_template_gurreck.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/bio_engineer/dna_template/shared_dna_template_gurreck.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
gmimano/commcaretest
|
refs/heads/master
|
corehq/apps/data_interfaces/views.py
|
1
|
import csv
import io
import uuid
from couchdbkit import ResourceNotFound
from django.contrib import messages
from django.core.cache import cache
from dimagi.utils.excel import WorkbookJSONReader, JSONReaderError
from django.utils.decorators import method_decorator
from openpyxl.shared.exc import InvalidFileException
from casexml.apps.case.models import CommCareCaseGroup
from corehq import CaseReassignmentInterface
from corehq.apps.data_interfaces.tasks import bulk_upload_cases_to_group
from corehq.apps.data_interfaces.forms import (AddCaseGroupForm, UpdateCaseGroupForm, AddCaseToGroupForm,
UploadBulkCaseGroupForm)
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.domain.views import BaseDomainView
from corehq.apps.hqcase.utils import get_case_by_identifier
from corehq.apps.hqwebapp.views import CRUDPaginatedViewMixin, PaginatedItemException
from corehq.apps.reports.standard.export import ExcelExportReport
from corehq.apps.data_interfaces.dispatcher import (DataInterfaceDispatcher, EditDataInterfaceDispatcher,
require_can_edit_data)
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, Http404
from dimagi.utils.decorators.memoized import memoized
from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy
@login_and_domain_required
def default(request, domain):
if not request.project or request.project.is_snapshot:
raise Http404()
if request.couch_user.can_view_reports():
return HttpResponseRedirect(reverse(DataInterfaceDispatcher.name(),
args=[domain, ExcelExportReport.slug]))
exportable_reports = request.couch_user.get_exportable_reports(domain)
if exportable_reports:
return HttpResponseRedirect(reverse(DataInterfaceDispatcher.name(),
args=[domain, exportable_reports[0]]))
if request.couch_user.can_edit_data():
return HttpResponseRedirect(reverse(EditDataInterfaceDispatcher.name(),
args=[domain, CaseReassignmentInterface.slug]))
raise Http404()
class BulkUploadCasesException(Exception):
pass
class DataInterfaceSection(BaseDomainView):
section_name = ugettext_noop("Data")
@method_decorator(require_can_edit_data)
def dispatch(self, request, *args, **kwargs):
return super(DataInterfaceSection, self).dispatch(request, *args, **kwargs)
@property
def section_url(self):
return reverse("data_interfaces_default", args=[self.domain])
class CaseGroupListView(DataInterfaceSection, CRUDPaginatedViewMixin):
template_name = "data_interfaces/list_case_groups.html"
urlname = 'case_group_list'
page_title = ugettext_lazy("Case Groups")
limit_text = ugettext_lazy("groups per page")
empty_notification = ugettext_lazy("You have no case groups. Please create one!")
loading_message = ugettext_lazy("Loading groups...")
deleted_items_header = ugettext_lazy("Deleted Groups:")
new_items_header = ugettext_lazy("New Groups:")
@property
def page_url(self):
return reverse(self.urlname, args=[self.domain])
@property
def parameters(self):
return self.request.POST if self.request.method == 'POST' else self.request.GET
@property
@memoized
def total(self):
return CommCareCaseGroup.get_total(self.domain)
@property
def column_names(self):
return [
_("Group Name"),
_("Number of Cases"),
_("Actions"),
]
@property
def page_context(self):
return self.pagination_context
@property
def paginated_list(self):
for group in CommCareCaseGroup.get_all(
self.domain,
limit=self.limit,
skip=self.skip
):
item_data = self._get_item_data(group)
item_data['updateForm'] = self.get_update_form_response(
self.get_update_form(initial_data={
'item_id': group._id,
'name': group.name,
})
)
yield {
'itemData': item_data,
'template': 'existing-group-template',
}
def _get_item_data(self, case_group):
return {
'id': case_group._id,
'name': case_group.name,
'numCases': len(case_group.cases),
'editUrl': reverse(CaseGroupCaseManagementView.urlname, args=[self.domain, case_group._id])
}
def post(self, *args, **kwargs):
return self.paginate_crud_response
def get_create_form(self, is_blank=False):
if self.request.method == 'POST' and not is_blank:
return AddCaseGroupForm(self.request.POST)
return AddCaseGroupForm()
def get_update_form(self, initial_data=None):
if self.request.method == 'POST' and self.action == 'update':
return UpdateCaseGroupForm(self.request.POST)
return UpdateCaseGroupForm(initial=initial_data)
def get_create_item_data(self, create_form):
case_group = create_form.create_group(self.domain)
return {
'itemData': self._get_item_data(case_group),
'template': 'new-group-template',
}
def get_deleted_item_data(self, item_id):
case_group = CommCareCaseGroup.get(item_id)
item_data = self._get_item_data(case_group)
case_group.delete()
return {
'itemData': item_data,
'template': 'deleted-group-template',
}
class CaseGroupCaseManagementView(DataInterfaceSection, CRUDPaginatedViewMixin):
template_name = 'data_interfaces/manage_case_groups.html'
urlname = 'manage_case_groups'
page_title = ugettext_noop("Manage Case Group")
limit_text = ugettext_noop("cases per page")
empty_notification = ugettext_noop("You have no cases in your group.")
loading_message = ugettext_noop("Loading cases...")
deleted_items_header = ugettext_noop("Removed Cases:")
new_items_header = ugettext_noop("Added Cases:")
@property
def group_id(self):
return self.kwargs.get('group_id')
@property
@memoized
def case_group(self):
try:
return CommCareCaseGroup.get(self.group_id)
except ResourceNotFound:
raise Http404()
@property
def parent_pages(self):
return [{
'title': CaseGroupListView.page_title,
'url': reverse(CaseGroupListView.urlname, args=[self.domain])
}]
@property
def page_name(self):
return _("Manage Group '%s'" % self.case_group.name)
@property
def page_url(self):
return reverse(self.urlname, args=[self.domain, self.group_id])
@property
def page_context(self):
context = self.pagination_context
context.update({
'bulk_upload_from': UploadBulkCaseGroupForm(),
'bulk_upload_id': self.bulk_upload_id,
'update_case_group_form': self.update_case_group_form,
'group_name': self.case_group.name,
})
return context
@property
@memoized
def update_case_group_form(self):
initial = {
'name': self.case_group.name,
'item_id': self.case_group._id,
}
if self.is_case_group_update:
return UpdateCaseGroupForm(self.request.POST, initial=initial)
return UpdateCaseGroupForm(initial=initial)
@property
def parameters(self):
return self.request.POST if self.request.method == 'POST' else self.request.GET
@property
@memoized
def total(self):
return self.case_group.get_total_cases()
@property
def column_names(self):
return [
_("Case Name"),
_("Phone Number"),
_("External ID"),
_("Action"),
]
@property
def paginated_list(self):
for case in self.case_group.get_cases(limit=self.limit, skip=self.skip):
yield {
'itemData': self._get_item_data(case),
'template': 'existing-case-template',
}
@property
def allowed_actions(self):
actions = super(CaseGroupCaseManagementView, self).allowed_actions
actions.append('bulk')
return actions
@property
def bulk_response(self):
return cache.get(self.request.POST['upload_id'])
@property
def is_bulk_upload(self):
return self.request.method == 'POST' and self.request.POST.get('action') == 'bulk_upload'
@property
def is_case_group_update(self):
return self.request.method == 'POST' and self.request.POST.get('action') == 'update_case_group'
@property
def bulk_upload_id(self):
if not self.is_bulk_upload:
return None
try:
if self.uploaded_file:
upload_id = uuid.uuid4().hex
bulk_upload_cases_to_group.delay(
upload_id,
self.domain,
self.group_id,
list(self.uploaded_file.get_worksheet())
)
messages.success(self.request, _("We received your file and are processing it..."))
return upload_id
except BulkUploadCasesException as e:
messages.error(self.request, e.message)
return None
@property
@memoized
def uploaded_file(self):
bulk_file = self.request.FILES['bulk_file']
try:
return WorkbookJSONReader(bulk_file)
except InvalidFileException:
try:
csv.DictReader(io.StringIO(bulk_file.read().decode('ascii'),
newline=None))
raise BulkUploadCasesException(_("CommCare HQ no longer supports CSV upload. "
"Please convert to Excel 2007 or higher (.xlsx) "
"and try again."))
except UnicodeDecodeError:
raise BulkUploadCasesException(_("Unrecognized format"))
except JSONReaderError as e:
raise BulkUploadCasesException(_('Your upload was unsuccessful. %s') % e.message)
def _get_item_data(self, case):
return {
'id': case._id,
'detailsUrl': reverse('case_details', args=[self.domain, case._id]),
'name': case.name,
'externalId': case.external_id if case.external_id else '--',
'phoneNumber': getattr(case, 'contact_phone_number', '--'),
}
def get_create_form(self, is_blank=False):
if self.request.method == 'POST' and not is_blank:
return AddCaseToGroupForm(self.request.POST)
return AddCaseToGroupForm()
def get_create_item_data(self, create_form):
case_identifier = create_form.cleaned_data['case_identifier']
case = get_case_by_identifier(self.domain, case_identifier)
if case is None:
return {
'itemData': {
'id': case_identifier.replace(' ', '_'),
'identifier': case_identifier,
'message': _('Sorry, we could not a find a case that '
'matched the identifier you provided.'),
},
'rowClass': 'warning',
'template': 'case-message-template',
}
item_data = self._get_item_data(case)
if case._id in self.case_group.cases:
message = '<span class="label label-important">%s</span>' % _("Case already in group")
elif case.doc_type != 'CommCareCase':
message = '<span class="label label-important">%s</span>' % _("It looks like this case was deleted.")
else:
message = '<span class="label label-success">%s</span>' % _("Case added")
self.case_group.cases.append(case._id)
self.case_group.save()
item_data['message'] = message
return {
'itemData': item_data,
'template': 'new-case-template',
}
def get_deleted_item_data(self, item_id):
if not item_id:
raise PaginatedItemException("The case's ID was blank.")
current_cases = set(self.case_group.cases)
self.case_group.cases = list(current_cases.difference([item_id]))
self.case_group.save()
return {
'template': 'removed-case-template',
}
def post(self, request, *args, **kwargs):
if self.is_bulk_upload or self.is_case_group_update:
if self.is_case_group_update and self.update_case_group_form.is_valid():
self.update_case_group_form.update_group()
return HttpResponseRedirect(self.page_url)
return self.get(request, *args, **kwargs)
return self.paginate_crud_response
|
Jgarcia-IAS/Fidelizacion_odoo
|
refs/heads/master
|
openerp/addons/hw_posbox_homepage/__openerp__.py
|
313
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'PosBox Homepage',
'version': '1.0',
'category': 'Hardware Drivers',
'sequence': 6,
'website': 'https://www.odoo.com/page/point-of-sale',
'summary': 'A homepage for the PosBox',
'description': """
PosBox Homepage
===============
This module overrides openerp web interface to display a simple
Homepage that explains what's the posbox and show the status,
and where to find documentation.
If you activate this module, you won't be able to access the
regular openerp interface anymore.
""",
'author': 'OpenERP SA',
'depends': ['hw_proxy'],
'installable': False,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
perezg/infoxchange
|
refs/heads/master
|
BASE/lib/python2.7/site-packages/django/utils/encoding.py
|
80
|
from __future__ import unicode_literals
import codecs
import datetime
from decimal import Decimal
import locale
try:
from urllib.parse import quote
except ImportError: # Python 2
from urllib import quote
import warnings
from django.utils.functional import Promise
from django.utils import six
class DjangoUnicodeDecodeError(UnicodeDecodeError):
def __init__(self, obj, *args):
self.obj = obj
UnicodeDecodeError.__init__(self, *args)
def __str__(self):
original = UnicodeDecodeError.__str__(self)
return '%s. You passed in %r (%s)' % (original, self.obj,
type(self.obj))
class StrAndUnicode(object):
"""
A class that derives __str__ from __unicode__.
On Python 2, __str__ returns the output of __unicode__ encoded as a UTF-8
bytestring. On Python 3, __str__ returns the output of __unicode__.
Useful as a mix-in. If you support Python 2 and 3 with a single code base,
you can inherit this mix-in and just define __unicode__.
"""
def __init__(self, *args, **kwargs):
warnings.warn("StrAndUnicode is deprecated. Define a __str__ method "
"and apply the @python_2_unicode_compatible decorator "
"instead.", PendingDeprecationWarning, stacklevel=2)
super(StrAndUnicode, self).__init__(*args, **kwargs)
if six.PY3:
def __str__(self):
return self.__unicode__()
else:
def __str__(self):
return self.__unicode__().encode('utf-8')
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if not six.PY3:
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
def smart_text(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Returns a text object representing 's' -- unicode on Python 2 and str on
Python 3. Treats bytestrings using the 'encoding' codec.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_text(s, encoding, strings_only, errors)
def is_protected_type(obj):
"""Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_text(strings_only=True).
"""
return isinstance(obj, six.integer_types + (type(None), float, Decimal,
datetime.datetime, datetime.date, datetime.time))
def force_text(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first, saves 30-40% when s is an instance of
# six.text_type. This function gets called often in that setting.
if isinstance(s, six.text_type):
return s
if strings_only and is_protected_type(s):
return s
try:
if not isinstance(s, six.string_types):
if hasattr(s, '__unicode__'):
s = s.__unicode__()
else:
if six.PY3:
if isinstance(s, bytes):
s = six.text_type(s, encoding, errors)
else:
s = six.text_type(s)
else:
s = six.text_type(bytes(s), encoding, errors)
else:
# Note: We use .decode() here, instead of six.text_type(s, encoding,
# errors), so that if s is a SafeBytes, it ends up being a
# SafeText at the end.
s = s.decode(encoding, errors)
except UnicodeDecodeError as e:
if not isinstance(s, Exception):
raise DjangoUnicodeDecodeError(s, *e.args)
else:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
s = ' '.join([force_text(arg, encoding, strings_only,
errors) for arg in s])
return s
def smart_bytes(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Returns a bytestring version of 's', encoded as specified in 'encoding'.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_bytes(s, encoding, strings_only, errors)
def force_bytes(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_bytes, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, bytes):
if encoding == 'utf-8':
return s
else:
return s.decode('utf-8', errors).encode(encoding, errors)
if strings_only and (s is None or isinstance(s, int)):
return s
if isinstance(s, Promise):
return six.text_type(s).encode(encoding, errors)
if not isinstance(s, six.string_types):
try:
if six.PY3:
return six.text_type(s).encode(encoding)
else:
return bytes(s)
except UnicodeEncodeError:
if isinstance(s, Exception):
# An Exception subclass containing non-ASCII data that doesn't
# know how to print itself properly. We shouldn't raise a
# further exception.
return b' '.join([force_bytes(arg, encoding, strings_only,
errors) for arg in s])
return six.text_type(s).encode(encoding, errors)
else:
return s.encode(encoding, errors)
if six.PY3:
smart_str = smart_text
force_str = force_text
else:
smart_str = smart_bytes
force_str = force_bytes
# backwards compatibility for Python 2
smart_unicode = smart_text
force_unicode = force_text
smart_str.__doc__ = """\
Apply smart_text in Python 3 and smart_bytes in Python 2.
This is suitable for writing to sys.stdout (for instance).
"""
force_str.__doc__ = """\
Apply force_text in Python 3 and force_bytes in Python 2.
"""
def iri_to_uri(iri):
"""
Convert an Internationalized Resource Identifier (IRI) portion to a URI
portion that is suitable for inclusion in a URL.
This is the algorithm from section 3.1 of RFC 3987. However, since we are
assuming input is either UTF-8 or unicode already, we can simplify things a
little from the full method.
Returns an ASCII string containing the encoded result.
"""
# The list of safe characters here is constructed from the "reserved" and
# "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986:
# reserved = gen-delims / sub-delims
# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
# / "*" / "+" / "," / ";" / "="
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
# Of the unreserved characters, urllib.quote already considers all but
# the ~ safe.
# The % character is also added to the list of safe characters here, as the
# end of section 3.1 of RFC 3987 specifically mentions that % must not be
# converted.
if iri is None:
return iri
return quote(force_bytes(iri), safe=b"/#%[]=:;$&()+,!?*@'~")
def filepath_to_uri(path):
"""Convert a file system path to a URI portion that is suitable for
inclusion in a URL.
We are assuming input is either UTF-8 or unicode already.
This method will encode certain chars that would normally be recognized as
special chars for URIs. Note that this method does not encode the '
character, as it is a valid character within URIs. See
encodeURIComponent() JavaScript function for more details.
Returns an ASCII string containing the encoded result.
"""
if path is None:
return path
# I know about `os.sep` and `os.altsep` but I want to leave
# some flexibility for hardcoding separators.
return quote(force_bytes(path).replace(b"\\", b"/"), safe=b"/~!*()'")
# The encoding of the default system locale but falls back to the
# given fallback encoding if the encoding is unsupported by python or could
# not be determined. See tickets #10335 and #5846
try:
DEFAULT_LOCALE_ENCODING = locale.getdefaultlocale()[1] or 'ascii'
codecs.lookup(DEFAULT_LOCALE_ENCODING)
except:
DEFAULT_LOCALE_ENCODING = 'ascii'
|
jjhelmus/wradlib
|
refs/heads/master
|
examples/clutter_gabella_example.py
|
1
|
# -------------------------------------------------------------------------------
# Name: clutter_gabella_example
# Purpose:
#
# Author: Thomas Pfaff
#
# Created: 26.10.2011
# Copyright: (c) Thomas Pfaff 2011
# Licence: The MIT License
# -------------------------------------------------------------------------------
#!/usr/bin/env python
import matplotlib.pyplot as pl
# just making sure that the plots immediately pop up
#pl.interactive(True)
import wradlib.vis as vis
import wradlib.clutter as clutter
import os
def ex_clutter_gabella():
# load the example data
import numpy as np
# Todo: link right data set
testdata = np.loadtxt(os.path.dirname(__file__) + '/' + 'data/polar_dBZ_fbg.gz')
# calculate the clutter map
clmap = clutter.filter_gabella(testdata,
wsize=5,
thrsnorain=0.,
tr1=6.,
n_p=8,
tr2=1.3)
# visualize the result
ax, pm = vis.plot_ppi(clmap)
ax.set_title('cluttermap')
pl.show()
if __name__ == '__main__':
ex_clutter_gabella()
|
optikfluffel/lagesonum
|
refs/heads/master
|
lagesonum/__main__.py
|
1
|
# coding: utf-8
# Datei zum lokalen testen, PythonAnywhere verwendet bottle_app.py direkt
from bottle import run, debug
from bottle_app import application
debug(True)
run(application, host='localhost', port=8080, reloader=True)
|
nickjj/deploy-web-apps-with-docker
|
refs/heads/master
|
chapter-4/website/app.py
|
6
|
from flask import Flask
from redis import StrictRedis
app = Flask(__name__)
redis = StrictRedis(host='redis')
@app.route('/')
def hello_world():
hits = redis.incr('hits')
return 'You visited {0} times!'.format(hits)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000, debug=True)
|
shahalimurtaza/Pynet
|
refs/heads/master
|
Week1/Exercise6.py
|
1
|
import yaml
import json
exercise6 = [ 1, 2, 3, 4, 5, { 'key1': 'lock1', 'key2': 2 } , 'some string']
with open ("yaml-exercise6", "w") as f:
f.write(yaml.dump(exercise6, default_flow_style=False))
with open ("json-exercise6", "w") as f:
json.dump(exercise6, f)
|
Chilledheart/chromium
|
refs/heads/master
|
tools/telemetry/telemetry/internal/actions/page_action.py
|
29
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
from telemetry import decorators
GESTURE_SOURCE_DEFAULT = 'DEFAULT'
GESTURE_SOURCE_MOUSE = 'MOUSE'
GESTURE_SOURCE_TOUCH = 'TOUCH'
SUPPORTED_GESTURE_SOURCES = (GESTURE_SOURCE_DEFAULT,
GESTURE_SOURCE_MOUSE,
GESTURE_SOURCE_TOUCH)
class PageActionNotSupported(Exception):
pass
class PageActionFailed(Exception):
pass
class PageAction(object):
"""Represents an action that a user might try to perform to a page."""
def WillRunAction(self, tab):
"""Override to do action-specific setup before
Test.WillRunAction is called."""
pass
def RunAction(self, tab):
raise NotImplementedError()
def CleanUp(self, tab):
pass
def EvaluateCallbackWithElement(
tab, callback_js, selector=None, text=None, element_function=None,
wait=False, timeout_in_seconds=60):
"""Evaluates the JavaScript callback with the given element.
The element may be selected via selector, text, or element_function.
Only one of these arguments must be specified.
Returns:
The callback's return value, if any. The return value must be
convertible to JSON.
Args:
tab: A telemetry.core.Tab object.
callback_js: The JavaScript callback to call (as string).
The callback receive 2 parameters: the element, and information
string about what method was used to retrieve the element.
Example: '''
function(element, info) {
if (!element) {
throw Error('Can not find element: ' + info);
}
element.click()
}'''
selector: A CSS selector describing the element.
text: The element must contains this exact text.
element_function: A JavaScript function (as string) that is used
to retrieve the element. For example:
'(function() { return foo.element; })()'.
wait: Whether to wait for the return value to be true.
timeout_in_seconds: The timeout for wait (if waiting).
"""
count = 0
info_msg = ''
if element_function is not None:
count = count + 1
info_msg = 'using element_function "%s"' % re.escape(element_function)
if selector is not None:
count = count + 1
info_msg = 'using selector "%s"' % _EscapeSelector(selector)
element_function = 'document.querySelector(\'%s\')' % _EscapeSelector(
selector)
if text is not None:
count = count + 1
info_msg = 'using exact text match "%s"' % re.escape(text)
element_function = '''
(function() {
function _findElement(element, text) {
if (element.innerHTML == text) {
return element;
}
var childNodes = element.childNodes;
for (var i = 0, len = childNodes.length; i < len; ++i) {
var found = _findElement(childNodes[i], text);
if (found) {
return found;
}
}
return null;
}
return _findElement(document, '%s');
})()''' % text
if count != 1:
raise PageActionFailed(
'Must specify 1 way to retrieve element, but %s was specified.' % count)
code = '''
(function() {
var element = %s;
var callback = %s;
return callback(element, '%s');
})()''' % (element_function, callback_js, info_msg)
if wait:
tab.WaitForJavaScriptExpression(code, timeout_in_seconds)
return True
else:
return tab.EvaluateJavaScript(code)
def _EscapeSelector(selector):
return selector.replace('\'', '\\\'')
@decorators.Cache
def IsGestureSourceTypeSupported(tab, gesture_source_type):
# TODO(dominikg): remove once support for
# 'chrome.gpuBenchmarking.gestureSourceTypeSupported' has
# been rolled into reference build.
if tab.EvaluateJavaScript("""
typeof chrome.gpuBenchmarking.gestureSourceTypeSupported ===
'undefined'"""):
return (tab.browser.platform.GetOSName() != 'mac' or
gesture_source_type.lower() != 'touch')
return tab.EvaluateJavaScript("""
chrome.gpuBenchmarking.gestureSourceTypeSupported(
chrome.gpuBenchmarking.%s_INPUT)"""
% (gesture_source_type.upper()))
|
MihaiMoldovanu/ansible
|
refs/heads/devel
|
lib/ansible/modules/database/proxysql/proxysql_global_variables.py
|
30
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: proxysql_global_variables
version_added: "2.3"
author: "Ben Mildren (@bmildren)"
short_description: Gets or sets the proxysql global variables.
description:
- The M(proxysql_global_variables) module gets or sets the proxysql global
variables.
options:
variable:
description:
- Defines which variable should be returned, or if I(value) is specified
which variable should be updated.
required: True
value:
description:
- Defines a value the variable specified using I(variable) should be set
to.
save_to_disk:
description:
- Save mysql host config to sqlite db on disk to persist the
configuration.
default: True
load_to_runtime:
description:
- Dynamically load mysql host config to runtime memory.
default: True
login_user:
description:
- The username used to authenticate to ProxySQL admin interface.
default: None
login_password:
description:
- The password used to authenticate to ProxySQL admin interface.
default: None
login_host:
description:
- The host used to connect to ProxySQL admin interface.
default: '127.0.0.1'
login_port:
description:
- The port used to connect to ProxySQL admin interface.
default: 6032
config_file:
description:
- Specify a config file from which login_user and login_password are to
be read.
default: ''
'''
EXAMPLES = '''
---
# This example sets the value of a variable, saves the mysql admin variables
# config to disk, and dynamically loads the mysql admin variables config to
# runtime. It uses supplied credentials to connect to the proxysql admin
# interface.
- proxysql_global_variables:
login_user: 'admin'
login_password: 'admin'
variable: 'mysql-max_connections'
value: 4096
# This example gets the value of a variable. It uses credentials in a
# supplied config file to connect to the proxysql admin interface.
- proxysql_global_variables:
config_file: '~/proxysql.cnf'
variable: 'mysql-default_query_delay'
'''
RETURN = '''
stdout:
description: Returns the mysql variable supplied with it's associted value.
returned: Returns the current variable and value, or the newly set value
for the variable supplied..
type: dict
"sample": {
"changed": false,
"msg": "The variable is already been set to the supplied value",
"var": {
"variable_name": "mysql-poll_timeout",
"variable_value": "3000"
}
}
'''
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.mysql import mysql_connect
from ansible.module_utils._text import to_native
try:
import MySQLdb
import MySQLdb.cursors
except ImportError:
MYSQLDB_FOUND = False
else:
MYSQLDB_FOUND = True
# ===========================================
# proxysql module specific support methods.
#
def perform_checks(module):
if module.params["login_port"] < 0 \
or module.params["login_port"] > 65535:
module.fail_json(
msg="login_port must be a valid unix port number (0-65535)"
)
if not MYSQLDB_FOUND:
module.fail_json(
msg="the python mysqldb module is required"
)
def save_config_to_disk(variable, cursor):
if variable.startswith("admin"):
cursor.execute("SAVE ADMIN VARIABLES TO DISK")
else:
cursor.execute("SAVE MYSQL VARIABLES TO DISK")
return True
def load_config_to_runtime(variable, cursor):
if variable.startswith("admin"):
cursor.execute("LOAD ADMIN VARIABLES TO RUNTIME")
else:
cursor.execute("LOAD MYSQL VARIABLES TO RUNTIME")
return True
def check_config(variable, value, cursor):
query_string = \
"""SELECT count(*) AS `variable_count`
FROM global_variables
WHERE variable_name = %s and variable_value = %s"""
query_data = \
[variable, value]
cursor.execute(query_string, query_data)
check_count = cursor.fetchone()
return (int(check_count['variable_count']) > 0)
def get_config(variable, cursor):
query_string = \
"""SELECT *
FROM global_variables
WHERE variable_name = %s"""
query_data = \
[variable, ]
cursor.execute(query_string, query_data)
row_count = cursor.rowcount
resultset = cursor.fetchone()
if row_count > 0:
return resultset
else:
return False
def set_config(variable, value, cursor):
query_string = \
"""UPDATE global_variables
SET variable_value = %s
WHERE variable_name = %s"""
query_data = \
[value, variable]
cursor.execute(query_string, query_data)
return True
def manage_config(variable, save_to_disk, load_to_runtime, cursor, state):
if state:
if save_to_disk:
save_config_to_disk(variable, cursor)
if load_to_runtime:
load_config_to_runtime(variable, cursor)
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None, type='str'),
login_password=dict(default=None, no_log=True, type='str'),
login_host=dict(default="127.0.0.1"),
login_unix_socket=dict(default=None),
login_port=dict(default=6032, type='int'),
config_file=dict(default="", type='path'),
variable=dict(required=True, type='str'),
value=dict(),
save_to_disk=dict(default=True, type='bool'),
load_to_runtime=dict(default=True, type='bool')
),
supports_check_mode=True
)
perform_checks(module)
login_user = module.params["login_user"]
login_password = module.params["login_password"]
config_file = module.params["config_file"]
variable = module.params["variable"]
value = module.params["value"]
save_to_disk = module.params["save_to_disk"]
load_to_runtime = module.params["load_to_runtime"]
cursor = None
try:
cursor = mysql_connect(module,
login_user,
login_password,
config_file,
cursor_class=MySQLdb.cursors.DictCursor)
except MySQLdb.Error as e:
module.fail_json(
msg="unable to connect to ProxySQL Admin Module.. %s" % to_native(e)
)
result = {}
if not value:
try:
if get_config(variable, cursor):
result['changed'] = False
result['msg'] = \
"Returned the variable and it's current value"
result['var'] = get_config(variable, cursor)
else:
module.fail_json(
msg="The variable \"%s\" was not found" % variable
)
except MySQLdb.Error as e:
module.fail_json(
msg="unable to get config.. %s" % to_native(e)
)
else:
try:
if get_config(variable, cursor):
if not check_config(variable, value, cursor):
if not module.check_mode:
result['changed'] = set_config(variable, value, cursor)
result['msg'] = \
"Set the variable to the supplied value"
result['var'] = get_config(variable, cursor)
manage_config(variable,
save_to_disk,
load_to_runtime,
cursor,
result['changed'])
else:
result['changed'] = True
result['msg'] = ("Variable would have been set to" +
" the supplied value, however" +
" check_mode is enabled.")
else:
result['changed'] = False
result['msg'] = ("The variable is already been set to" +
" the supplied value")
result['var'] = get_config(variable, cursor)
else:
module.fail_json(
msg="The variable \"%s\" was not found" % variable
)
except MySQLdb.Error as e:
module.fail_json(
msg="unable to set config.. %s" % to_native(e)
)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
floringrigoriu/Algorthitms
|
refs/heads/master
|
Leetcode2021/Monthly/January/jan27.py
|
1
|
# https://leetcode.com/explore/challenge/card/january-leetcoding-challenge-2021/582/week-4-january-22nd-january-28th/3618/
# Concatenation of Consecutive Binary Numbers
# Given an integer n, return the decimal value of the binary string formed by concatenating the binary representations of 1 to n in order, modulo 109 + 7.
# Example 1:
# Input: n = 1
# Output: 1
# Explanation: "1" in binary corresponds to the decimal value 1.
# Example 2:
# Input: n = 3
# Output: 27
# Explanation: In binary, 1, 2, and 3 corresponds to "1", "10", and "11".
# After concatenating them, we have "11011", which corresponds to the decimal value 27.
class Solution:
def concatenatedBinary(self, n: int) -> int:
modulo = 1000000007
result = 0
offset =0
for i in range(1,n+1):
while (1<< offset) <= i :
offset+=1
result = result<<offset
result+= i
result %= modulo
return result
s = Solution()
for n in [1,3,12,32]:
print(n, s.concatenatedBinary(n))
|
horstjens/ThePythonGameBook
|
refs/heads/master
|
en/pygame/003_static_blit_pretty2.py
|
1
|
# -*- coding: utf-8 -*-
"""
003_static_blit_pretty.py
static blitting and drawing (pretty version)
url: http://thepythongamebook.com/en:part2:pygame:step003
author: horst.jens@spielend-programmieren.at
licence: gpl, see http://www.gnu.org/licenses/gpl.html
works with pyhton3.4 and python2.7
Blitting a surface on a static position
Drawing a filled circle into ballsurface.
Blitting this surface once.
introducing pygame draw methods
The ball's rectangular surface is black because the background
color of the ball's surface was never defined nor filled."""
import pygame
import random
class PygView(object):
def __init__(self, width=640, height=400, fps=30):
"""Initialize pygame, window, background, font,...
default arguments
"""
pygame.init()
pygame.display.set_caption("Press ESC to quit")
self.width = width
self.height = height
self.screen = pygame.display.set_mode((self.width, self.height), pygame.DOUBLEBUF)
self.background = pygame.Surface(self.screen.get_size()).convert()
self.background.fill((255,255,255)) # fill background white
self.clock = pygame.time.Clock()
self.fps = fps
self.playtime = 0.0
self.font = pygame.font.SysFont('mono', 24, bold=True)
self.newflash()
def newflash(self):
self.points = []
richtung = random.choice(("n","ne", "e", "se","s","sw", "w", "nw"))
for x in range(0, self.width//2, self.width//16):
if richtung == "n":
self.points.append([self.width//2, self.height//2-x])
elif richtung == "s":
self.points.append([self.width//2, self.height//2+x])
elif richtung == "w":
self.points.append([self.width//2-x, self.height//2])
elif richtung == "e":
self.points.append([self.width//2+x, self.height//2])
elif richtung == "ne":
self.points.append([self.width//2+x, self.height//2-x])
elif richtung == "se":
self.points.append([self.width//2+x, self.height//2+x])
elif richtung == "nw":
self.points.append([self.width//2-x, self.height//2-x])
elif richtung == "sw":
self.points.append([self.width//2-x, self.height//2+x])
#print(self.points)
def flash(self):
f = random.randint(0,255)
farbe = (f,f,255) # zwischen blau und weiร
dicke = random.randint(2,5)
if random.random() < 0.28:
# 5% chance fรผr y รnderung
i = random.choice(self.points)
i[1] += random.randint(-35,35)
if random.random() < 0.28:
# 5% chance fรผr x รnderung
i = random.choice(self.points)
i[0] += random.randint(-35,35)
# --- blitz zeichnen ---
start = (self.width//2,self.height//2)
for p in self.points:
pygame.draw.line(self.screen, farbe, start, p, dicke)
start = p
if random.random() < 0.035:
# 1/2 % chance auf komplett neuen blitz
self.newflash()
def paint(self):
"""painting on the surface"""
#------- try out some pygame draw functions --------
# pygame.draw.line(Surface, color, start, end, width)
#pygame.draw.line(self.background, (0,255,0), (10,10), (50,100))
# pygame.draw.rect(Surface, color, Rect, width=0): return Rect
#pygame.draw.rect(self.background, (0,255,0), (50,50,100,25)) # rect: (x1, y1, width, height)
# pygame.draw.circle(Surface, color, pos, radius, width=0): return Rect
#pygame.draw.circle(self.background, (0,200,0), (200,50), 55, 0)
# pygame.draw.polygon(Surface, color, pointlist, width=0): return Rect
#pygame.draw.polygon(self.background, (0,180,0), ((250,100),(300,0),(350,50)))
# pygame.draw.arc(Surface, color, Rect, start_angle, stop_angle, width=1): return Rect
#pygame.draw.arc(self.background, (0,150,0),(400,10,150,100), 0, 3.14) # radiant instead of grad
# ------------------- blitting a Ball --------------
#myball = Ball() # creating the Ball object
#myball.blit(self.background) # blitting it
for radius in range(320, 4, -10):
pygame.draw.circle(self.screen,
(radius%255, 0, radius%255),
(320, 200), radius)
def run(self):
"""The mainloop
"""
self.paint()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
running = False
milliseconds = self.clock.tick(self.fps)
self.playtime += milliseconds / 1000.0
self.draw_text("FPS: {:6.3}{}PLAYTIME: {:6.3} SECONDS".format(
self.clock.get_fps(), " "*5, self.playtime))
# ---- kreise malen -----
pressed = pygame.key.get_pressed()
if pressed[pygame.K_k]:
self.paint()
# ----- blitze malen ----
if pressed[pygame.K_b]:
self.flash()
pygame.display.flip()
self.screen.blit(self.background, (0, 0))
pygame.quit()
def draw_text(self, text):
"""Center text in window
"""
fw, fh = self.font.size(text)
surface = self.font.render(text, True, (0, 0, 0))
self.screen.blit(surface, (50,150))
class Ball(object):
"""this is not a native pygame sprite but instead a pygame surface"""
def __init__(self, radius = 50, color=(0,0,255), x=320, y=240):
"""create a (black) surface and paint a blue ball on it"""
self.radius = radius
self.color = color
self.x = x
self.y = y
# create a rectangular surface for the ball 50x50
self.surface = pygame.Surface((2*self.radius,2*self.radius))
# pygame.draw.circle(Surface, color, pos, radius, width=0) # from pygame documentation
pygame.draw.circle(self.surface, color, (radius, radius), radius) # draw blue filled circle on ball surface
self.surface = self.surface.convert() # for faster blitting.
# to avoid the black background, make black the transparent color:
# self.surface.set_colorkey((0,0,0))
# self.surface = self.surface.convert_alpha() # faster blitting with transparent color
def blit(self, background):
"""blit the Ball on the background"""
background.blit(self.surface, ( self.x, self.y))
####
if __name__ == '__main__':
# call with width of window and fps
PygView(800,600).run()
|
BenevolentAI/guacamol
|
refs/heads/master
|
tests/test_distribution_learning_benchmarks.py
|
1
|
from guacamol.distribution_learning_benchmark import ValidityBenchmark, UniquenessBenchmark, NoveltyBenchmark, \
KLDivBenchmark
from guacamol.assess_distribution_learning import _assess_distribution_learning
from .mock_generator import MockGenerator
import numpy as np
import tempfile
from os.path import join
def test_validity_does_not_penalize_duplicates():
generator = MockGenerator(['CCC', 'CCC'])
benchmark = ValidityBenchmark(number_samples=2)
assert benchmark.assess_model(generator).score == 1.0
def test_validity_score_is_proportion_of_valid_molecules():
generator = MockGenerator(['CCC', 'CC(CC)C', 'invalidMolecule'])
benchmark = ValidityBenchmark(number_samples=3)
assert benchmark.assess_model(generator).score == 2.0 / 3.0
def test_uniqueness_penalizes_duplicates():
generator = MockGenerator(['CCC', 'CCC', 'CCC'])
benchmark = UniquenessBenchmark(number_samples=3)
assert benchmark.assess_model(generator).score == 1.0 / 3.0
def test_uniqueness_penalizes_duplicates_with_different_smiles_strings():
generator = MockGenerator(['C(O)C', 'CCO', 'OCC'])
benchmark = UniquenessBenchmark(number_samples=3)
assert benchmark.assess_model(generator).score == 1.0 / 3.0
def test_uniqueness_does_not_penalize_invalid_molecules():
generator = MockGenerator(['C(O)C', 'invalid1', 'invalid2', 'CCC', 'NCCN'])
benchmark = UniquenessBenchmark(number_samples=3)
assert benchmark.assess_model(generator).score == 1.0
def test_novelty_score_is_zero_if_no_molecule_is_new():
molecules = ['CCOCC', 'NNNNONNN', 'C=CC=C']
generator = MockGenerator(molecules)
benchmark = NoveltyBenchmark(number_samples=3, training_set=molecules)
assert benchmark.assess_model(generator).score == 0.0
def test_novelty_score_is_one_if_all_molecules_are_new():
generator = MockGenerator(['CCOCC', 'NNNNONNN', 'C=CC=C'])
benchmark = NoveltyBenchmark(number_samples=3, training_set=['CO', 'CC'])
assert benchmark.assess_model(generator).score == 1.0
def test_novelty_score_does_not_penalize_duplicates():
generator = MockGenerator(['CCOCC', 'O(CC)CC', 'C=CC=C', 'CC'])
benchmark = NoveltyBenchmark(number_samples=3, training_set=['CO', 'CC'])
# Gets 2 out of 3: one of the duplicated molecules is ignored, so the sampled molecules are
# ['CCOCC', 'C=CC=C', 'CC'], and 'CC' is not novel
assert benchmark.assess_model(generator).score == 2.0 / 3.0
def test_novelty_score_penalizes_invalid_molecules():
generator = MockGenerator(['CCOCC', 'invalid1', 'invalid2', 'CCCC', 'CC'])
benchmark = NoveltyBenchmark(number_samples=3, training_set=['CO', 'CC'])
assert benchmark.assess_model(generator).score == 2.0 / 3.0
def test_KLdiv_benchmark_same_dist():
generator = MockGenerator(['CCOCC', 'NNNNONNN', 'C=CC=C'])
benchmark = KLDivBenchmark(number_samples=3, training_set=['CCOCC', 'NNNNONNN', 'C=CC=C'])
result = benchmark.assess_model(generator)
print(result.metadata)
assert np.isclose(result.score, 1.0, )
def test_KLdiv_benchmark_different_dist():
generator = MockGenerator(['CCOCC', 'NNNNONNN', 'C=CC=C'])
benchmark = KLDivBenchmark(number_samples=3, training_set=['FCCOCC', 'CC(CC)CCCCNONNN', 'C=CC=O'])
result = benchmark.assess_model(generator)
print(result.metadata)
assert result.metadata['number_samples'] == 3
assert result.metadata.get('kl_divs') is not None
assert result.metadata['kl_divs'].get('BertzCT') > 0
assert result.metadata['kl_divs'].get('MolLogP', None) > 0
assert result.metadata['kl_divs'].get('MolWt', None) > 0
assert result.metadata['kl_divs'].get('TPSA', None) > 0
assert result.metadata['kl_divs'].get('NumHAcceptors', None) > 0
assert result.metadata['kl_divs'].get('NumHDonors', None) > 0
assert result.metadata['kl_divs'].get('NumRotatableBonds', None) > 0
assert result.score < 1.0
def test_distribution_learning_suite_v1():
generator = MockGenerator(
['CCl', 'CCOCCCl', 'ClCCF', 'CCCOCCOCCCO', 'CF', 'CCOCC', 'CCF', 'CCCOCC', 'NNNNONNN', 'C=CC=C'] * 10)
mock_chembl = ['FCCOCC', 'C=CC=O', 'CCl', 'CCOCCCl', 'ClCCF', 'CCCOCCOCCCO', 'CF', 'CCOCC',
'CCF']
temp_dir = tempfile.mkdtemp()
smiles_path = join(temp_dir, 'mock.smiles')
with open(smiles_path, 'w') as f:
for i in mock_chembl:
f.write(f'{i}\n')
f.close()
json_path = join(temp_dir, 'output.json')
_assess_distribution_learning(model=generator,
chembl_training_file=smiles_path,
json_output_file=json_path,
benchmark_version='v1',
number_samples=4)
with open(json_path, 'r') as f:
print(f.read())
|
Temeez/wagtail-simple-gallery
|
refs/heads/master
|
wagtail_simple_gallery/models.py
|
1
|
from django.conf import settings
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.admin.edit_handlers import FieldPanel
from wagtail.core.fields import RichTextField
from wagtail.core.models import Page
from wagtail.images import get_image_model
from django.shortcuts import render, redirect
from wagtail.contrib.routable_page.models import RoutablePageMixin, route
from taggit.models import Tag
IMAGE_ORDER_TYPES = (
(1, 'Image title'),
(2, 'Newest image first'),
)
class SimpleGalleryIndex(RoutablePageMixin, Page):
intro_title = models.CharField(
verbose_name=_('Intro title'),
max_length=250,
blank=True,
help_text=_('Optional H1 title for the gallery page.')
)
intro_text = RichTextField(
blank=True,
verbose_name=_('Intro text'),
help_text=_('Optional text to go with the intro text.')
)
collection = models.ForeignKey(
'wagtailcore.Collection',
verbose_name=_('Collection'),
null=True,
blank=False,
on_delete=models.SET_NULL,
related_name='+',
help_text=_('Show images in this collection in the gallery view.')
)
images_per_page = models.IntegerField(
default=8,
verbose_name=_('Images per page'),
help_text=_('How many images there should be on one page.')
)
use_lightbox = models.BooleanField(
verbose_name=_('Use lightbox'),
default=True,
help_text=_('Use lightbox to view larger images when clicking the thumbnail.')
)
order_images_by = models.IntegerField(choices=IMAGE_ORDER_TYPES, default=1)
content_panels = Page.content_panels + [
FieldPanel('intro_title', classname='full title'),
FieldPanel('intro_text', classname='full title'),
FieldPanel('collection'),
FieldPanel('images_per_page', classname='full title'),
FieldPanel('use_lightbox'),
FieldPanel('order_images_by'),
]
@property
def images(self, tags=None):
return get_gallery_images(self.collection.name, self)
@property
def tags(self):
return self.get_gallery_tags()
def get_context(self, request):
images = self.images
tags = self.tags
context = super(SimpleGalleryIndex, self).get_context(request)
page = request.GET.get('page')
paginator = Paginator(images, self.images_per_page)
try:
images = paginator.page(page)
except PageNotAnInteger:
images = paginator.page(1)
except EmptyPage:
images = paginator.page(paginator.num_pages)
context['gallery_images'] = images
context['gallery_tags'] = tags
return context
def get_gallery_tags(self, tags=[]):
images = get_gallery_images(self.collection.name, self, tags=tags)
for img in images:
tags += img.tags.all()
tags = sorted(set(tags))
return tags
@route('^tags/$', name='tag_archive')
@route('^tags/([\w-]+)/$', name='tag_archive')
def tag_archive(self, request, tag=None):
try:
tag = Tag.objects.get(slug=tag)
except Tag.DoesNotExist:
return redirect(self.url)
try:
taglist.append(tag)
except NameError:
taglist = []
taglist.append(tag)
images = get_gallery_images(self.collection.name, self, tags=taglist)
tags = self.get_gallery_tags(tags=taglist)
paginator = Paginator(images, self.images_per_page)
page = request.GET.get('page')
try:
images = paginator.page(page)
except PageNotAnInteger:
images = paginator.page(1)
except EmptyPage:
images = paginator.page(paginator.num_pages)
context = self.get_context(request)
context['gallery_images'] = images
context['gallery_tags'] = tags
context['current_tag'] = tag
return render(request, 'wagtail_simple_gallery/simple_gallery_index.html', context)
class Meta:
verbose_name = _('Gallery index')
verbose_name_plural = _('Gallery indices')
template = getattr(settings, 'SIMPLE_GALLERY_TEMPLATE', 'wagtail_simple_gallery/simple_gallery_index.html')
def get_gallery_images(collection, page=None, tags=None):
# Tags must be a list of tag names like ["Hasthag", "Kawabonga", "Winter is coming"]
images = None
try:
images = get_image_model().objects.filter(collection__name=collection).prefetch_related("tags")
if page:
if page.order_images_by == 1:
images = images.order_by('title')
elif page.order_images_by == 2:
images = images.order_by('-created_at')
except Exception as e:
pass
if images and tags:
images = images.filter(tags__name__in=tags).prefetch_related("tags").distinct()
return images
|
cecep-edu/refactory
|
refs/heads/staging
|
requirements/PyChart-1.39/demos/tocssingle.py
|
6
|
#
# Copyright (C) 2000-2005 by Yasushi Saito (yasushi.saito@gmail.com)
#
# Pychart is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any
# later version.
#
# Pychart is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
from pychart import *
import tocslib
data = chart_data.read_str(",",
"/8No\nReplication,23,105",
"/8Replication,9,38",
"/8Replication\nwith NVRAM,13,48")
ar = area.T(y_range=(0,150),
size=(tocslib.width*1.3, tocslib.height),
x_coord=category_coord.T(data, 0),
y_axis = axis.Y(label="/bMessages//second", tic_interval=50),
x_axis = axis.X(label=None),
legend=legend.T(loc=(70,50)))
ar.add_plot(bar_plot.T(label="With one disk//node",
cluster = (0,2), data = data, width=15,
fill_style=fill_style.white),
bar_plot.T(label="With three disks//node",
cluster = (1,2), data = data, width=15, hcol=2))
ar.draw()
|
ossdemura/django-miniblog
|
refs/heads/dev
|
src/Lib/encodings/iso8859_10.py
|
272
|
""" Python Character Mapping Codec iso8859_10 generated from 'MAPPINGS/ISO8859/8859-10.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-10',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0112' # 0xA2 -> LATIN CAPITAL LETTER E WITH MACRON
'\u0122' # 0xA3 -> LATIN CAPITAL LETTER G WITH CEDILLA
'\u012a' # 0xA4 -> LATIN CAPITAL LETTER I WITH MACRON
'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
'\u0136' # 0xA6 -> LATIN CAPITAL LETTER K WITH CEDILLA
'\xa7' # 0xA7 -> SECTION SIGN
'\u013b' # 0xA8 -> LATIN CAPITAL LETTER L WITH CEDILLA
'\u0110' # 0xA9 -> LATIN CAPITAL LETTER D WITH STROKE
'\u0160' # 0xAA -> LATIN CAPITAL LETTER S WITH CARON
'\u0166' # 0xAB -> LATIN CAPITAL LETTER T WITH STROKE
'\u017d' # 0xAC -> LATIN CAPITAL LETTER Z WITH CARON
'\xad' # 0xAD -> SOFT HYPHEN
'\u016a' # 0xAE -> LATIN CAPITAL LETTER U WITH MACRON
'\u014a' # 0xAF -> LATIN CAPITAL LETTER ENG
'\xb0' # 0xB0 -> DEGREE SIGN
'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
'\u0113' # 0xB2 -> LATIN SMALL LETTER E WITH MACRON
'\u0123' # 0xB3 -> LATIN SMALL LETTER G WITH CEDILLA
'\u012b' # 0xB4 -> LATIN SMALL LETTER I WITH MACRON
'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
'\u0137' # 0xB6 -> LATIN SMALL LETTER K WITH CEDILLA
'\xb7' # 0xB7 -> MIDDLE DOT
'\u013c' # 0xB8 -> LATIN SMALL LETTER L WITH CEDILLA
'\u0111' # 0xB9 -> LATIN SMALL LETTER D WITH STROKE
'\u0161' # 0xBA -> LATIN SMALL LETTER S WITH CARON
'\u0167' # 0xBB -> LATIN SMALL LETTER T WITH STROKE
'\u017e' # 0xBC -> LATIN SMALL LETTER Z WITH CARON
'\u2015' # 0xBD -> HORIZONTAL BAR
'\u016b' # 0xBE -> LATIN SMALL LETTER U WITH MACRON
'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic)
'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\u0168' # 0xD7 -> LATIN CAPITAL LETTER U WITH TILDE
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic)
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic)
'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\u0169' # 0xF7 -> LATIN SMALL LETTER U WITH TILDE
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic)
'\u0138' # 0xFF -> LATIN SMALL LETTER KRA
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
andrewmoses/ssquiz
|
refs/heads/master
|
flask/lib/python2.7/encodings/mac_croatian.py
|
593
|
""" Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-croatian',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\u2206' # 0xB4 -> INCREMENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u0106' # 0xC6 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\uf8ff' # 0xD8 -> Apple logo
u'\xa9' # 0xD9 -> COPYRIGHT SIGN
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\xc6' # 0xDE -> LATIN CAPITAL LETTER AE
u'\xbb' # 0xDF -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2013' # 0xE0 -> EN DASH
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\u0107' # 0xE6 -> LATIN SMALL LETTER C WITH ACUTE
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u03c0' # 0xF9 -> GREEK SMALL LETTER PI
u'\xcb' # 0xFA -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\xca' # 0xFD -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xe6' # 0xFE -> LATIN SMALL LETTER AE
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
jeeftor/alfredToday
|
refs/heads/master
|
src/lib/pytz/lazy.py
|
514
|
from threading import RLock
try:
from UserDict import DictMixin
except ImportError:
from collections import Mapping as DictMixin
# With lazy loading, we might end up with multiple threads triggering
# it at the same time. We need a lock.
_fill_lock = RLock()
class LazyDict(DictMixin):
"""Dictionary populated on first use."""
data = None
def __getitem__(self, key):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return self.data[key.upper()]
def __contains__(self, key):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return key in self.data
def __iter__(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return iter(self.data)
def __len__(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return len(self.data)
def keys(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return self.data.keys()
class LazyList(list):
"""List populated on first use."""
_props = [
'__str__', '__repr__', '__unicode__',
'__hash__', '__sizeof__', '__cmp__',
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove',
'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__',
'__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__',
'__getitem__', '__setitem__', '__delitem__', '__iter__',
'__reversed__', '__getslice__', '__setslice__', '__delslice__']
def __new__(cls, fill_iter=None):
if fill_iter is None:
return list()
# We need a new class as we will be dynamically messing with its
# methods.
class LazyList(list):
pass
fill_iter = [fill_iter]
def lazy(name):
def _lazy(self, *args, **kw):
_fill_lock.acquire()
try:
if len(fill_iter) > 0:
list.extend(self, fill_iter.pop())
for method_name in cls._props:
delattr(LazyList, method_name)
finally:
_fill_lock.release()
return getattr(list, name)(self, *args, **kw)
return _lazy
for name in cls._props:
setattr(LazyList, name, lazy(name))
new_list = LazyList()
return new_list
# Not all versions of Python declare the same magic methods.
# Filter out properties that don't exist in this version of Python
# from the list.
LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)]
class LazySet(set):
"""Set populated on first use."""
_props = (
'__str__', '__repr__', '__unicode__',
'__hash__', '__sizeof__', '__cmp__',
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__contains__', '__len__', '__nonzero__',
'__getitem__', '__setitem__', '__delitem__', '__iter__',
'__sub__', '__and__', '__xor__', '__or__',
'__rsub__', '__rand__', '__rxor__', '__ror__',
'__isub__', '__iand__', '__ixor__', '__ior__',
'add', 'clear', 'copy', 'difference', 'difference_update',
'discard', 'intersection', 'intersection_update', 'isdisjoint',
'issubset', 'issuperset', 'pop', 'remove',
'symmetric_difference', 'symmetric_difference_update',
'union', 'update')
def __new__(cls, fill_iter=None):
if fill_iter is None:
return set()
class LazySet(set):
pass
fill_iter = [fill_iter]
def lazy(name):
def _lazy(self, *args, **kw):
_fill_lock.acquire()
try:
if len(fill_iter) > 0:
for i in fill_iter.pop():
set.add(self, i)
for method_name in cls._props:
delattr(LazySet, method_name)
finally:
_fill_lock.release()
return getattr(set, name)(self, *args, **kw)
return _lazy
for name in cls._props:
setattr(LazySet, name, lazy(name))
new_set = LazySet()
return new_set
# Not all versions of Python declare the same magic methods.
# Filter out properties that don't exist in this version of Python
# from the list.
LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)]
|
prtx/What-I-learned-in-college
|
refs/heads/master
|
AI/mc2.py
|
1
|
#!/usr/bin/python
from copy import deepcopy
class Tree_Node:
def __init__(self, item = None, parent = None):
self.item = item
self.parent = parent
self.children = []
def set_item(self, node):
self.parent = node
def set_parent(self, node):
self.parent = node
def set_child(self, item):
tree_node = Tree_Node(item, self)
self.children.append(tree_node)
class missionary_cannibals:
def __init__(self):
#cannibal,missionary,canoe
print "Missionary Cannibal Problem\n"
self.state = [3, 3, True]
self.success_state = [0, 0, False]
self.state_tree = Tree_Node(self.state)
self.state_history = [self.state]
def valid_state(self,left_state):
right_state = [3-x for x in left_state]
if left_state[0]>left_state[1]>0 or right_state[0]>right_state[1]>0:
return False
return True
def possible_states(self, current_state):
states = []
k = -1 if current_state[2] else 1
for i in range(3):
for j in range(3):
if i + j in [1,2]:
state = deepcopy(current_state)
state[2] = not state[2]
state[0] += i*k
state[1] += j*k
if 3 >= state[0] >= 0 and 3 >= state[1] >= 0 and self.valid_state(state[:2]) and state not in self.state_history:
states.append(state)
self.state_history.append(state)
return states
def solution(self):
self.depth_first_search(self.state_tree, 0)
def breadth_first_search(self, current_node, level):
for state in self.possible_states(current_node.item):
current_node.set_child(state)
for node in current_node.children:
if node.item == self.success_state:
success_node = deepcopy(node)
print success_node.item
while success_node.parent:
success_node = success_node.parent
print success_node.item
self.breadth_first_search(node, level + 1)
def depth_first_search(self, current_node, level):
for state in self.possible_states(current_node.item):
current_node.set_child(state)
for node in current_node.children:
'''if node.item == self.success_state:
success_node = deepcopy(node)
print success_node.item
while success_node.parent:
success_node = success_node.parent
print success_node.item'''
self.depth_first_search(node, level + 1)
a = missionary_cannibals()
a.solution()
|
gboudreau/CouchPotato
|
refs/heads/master
|
library/hachoir_parser/video/mov.py
|
10
|
"""
Apple Quicktime Movie (file extension ".mov") parser.
Documents:
- Parsing and Writing QuickTime Files in Java (by Chris Adamson, 02/19/2003)
http://www.onjava.com/pub/a/onjava/2003/02/19/qt_file_format.html
- QuickTime File Format (official technical reference)
http://developer.apple.com/documentation/QuickTime/QTFF/qtff.pdf
- Apple QuickTime:
http://wiki.multimedia.cx/index.php?title=Apple_QuickTime
- File type (ftyp):
http://www.ftyps.com/
Author: Victor Stinner
Creation: 2 august 2006
"""
from hachoir_parser import Parser
from hachoir_core.field import (ParserError, FieldSet, MissingField,
UInt8, Int16, UInt16, UInt32, TimestampMac32,
String, PascalString8, CString,
RawBytes, PaddingBytes)
from hachoir_core.endian import BIG_ENDIAN
from hachoir_core.text_handler import textHandler, hexadecimal
class QTFloat32(FieldSet):
static_size = 32
def createFields(self):
yield Int16(self, "int_part")
yield UInt16(self, "float_part")
def createValue(self):
return self["int_part"].value + float(self["float_part"].value) / 65535
def createDescription(self):
return str(self.value)
class AtomList(FieldSet):
def createFields(self):
while not self.eof:
yield Atom(self, "atom[]")
class TrackHeader(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
# TODO: sum of :
# TrackEnabled = 1;
# TrackInMovie = 2;
# TrackInPreview = 4;
# TrackInPoster = 8
yield RawBytes(self, "flags", 3)
yield TimestampMac32(self, "creation_date")
yield TimestampMac32(self, "lastmod_date")
yield UInt32(self, "track_id")
yield PaddingBytes(self, "reserved[]", 8)
yield UInt32(self, "duration")
yield PaddingBytes(self, "reserved[]", 8)
yield Int16(self, "video_layer", "Middle is 0, negative in front")
yield PaddingBytes(self, "other", 2)
yield QTFloat32(self, "geom_a", "Width scale")
yield QTFloat32(self, "geom_b", "Width rotate")
yield QTFloat32(self, "geom_u", "Width angle")
yield QTFloat32(self, "geom_c", "Height rotate")
yield QTFloat32(self, "geom_d", "Height scale")
yield QTFloat32(self, "geom_v", "Height angle")
yield QTFloat32(self, "geom_x", "Position X")
yield QTFloat32(self, "geom_y", "Position Y")
yield QTFloat32(self, "geom_w", "Divider scale")
yield QTFloat32(self, "frame_size_width")
yield QTFloat32(self, "frame_size_height")
class HDLR(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield String(self, "subtype", 8)
yield String(self, "manufacturer", 4)
yield UInt32(self, "res_flags")
yield UInt32(self, "res_flags_mask")
if self.root.is_mpeg4:
yield CString(self, "name")
else:
yield PascalString8(self, "name")
class MediaHeader(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield TimestampMac32(self, "creation_date")
yield TimestampMac32(self, "lastmod_date")
yield UInt32(self, "time_scale")
yield UInt32(self, "duration")
yield UInt16(self, "mac_lang")
yield Int16(self, "quality")
class ELST(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield UInt32(self, "nb_edits")
yield UInt32(self, "length")
yield UInt32(self, "start")
yield QTFloat32(self, "playback_speed")
class Load(FieldSet):
def createFields(self):
yield UInt32(self, "start")
yield UInt32(self, "length")
yield UInt32(self, "flags") # PreloadAlways = 1 or TrackEnabledPreload = 2
yield UInt32(self, "hints") # KeepInBuffer = 0x00000004; HighQuality = 0x00000100; SingleFieldVideo = 0x00100000
class MovieHeader(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield TimestampMac32(self, "creation_date")
yield TimestampMac32(self, "lastmod_date")
yield UInt32(self, "time_scale")
yield UInt32(self, "duration")
yield QTFloat32(self, "play_speed")
yield UInt16(self, "volume")
yield PaddingBytes(self, "reserved[]", 10)
yield QTFloat32(self, "geom_a", "Width scale")
yield QTFloat32(self, "geom_b", "Width rotate")
yield QTFloat32(self, "geom_u", "Width angle")
yield QTFloat32(self, "geom_c", "Height rotate")
yield QTFloat32(self, "geom_d", "Height scale")
yield QTFloat32(self, "geom_v", "Height angle")
yield QTFloat32(self, "geom_x", "Position X")
yield QTFloat32(self, "geom_y", "Position Y")
yield QTFloat32(self, "geom_w", "Divider scale")
yield UInt32(self, "preview_start")
yield UInt32(self, "preview_length")
yield UInt32(self, "still_poster")
yield UInt32(self, "sel_start")
yield UInt32(self, "sel_length")
yield UInt32(self, "current_time")
yield UInt32(self, "next_track")
class FileType(FieldSet):
def createFields(self):
yield String(self, "brand", 4, "Major brand")
yield UInt32(self, "version", "Version")
while not self.eof:
yield String(self, "compat_brand[]", 4, "Compatible brand")
class META(FieldSet):
def createFields(self):
yield UInt32(self, "unk")
yield AtomList(self, "tags")
class STCO(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield UInt32(self, "count", description="Total entries in offset table")
for i in xrange(self['count'].value):
yield UInt32(self, "chunk_offset[]")
class SampleDescription(FieldSet):
def createFields(self):
yield UInt32(self, "size", "Sample Description Size")
yield RawBytes(self, "format", 4, "Data Format (codec)")
yield RawBytes(self, "reserved", 6, "Reserved")
yield UInt16(self, "index", "Data Reference Index")
yield UInt16(self, "version")
yield UInt16(self, "revision_level")
yield RawBytes(self, "vendor_id", 4)
yield UInt32(self, "temporal_quality")
yield UInt32(self, "spatial_quality")
yield UInt16(self, "width", "Width (pixels)")
yield UInt16(self, "height", "Height (pixels)")
yield UInt32(self, "horizontal_resolution")
yield UInt32(self, "vertical resolution")
yield UInt32(self, "data_size")
yield UInt16(self, "frame_count")
size = self['size'].value - self.current_size//8
if size > 0:
yield RawBytes(self, "extra_data", size)
class STSD(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield UInt32(self, "count", description="Total entries in table")
for i in xrange(self['count'].value):
yield SampleDescription(self, "sample_description[]")
class STSS(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield UInt32(self, "count", description="Number of sync samples")
for i in xrange(self['count'].value):
yield UInt32(self, "sync_sample[]")
class STSZ(FieldSet):
def createFields(self):
yield textHandler(UInt8(self, "version"), hexadecimal)
yield RawBytes(self, "flags", 3)
yield UInt32(self, "uniform_size", description="Uniform size of each sample (0 if non-uniform)")
yield UInt32(self, "count", description="Number of samples")
if self['uniform_size'].value == 0:
for i in xrange(self['count'].value):
yield UInt32(self, "sample_size[]")
class Atom(FieldSet):
tag_info = {
# TODO: Use dictionary of dictionaries, like Matroska parser does
# "elst" is a child of "edts", but not of "moov" for example
"moov": (AtomList, "movie", "Movie"),
"trak": (AtomList, "track", "Track"),
"mdia": (AtomList, "media", "Media"),
"edts": (AtomList, "edts", ""),
"minf": (AtomList, "minf", ""),
"stbl": (AtomList, "stbl", "Sample Table"),
"stco": (STCO, "stsd", "Sample Table Chunk Offset"),
"stsd": (STSD, "stsd", "Sample Table Sample Description"),
"stss": (STSS, "stss", "Sample Table Sync Samples"),
"stsz": (STSZ, "stsz", "Sample Table Sizes"),
"dinf": (AtomList, "dinf", ""),
"udta": (AtomList, "udta", ""),
"ilst": (AtomList, "ilst", ""),
"trkn": (AtomList, "trkn", "Metadata: Track number"),
"disk": (AtomList, "disk", "Metadata: Disk number"),
"tmpo": (AtomList, "tempo", "Metadata: Tempo"),
"cpil": (AtomList, "cpil", "Metadata: Compilation"),
"gnre": (AtomList, "gnre", "Metadata: Genre"),
"\xa9alb": (AtomList, "album", "Metadata: Album name"),
"\xa9ART": (AtomList, "artist", "Metadata: Artist name"),
"\xa9cmt": (AtomList, "comment", "Metadata: Comment"),
"\xa9nam": (AtomList, "name", "Metadata: Track name"),
"\xa9too": (AtomList, "tool", "Metadata: Creator program"),
"\xa9wrt": (AtomList, "composer", "Metadata: Composer name"),
"\xa9day": (AtomList, "date", "Metadata: Date of creation"),
"covr": (AtomList, "cover", "Metadata: Cover art"),
"----": (AtomList, "misc", "Metadata: Miscellaneous"),
"meta": (META, "meta", "File metadata"),
"elst": (ELST, "edts", ""),
"tkhd": (TrackHeader, "track_hdr", "Track header"),
"hdlr": (HDLR, "hdlr", ""),
"mdhd": (MediaHeader, "media_hdr", "Media header"),
"load": (Load, "load", ""),
"mvhd": (MovieHeader, "movie_hdr", "Movie header"),
"ftyp": (FileType, "file_type", "File type"),
}
tag_handler = [ item[0] for item in tag_info ]
tag_desc = [ item[1] for item in tag_info ]
def createFields(self):
yield UInt32(self, "size")
yield RawBytes(self, "tag", 4)
size = self["size"].value
if size == 1:
raise ParserError("Extended size is not supported!")
#yield UInt64(self, "size64")
size = self["size64"].value
elif size == 0:
#size = (self.root.size - self.root.current_size - self.current_size) / 8
if self._size is None:
size = (self.parent.size - self.current_size) / 8 - 8
else:
size = (self.size - self.current_size) / 8
else:
size = size - 8
if 0 < size:
tag = self["tag"].value
if tag in self.tag_info:
handler, name, desc = self.tag_info[tag]
yield handler(self, name, desc, size=size*8)
else:
yield RawBytes(self, "data", size)
def createDescription(self):
return "Atom: %s" % self["tag"].value
class MovFile(Parser):
PARSER_TAGS = {
"id": "mov",
"category": "video",
"file_ext": ("mov", "qt", "mp4", "m4v", "m4a", "m4p", "m4b"),
"mime": (u"video/quicktime", u'video/mp4'),
"min_size": 8*8,
"magic": (("moov", 4*8),),
"description": "Apple QuickTime movie"
}
BRANDS = {
# File type brand => MIME type
'mp41': u'video/mp4',
'mp42': u'video/mp4',
}
endian = BIG_ENDIAN
def __init__(self, *args, **kw):
Parser.__init__(self, *args, **kw)
self.is_mpeg4 = False
def validate(self):
# TODO: Write better code, erk!
size = self.stream.readBits(0, 32, self.endian)
if size < 8:
return "Invalid first atom size"
tag = self.stream.readBytes(4*8, 4)
return tag in ("ftyp", "moov", "free")
def createFields(self):
while not self.eof:
yield Atom(self, "atom[]")
def createMimeType(self):
first = self[0]
try:
# Read brands in the file type
if first['tag'].value != "ftyp":
return None
file_type = first["file_type"]
brand = file_type["brand"].value
if brand in self.BRANDS:
return self.BRANDS[brand]
for field in file_type.array("compat_brand"):
brand = field.value
if brand in self.BRANDS:
return self.BRANDS[brand]
except MissingField:
pass
return None
|
liangwang/m5
|
refs/heads/master
|
configs/example/memtest.py
|
1
|
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ron Dreslinski
import optparse
import sys
import m5
from m5.objects import *
parser = optparse.OptionParser()
parser.add_option("-a", "--atomic", action="store_true",
help="Use atomic (non-timing) mode")
parser.add_option("-b", "--blocking", action="store_true",
help="Use blocking caches")
parser.add_option("-l", "--maxloads", metavar="N", default=0,
help="Stop after N loads")
parser.add_option("-m", "--maxtick", type="int", default=m5.MaxTick,
metavar="T",
help="Stop after T ticks")
#
# The "tree" specification is a colon-separated list of one or more
# integers. The first integer is the number of caches/testers
# connected directly to main memory. The last integer in the list is
# the number of testers associated with the uppermost level of memory
# (L1 cache, if there are caches, or main memory if no caches). Thus
# if there is only one integer, there are no caches, and the integer
# specifies the number of testers connected directly to main memory.
# The other integers (if any) specify the number of caches at each
# level of the hierarchy between.
#
# Examples:
#
# "2:1" Two caches connected to memory with a single tester behind each
# (single-level hierarchy, two testers total)
#
# "2:2:1" Two-level hierarchy, 2 L1s behind each of 2 L2s, 4 testers total
#
parser.add_option("-t", "--treespec", type="string", default="8:1",
help="Colon-separated multilevel tree specification, "
"see script comments for details "
"[default: %default]")
parser.add_option("--force-bus", action="store_true",
help="Use bus between levels even with single cache")
parser.add_option("-f", "--functional", type="int", default=0,
metavar="PCT",
help="Target percentage of functional accesses "
"[default: %default]")
parser.add_option("-u", "--uncacheable", type="int", default=0,
metavar="PCT",
help="Target percentage of uncacheable accesses "
"[default: %default]")
parser.add_option("--progress", type="int", default=1000,
metavar="NLOADS",
help="Progress message interval "
"[default: %default]")
(options, args) = parser.parse_args()
if args:
print "Error: script doesn't take any positional arguments"
sys.exit(1)
block_size = 64
try:
treespec = [int(x) for x in options.treespec.split(':')]
numtesters = reduce(lambda x,y: x*y, treespec)
except:
print "Error parsing treespec option"
sys.exit(1)
if numtesters > block_size:
print "Error: Number of testers limited to %s because of false sharing" \
% (block_size)
sys.exit(1)
if len(treespec) < 1:
print "Error parsing treespec"
sys.exit(1)
# define prototype L1 cache
proto_l1 = BaseCache(size = '32kB', assoc = 4, block_size = block_size,
latency = '1ns', tgts_per_mshr = 8)
if options.blocking:
proto_l1.mshrs = 1
else:
proto_l1.mshrs = 8
# build a list of prototypes, one for each level of treespec, starting
# at the end (last entry is tester objects)
prototypes = [ MemTest(atomic=options.atomic, max_loads=options.maxloads,
percent_functional=options.functional,
percent_uncacheable=options.uncacheable,
progress_interval=options.progress) ]
# next comes L1 cache, if any
if len(treespec) > 1:
prototypes.insert(0, proto_l1)
# now add additional cache levels (if any) by scaling L1 params
while len(prototypes) < len(treespec):
# clone previous level and update params
prev = prototypes[0]
next = prev()
next.size = prev.size * 4
next.latency = prev.latency * 10
next.assoc = prev.assoc * 2
prototypes.insert(0, next)
# system simulated
system = System(funcmem = PhysicalMemory(),
physmem = PhysicalMemory(latency = "100ns"))
def make_level(spec, prototypes, attach_obj, attach_port):
fanout = spec[0]
parent = attach_obj # use attach obj as config parent too
if len(spec) > 1 and (fanout > 1 or options.force_bus):
new_bus = Bus(clock="500MHz", width=16)
new_bus.port = getattr(attach_obj, attach_port)
parent.cpu_side_bus = new_bus
attach_obj = new_bus
attach_port = "port"
objs = [prototypes[0]() for i in xrange(fanout)]
if len(spec) > 1:
# we just built caches, more levels to go
parent.cache = objs
for cache in objs:
cache.mem_side = getattr(attach_obj, attach_port)
make_level(spec[1:], prototypes[1:], cache, "cpu_side")
else:
# we just built the MemTest objects
parent.cpu = objs
for t in objs:
t.test = getattr(attach_obj, attach_port)
t.functional = system.funcmem.port
make_level(treespec, prototypes, system.physmem, "port")
# -----------------------
# run simulation
# -----------------------
root = Root( system = system )
if options.atomic:
root.system.mem_mode = 'atomic'
else:
root.system.mem_mode = 'timing'
# Not much point in this being higher than the L1 latency
m5.ticks.setGlobalFrequency('1ns')
# instantiate configuration
m5.instantiate(root)
# simulate until program terminates
exit_event = m5.simulate(options.maxtick)
print 'Exiting @ tick', m5.curTick(), 'because', exit_event.getCause()
|
mtndesign/myVim
|
refs/heads/master
|
myvim/bundle/ropevim/ftplugin/python/libs/rope/refactor/occurrences.py
|
91
|
import re
import rope.base.pynames
from rope.base import pynames, pyobjects, codeanalyze, evaluate, exceptions, utils, worder
class Finder(object):
"""For finding occurrences of a name
The constructor takes a `filters` argument. It should be a list
of functions that take a single argument. For each possible
occurrence, these functions are called in order with the an
instance of `Occurrence`:
* If it returns `None` other filters are tried.
* If it returns `True`, the occurrence will be a match.
* If it returns `False`, the occurrence will be skipped.
* If all of the filters return `None`, it is skipped also.
"""
def __init__(self, pycore, name, filters=[lambda o: True], docs=False):
self.pycore = pycore
self.name = name
self.docs = docs
self.filters = filters
self._textual_finder = _TextualFinder(name, docs=docs)
def find_occurrences(self, resource=None, pymodule=None):
"""Generate `Occurrence` instances"""
tools = _OccurrenceToolsCreator(self.pycore, resource=resource,
pymodule=pymodule, docs=self.docs)
for offset in self._textual_finder.find_offsets(tools.source_code):
occurrence = Occurrence(tools, offset)
for filter in self.filters:
result = filter(occurrence)
if result is None:
continue
if result:
yield occurrence
break
def create_finder(pycore, name, pyname, only_calls=False, imports=True,
unsure=None, docs=False, instance=None, in_hierarchy=False):
"""A factory for `Finder`
Based on the arguments it creates a list of filters. `instance`
argument is needed only when you want implicit interfaces to be
considered.
"""
pynames = set([pyname])
filters = []
if only_calls:
filters.append(CallsFilter())
if not imports:
filters.append(NoImportsFilter())
if isinstance(instance, rope.base.pynames.ParameterName):
for pyobject in instance.get_objects():
try:
pynames.add(pyobject[name])
except exceptions.AttributeNotFoundError:
pass
for pyname in pynames:
filters.append(PyNameFilter(pyname))
if in_hierarchy:
filters.append(InHierarchyFilter(pyname))
if unsure:
filters.append(UnsureFilter(unsure))
return Finder(pycore, name, filters=filters, docs=docs)
class Occurrence(object):
def __init__(self, tools, offset):
self.tools = tools
self.offset = offset
self.resource = tools.resource
@utils.saveit
def get_word_range(self):
return self.tools.word_finder.get_word_range(self.offset)
@utils.saveit
def get_primary_range(self):
return self.tools.word_finder.get_primary_range(self.offset)
@utils.saveit
def get_pyname(self):
try:
return self.tools.name_finder.get_pyname_at(self.offset)
except exceptions.BadIdentifierError:
pass
@utils.saveit
def get_primary_and_pyname(self):
try:
return self.tools.name_finder.get_primary_and_pyname_at(self.offset)
except exceptions.BadIdentifierError:
pass
@utils.saveit
def is_in_import_statement(self):
return (self.tools.word_finder.is_from_statement(self.offset) or
self.tools.word_finder.is_import_statement(self.offset))
def is_called(self):
return self.tools.word_finder.is_a_function_being_called(self.offset)
def is_defined(self):
return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset)
def is_a_fixed_primary(self):
return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) or \
self.tools.word_finder.is_a_name_after_from_import(self.offset)
def is_written(self):
return self.tools.word_finder.is_assigned_here(self.offset)
def is_unsure(self):
return unsure_pyname(self.get_pyname())
@property
@utils.saveit
def lineno(self):
offset = self.get_word_range()[0]
return self.tools.pymodule.lines.get_line_number(offset)
def same_pyname(expected, pyname):
"""Check whether `expected` and `pyname` are the same"""
if expected is None or pyname is None:
return False
if expected == pyname:
return True
if type(expected) not in (pynames.ImportedModule, pynames.ImportedName) and \
type(pyname) not in (pynames.ImportedModule, pynames.ImportedName):
return False
return expected.get_definition_location() == pyname.get_definition_location() and \
expected.get_object() == pyname.get_object()
def unsure_pyname(pyname, unbound=True):
"""Return `True` if we don't know what this name references"""
if pyname is None:
return True
if unbound and not isinstance(pyname, pynames.UnboundName):
return False
if pyname.get_object() == pyobjects.get_unknown():
return True
class PyNameFilter(object):
"""For finding occurrences of a name"""
def __init__(self, pyname):
self.pyname = pyname
def __call__(self, occurrence):
if same_pyname(self.pyname, occurrence.get_pyname()):
return True
class InHierarchyFilter(object):
"""For finding occurrences of a name"""
def __init__(self, pyname, implementations_only=False):
self.pyname = pyname
self.impl_only = implementations_only
self.pyclass = self._get_containing_class(pyname)
if self.pyclass is not None:
self.name = pyname.get_object().get_name()
self.roots = self._get_root_classes(self.pyclass, self.name)
else:
self.roots = None
def __call__(self, occurrence):
if self.roots is None:
return
pyclass = self._get_containing_class(occurrence.get_pyname())
if pyclass is not None:
roots = self._get_root_classes(pyclass, self.name)
if self.roots.intersection(roots):
return True
def _get_containing_class(self, pyname):
if isinstance(pyname, pynames.DefinedName):
scope = pyname.get_object().get_scope()
parent = scope.parent
if parent is not None and parent.get_kind() == 'Class':
return parent.pyobject
def _get_root_classes(self, pyclass, name):
if self.impl_only and pyclass == self.pyclass:
return set([pyclass])
result = set()
for superclass in pyclass.get_superclasses():
if name in superclass:
result.update(self._get_root_classes(superclass, name))
if not result:
return set([pyclass])
return result
class UnsureFilter(object):
def __init__(self, unsure):
self.unsure = unsure
def __call__(self, occurrence):
if occurrence.is_unsure() and self.unsure(occurrence):
return True
class NoImportsFilter(object):
def __call__(self, occurrence):
if occurrence.is_in_import_statement():
return False
class CallsFilter(object):
def __call__(self, occurrence):
if not occurrence.is_called():
return False
class _TextualFinder(object):
def __init__(self, name, docs=False):
self.name = name
self.docs = docs
self.comment_pattern = _TextualFinder.any('comment', [r'#[^\n]*'])
self.string_pattern = _TextualFinder.any(
'string', [codeanalyze.get_string_pattern()])
self.pattern = self._get_occurrence_pattern(self.name)
def find_offsets(self, source):
if not self._fast_file_query(source):
return
if self.docs:
searcher = self._normal_search
else:
searcher = self._re_search
for matched in searcher(source):
yield matched
def _re_search(self, source):
for match in self.pattern.finditer(source):
for key, value in match.groupdict().items():
if value and key == 'occurrence':
yield match.start(key)
def _normal_search(self, source):
current = 0
while True:
try:
found = source.index(self.name, current)
current = found + len(self.name)
if (found == 0 or not self._is_id_char(source[found - 1])) and \
(current == len(source) or not self._is_id_char(source[current])):
yield found
except ValueError:
break
def _is_id_char(self, c):
return c.isalnum() or c == '_'
def _fast_file_query(self, source):
try:
source.index(self.name)
return True
except ValueError:
return False
def _get_source(self, resource, pymodule):
if resource is not None:
return resource.read()
else:
return pymodule.source_code
def _get_occurrence_pattern(self, name):
occurrence_pattern = _TextualFinder.any('occurrence',
['\\b' + name + '\\b'])
pattern = re.compile(occurrence_pattern + '|' + self.comment_pattern +
'|' + self.string_pattern)
return pattern
@staticmethod
def any(name, list_):
return '(?P<%s>' % name + '|'.join(list_) + ')'
class _OccurrenceToolsCreator(object):
def __init__(self, pycore, resource=None, pymodule=None, docs=False):
self.pycore = pycore
self.__resource = resource
self.__pymodule = pymodule
self.docs = docs
@property
@utils.saveit
def name_finder(self):
return evaluate.ScopeNameFinder(self.pymodule)
@property
@utils.saveit
def source_code(self):
if self.__resource is not None:
return self.resource.read()
else:
return self.pymodule.source_code
@property
@utils.saveit
def word_finder(self):
return worder.Worder(self.source_code, self.docs)
@property
@utils.saveit
def resource(self):
if self.__resource is not None:
return self.__resource
if self.__pymodule is not None:
return self.__pymodule.resource
@property
@utils.saveit
def pymodule(self):
if self.__pymodule is not None:
return self.__pymodule
return self.pycore.resource_to_pyobject(self.resource)
|
sjhewitt/graphene
|
refs/heads/master
|
graphene/types/argument.py
|
1
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
from .dynamic import Dynamic
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def __eq__(self, other):
return isinstance(other, Argument) and (
self.name == other.name,
self.type == other.type,
self.default_value == other.default_value,
self.description == other.description
)
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, Dynamic):
arg = arg.get_type()
if arg is None:
# If the Dynamic type returned None
# then we skip the Argument
continue
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg_name)
arguments[arg_name] = arg
return arguments
|
mircealungu/Zeeguu-Core
|
refs/heads/master
|
zeeguu_core_test/test_article.py
|
1
|
from unittest import TestCase
from zeeguu_core_test.model_test_mixin import ModelTestMixIn
import zeeguu_core
from zeeguu_core_test.rules.article_rule import ArticleRule
from zeeguu_core_test.rules.language_rule import LanguageRule
from zeeguu_core.model import Topic, Article
from zeeguu_core_test.test_data.mocking_the_web import url_plane_crashes, url_formation_professionnelle
session = zeeguu_core.db.session
class ArticleTest(ModelTestMixIn, TestCase):
def setUp(self):
super().setUp()
self.article1 = ArticleRule().article
self.article2 = ArticleRule().article
self.language = LanguageRule.get_or_create_language("en")
def test_articles_are_different(self):
assert (self.article1.title != self.article2.title)
def test_article_representation_does_not_error(self):
assert self.article1.article_info()
def test_add_topic(self):
health = Topic("health")
sports = Topic("sports")
self.article1.add_topic(health)
self.article1.add_topic(sports)
assert len(self.article1.topics) == 2
def test_find_or_create(self):
self.new_art = Article.find_or_create(session, url_formation_professionnelle)
assert (self.new_art.fk_difficulty)
def test_load_article_without_language_information(self):
art = Article.find_or_create(session, url_plane_crashes)
assert (art)
|
boomsbloom/dtm-fmri
|
refs/heads/master
|
DTM/for_gensim/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/jpcntx.py
|
1776
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
magvugr/AT
|
refs/heads/master
|
EntVirtual/lib/python2.7/site-packages/django/conf/locale/fy/formats.py
|
852
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
# DATE_FORMAT =
# TIME_FORMAT =
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
# MONTH_DAY_FORMAT =
# SHORT_DATE_FORMAT =
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
# DECIMAL_SEPARATOR =
# THOUSAND_SEPARATOR =
# NUMBER_GROUPING =
|
msabramo/pip
|
refs/heads/develop
|
pip/_vendor/requests/packages/urllib3/packages/six.py
|
2374
|
"""Utilities for writing code that runs on Python 2 and 3"""
#Copyright (c) 2010-2011 Benjamin Peterson
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
#the Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.2.0" # Revision 41c74fef2ded
# True if we are running on Python 3.
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_code = "func_code"
_func_defaults = "func_defaults"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
if PY3:
def get_unbound_function(unbound):
return unbound
Iterator = object
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
else:
def get_unbound_function(unbound):
return unbound.im_func
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
def iterkeys(d):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)())
def itervalues(d):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)())
def iteritems(d):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)())
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
int2byte = chr
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
print_ = getattr(builtins, "print")
del builtins
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
def print_(*args, **kwargs):
"""The new-style print function."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("NewBase", (base,), {})
|
ya7lelkom/linguist
|
refs/heads/master
|
samples/Python/django-models-base.py
|
92
|
from __future__ import unicode_literals
import copy
import sys
from functools import update_wrapper
from future_builtins import zip
import django.db.models.manager # Imported to register signal handler.
from django.conf import settings
from django.core.exceptions import (ObjectDoesNotExist,
MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS)
from django.core import validators
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.related import (ManyToOneRel,
OneToOneField, add_lazy_relation)
from django.db import (router, transaction, DatabaseError,
DEFAULT_DB_ALIAS)
from django.db.models.query import Q
from django.db.models.query_utils import DeferredAttribute
from django.db.models.deletion import Collector
from django.db.models.options import Options
from django.db.models import signals
from django.db.models.loading import register_models, get_model
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import curry
from django.utils.encoding import smart_str, force_unicode
from django.utils.text import get_text_list, capfirst
class ModelBase(type):
"""
Metaclass for all models.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
# If this isn't a subclass of Model, don't do anything special.
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
if getattr(meta, 'app_label', None) is None:
# Figure out the app_label by looking one level up.
# For 'django.contrib.sites.models', this would be 'sites'.
model_module = sys.modules[new_class.__module__]
kwargs = {"app_label": model_module.__name__.split('.')[-2]}
else:
kwargs = {}
new_class.add_to_class('_meta', Options(meta, **kwargs))
if not abstract:
new_class.add_to_class('DoesNotExist', subclass_exception(b'DoesNotExist',
tuple(x.DoesNotExist
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
or (ObjectDoesNotExist,), module))
new_class.add_to_class('MultipleObjectsReturned', subclass_exception(b'MultipleObjectsReturned',
tuple(x.MultipleObjectsReturned
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
or (MultipleObjectsReturned,), module))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
if getattr(new_class, '_default_manager', None):
if not is_proxy:
# Multi-table inheritance doesn't inherit default manager from
# parents.
new_class._default_manager = None
new_class._base_manager = None
else:
# Proxy classes do inherit parent's default manager, if none is
# set explicitly.
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
# Bail out early if we have already created this class.
m = get_model(new_class._meta.app_label, name,
seed_cache=False, only_installed=False)
if m is not None:
return m
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = new_class._meta.local_fields + \
new_class._meta.local_many_to_many + \
new_class._meta.virtual_fields
field_names = set([f.name for f in new_fields])
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [cls for cls in parents if hasattr(cls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
else:
continue
if base is not None:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
else:
base = parent
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
if (new_class._meta.local_fields or
new_class._meta.local_many_to_many):
raise FieldError("Proxy model '%s' contains model fields." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Do the appropriate setup for any model parents.
o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
if isinstance(f, OneToOneField)])
for base in parents:
original_base = base
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in parent_fields:
if field.name in field_names:
raise FieldError('Local field %r in class %r clashes '
'with field of similar name from '
'base class %r' %
(field.name, name, base.__name__))
if not base._meta.abstract:
# Concrete classes...
base = base._meta.concrete_model
if base in o2o_map:
field = o2o_map[base]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.module_name
field = OneToOneField(base, name=attr_name,
auto_created=True, parent_link=True)
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
# .. and abstract ones.
for field in parent_fields:
new_class.add_to_class(field.name, copy.deepcopy(field))
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base._meta.parents)
# Inherit managers from the abstract base classes.
new_class.copy_managers(base._meta.abstract_managers)
# Proxy models inherit the non-abstract managers from their base,
# unless they have redefined any of them.
if is_proxy:
new_class.copy_managers(original_base._meta.concrete_managers)
# Inherit virtual fields (like GenericForeignKey) from the parent
# class
for field in base._meta.virtual_fields:
if base._meta.abstract and field.name in field_names:
raise FieldError('Local field %r in class %r clashes '\
'with field of similar name from '\
'abstract base class %r' % \
(field.name, name, base.__name__))
new_class.add_to_class(field.name, copy.deepcopy(field))
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
register_models(new_class._meta.app_label, new_class)
# Because of the way imports happen (recursively), we may or may not be
# the first time this model tries to register with the framework. There
# should only be one class for each model, so we always return the
# registered version.
return get_model(new_class._meta.app_label, name,
seed_cache=False, only_installed=False)
def copy_managers(cls, base_managers):
# This is in-place sorting of an Options attribute, but that's fine.
base_managers.sort()
for _, mgr_name, manager in base_managers:
val = getattr(cls, mgr_name, None)
if not val or val is manager:
new_manager = manager._copy_to_model(cls)
cls.add_to_class(mgr_name, new_manager)
def add_to_class(cls, name, value):
if hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""
Creates some methods once self._meta has been populated.
"""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
# defer creating accessors on the foreign class until we are
# certain it has been created
def make_foreign_order_accessors(field, model, cls):
setattr(
field.rel.to,
'get_%s_order' % cls.__name__.lower(),
curry(method_get_order, cls)
)
setattr(
field.rel.to,
'set_%s_order' % cls.__name__.lower(),
curry(method_set_order, cls)
)
add_lazy_relation(
cls,
opts.order_with_respect_to,
opts.order_with_respect_to.rel.to,
make_foreign_order_accessors
)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join([f.attname for f in opts.fields]))
if hasattr(cls, 'get_absolute_url'):
cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url),
cls.get_absolute_url)
signals.class_prepared.send(sender=cls)
class ModelState(object):
"""
A class for storing instance state
"""
def __init__(self, db=None):
self.db = db
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
# Necessary for correct validation of new instances of objects with explicit (non-auto) PKs.
# This impacts validation only; it has no effect on the actual save.
self.adding = True
class Model(object):
__metaclass__ = ModelBase
_deferred = False
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
args_len = len(args)
if args_len > len(self._meta.fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
fields_iter = iter(self._meta.fields)
if not kwargs:
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Maintain compatibility with existing calls.
if isinstance(field.rel, ManyToOneRel):
kwargs.pop(field.attname, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# This slightly odd construct is so that we can access any
# data-descriptor object (DeferredAttribute) without triggering its
# __get__ method.
if (field.attname not in kwargs and
isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)):
# This field will be populated on request.
continue
if kwargs:
if isinstance(field.rel, ManyToOneRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
# Object instance was passed in. Special case: You can
# pass in "None" for related objects if it's allowed.
if rel_obj is None and field.null:
val = None
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
setattr(self, field.name, rel_obj)
else:
setattr(self, field.attname, val)
if kwargs:
for prop in kwargs.keys():
try:
if isinstance(getattr(self.__class__, prop), property):
setattr(self, prop, kwargs.pop(prop))
except AttributeError:
pass
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function" % kwargs.keys()[0])
super(Model, self).__init__()
signals.post_init.send(sender=self.__class__, instance=self)
def __repr__(self):
try:
u = unicode(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return smart_str('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if hasattr(self, '__unicode__'):
return force_unicode(self).encode('utf-8')
return '%s object' % self.__class__.__name__
def __eq__(self, other):
return isinstance(other, self.__class__) and self._get_pk_val() == other._get_pk_val()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self._get_pk_val())
def __reduce__(self):
"""
Provides pickling support. Normally, this just dispatches to Python's
standard handling. However, for models with deferred field loading, we
need to do things manually, as they're dynamically created classes and
only module-level classes can be pickled by the default path.
"""
data = self.__dict__
model = self.__class__
# The obvious thing to do here is to invoke super().__reduce__()
# for the non-deferred case. Don't do that.
# On Python 2.4, there is something weird with __reduce__,
# and as a result, the super call will cause an infinite recursion.
# See #10547 and #12121.
defers = []
if self._deferred:
from django.db.models.query_utils import deferred_class_factory
factory = deferred_class_factory
for field in self._meta.fields:
if isinstance(self.__class__.__dict__.get(field.attname),
DeferredAttribute):
defers.append(field.attname)
model = self._meta.proxy_for_model
else:
factory = simple_class_factory
return (model_unpickle, (model, defers, factory), data)
def _get_pk_val(self, meta=None):
if not meta:
meta = self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def serializable_value(self, field_name):
"""
Returns the value of the field name for this instance. If the field is
a foreign key, returns the id value, instead of the object. If there's
no Field object with this name on the model, the model attribute's
value is returned directly.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field_by_name(field_name)[0]
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if len(update_fields) == 0:
return
update_fields = frozenset(update_fields)
field_names = set([field.name for field in self._meta.fields
if not field.primary_key])
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError("The following fields do not exist in this "
"model or are m2m fields: %s"
% ', '.join(non_model_fields))
self.save_base(using=using, force_insert=force_insert,
force_update=force_update, update_fields=update_fields)
save.alters_data = True
def save_base(self, raw=False, cls=None, origin=None, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Does the heavy-lifting involved in saving. Subclasses shouldn't need to
override this method. It's separate from save() in order to hide the
need for overrides of save() to pass around internal-only parameters
('raw', 'cls', and 'origin').
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or len(update_fields) > 0
if cls is None:
cls = self.__class__
meta = cls._meta
if not meta.proxy:
origin = cls
else:
meta = cls._meta
if origin and not meta.auto_created:
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields)
# If we are in a raw save, save the object exactly as presented.
# That means that we don't try to be smart about saving attributes
# that might have come from the parent class - we just save the
# attributes we have been given to the class we have been given.
# We also go through this process to defer the save of proxy objects
# to their actual underlying model.
if not raw or meta.proxy:
if meta.proxy:
org = cls
else:
org = None
for parent, field in meta.parents.items():
# At this point, parent's primary key field may be unknown
# (for example, from administration form which doesn't fill
# this field). If so, fill it.
if field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None:
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
self.save_base(cls=parent, origin=org, using=using,
update_fields=update_fields)
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
if meta.proxy:
return
if not meta.proxy:
non_pks = [f for f in meta.local_fields if not f.primary_key]
if update_fields:
non_pks = [f for f in non_pks if f.name in update_fields]
# First, try an UPDATE. If that doesn't update anything, do an INSERT.
pk_val = self._get_pk_val(meta)
pk_set = pk_val is not None
record_exists = True
manager = cls._base_manager
if pk_set:
# Determine if we should do an update (pk already exists, forced update,
# no force_insert)
if ((force_update or update_fields) or (not force_insert and
manager.using(using).filter(pk=pk_val).exists())):
if force_update or non_pks:
values = [(f, None, (raw and getattr(self, f.attname) or f.pre_save(self, False))) for f in non_pks]
if values:
rows = manager.using(using).filter(pk=pk_val)._update(values)
if force_update and not rows:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not rows:
raise DatabaseError("Save with update_fields did not affect any rows.")
else:
record_exists = False
if not pk_set or not record_exists:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
order_value = manager.using(using).filter(**{field.name: getattr(self, field.attname)}).count()
self._order = order_value
fields = meta.local_fields
if not pk_set:
if force_update or update_fields:
raise ValueError("Cannot force an update in save() with no primary key.")
fields = [f for f in fields if not isinstance(f, AutoField)]
record_exists = False
update_pk = bool(meta.has_auto_field and not pk_set)
result = manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw)
if update_pk:
setattr(self, meta.pk.attname, result)
transaction.commit_unless_managed(using=using)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if origin and not meta.auto_created:
signals.post_save.send(sender=origin, instance=self, created=(not record_exists),
update_fields=update_fields, raw=raw, using=using)
save_base.alters_data = True
def delete(self, using=None):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
collector = Collector(using=using)
collector.collect([self])
collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_unicode(dict(field.flatchoices).get(value, value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = is_next and 'gt' or 'lt'
order = not is_next and '-' or ''
param = smart_str(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q|Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = is_next and 'gt' or 'lt'
order = not is_next and '-_order' or '_order'
order_field = self._meta.order_with_respect_to
obj = self._default_manager.filter(**{
order_field.name: getattr(self, order_field.attname)
}).filter(**{
'_order__%s' % op: self._default_manager.values('_order').filter(**{
self._meta.pk.name: self.pk
})
}).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, unused):
return self.pk
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Checks unique constraints on the model and raises ``ValidationError``
if any failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
"""
Gather a list of checks to perform. Since validate_unique could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check.
Fields that did not validate should also be excluded, but they need
to be passed in via the exclude argument.
"""
if exclude is None:
exclude = []
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
for parent_class in self._meta.parents.keys():
if parent_class._meta.unique_together:
unique_togethers.append((parent_class, parent_class._meta.unique_together))
for model_class, unique_together in unique_togethers:
for check in unique_together:
for name in check:
# If this is an excluded field, don't add this check.
if name in exclude:
break
else:
unique_checks.append((model_class, tuple(check)))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.parents.keys():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, 'date', name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, 'year', name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, 'month', name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
if lookup_value is None:
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs.keys()):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == 'date':
lookup_kwargs['%s__day' % unique_for] = date.day
lookup_kwargs['%s__month' % unique_for] = date.month
lookup_kwargs['%s__year' % unique_for] = date.year
else:
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field, unique_for):
opts = self._meta
return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % {
'field_name': unicode(capfirst(opts.get_field(field).verbose_name)),
'date_field': unicode(capfirst(opts.get_field(unique_for).verbose_name)),
'lookup': lookup_type,
}
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
model_name = capfirst(opts.verbose_name)
# A unique field
if len(unique_check) == 1:
field_name = unique_check[0]
field = opts.get_field(field_name)
field_label = capfirst(field.verbose_name)
# Insert the error into the error dict, very sneaky
return field.error_messages['unique'] % {
'model_name': unicode(model_name),
'field_label': unicode(field_label)
}
# unique_together
else:
field_labels = map(lambda f: capfirst(opts.get_field(f).verbose_name), unique_check)
field_labels = get_text_list(field_labels, _('and'))
return _("%(model_name)s with this %(field_label)s already exists.") % {
'model_name': unicode(model_name),
'field_label': unicode(field_labels)
}
def full_clean(self, exclude=None):
"""
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ``ValidationError`` for any errors that occured.
"""
errors = {}
if exclude is None:
exclude = []
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
for name in errors.keys():
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.append(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Cleans all fields and raises a ValidationError containing message_dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = []
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in validators.EMPTY_VALUES:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.messages
if errors:
raise ValidationError(errors)
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(ordered_obj, self, id_list, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
for i, j in enumerate(id_list):
ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i)
transaction.commit_unless_managed(using=using)
def method_get_order(ordered_obj, self):
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
pk_name = ordered_obj._meta.pk.name
return [r[pk_name] for r in
ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)]
##############################################
# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) #
##############################################
def get_absolute_url(opts, func, self, *args, **kwargs):
return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.module_name), func)(self, *args, **kwargs)
########
# MISC #
########
class Empty(object):
pass
def simple_class_factory(model, attrs):
"""Used to unpickle Models without deferred fields.
We need to do this the hard way, rather than just using
the default __reduce__ implementation, because of a
__deepcopy__ problem in Python 2.4
"""
return model
def model_unpickle(model, attrs, factory):
"""
Used to unpickle Model subclasses with deferred fields.
"""
cls = factory(model, attrs)
return cls.__new__(cls)
model_unpickle.__safe_for_unpickle__ = True
def subclass_exception(name, parents, module):
return type(name, parents, {'__module__': module})
|
isandlaTech/cohorte-devtools
|
refs/heads/master
|
org.cohorte.eclipse.runner.basic/files/jython/Lib/email/encoders.py
|
263
|
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""Encodings and related functions."""
__all__ = [
'encode_7or8bit',
'encode_base64',
'encode_noop',
'encode_quopri',
]
import base64
from quopri import encodestring as _encodestring
def _qencode(s):
enc = _encodestring(s, quotetabs=True)
# Must encode spaces, which quopri.encodestring() doesn't do
return enc.replace(' ', '=20')
def _bencode(s):
# We can't quite use base64.encodestring() since it tacks on a "courtesy
# newline". Blech!
if not s:
return s
hasnewline = (s[-1] == '\n')
value = base64.encodestring(s)
if not hasnewline and value[-1] == '\n':
return value[:-1]
return value
def encode_base64(msg):
"""Encode the message's payload in Base64.
Also, add an appropriate Content-Transfer-Encoding header.
"""
orig = msg.get_payload()
encdata = _bencode(orig)
msg.set_payload(encdata)
msg['Content-Transfer-Encoding'] = 'base64'
def encode_quopri(msg):
"""Encode the message's payload in quoted-printable.
Also, add an appropriate Content-Transfer-Encoding header.
"""
orig = msg.get_payload()
encdata = _qencode(orig)
msg.set_payload(encdata)
msg['Content-Transfer-Encoding'] = 'quoted-printable'
def encode_7or8bit(msg):
"""Set the Content-Transfer-Encoding header to 7bit or 8bit."""
orig = msg.get_payload()
if orig is None:
# There's no payload. For backwards compatibility we use 7bit
msg['Content-Transfer-Encoding'] = '7bit'
return
# We play a trick to make this go fast. If encoding to ASCII succeeds, we
# know the data must be 7bit, otherwise treat it as 8bit.
try:
orig.encode('ascii')
except UnicodeError:
msg['Content-Transfer-Encoding'] = '8bit'
else:
msg['Content-Transfer-Encoding'] = '7bit'
def encode_noop(msg):
"""Do nothing."""
|
fatadama/mavlink-vscl
|
refs/heads/master
|
MAVProxy-master/modules/lib/libchecklist.py
|
5
|
#!/usr/bin/env python
"""
MAVProxy checklist, implemented in a child process
Created by Stephen Dade (stephen_dade@hotmail.com)
"""
class CheckItem():
'''Checklist item used for information transfer
between threads/processes/pipes'''
def __init__(self, name, state):
self.name = name
self.state = state
class UI():
'''
a UI for the MAVProxy checklist
'''
def __init__(self):
import multiprocessing
self.parent_pipe,self.child_pipe = multiprocessing.Pipe()
self.close_event = multiprocessing.Event()
self.close_event.clear()
self.child = multiprocessing.Process(target=self.child_task)
self.child.start()
def child_task(self):
'''child process - this holds all the GUI elements'''
import Tkinter as tk
'''curStep is which step in the list we're up to, increments +1 for each list completed
it is the same as the column number of the checklist item'''
self.curStep = 0
self.root = tk.Tk()
self.root.title("MAVProxy: Checklist")
self.root.grid()
self.createLists()
self.createWidgets(self.root)
self.on_timer()
self.root.mainloop()
def createLists(self):
'''Generate the checklists. Note that:
0,1 = off/on for auto-ticked items
2,3 = off/on for manually ticked items'''
self.beforeAssemblyList = {
'Confirm batteries charged':2,
'No physical damage to airframe':2,
'All electronics present and connected':2,
'Joe placed':2,
'CoG of UAV correct':2,
'Ground station operational':2
}
self.beforeEngineList = {
'APM Booted':0,
'Pandaboard Booted':2,
'Cameras calibrated and capturing':2,
'GPS lock':0,
'Altitude lock':0,
'Flight mode MANUAL':0,
'Trim set from controller':0,
'Avionics Battery':0,
'Compass Calibrated':0,
'Accelerometers and Gyros Calibrated':0,
'UAV Level':0,
'Aircraft Params Loaded':2,
'Radio Links > 6db margin':0,
'Waypoints Loaded':0
}
self.beforeTakeoffList = {
'Flight control surfaces responsive':2,
'Engine throttle responsive':2,
'Runway clear':2,
'Compass active':0,
'IMU OK':2,
'Set flight timer and alarm':2
}
self.beforeCruiseList = {
'Airspeed > 10 m/s':0,
'Altitude > 30 m':0,
'< 100 degrees to 1st Waypoint':2
}
self.bottleDropList = {
'Joe found':2,
'Joe waypoint laid in':2,
'< 100m to Joe waypoint':2,
'Bottle drop mechanism activated':2
}
self.beforeLandingList = {
'APM set to MANUAL mode':2,
'< 100m from airfield home':2
}
self.beforeShutdownList = {
'Engine cutoff':2,
'Data downloaded':2
}
def createWidgets(self, frame):
'''Create the controls on the UI'''
import Tkinter as tk
'''Group Labels'''
AssemblyLabel = tk.Label(frame, text="During Assembly")
EngineLabel = tk.Label(frame, text="Before Engine Start")
BootLabel = tk.Label(frame, text="Before Takeoff")
FlightLabel = tk.Label(frame, text="Before Cruise/AUTO")
'''BottleLabel = tk.Label(frame, text="Bottle Drop")'''
'''LandLabel = tk.Label(frame, text="Before Landing")'''
'''ShutdownLabel = tk.Label(frame, text="Before Shutdown")'''
AssemblyLabel.grid(row=0, column=0)
EngineLabel.grid(row=0, column=1)
BootLabel.grid(row=0, column=2)
FlightLabel.grid(row=0, column=3)
'''BottleLabel.grid(row=0, column=4)'''
'''LandLabel.grid(row=0, column=5)'''
'''ShutdownLabel.grid(row=0, column=6)'''
'''before assembly checklist'''
i = 1
for key in self.beforeAssemblyList:
if self.beforeAssemblyList[key] == 0:
self.beforeAssemblyList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeAssemblyList[key], state="disabled", wraplength=170, justify='left', onvalue=1, offvalue=0)
aCheckButton.grid(row = i, column=0, sticky='w')
if self.beforeAssemblyList[key] == 2:
self.beforeAssemblyList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeAssemblyList[key], wraplength=170, justify='left', onvalue=3, offvalue=2)
aCheckButton.grid(row = i, column=0, sticky='w')
i = i+1
self.beforeAssemblyButton = tk.Button(text='Close final hatches', state="active", command=self.beforeAssemblyListCheck)
self.beforeAssemblyButton.grid(row = i, column=0, sticky='w')
'''before Engine Start checklist'''
i = 1
for key in self.beforeEngineList:
if self.beforeEngineList[key] == 0:
self.beforeEngineList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeEngineList[key], state="disabled", wraplength=170, justify='left', onvalue=1, offvalue=0)
aCheckButton.grid(row = i, column=1, sticky='w')
if self.beforeEngineList[key] == 2:
self.beforeEngineList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeEngineList[key], wraplength=170, justify='left', onvalue=3, offvalue=2)
aCheckButton.grid(row = i, column=1, sticky='w')
i = i+1
self.beforeEngineButton = tk.Button(text='Ready for Engine start', state="disabled", command=self.beforeEngineCheck)
self.beforeEngineButton.grid(row = i, column=1, sticky='w')
'''before takeoff checklist'''
i = 1
for key in self.beforeTakeoffList:
if self.beforeTakeoffList[key] == 0:
self.beforeTakeoffList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeTakeoffList[key], state="disabled", wraplength=170, justify='left', onvalue=1, offvalue=0)
aCheckButton.grid(row = i, column=2, sticky='w')
if self.beforeTakeoffList[key] == 2:
self.beforeTakeoffList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeTakeoffList[key], wraplength=170, justify='left', onvalue=3, offvalue=2)
aCheckButton.grid(row = i, column=2, sticky='w')
i = i+1
self.beforeTakeoffButton = tk.Button(text='Ready for Takeoff', state="disabled", command=self.beforeTakeoffCheck)
self.beforeTakeoffButton.grid(row = i, column=2, sticky='w')
'''After takeoff'''
i=1
for key in self.beforeCruiseList:
if self.beforeCruiseList[key] == 0:
self.beforeCruiseList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeCruiseList[key], state="disabled", wraplength=170, justify='left', onvalue=1, offvalue=0)
aCheckButton.grid(row = i, column=3, sticky='w')
if self.beforeCruiseList[key] == 2:
self.beforeCruiseList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeCruiseList[key], wraplength=170, justify='left', onvalue=3, offvalue=2)
aCheckButton.grid(row = i, column=3, sticky='w')
i = i+1
self.beforeCruiseButton = tk.Button(text='Ready for Cruise', state="disabled", command=self.beforeCruiseCheck)
self.beforeCruiseButton.grid(row = i, column=3, sticky='w')
'''Before bottle drop'''
'''i=1
for key in self.bottleDropList:
if self.bottleDropList[key] == 0:
self.bottleDropList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.bottleDropList[key], state="disabled", wraplength=170, justify='left', onvalue=1, offvalue=0)
aCheckButton.grid(row = i, column=4, sticky='w')
if self.bottleDropList[key] == 2:
self.bottleDropList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.bottleDropList[key], wraplength=170, justify='left', onvalue=3, offvalue=2)
aCheckButton.grid(row = i, column=4, sticky='w')
i = i+1
self.bottleDropButton = tk.Button(text='Bottle drop completed', state="disabled", command=self.bottleDropCheck)
self.bottleDropButton.grid(row = i, column=4, sticky='w')'''
'''Before landing'''
'''i=1
for key in self.beforeLandingList:
if self.beforeLandingList[key] == 0:
self.beforeLandingList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeLandingList[key], state="disabled", wraplength=170, justify='left', onvalue=1, offvalue=0)
aCheckButton.grid(row = i, column=5, sticky='w')
if self.beforeLandingList[key] == 2:
self.beforeLandingList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeLandingList[key], wraplength=170, justify='left', onvalue=3, offvalue=2)
aCheckButton.grid(row = i, column=5, sticky='w')
i = i+1
self.beforeLandingButton = tk.Button(text='Ready for landing', state="disabled", command=self.beforeLandingCheck)
self.beforeLandingButton.grid(row = i, column=5, sticky='w')'''
'''before shutdown checklist'''
'''i = 1
for key in self.beforeShutdownList:
if self.beforeShutdownList[key] == 0:
self.beforeShutdownList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeShutdownList[key], state="disabled", wraplength=170, justify='left', onvalue=1, offvalue=0)
aCheckButton.grid(row = i, column=6, sticky='w')
if self.beforeShutdownList[key] == 2:
self.beforeShutdownList[key] = tk.IntVar()
aCheckButton = tk.Checkbutton(text=key, variable=self.beforeShutdownList[key], wraplength=170, justify='left', onvalue=3, offvalue=2)
aCheckButton.grid(row = i, column=6, sticky='w')
i = i+1
self.beforeShutdownButton = tk.Button(text='Shutdown', state="disabled", command=self.beforeShutdownCheck)
self.beforeShutdownButton.grid(row = i, column=6, sticky='w')'''
def beforeAssemblyListCheck(self):
'''Event for the "Checklist Complete" button for the Before Assembly section'''
import Tkinter as tk
import tkMessageBox
'''Check all of the checklist for ticks'''
for key, value in self.beforeAssemblyList.items():
state = value.get()
if state == 0 or state == 2:
tkMessageBox.showinfo("Error", "Item not ticked: " + key)
return
'''disable all checkboxes in this column'''
for child in self.root.winfo_children():
if isinstance(child, tk.Checkbutton) and int(child.grid_info()['column']) == self.curStep:
child.config(state="disabled")
'''if we made it here, the checklist is OK'''
self.beforeEngineButton.config(state="normal")
self.beforeAssemblyButton.config(text='Checklist Completed', state="disabled")
self.curStep = 1
def beforeEngineCheck(self):
'''Event for the "Checklist Complete" button for the Before Engine Start section'''
import Tkinter as tk
import tkMessageBox
'''Check all of the checklist for ticks'''
for key, value in self.beforeEngineList.items():
state = value.get()
if state == 0 or state == 2:
tkMessageBox.showinfo("Error", "Item not ticked: " + key)
return
'''disable all checkboxes in this column'''
for child in self.root.winfo_children():
if isinstance(child, tk.Checkbutton) and int(child.grid_info()['column']) == self.curStep:
child.config(state="disabled")
'''if we made it here, the checklist is OK'''
self.beforeTakeoffButton.config(state="normal")
self.beforeEngineButton.config(text='Checklist Completed', state="disabled")
self.curStep = 2
def beforeTakeoffCheck(self):
'''Event for the "Checklist Complete" button for the Before Takeoff section'''
import Tkinter as tk
import tkMessageBox
'''Check all of the checklist for ticks'''
for key, value in self.beforeTakeoffList.items():
state = value.get()
if state == 0 or state == 2:
tkMessageBox.showinfo("Error", "Item not ticked: " + key)
return
'''disable all checkboxes in this column'''
for child in self.root.winfo_children():
if isinstance(child, tk.Checkbutton) and int(child.grid_info()['column']) == self.curStep:
child.config(state="disabled")
'''if we made it here, the checklist is OK'''
self.beforeCruiseButton.config(state="normal")
self.beforeTakeoffButton.config(text='Checklist Completed', state="disabled")
self.curStep = 3
def beforeCruiseCheck(self):
'''Event for the "Checklist Complete" button for the Before Cruise/AUTO section'''
import Tkinter as tk
import tkMessageBox
'''Check all of the checklist for ticks'''
for key, value in self.beforeCruiseList.items():
state = value.get()
if state == 0 or state == 2:
tkMessageBox.showinfo("Error", "Item not ticked: " + key)
return
'''disable all checkboxes in this column'''
for child in self.root.winfo_children():
if isinstance(child, tk.Checkbutton) and int(child.grid_info()['column']) == self.curStep:
child.config(state="disabled")
'''if we made it here, the checklist is OK'''
'''self.bottleDropButton.config(state="normal")'''
tkMessageBox.showinfo("Information", "Checklist Completed!")
self.beforeCruiseButton.config(text='Checklist Completed', state="disabled")
self.curStep = 4
def bottleDropCheck(self):
'''Event for the "Checklist Complete" button for the Before Bottle Drop section'''
import Tkinter as tk
import tkMessageBox
'''Check all of the checklist for ticks'''
for key, value in self.bottleDropList.items():
state = value.get()
if state == 0 or state == 2:
tkMessageBox.showinfo("Error", "Item not ticked: " + key)
return
'''disable all checkboxes in this column'''
for child in self.root.winfo_children():
if isinstance(child, tk.Checkbutton) and int(child.grid_info()['column']) == self.curStep:
child.config(state="disabled")
'''if we made it here, the checklist is OK'''
self.beforeLandingButton.config(state="normal")
self.bottleDropButton.config(text='Checklist Completed', state="disabled")
self.curStep = 5
def beforeLandingCheck(self):
'''Event for the "Checklist Complete" button for the Before Landing section'''
import Tkinter as tk
import tkMessageBox
'''Check all of the checklist for ticks'''
for key, value in self.beforeLandingList.items():
state = value.get()
if state == 0 or state == 2:
tkMessageBox.showinfo("Error", "Item not ticked: " + key)
return
'''disable all checkboxes in this column'''
for child in self.root.winfo_children():
if isinstance(child, tk.Checkbutton) and int(child.grid_info()['column']) == self.curStep:
child.config(state="disabled")
'''if we made it here, the checklist is OK'''
self.beforeShutdownButton.config(state="normal")
self.beforeLandingButton.config(text='Checklist Completed', state="disabled")
self.curStep = 6
def beforeShutdownCheck(self):
'''Event for the "Checklist Complete" button for the Before Landing section'''
import Tkinter as tk
import tkMessageBox
'''Check all of the checklist for ticks'''
for key, value in self.beforeShutdownList.items():
state = value.get()
if state == 0 or state == 2:
tkMessageBox.showinfo("Error", "Item not ticked: " + key)
return
'''disable all checkboxes in this column'''
for child in self.root.winfo_children():
if isinstance(child, tk.Checkbutton) and int(child.grid_info()['column']) == self.curStep:
child.config(state="disabled")
'''if we made it here, the checklist is OK'''
self.beforeShutdownButton.config(text='Checklist Completed', state="disabled")
tkMessageBox.showinfo("Information", "Checklist Completed!")
self.curStep = 7
def close(self):
'''close the console'''
self.close_event.set()
if self.is_alive():
self.child.join(2)
def is_alive(self):
'''check if child is still going'''
return self.child.is_alive()
def on_timer(self):
'''this timer periodically checks the inter-process pipe
for any updated checklist items'''
import Tkinter as tk
if self.close_event.wait(0.001):
self.timer.Stop()
self.Destroy()
return
while self.child_pipe.poll():
obj = self.child_pipe.recv()
if isinstance(obj, CheckItem):
# request to set a checklist item
'''Go through all the controls in the main window'''
for child in self.root.winfo_children():
'''If the control is a checkbutton and it's name matches and we're in the right checklist step, update it'''
if isinstance(child, tk.Checkbutton) and obj.name == child.cget('text') and int(child.grid_info()['column']) == self.curStep:
if obj.state == 1:
child.select()
else:
child.deselect()
'''print("in here")'''
self.root.after(500, self.on_timer)
def set_status(self, name, status):
'''set a status value'''
if self.child.is_alive():
self.parent_pipe.send(CheckItem(name, status))
if __name__ == "__main__":
# test the console
import time
checklist = UI()
while checklist.is_alive():
checklist.set_status("Compass Offsets", 1)
time.sleep(0.5)
|
aponxi/libmysqlpp
|
refs/heads/master
|
bakefile-0.2.9/src/formats.py
|
1
|
#
# This file is part of Bakefile (http://www.bakefile.org)
#
# Copyright (C) 2003,2004 Vaclav Slavik
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# $Id: formats.py 1089 2007-11-03 21:27:39Z vaclavslavik $
#
# Reading, parsing and checking against FORMATS.bkmanifest manifest
# files.
#
import xmlparser, errors
import config
import os.path
formats = {}
class FormatInfo:
def __init__(self):
self.name = None
self.desc = None
self.defaultFile = None
def loadManifestFile(filename):
"""Loads manifest from file 'filename'."""
manifest = xmlparser.parseFile(filename, xmlparser.NS_FORMATS_MANIFEST)
if manifest.name != 'bakefile-manifest':
raise errors.ReaderError(manifest, 'invalid manifest file')
for fmt in manifest.children:
if fmt.name != 'format':
raise errors.ReaderError(fmt, 'invalid manifest file')
info = FormatInfo()
info.name = fmt.props['id']
for node in fmt.children:
if node.name == 'description':
info.desc = node.value
elif node.name == 'default-filename':
info.defaultFile = node.value
else:
raise errors.ReaderError(node, 'invalid format description')
if info.name == None or info.desc == None or info.defaultFile == None:
raise errors.ReaderError(fmt, 'format not fully described')
formats[info.name] = info
def loadFormats():
"""Find all format specification in search paths."""
for path in config.searchPath:
manifest = os.path.join(path, 'FORMATS.bkmanifest')
if os.path.isfile(manifest):
try:
loadManifestFile(manifest)
except xmlparser.ParsingError:
raise errors.Error("malformed format manifest file %s" % manifest)
def isValidFormat(f):
return f in formats
def showFormats():
if len(formats) == 0:
loadFormats()
help = "available formats are:\n"
maxlen = 0
for f in formats:
if len(f) > maxlen: maxlen = len(f)
outfmt = ' %%-%is %%s\n' % maxlen
keys = formats.keys()
keys.sort()
for f in keys:
help += outfmt % (f, formats[f].desc)
help += '\n'
return help
|
tethysplatform/tethys
|
refs/heads/master
|
tests/unit_tests/test_tethys_gizmos/test_gizmo_options/test_range_slider.py
|
2
|
import unittest
import tethys_gizmos.gizmo_options.range_slider as gizmo_range_slider
class TestRangeSlider(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_RangeSlider(self):
name = 'Test Range Slider'
min = 0
max = 100
initial = 50
step = 1
result = gizmo_range_slider.RangeSlider(name=name, min=min, max=max, initial=initial, step=step)
# Check Result
self.assertEqual(name, result['name'])
self.assertEqual(min, result['min'])
self.assertEqual(max, result['max'])
self.assertEqual(initial, result['initial'])
self.assertEqual(step, result['step'])
self.assertIn('.js', gizmo_range_slider.RangeSlider.get_gizmo_js()[0])
self.assertNotIn('.css', gizmo_range_slider.RangeSlider.get_gizmo_js()[0])
|
ashhher3/cvxpy
|
refs/heads/master
|
cvxpy/constraints/int_constr.py
|
12
|
"""
Copyright 2013 Steven Diamond
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
import cvxpy.settings as s
from cvxpy.constraints.bool_constr import BoolConstr
class IntConstr(BoolConstr):
"""
An integer constraint:
X_{ij} in Z for all i,j.
Attributes:
noncvx_var: A variable constrained to be elementwise integral.
lin_op: The linear operator equal to the noncvx_var.
"""
CONSTR_TYPE = s.INT_IDS
def __str__(self):
return "IntConstr(%s)" % self.lin_op
|
Acehaidrey/incubator-airflow
|
refs/heads/master
|
tests/models/test_taskinstance.py
|
1
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
import os
import time
import unittest
import urllib
from typing import List, Optional, Union, cast
from unittest import mock
from unittest.mock import call, mock_open, patch
import pendulum
import pytest
from freezegun import freeze_time
from parameterized import param, parameterized
from sqlalchemy.orm.session import Session
from airflow import models, settings
from airflow.exceptions import AirflowException, AirflowFailException, AirflowSkipException
from airflow.jobs.scheduler_job import SchedulerJob
from airflow.models import (
DAG,
DagModel,
DagRun,
Pool,
RenderedTaskInstanceFields,
TaskInstance as TI,
TaskReschedule,
Variable,
)
from airflow.operators.bash import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python import PythonOperator
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.sensors.python import PythonSensor
from airflow.serialization.serialized_objects import SerializedBaseOperator
from airflow.stats import Stats
from airflow.ti_deps.dependencies_deps import REQUEUEABLE_DEPS, RUNNING_DEPS
from airflow.ti_deps.dependencies_states import RUNNABLE_STATES
from airflow.ti_deps.deps.base_ti_dep import TIDepStatus
from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep
from airflow.utils import timezone
from airflow.utils.session import create_session, provide_session
from airflow.utils.state import State
from airflow.utils.types import DagRunType
from airflow.version import version
from tests.models import DEFAULT_DATE
from tests.test_utils import db
from tests.test_utils.asserts import assert_queries_count
from tests.test_utils.config import conf_vars
class CallbackWrapper:
task_id: Optional[str] = None
dag_id: Optional[str] = None
execution_date: Optional[datetime.datetime] = None
task_state_in_callback: Optional[str] = None
callback_ran = False
def wrap_task_instance(self, ti):
self.task_id = ti.task_id
self.dag_id = ti.dag_id
self.execution_date = ti.execution_date
self.task_state_in_callback = ""
self.callback_ran = False
def success_handler(self, context): # pylint: disable=unused-argument
self.callback_ran = True
session = settings.Session()
temp_instance = (
session.query(TI)
.filter(TI.task_id == self.task_id)
.filter(TI.dag_id == self.dag_id)
.filter(TI.execution_date == self.execution_date)
.one()
)
self.task_state_in_callback = temp_instance.state
class TestTaskInstance(unittest.TestCase):
@staticmethod
def clean_db():
db.clear_db_dags()
db.clear_db_pools()
db.clear_db_runs()
db.clear_db_task_fail()
db.clear_rendered_ti_fields()
db.clear_db_task_reschedule()
def setUp(self):
self.clean_db()
with create_session() as session:
test_pool = Pool(pool='test_pool', slots=1)
session.add(test_pool)
session.commit()
def tearDown(self):
self.clean_db()
def test_set_task_dates(self):
"""
Test that tasks properly take start/end dates from DAGs
"""
dag = DAG('dag', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10))
op1 = DummyOperator(task_id='op_1', owner='test')
self.assertTrue(op1.start_date is None and op1.end_date is None)
# dag should assign its dates to op1 because op1 has no dates
dag.add_task(op1)
self.assertTrue(op1.start_date == dag.start_date and op1.end_date == dag.end_date)
op2 = DummyOperator(
task_id='op_2',
owner='test',
start_date=DEFAULT_DATE - datetime.timedelta(days=1),
end_date=DEFAULT_DATE + datetime.timedelta(days=11),
)
# dag should assign its dates to op2 because they are more restrictive
dag.add_task(op2)
self.assertTrue(op2.start_date == dag.start_date and op2.end_date == dag.end_date)
op3 = DummyOperator(
task_id='op_3',
owner='test',
start_date=DEFAULT_DATE + datetime.timedelta(days=1),
end_date=DEFAULT_DATE + datetime.timedelta(days=9),
)
# op3 should keep its dates because they are more restrictive
dag.add_task(op3)
self.assertTrue(op3.start_date == DEFAULT_DATE + datetime.timedelta(days=1))
self.assertTrue(op3.end_date == DEFAULT_DATE + datetime.timedelta(days=9))
def test_timezone_awareness(self):
naive_datetime = DEFAULT_DATE.replace(tzinfo=None)
# check ti without dag (just for bw compat)
op_no_dag = DummyOperator(task_id='op_no_dag')
ti = TI(task=op_no_dag, execution_date=naive_datetime)
self.assertEqual(ti.execution_date, DEFAULT_DATE)
# check with dag without localized execution_date
dag = DAG('dag', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='op_1')
dag.add_task(op1)
ti = TI(task=op1, execution_date=naive_datetime)
self.assertEqual(ti.execution_date, DEFAULT_DATE)
# with dag and localized execution_date
tzinfo = pendulum.timezone("Europe/Amsterdam")
execution_date = timezone.datetime(2016, 1, 1, 1, 0, 0, tzinfo=tzinfo)
utc_date = timezone.convert_to_utc(execution_date)
ti = TI(task=op1, execution_date=execution_date)
self.assertEqual(ti.execution_date, utc_date)
def test_task_naive_datetime(self):
naive_datetime = DEFAULT_DATE.replace(tzinfo=None)
op_no_dag = DummyOperator(
task_id='test_task_naive_datetime', start_date=naive_datetime, end_date=naive_datetime
)
self.assertTrue(op_no_dag.start_date.tzinfo)
self.assertTrue(op_no_dag.end_date.tzinfo)
def test_set_dag(self):
"""
Test assigning Operators to Dags, including deferred assignment
"""
dag = DAG('dag', start_date=DEFAULT_DATE)
dag2 = DAG('dag2', start_date=DEFAULT_DATE)
op = DummyOperator(task_id='op_1', owner='test')
# no dag assigned
self.assertFalse(op.has_dag())
self.assertRaises(AirflowException, getattr, op, 'dag')
# no improper assignment
with self.assertRaises(TypeError):
op.dag = 1
op.dag = dag
# no reassignment
with self.assertRaises(AirflowException):
op.dag = dag2
# but assigning the same dag is ok
op.dag = dag
self.assertIs(op.dag, dag)
self.assertIn(op, dag.tasks)
def test_infer_dag(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
dag2 = DAG('dag2', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='test_op_1', owner='test')
op2 = DummyOperator(task_id='test_op_2', owner='test')
op3 = DummyOperator(task_id='test_op_3', owner='test', dag=dag)
op4 = DummyOperator(task_id='test_op_4', owner='test', dag=dag2)
# double check dags
self.assertEqual([i.has_dag() for i in [op1, op2, op3, op4]], [False, False, True, True])
# can't combine operators with no dags
self.assertRaises(AirflowException, op1.set_downstream, op2)
# op2 should infer dag from op1
op1.dag = dag
op1.set_downstream(op2)
self.assertIs(op2.dag, dag)
# can't assign across multiple DAGs
self.assertRaises(AirflowException, op1.set_downstream, op4)
self.assertRaises(AirflowException, op1.set_downstream, [op3, op4])
def test_bitshift_compose_operators(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
with dag:
op1 = DummyOperator(task_id='test_op_1', owner='test')
op2 = DummyOperator(task_id='test_op_2', owner='test')
op3 = DummyOperator(task_id='test_op_3', owner='test')
op1 >> op2 << op3
# op2 should be downstream of both
self.assertIn(op2, op1.downstream_list)
self.assertIn(op2, op3.downstream_list)
@patch.object(DAG, 'get_concurrency_reached')
def test_requeue_over_dag_concurrency(self, mock_concurrency_reached):
mock_concurrency_reached.return_value = True
dag = DAG(
dag_id='test_requeue_over_dag_concurrency',
start_date=DEFAULT_DATE,
max_active_runs=1,
concurrency=2,
)
task = DummyOperator(task_id='test_requeue_over_dag_concurrency_op', dag=dag)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
session.add(ti)
session.commit()
ti.run()
self.assertEqual(ti.state, State.NONE)
def test_requeue_over_task_concurrency(self):
dag = DAG(
dag_id='test_requeue_over_task_concurrency',
start_date=DEFAULT_DATE,
max_active_runs=1,
concurrency=2,
)
task = DummyOperator(task_id='test_requeue_over_task_concurrency_op', dag=dag, task_concurrency=0)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
session.add(ti)
session.commit()
ti.run()
self.assertEqual(ti.state, State.NONE)
def test_requeue_over_pool_concurrency(self):
dag = DAG(
dag_id='test_requeue_over_pool_concurrency',
start_date=DEFAULT_DATE,
max_active_runs=1,
concurrency=2,
)
task = DummyOperator(task_id='test_requeue_over_pool_concurrency_op', dag=dag, task_concurrency=0)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
pool = session.query(Pool).filter(Pool.pool == 'test_pool').one()
pool.slots = 0
session.add(ti)
session.commit()
ti.run()
self.assertEqual(ti.state, State.NONE)
def test_not_requeue_non_requeueable_task_instance(self):
dag = models.DAG(dag_id='test_not_requeue_non_requeueable_task_instance')
# Use BaseSensorOperator because sensor got
# one additional DEP in BaseSensorOperator().deps
task = BaseSensorOperator(
task_id='test_not_requeue_non_requeueable_task_instance_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
with create_session() as session:
session.add(ti)
session.commit()
all_deps = RUNNING_DEPS | task.deps
all_non_requeueable_deps = all_deps - REQUEUEABLE_DEPS
patch_dict = {}
for dep in all_non_requeueable_deps:
class_name = dep.__class__.__name__
dep_patch = patch(f'{dep.__module__}.{class_name}.{dep._get_dep_statuses.__name__}')
method_patch = dep_patch.start()
method_patch.return_value = iter([TIDepStatus('mock_' + class_name, True, 'mock')])
patch_dict[class_name] = (dep_patch, method_patch)
for class_name, (dep_patch, method_patch) in patch_dict.items():
method_patch.return_value = iter([TIDepStatus('mock_' + class_name, False, 'mock')])
ti.run()
self.assertEqual(ti.state, State.QUEUED)
dep_patch.return_value = TIDepStatus('mock_' + class_name, True, 'mock')
for (dep_patch, method_patch) in patch_dict.values():
dep_patch.stop()
def test_mark_non_runnable_task_as_success(self):
"""
test that running task with mark_success param update task state
as SUCCESS without running task despite it fails dependency checks.
"""
non_runnable_state = (set(State.task_states) - RUNNABLE_STATES - set(State.SUCCESS)).pop()
dag = models.DAG(dag_id='test_mark_non_runnable_task_as_success')
task = DummyOperator(
task_id='test_mark_non_runnable_task_as_success_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow(), state=non_runnable_state)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
session.add(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
session.commit()
ti.run(mark_success=True)
self.assertEqual(ti.state, State.SUCCESS)
def test_run_pooling_task(self):
"""
test that running a task in an existing pool update task state as SUCCESS.
"""
dag = models.DAG(dag_id='test_run_pooling_task')
task = DummyOperator(
task_id='test_run_pooling_task_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run()
db.clear_db_pools()
self.assertEqual(ti.state, State.SUCCESS)
def test_pool_slots_property(self):
"""
test that try to create a task with pool_slots less than 1
"""
def create_task_instance():
dag = models.DAG(dag_id='test_run_pooling_task')
task = DummyOperator(
task_id='test_run_pooling_task_op',
dag=dag,
pool='test_pool',
pool_slots=0,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
return TI(task=task, execution_date=timezone.utcnow())
self.assertRaises(AirflowException, create_task_instance)
@provide_session
def test_ti_updates_with_task(self, session=None):
"""
test that updating the executor_config propogates to the TaskInstance DB
"""
with models.DAG(dag_id='test_run_pooling_task') as dag:
task = DummyOperator(
task_id='test_run_pooling_task_op',
owner='airflow',
executor_config={'foo': 'bar'},
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
ti.run(session=session)
tis = dag.get_task_instances()
self.assertEqual({'foo': 'bar'}, tis[0].executor_config)
with models.DAG(dag_id='test_run_pooling_task') as dag:
task2 = DummyOperator(
task_id='test_run_pooling_task_op',
owner='airflow',
executor_config={'bar': 'baz'},
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task2, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
ti.run(session=session)
tis = dag.get_task_instances()
self.assertEqual({'bar': 'baz'}, tis[1].executor_config)
session.rollback()
def test_run_pooling_task_with_mark_success(self):
"""
test that running task in an existing pool with mark_success param
update task state as SUCCESS without running task
despite it fails dependency checks.
"""
dag = models.DAG(dag_id='test_run_pooling_task_with_mark_success')
task = DummyOperator(
task_id='test_run_pooling_task_with_mark_success_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run(mark_success=True)
self.assertEqual(ti.state, State.SUCCESS)
def test_run_pooling_task_with_skip(self):
"""
test that running task which returns AirflowSkipOperator will end
up in a SKIPPED state.
"""
def raise_skip_exception():
raise AirflowSkipException
dag = models.DAG(dag_id='test_run_pooling_task_with_skip')
task = PythonOperator(
task_id='test_run_pooling_task_with_skip',
dag=dag,
python_callable=raise_skip_exception,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run()
self.assertEqual(State.SKIPPED, ti.state)
def test_retry_delay(self):
"""
Test that retry delays are respected
"""
dag = models.DAG(dag_id='test_retry_handling')
task = BashOperator(
task_id='test_retry_handling_op',
bash_command='exit 1',
retries=1,
retry_delay=datetime.timedelta(seconds=3),
dag=dag,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
def run_with_error(ti):
try:
ti.run()
except AirflowException:
pass
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
self.assertEqual(ti.try_number, 1)
# first run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti.try_number, 2)
# second run -- still up for retry because retry_delay hasn't expired
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
# third run -- failed
time.sleep(3)
run_with_error(ti)
self.assertEqual(ti.state, State.FAILED)
def test_retry_handling(self):
"""
Test that task retries are handled properly
"""
expected_rendered_ti_fields = {'env': None, 'bash_command': 'echo test_retry_handling; exit 1'}
dag = models.DAG(dag_id='test_retry_handling')
task = BashOperator(
task_id='test_retry_handling_op',
bash_command='echo {{dag.dag_id}}; exit 1',
retries=1,
retry_delay=datetime.timedelta(seconds=0),
dag=dag,
owner='test_pool',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
def run_with_error(ti):
try:
ti.run()
except AirflowException:
pass
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti.try_number, 1)
# first run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti._try_number, 1)
self.assertEqual(ti.try_number, 2)
# second run -- fail
run_with_error(ti)
self.assertEqual(ti.state, State.FAILED)
self.assertEqual(ti._try_number, 2)
self.assertEqual(ti.try_number, 3)
# Clear the TI state since you can't run a task with a FAILED state without
# clearing it first
dag.clear()
# third run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti._try_number, 3)
self.assertEqual(ti.try_number, 4)
# fourth run -- fail
run_with_error(ti)
ti.refresh_from_db()
self.assertEqual(ti.state, State.FAILED)
self.assertEqual(ti._try_number, 4)
self.assertEqual(ti.try_number, 5)
self.assertEqual(RenderedTaskInstanceFields.get_templated_fields(ti), expected_rendered_ti_fields)
def test_next_retry_datetime(self):
delay = datetime.timedelta(seconds=30)
max_delay = datetime.timedelta(minutes=60)
dag = models.DAG(dag_id='fail_dag')
task = BashOperator(
task_id='task_with_exp_backoff_and_max_delay',
bash_command='exit 1',
retries=3,
retry_delay=delay,
retry_exponential_backoff=True,
max_retry_delay=max_delay,
dag=dag,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=DEFAULT_DATE)
ti.end_date = pendulum.instance(timezone.utcnow())
date = ti.next_retry_datetime()
# between 30 * 2^0.5 and 30 * 2^1 (15 and 30)
period = ti.end_date.add(seconds=30) - ti.end_date.add(seconds=15)
self.assertTrue(date in period)
ti.try_number = 3
date = ti.next_retry_datetime()
# between 30 * 2^2 and 30 * 2^3 (120 and 240)
period = ti.end_date.add(seconds=240) - ti.end_date.add(seconds=120)
self.assertTrue(date in period)
ti.try_number = 5
date = ti.next_retry_datetime()
# between 30 * 2^4 and 30 * 2^5 (480 and 960)
period = ti.end_date.add(seconds=960) - ti.end_date.add(seconds=480)
self.assertTrue(date in period)
ti.try_number = 9
date = ti.next_retry_datetime()
self.assertEqual(date, ti.end_date + max_delay)
ti.try_number = 50
date = ti.next_retry_datetime()
self.assertEqual(date, ti.end_date + max_delay)
def test_next_retry_datetime_short_intervals(self):
delay = datetime.timedelta(seconds=1)
max_delay = datetime.timedelta(minutes=60)
dag = models.DAG(dag_id='fail_dag')
task = BashOperator(
task_id='task_with_exp_backoff_and_short_time_interval',
bash_command='exit 1',
retries=3,
retry_delay=delay,
retry_exponential_backoff=True,
max_retry_delay=max_delay,
dag=dag,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=DEFAULT_DATE)
ti.end_date = pendulum.instance(timezone.utcnow())
date = ti.next_retry_datetime()
# between 1 * 2^0.5 and 1 * 2^1 (15 and 30)
period = ti.end_date.add(seconds=15) - ti.end_date.add(seconds=1)
self.assertTrue(date in period)
def test_reschedule_handling(self):
"""
Test that task reschedules are handled properly
"""
# Return values of the python sensor callable, modified during tests
done = False
fail = False
def func():
if fail:
raise AirflowException()
return done
dag = models.DAG(dag_id='test_reschedule_handling')
task = PythonSensor(
task_id='test_reschedule_handling_sensor',
poke_interval=0,
mode='reschedule',
python_callable=func,
retries=1,
retry_delay=datetime.timedelta(seconds=0),
dag=dag,
owner='airflow',
pool='test_pool',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti._try_number, 0)
self.assertEqual(ti.try_number, 1)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
def run_ti_and_assert(
run_date,
expected_start_date,
expected_end_date,
expected_duration,
expected_state,
expected_try_number,
expected_task_reschedule_count,
):
with freeze_time(run_date):
try:
ti.run()
except AirflowException:
if not fail:
raise
ti.refresh_from_db()
self.assertEqual(ti.state, expected_state)
self.assertEqual(ti._try_number, expected_try_number)
self.assertEqual(ti.try_number, expected_try_number + 1)
self.assertEqual(ti.start_date, expected_start_date)
self.assertEqual(ti.end_date, expected_end_date)
self.assertEqual(ti.duration, expected_duration)
trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter
self.assertEqual(len(trs), expected_task_reschedule_count)
date1 = timezone.utcnow()
date2 = date1 + datetime.timedelta(minutes=1)
date3 = date2 + datetime.timedelta(minutes=1)
date4 = date3 + datetime.timedelta(minutes=1)
# Run with multiple reschedules.
# During reschedule the try number remains the same, but each reschedule is recorded.
# The start date is expected to remain the initial date, hence the duration increases.
# When finished the try number is incremented and there is no reschedule expected
# for this try.
done, fail = False, False
run_ti_and_assert(date1, date1, date1, 0, State.UP_FOR_RESCHEDULE, 0, 1)
done, fail = False, False
run_ti_and_assert(date2, date1, date2, 60, State.UP_FOR_RESCHEDULE, 0, 2)
done, fail = False, False
run_ti_and_assert(date3, date1, date3, 120, State.UP_FOR_RESCHEDULE, 0, 3)
done, fail = True, False
run_ti_and_assert(date4, date1, date4, 180, State.SUCCESS, 1, 0)
# Clear the task instance.
dag.clear()
ti.refresh_from_db()
self.assertEqual(ti.state, State.NONE)
self.assertEqual(ti._try_number, 1)
# Run again after clearing with reschedules and a retry.
# The retry increments the try number, and for that try no reschedule is expected.
# After the retry the start date is reset, hence the duration is also reset.
done, fail = False, False
run_ti_and_assert(date1, date1, date1, 0, State.UP_FOR_RESCHEDULE, 1, 1)
done, fail = False, True
run_ti_and_assert(date2, date1, date2, 60, State.UP_FOR_RETRY, 2, 0)
done, fail = False, False
run_ti_and_assert(date3, date3, date3, 0, State.UP_FOR_RESCHEDULE, 2, 1)
done, fail = True, False
run_ti_and_assert(date4, date3, date4, 60, State.SUCCESS, 3, 0)
def test_reschedule_handling_clear_reschedules(self):
"""
Test that task reschedules clearing are handled properly
"""
# Return values of the python sensor callable, modified during tests
done = False
fail = False
def func():
if fail:
raise AirflowException()
return done
dag = models.DAG(dag_id='test_reschedule_handling')
task = PythonSensor(
task_id='test_reschedule_handling_sensor',
poke_interval=0,
mode='reschedule',
python_callable=func,
retries=1,
retry_delay=datetime.timedelta(seconds=0),
dag=dag,
owner='airflow',
pool='test_pool',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti._try_number, 0)
self.assertEqual(ti.try_number, 1)
def run_ti_and_assert(
run_date,
expected_start_date,
expected_end_date,
expected_duration,
expected_state,
expected_try_number,
expected_task_reschedule_count,
):
with freeze_time(run_date):
try:
ti.run()
except AirflowException:
if not fail:
raise
ti.refresh_from_db()
self.assertEqual(ti.state, expected_state)
self.assertEqual(ti._try_number, expected_try_number)
self.assertEqual(ti.try_number, expected_try_number + 1)
self.assertEqual(ti.start_date, expected_start_date)
self.assertEqual(ti.end_date, expected_end_date)
self.assertEqual(ti.duration, expected_duration)
trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter
self.assertEqual(len(trs), expected_task_reschedule_count)
date1 = timezone.utcnow()
done, fail = False, False
run_ti_and_assert(date1, date1, date1, 0, State.UP_FOR_RESCHEDULE, 0, 1)
# Clear the task instance.
dag.clear()
ti.refresh_from_db()
self.assertEqual(ti.state, State.NONE)
self.assertEqual(ti._try_number, 0)
# Check that reschedules for ti have also been cleared.
trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter
self.assertFalse(trs)
def test_depends_on_past(self):
dag = DAG(dag_id='test_depends_on_past', start_date=DEFAULT_DATE)
task = DummyOperator(
task_id='test_dop_task',
dag=dag,
depends_on_past=True,
)
dag.clear()
run_date = task.start_date + datetime.timedelta(days=5)
dag.create_dagrun(
execution_date=run_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti = TI(task, run_date)
# depends_on_past prevents the run
task.run(start_date=run_date, end_date=run_date, ignore_first_depends_on_past=False)
ti.refresh_from_db()
self.assertIs(ti.state, None)
# ignore first depends_on_past to allow the run
task.run(start_date=run_date, end_date=run_date, ignore_first_depends_on_past=True)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
# Parameterized tests to check for the correct firing
# of the trigger_rule under various circumstances
# Numeric fields are in order:
# successes, skipped, failed, upstream_failed, done
@parameterized.expand(
[
#
# Tests for all_success
#
['all_success', 5, 0, 0, 0, 0, True, None, True],
['all_success', 2, 0, 0, 0, 0, True, None, False],
['all_success', 2, 0, 1, 0, 0, True, State.UPSTREAM_FAILED, False],
['all_success', 2, 1, 0, 0, 0, True, State.SKIPPED, False],
#
# Tests for one_success
#
['one_success', 5, 0, 0, 0, 5, True, None, True],
['one_success', 2, 0, 0, 0, 2, True, None, True],
['one_success', 2, 0, 1, 0, 3, True, None, True],
['one_success', 2, 1, 0, 0, 3, True, None, True],
#
# Tests for all_failed
#
['all_failed', 5, 0, 0, 0, 5, True, State.SKIPPED, False],
['all_failed', 0, 0, 5, 0, 5, True, None, True],
['all_failed', 2, 0, 0, 0, 2, True, State.SKIPPED, False],
['all_failed', 2, 0, 1, 0, 3, True, State.SKIPPED, False],
['all_failed', 2, 1, 0, 0, 3, True, State.SKIPPED, False],
#
# Tests for one_failed
#
['one_failed', 5, 0, 0, 0, 0, True, None, False],
['one_failed', 2, 0, 0, 0, 0, True, None, False],
['one_failed', 2, 0, 1, 0, 0, True, None, True],
['one_failed', 2, 1, 0, 0, 3, True, None, False],
['one_failed', 2, 3, 0, 0, 5, True, State.SKIPPED, False],
#
# Tests for done
#
['all_done', 5, 0, 0, 0, 5, True, None, True],
['all_done', 2, 0, 0, 0, 2, True, None, False],
['all_done', 2, 0, 1, 0, 3, True, None, False],
['all_done', 2, 1, 0, 0, 3, True, None, False],
]
)
def test_check_task_dependencies(
self,
trigger_rule,
successes,
skipped,
failed,
upstream_failed,
done,
flag_upstream_failed,
expect_state,
expect_completed,
):
start_date = timezone.datetime(2016, 2, 1, 0, 0, 0)
dag = models.DAG('test-dag', start_date=start_date)
downstream = DummyOperator(task_id='downstream', dag=dag, owner='airflow', trigger_rule=trigger_rule)
for i in range(5):
task = DummyOperator(task_id=f'runme_{i}', dag=dag, owner='airflow')
task.set_downstream(downstream)
run_date = task.start_date + datetime.timedelta(days=5)
ti = TI(downstream, run_date)
dep_results = TriggerRuleDep()._evaluate_trigger_rule( # pylint: disable=no-value-for-parameter
ti=ti,
successes=successes,
skipped=skipped,
failed=failed,
upstream_failed=upstream_failed,
done=done,
flag_upstream_failed=flag_upstream_failed,
)
completed = all(dep.passed for dep in dep_results)
self.assertEqual(completed, expect_completed)
self.assertEqual(ti.state, expect_state)
def test_respects_prev_dagrun_dep(self):
with DAG(dag_id='test_dag'):
task = DummyOperator(task_id='task', start_date=DEFAULT_DATE)
ti = TI(task, DEFAULT_DATE)
failing_status = [TIDepStatus('test fail status name', False, 'test fail reason')]
passing_status = [TIDepStatus('test pass status name', True, 'test passing reason')]
with patch(
'airflow.ti_deps.deps.prev_dagrun_dep.PrevDagrunDep.get_dep_statuses', return_value=failing_status
):
self.assertFalse(ti.are_dependencies_met())
with patch(
'airflow.ti_deps.deps.prev_dagrun_dep.PrevDagrunDep.get_dep_statuses', return_value=passing_status
):
self.assertTrue(ti.are_dependencies_met())
@parameterized.expand(
[
(State.SUCCESS, True),
(State.SKIPPED, True),
(State.RUNNING, False),
(State.FAILED, False),
(State.NONE, False),
]
)
def test_are_dependents_done(self, downstream_ti_state, expected_are_dependents_done):
with DAG(dag_id='test_dag'):
task = DummyOperator(task_id='task', start_date=DEFAULT_DATE)
downstream_task = DummyOperator(task_id='downstream_task', start_date=DEFAULT_DATE)
task >> downstream_task
ti = TI(task, DEFAULT_DATE)
downstream_ti = TI(downstream_task, DEFAULT_DATE)
downstream_ti.set_state(downstream_ti_state)
self.assertEqual(ti.are_dependents_done(), expected_are_dependents_done)
def test_xcom_pull(self):
"""
Test xcom_pull, using different filtering methods.
"""
dag = models.DAG(
dag_id='test_xcom',
schedule_interval='@monthly',
start_date=timezone.datetime(2016, 6, 1, 0, 0, 0),
)
exec_date = timezone.utcnow()
# Push a value
task1 = DummyOperator(task_id='test_xcom_1', dag=dag, owner='airflow')
ti1 = TI(task=task1, execution_date=exec_date)
ti1.xcom_push(key='foo', value='bar')
# Push another value with the same key (but by a different task)
task2 = DummyOperator(task_id='test_xcom_2', dag=dag, owner='airflow')
ti2 = TI(task=task2, execution_date=exec_date)
ti2.xcom_push(key='foo', value='baz')
# Pull with no arguments
result = ti1.xcom_pull()
self.assertEqual(result, None)
# Pull the value pushed most recently by any task.
result = ti1.xcom_pull(key='foo')
self.assertIn(result, 'baz')
# Pull the value pushed by the first task
result = ti1.xcom_pull(task_ids='test_xcom_1', key='foo')
self.assertEqual(result, 'bar')
# Pull the value pushed by the second task
result = ti1.xcom_pull(task_ids='test_xcom_2', key='foo')
self.assertEqual(result, 'baz')
# Pull the values pushed by both tasks
result = ti1.xcom_pull(task_ids=['test_xcom_1', 'test_xcom_2'], key='foo')
self.assertEqual(result, ['baz', 'bar'])
def test_xcom_pull_after_success(self):
"""
tests xcom set/clear relative to a task in a 'success' rerun scenario
"""
key = 'xcom_key'
value = 'xcom_value'
dag = models.DAG(dag_id='test_xcom', schedule_interval='@monthly')
task = DummyOperator(
task_id='test_xcom',
dag=dag,
pool='test_xcom',
owner='airflow',
start_date=timezone.datetime(2016, 6, 2, 0, 0, 0),
)
exec_date = timezone.utcnow()
ti = TI(task=task, execution_date=exec_date)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run(mark_success=True)
ti.xcom_push(key=key, value=value)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
ti.run()
# The second run and assert is to handle AIRFLOW-131 (don't clear on
# prior success)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
# Test AIRFLOW-703: Xcom shouldn't be cleared if the task doesn't
# execute, even if dependencies are ignored
ti.run(ignore_all_deps=True, mark_success=True)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
# Xcom IS finally cleared once task has executed
ti.run(ignore_all_deps=True)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), None)
def test_xcom_pull_different_execution_date(self):
"""
tests xcom fetch behavior with different execution dates, using
both xcom_pull with "include_prior_dates" and without
"""
key = 'xcom_key'
value = 'xcom_value'
dag = models.DAG(dag_id='test_xcom', schedule_interval='@monthly')
task = DummyOperator(
task_id='test_xcom',
dag=dag,
pool='test_xcom',
owner='airflow',
start_date=timezone.datetime(2016, 6, 2, 0, 0, 0),
)
exec_date = timezone.utcnow()
ti = TI(task=task, execution_date=exec_date)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run(mark_success=True)
ti.xcom_push(key=key, value=value)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
ti.run()
exec_date += datetime.timedelta(days=1)
ti = TI(task=task, execution_date=exec_date)
ti.run()
# We have set a new execution date (and did not pass in
# 'include_prior_dates'which means this task should now have a cleared
# xcom value
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), None)
# We *should* get a value using 'include_prior_dates'
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key, include_prior_dates=True), value)
def test_xcom_push_flag(self):
"""
Tests the option for Operators to push XComs
"""
value = 'hello'
task_id = 'test_no_xcom_push'
dag = models.DAG(dag_id='test_xcom')
# nothing saved to XCom
task = PythonOperator(
task_id=task_id,
dag=dag,
python_callable=lambda: value,
do_xcom_push=False,
owner='airflow',
start_date=datetime.datetime(2017, 1, 1),
)
ti = TI(task=task, execution_date=datetime.datetime(2017, 1, 1))
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run()
self.assertEqual(ti.xcom_pull(task_ids=task_id, key=models.XCOM_RETURN_KEY), None)
def test_post_execute_hook(self):
"""
Test that post_execute hook is called with the Operator's result.
The result ('error') will cause an error to be raised and trapped.
"""
class TestError(Exception):
pass
class TestOperator(PythonOperator):
def post_execute(self, context, result=None):
if result == 'error':
raise TestError('expected error.')
dag = models.DAG(dag_id='test_post_execute_dag')
task = TestOperator(
task_id='test_operator',
dag=dag,
python_callable=lambda: 'error',
owner='airflow',
start_date=timezone.datetime(2017, 2, 1),
)
ti = TI(task=task, execution_date=timezone.utcnow())
with self.assertRaises(TestError):
ti.run()
def test_check_and_change_state_before_execution(self):
dag = models.DAG(dag_id='test_check_and_change_state_before_execution')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti._try_number, 0)
self.assertTrue(ti.check_and_change_state_before_execution())
# State should be running, and try_number column should be incremented
self.assertEqual(ti.state, State.RUNNING)
self.assertEqual(ti._try_number, 1)
def test_check_and_change_state_before_execution_dep_not_met(self):
dag = models.DAG(dag_id='test_check_and_change_state_before_execution')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
task2 = DummyOperator(task_id='task2', dag=dag, start_date=DEFAULT_DATE)
task >> task2
ti = TI(task=task2, execution_date=timezone.utcnow())
self.assertFalse(ti.check_and_change_state_before_execution())
def test_try_number(self):
"""
Test the try_number accessor behaves in various running states
"""
dag = models.DAG(dag_id='test_check_and_change_state_before_execution')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(1, ti.try_number)
ti.try_number = 2
ti.state = State.RUNNING
self.assertEqual(2, ti.try_number)
ti.state = State.SUCCESS
self.assertEqual(3, ti.try_number)
def test_get_num_running_task_instances(self):
session = settings.Session()
dag = models.DAG(dag_id='test_get_num_running_task_instances')
dag2 = models.DAG(dag_id='test_get_num_running_task_instances_dummy')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
task2 = DummyOperator(task_id='task', dag=dag2, start_date=DEFAULT_DATE)
ti1 = TI(task=task, execution_date=DEFAULT_DATE)
ti2 = TI(task=task, execution_date=DEFAULT_DATE + datetime.timedelta(days=1))
ti3 = TI(task=task2, execution_date=DEFAULT_DATE)
ti1.state = State.RUNNING
ti2.state = State.QUEUED
ti3.state = State.RUNNING
session.add(ti1)
session.add(ti2)
session.add(ti3)
session.commit()
self.assertEqual(1, ti1.get_num_running_task_instances(session=session))
self.assertEqual(1, ti2.get_num_running_task_instances(session=session))
self.assertEqual(1, ti3.get_num_running_task_instances(session=session))
# def test_log_url(self):
# now = pendulum.now('Europe/Brussels')
# dag = DAG('dag', start_date=DEFAULT_DATE)
# task = DummyOperator(task_id='op', dag=dag)
# ti = TI(task=task, execution_date=now)
# d = urllib.parse.parse_qs(
# urllib.parse.urlparse(ti.log_url).query,
# keep_blank_values=True, strict_parsing=True)
# self.assertEqual(d['dag_id'][0], 'dag')
# self.assertEqual(d['task_id'][0], 'op')
# self.assertEqual(pendulum.parse(d['execution_date'][0]), now)
def test_log_url(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=datetime.datetime(2018, 1, 1))
expected_url = (
'http://localhost:8080/log?'
'execution_date=2018-01-01T00%3A00%3A00%2B00%3A00'
'&task_id=op'
'&dag_id=dag'
)
self.assertEqual(ti.log_url, expected_url)
def test_mark_success_url(self):
now = pendulum.now('Europe/Brussels')
dag = DAG('dag', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=now)
query = urllib.parse.parse_qs(
urllib.parse.urlparse(ti.mark_success_url).query, keep_blank_values=True, strict_parsing=True
)
self.assertEqual(query['dag_id'][0], 'dag')
self.assertEqual(query['task_id'][0], 'op')
self.assertEqual(pendulum.parse(query['execution_date'][0]), now)
def test_overwrite_params_with_dag_run_conf(self):
task = DummyOperator(task_id='op')
ti = TI(task=task, execution_date=datetime.datetime.now())
dag_run = DagRun()
dag_run.conf = {"override": True}
params = {"override": False}
ti.overwrite_params_with_dag_run_conf(params, dag_run)
self.assertEqual(True, params["override"])
def test_overwrite_params_with_dag_run_none(self):
task = DummyOperator(task_id='op')
ti = TI(task=task, execution_date=datetime.datetime.now())
params = {"override": False}
ti.overwrite_params_with_dag_run_conf(params, None)
self.assertEqual(False, params["override"])
def test_overwrite_params_with_dag_run_conf_none(self):
task = DummyOperator(task_id='op')
ti = TI(task=task, execution_date=datetime.datetime.now())
params = {"override": False}
dag_run = DagRun()
ti.overwrite_params_with_dag_run_conf(params, dag_run)
self.assertEqual(False, params["override"])
@patch('airflow.models.taskinstance.send_email')
def test_email_alert(self, mock_send_email):
dag = models.DAG(dag_id='test_failure_email')
task = BashOperator(
task_id='test_email_alert', dag=dag, bash_command='exit 1', start_date=DEFAULT_DATE, email='to'
)
ti = TI(task=task, execution_date=timezone.utcnow())
try:
ti.run()
except AirflowException:
pass
(email, title, body), _ = mock_send_email.call_args
self.assertEqual(email, 'to')
self.assertIn('test_email_alert', title)
self.assertIn('test_email_alert', body)
self.assertIn('Try 1', body)
@conf_vars(
{
('email', 'subject_template'): '/subject/path',
('email', 'html_content_template'): '/html_content/path',
}
)
@patch('airflow.models.taskinstance.send_email')
def test_email_alert_with_config(self, mock_send_email):
dag = models.DAG(dag_id='test_failure_email')
task = BashOperator(
task_id='test_email_alert_with_config',
dag=dag,
bash_command='exit 1',
start_date=DEFAULT_DATE,
email='to',
)
ti = TI(task=task, execution_date=timezone.utcnow())
opener = mock_open(read_data='template: {{ti.task_id}}')
with patch('airflow.models.taskinstance.open', opener, create=True):
try:
ti.run()
except AirflowException:
pass
(email, title, body), _ = mock_send_email.call_args
self.assertEqual(email, 'to')
self.assertEqual('template: test_email_alert_with_config', title)
self.assertEqual('template: test_email_alert_with_config', body)
def test_set_duration(self):
task = DummyOperator(task_id='op', email='test@test.test')
ti = TI(
task=task,
execution_date=datetime.datetime.now(),
)
ti.start_date = datetime.datetime(2018, 10, 1, 1)
ti.end_date = datetime.datetime(2018, 10, 1, 2)
ti.set_duration()
self.assertEqual(ti.duration, 3600)
def test_set_duration_empty_dates(self):
task = DummyOperator(task_id='op', email='test@test.test')
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.set_duration()
self.assertIsNone(ti.duration)
def test_success_callback_no_race_condition(self):
callback_wrapper = CallbackWrapper()
dag = DAG(
'test_success_callback_no_race_condition',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
task = DummyOperator(
task_id='op',
email='test@test.test',
on_success_callback=callback_wrapper.success_handler,
dag=dag,
)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session = settings.Session()
session.merge(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
session.commit()
callback_wrapper.wrap_task_instance(ti)
ti._run_raw_task()
self.assertTrue(callback_wrapper.callback_ran)
self.assertEqual(callback_wrapper.task_state_in_callback, State.RUNNING)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
@staticmethod
def _test_previous_dates_setup(
schedule_interval: Union[str, datetime.timedelta, None], catchup: bool, scenario: List[str]
) -> list:
dag_id = 'test_previous_dates'
dag = models.DAG(dag_id=dag_id, schedule_interval=schedule_interval, catchup=catchup)
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
def get_test_ti(session, execution_date: pendulum.DateTime, state: str) -> TI:
dag.create_dagrun(
run_type=DagRunType.SCHEDULED,
state=state,
execution_date=execution_date,
start_date=pendulum.now('UTC'),
session=session,
)
ti = TI(task=task, execution_date=execution_date)
ti.set_state(state=State.SUCCESS, session=session)
return ti
with create_session() as session: # type: Session
date = cast(pendulum.DateTime, pendulum.parse('2019-01-01T00:00:00+00:00'))
ret = []
for idx, state in enumerate(scenario):
new_date = date.add(days=idx)
ti = get_test_ti(session, new_date, state)
ret.append(ti)
return ret
_prev_dates_param_list = (
param('cron/catchup', '0 0 * * * ', True),
param('cron/no-catchup', '0 0 * * *', False),
param('no-sched/catchup', None, True),
param('no-sched/no-catchup', None, False),
param('timedelta/catchup', datetime.timedelta(days=1), True),
param('timedelta/no-catchup', datetime.timedelta(days=1), False),
)
@parameterized.expand(_prev_dates_param_list)
def test_previous_ti(self, _, schedule_interval, catchup) -> None:
scenario = [State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_ti())
self.assertEqual(ti_list[2].get_previous_ti().execution_date, ti_list[1].execution_date)
self.assertNotEqual(ti_list[2].get_previous_ti().execution_date, ti_list[0].execution_date)
@parameterized.expand(_prev_dates_param_list)
def test_previous_ti_success(self, _, schedule_interval, catchup) -> None:
scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_ti(state=State.SUCCESS))
self.assertIsNone(ti_list[1].get_previous_ti(state=State.SUCCESS))
self.assertEqual(
ti_list[3].get_previous_ti(state=State.SUCCESS).execution_date, ti_list[1].execution_date
)
self.assertNotEqual(
ti_list[3].get_previous_ti(state=State.SUCCESS).execution_date, ti_list[2].execution_date
)
@parameterized.expand(_prev_dates_param_list)
def test_previous_execution_date_success(self, _, schedule_interval, catchup) -> None:
scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_execution_date(state=State.SUCCESS))
self.assertIsNone(ti_list[1].get_previous_execution_date(state=State.SUCCESS))
self.assertEqual(
ti_list[3].get_previous_execution_date(state=State.SUCCESS), ti_list[1].execution_date
)
self.assertNotEqual(
ti_list[3].get_previous_execution_date(state=State.SUCCESS), ti_list[2].execution_date
)
@parameterized.expand(_prev_dates_param_list)
def test_previous_start_date_success(self, _, schedule_interval, catchup) -> None:
scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_start_date(state=State.SUCCESS))
self.assertIsNone(ti_list[1].get_previous_start_date(state=State.SUCCESS))
self.assertEqual(
ti_list[3].get_previous_start_date(state=State.SUCCESS),
ti_list[1].start_date,
)
self.assertNotEqual(
ti_list[3].get_previous_start_date(state=State.SUCCESS),
ti_list[2].start_date,
)
def test_pendulum_template_dates(self):
dag = models.DAG(
dag_id='test_pendulum_template_dates',
schedule_interval='0 12 * * *',
start_date=timezone.datetime(2016, 6, 1, 0, 0, 0),
)
task = DummyOperator(task_id='test_pendulum_template_dates_task', dag=dag)
ti = TI(task=task, execution_date=timezone.utcnow())
template_context = ti.get_template_context()
self.assertIsInstance(template_context["execution_date"], pendulum.DateTime)
self.assertIsInstance(template_context["next_execution_date"], pendulum.DateTime)
self.assertIsInstance(template_context["prev_execution_date"], pendulum.DateTime)
@parameterized.expand(
[
('{{ var.value.a_variable }}', 'a test value'),
('{{ var.value.get("a_variable") }}', 'a test value'),
('{{ var.value.get("a_variable", "unused_fallback") }}', 'a test value'),
('{{ var.value.get("missing_variable", "fallback") }}', 'fallback'),
]
)
def test_template_with_variable(self, content, expected_output):
"""
Test the availability of variables in templates
"""
Variable.set('a_variable', 'a test value')
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
result = task.render_template(content, context)
self.assertEqual(result, expected_output)
def test_template_with_variable_missing(self):
"""
Test the availability of variables in templates
"""
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
with self.assertRaises(KeyError):
task.render_template('{{ var.value.get("missing_variable") }}', context)
@parameterized.expand(
[
('{{ var.value.a_variable }}', '{\n "a": {\n "test": "value"\n }\n}'),
('{{ var.json.a_variable["a"]["test"] }}', 'value'),
('{{ var.json.get("a_variable")["a"]["test"] }}', 'value'),
('{{ var.json.get("a_variable", {"a": {"test": "unused_fallback"}})["a"]["test"] }}', 'value'),
('{{ var.json.get("missing_variable", {"a": {"test": "fallback"}})["a"]["test"] }}', 'fallback'),
]
)
def test_template_with_json_variable(self, content, expected_output):
"""
Test the availability of variables in templates
"""
Variable.set('a_variable', {'a': {'test': 'value'}}, serialize_json=True)
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
result = task.render_template(content, context)
self.assertEqual(result, expected_output)
def test_template_with_json_variable_missing(self):
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
with self.assertRaises(KeyError):
task.render_template('{{ var.json.get("missing_variable") }}', context)
def test_execute_callback(self):
called = False
def on_execute_callable(context):
nonlocal called
called = True
self.assertEqual(context['dag_run'].dag_id, 'test_dagrun_execute_callback')
dag = DAG(
'test_execute_callback',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
task = DummyOperator(
task_id='op', email='test@test.test', on_execute_callback=on_execute_callable, dag=dag
)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session = settings.Session()
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
session.merge(ti)
session.commit()
ti._run_raw_task()
assert called
ti.refresh_from_db()
assert ti.state == State.SUCCESS
def test_handle_failure(self):
start_date = timezone.datetime(2016, 6, 1)
dag = models.DAG(dag_id="test_handle_failure", schedule_interval=None, start_date=start_date)
mock_on_failure_1 = mock.MagicMock()
mock_on_retry_1 = mock.MagicMock()
task1 = DummyOperator(
task_id="test_handle_failure_on_failure",
on_failure_callback=mock_on_failure_1,
on_retry_callback=mock_on_retry_1,
dag=dag,
)
ti1 = TI(task=task1, execution_date=start_date)
ti1.state = State.FAILED
ti1.handle_failure("test failure handling")
context_arg_1 = mock_on_failure_1.call_args[0][0]
assert context_arg_1 and "task_instance" in context_arg_1
mock_on_retry_1.assert_not_called()
mock_on_failure_2 = mock.MagicMock()
mock_on_retry_2 = mock.MagicMock()
task2 = DummyOperator(
task_id="test_handle_failure_on_retry",
on_failure_callback=mock_on_failure_2,
on_retry_callback=mock_on_retry_2,
retries=1,
dag=dag,
)
ti2 = TI(task=task2, execution_date=start_date)
ti2.state = State.FAILED
ti2.handle_failure("test retry handling")
mock_on_failure_2.assert_not_called()
context_arg_2 = mock_on_retry_2.call_args[0][0]
assert context_arg_2 and "task_instance" in context_arg_2
# test the scenario where normally we would retry but have been asked to fail
mock_on_failure_3 = mock.MagicMock()
mock_on_retry_3 = mock.MagicMock()
task3 = DummyOperator(
task_id="test_handle_failure_on_force_fail",
on_failure_callback=mock_on_failure_3,
on_retry_callback=mock_on_retry_3,
retries=1,
dag=dag,
)
ti3 = TI(task=task3, execution_date=start_date)
ti3.state = State.FAILED
ti3.handle_failure("test force_fail handling", force_fail=True)
context_arg_3 = mock_on_failure_3.call_args[0][0]
assert context_arg_3 and "task_instance" in context_arg_3
mock_on_retry_3.assert_not_called()
def test_does_not_retry_on_airflow_fail_exception(self):
def fail():
raise AirflowFailException("hopeless")
dag = models.DAG(dag_id='test_does_not_retry_on_airflow_fail_exception')
task = PythonOperator(
task_id='test_raise_airflow_fail_exception',
dag=dag,
python_callable=fail,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
retries=1,
)
ti = TI(task=task, execution_date=timezone.utcnow())
try:
ti.run()
except AirflowFailException:
pass # expected
self.assertEqual(State.FAILED, ti.state)
def test_retries_on_other_exceptions(self):
def fail():
raise AirflowException("maybe this will pass?")
dag = models.DAG(dag_id='test_retries_on_other_exceptions')
task = PythonOperator(
task_id='test_raise_other_exception',
dag=dag,
python_callable=fail,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
retries=1,
)
ti = TI(task=task, execution_date=timezone.utcnow())
try:
ti.run()
except AirflowException:
pass # expected
self.assertEqual(State.UP_FOR_RETRY, ti.state)
def _env_var_check_callback(self):
self.assertEqual('test_echo_env_variables', os.environ['AIRFLOW_CTX_DAG_ID'])
self.assertEqual('hive_in_python_op', os.environ['AIRFLOW_CTX_TASK_ID'])
self.assertEqual(DEFAULT_DATE.isoformat(), os.environ['AIRFLOW_CTX_EXECUTION_DATE'])
self.assertEqual(
DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE), os.environ['AIRFLOW_CTX_DAG_RUN_ID']
)
def test_echo_env_variables(self):
dag = DAG(
'test_echo_env_variables',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
op = PythonOperator(
task_id='hive_in_python_op', dag=dag, python_callable=self._env_var_check_callback
)
dag.create_dagrun(
run_type=DagRunType.MANUAL,
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
ti = TI(task=op, execution_date=DEFAULT_DATE)
ti.state = State.RUNNING
session = settings.Session()
session.merge(ti)
session.commit()
ti._run_raw_task()
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
@patch.object(Stats, 'incr')
def test_task_stats(self, stats_mock):
dag = DAG(
'test_task_start_end_stats',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
op = DummyOperator(task_id='dummy_op', dag=dag)
dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
ti = TI(task=op, execution_date=DEFAULT_DATE)
ti.state = State.RUNNING
session = settings.Session()
session.merge(ti)
session.commit()
ti._run_raw_task()
ti.refresh_from_db()
stats_mock.assert_called_with(f'ti.finish.{dag.dag_id}.{op.task_id}.{ti.state}')
self.assertIn(call(f'ti.start.{dag.dag_id}.{op.task_id}'), stats_mock.mock_calls)
self.assertEqual(stats_mock.call_count, 5)
def test_generate_command_default_param(self):
dag_id = 'test_generate_command_default_param'
task_id = 'task'
assert_command = ['airflow', 'tasks', 'run', dag_id, task_id, DEFAULT_DATE.isoformat()]
generate_command = TI.generate_command(dag_id=dag_id, task_id=task_id, execution_date=DEFAULT_DATE)
assert assert_command == generate_command
def test_generate_command_specific_param(self):
dag_id = 'test_generate_command_specific_param'
task_id = 'task'
assert_command = [
'airflow',
'tasks',
'run',
dag_id,
task_id,
DEFAULT_DATE.isoformat(),
'--mark-success',
]
generate_command = TI.generate_command(
dag_id=dag_id, task_id=task_id, execution_date=DEFAULT_DATE, mark_success=True
)
assert assert_command == generate_command
def test_get_rendered_template_fields(self):
with DAG('test-dag', start_date=DEFAULT_DATE):
task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
ti = TI(task=task, execution_date=DEFAULT_DATE)
with create_session() as session:
session.add(RenderedTaskInstanceFields(ti))
# Create new TI for the same Task
with DAG('test-dag', start_date=DEFAULT_DATE):
new_task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
new_ti = TI(task=new_task, execution_date=DEFAULT_DATE)
new_ti.get_rendered_template_fields()
self.assertEqual("op1", ti.task.bash_command)
# CleanUp
with create_session() as session:
session.query(RenderedTaskInstanceFields).delete()
@patch("airflow.models.renderedtifields.IS_K8S_OR_K8SCELERY_EXECUTOR", new=True)
def test_get_rendered_k8s_spec(self):
with DAG('test_get_rendered_k8s_spec', start_date=DEFAULT_DATE):
task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
ti = TI(task=task, execution_date=DEFAULT_DATE)
expected_pod_spec = {
'metadata': {
'annotations': {
'dag_id': 'test_get_rendered_k8s_spec',
'execution_date': '2016-01-01T00:00:00+00:00',
'task_id': 'op1',
'try_number': '1',
},
'labels': {
'airflow-worker': 'worker-config',
'airflow_version': version,
'dag_id': 'test_get_rendered_k8s_spec',
'execution_date': '2016-01-01T00_00_00_plus_00_00',
'kubernetes_executor': 'True',
'task_id': 'op1',
'try_number': '1',
},
'name': mock.ANY,
'namespace': 'default',
},
'spec': {
'containers': [
{
'command': [
'airflow',
'tasks',
'run',
'test_get_rendered_k8s_spec',
'op1',
'2016-01-01T00:00:00+00:00',
],
'image': ':',
'name': 'base',
}
]
},
}
with create_session() as session:
rtif = RenderedTaskInstanceFields(ti)
session.add(rtif)
self.assertEqual(rtif.k8s_pod_yaml, expected_pod_spec)
# Create new TI for the same Task
with DAG('test_get_rendered_k8s_spec', start_date=DEFAULT_DATE):
new_task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
new_ti = TI(task=new_task, execution_date=DEFAULT_DATE)
pod_spec = new_ti.get_rendered_k8s_spec()
self.assertEqual(expected_pod_spec, pod_spec)
# CleanUp
with create_session() as session:
session.query(RenderedTaskInstanceFields).delete()
def validate_ti_states(self, dag_run, ti_state_mapping, error_message):
for task_id, expected_state in ti_state_mapping.items():
task_instance = dag_run.get_task_instance(task_id=task_id)
self.assertEqual(task_instance.state, expected_state, error_message)
@parameterized.expand(
[
(
{('scheduler', 'schedule_after_task_execution'): 'True'},
{'A': 'B', 'B': 'C'},
{'A': State.QUEUED, 'B': State.NONE, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.SCHEDULED, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.SUCCESS, 'C': State.SCHEDULED},
"A -> B -> C, with fast-follow ON when A runs, B should be QUEUED. Same for B and C.",
),
(
{('scheduler', 'schedule_after_task_execution'): 'False'},
{'A': 'B', 'B': 'C'},
{'A': State.QUEUED, 'B': State.NONE, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.NONE, 'C': State.NONE},
None,
"A -> B -> C, with fast-follow OFF, when A runs, B shouldn't be QUEUED.",
),
(
{('scheduler', 'schedule_after_task_execution'): 'True'},
{'A': 'B', 'C': 'B', 'D': 'C'},
{'A': State.QUEUED, 'B': State.NONE, 'C': State.NONE, 'D': State.NONE},
{'A': State.SUCCESS, 'B': State.NONE, 'C': State.NONE, 'D': State.NONE},
None,
"D -> C -> B & A -> B, when A runs but C isn't QUEUED yet, B shouldn't be QUEUED.",
),
(
{('scheduler', 'schedule_after_task_execution'): 'True'},
{'A': 'C', 'B': 'C'},
{'A': State.QUEUED, 'B': State.FAILED, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.FAILED, 'C': State.UPSTREAM_FAILED},
None,
"A -> C & B -> C, when A is QUEUED but B has FAILED, C is marked UPSTREAM_FAILED.",
),
]
)
def test_fast_follow(
self, conf, dependencies, init_state, first_run_state, second_run_state, error_message
):
with conf_vars(conf):
session = settings.Session()
dag = DAG('test_dagrun_fast_follow', start_date=DEFAULT_DATE)
dag_model = DagModel(
dag_id=dag.dag_id,
next_dagrun=dag.start_date,
is_active=True,
)
session.add(dag_model)
session.flush()
python_callable = lambda: True
with dag:
task_a = PythonOperator(task_id='A', python_callable=python_callable)
task_b = PythonOperator(task_id='B', python_callable=python_callable)
task_c = PythonOperator(task_id='C', python_callable=python_callable)
if 'D' in init_state:
task_d = PythonOperator(task_id='D', python_callable=python_callable)
for upstream, downstream in dependencies.items():
dag.set_dependency(upstream, downstream)
scheduler = SchedulerJob()
scheduler.dagbag.bag_dag(dag, root_dag=dag)
dag_run = dag.create_dagrun(run_id='test_dagrun_fast_follow', state=State.RUNNING)
task_instance_a = dag_run.get_task_instance(task_id=task_a.task_id)
task_instance_a.task = task_a
task_instance_a.set_state(init_state['A'])
task_instance_b = dag_run.get_task_instance(task_id=task_b.task_id)
task_instance_b.task = task_b
task_instance_b.set_state(init_state['B'])
task_instance_c = dag_run.get_task_instance(task_id=task_c.task_id)
task_instance_c.task = task_c
task_instance_c.set_state(init_state['C'])
if 'D' in init_state:
task_instance_d = dag_run.get_task_instance(task_id=task_d.task_id)
task_instance_d.task = task_d
task_instance_d.state = init_state['D']
session.commit()
task_instance_a.run()
self.validate_ti_states(dag_run, first_run_state, error_message)
if second_run_state:
scheduler._critical_section_execute_task_instances(session=session)
task_instance_b.run()
self.validate_ti_states(dag_run, second_run_state, error_message)
@pytest.mark.parametrize("pool_override", [None, "test_pool2"])
def test_refresh_from_task(pool_override):
task = DummyOperator(
task_id="dummy",
queue="test_queue",
pool="test_pool1",
pool_slots=3,
priority_weight=10,
run_as_user="test",
retries=30,
executor_config={"KubernetesExecutor": {"image": "myCustomDockerImage"}},
)
ti = TI(task, execution_date=pendulum.datetime(2020, 1, 1))
ti.refresh_from_task(task, pool_override=pool_override)
assert ti.queue == task.queue
if pool_override:
assert ti.pool == pool_override
else:
assert ti.pool == task.pool
assert ti.pool_slots == task.pool_slots
assert ti.priority_weight == task.priority_weight_total
assert ti.run_as_user == task.run_as_user
assert ti.max_tries == task.retries
assert ti.executor_config == task.executor_config
assert ti.operator == DummyOperator.__name__
class TestRunRawTaskQueriesCount(unittest.TestCase):
"""
These tests are designed to detect changes in the number of queries executed
when calling _run_raw_task
"""
@staticmethod
def _clean():
db.clear_db_runs()
db.clear_db_pools()
db.clear_db_dags()
db.clear_db_sla_miss()
db.clear_db_errors()
def setUp(self) -> None:
self._clean()
def tearDown(self) -> None:
self._clean()
@parameterized.expand(
[
# Expected queries, mark_success
(10, False),
(5, True),
]
)
def test_execute_queries_count(self, expected_query_count, mark_success):
with create_session() as session:
dag = DAG('test_queries', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session.merge(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
with assert_queries_count(expected_query_count):
ti._run_raw_task(mark_success=mark_success)
def test_execute_queries_count_store_serialized(self):
with create_session() as session:
dag = DAG('test_queries', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session.merge(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
with assert_queries_count(10):
ti._run_raw_task()
def test_operator_field_with_serialization(self):
dag = DAG('test_queries', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
self.assertEqual(task.task_type, 'DummyOperator')
# Verify that ti.operator field renders correctly "without" Serialization
ti = TI(task=task, execution_date=datetime.datetime.now())
self.assertEqual(ti.operator, "DummyOperator")
serialized_op = SerializedBaseOperator.serialize_operator(task)
deserialized_op = SerializedBaseOperator.deserialize_operator(serialized_op)
self.assertEqual(deserialized_op.task_type, 'DummyOperator')
# Verify that ti.operator field renders correctly "with" Serialization
ser_ti = TI(task=deserialized_op, execution_date=datetime.datetime.now())
self.assertEqual(ser_ti.operator, "DummyOperator")
|
mjtamlyn/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_erroneous/6_auto.py
|
266
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("migrations", "5_auto")]
operations = [
migrations.RunPython(migrations.RunPython.noop)
]
|
Bleyddyn/malpi
|
refs/heads/master
|
exp/temp/captioning_solver.py
|
19
|
import numpy as np
from cs231n import optim
from cs231n.coco_utils import sample_coco_minibatch
class CaptioningSolver(object):
"""
A CaptioningSolver encapsulates all the logic necessary for training
image captioning models. The CaptioningSolver performs stochastic gradient
descent using different update rules defined in optim.py.
The solver accepts both training and validataion data and labels so it can
periodically check classification accuracy on both training and validation
data to watch out for overfitting.
To train a model, you will first construct a CaptioningSolver instance,
passing the model, dataset, and various options (learning rate, batch size,
etc) to the constructor. You will then call the train() method to run the
optimization procedure and train the model.
After the train() method returns, model.params will contain the parameters
that performed best on the validation set over the course of training.
In addition, the instance variable solver.loss_history will contain a list
of all losses encountered during training and the instance variables
solver.train_acc_history and solver.val_acc_history will be lists containing
the accuracies of the model on the training and validation set at each epoch.
Example usage might look something like this:
data = load_coco_data()
model = MyAwesomeModel(hidden_dim=100)
solver = CaptioningSolver(model, data,
update_rule='sgd',
optim_config={
'learning_rate': 1e-3,
},
lr_decay=0.95,
num_epochs=10, batch_size=100,
print_every=100)
solver.train()
A CaptioningSolver works on a model object that must conform to the following
API:
- model.params must be a dictionary mapping string parameter names to numpy
arrays containing parameter values.
- model.loss(features, captions) must be a function that computes
training-time loss and gradients, with the following inputs and outputs:
Inputs:
- features: Array giving a minibatch of features for images, of shape (N, D
- captions: Array of captions for those images, of shape (N, T) where
each element is in the range (0, V].
Returns:
- loss: Scalar giving the loss
- grads: Dictionary with the same keys as self.params mapping parameter
names to gradients of the loss with respect to those parameters.
"""
def __init__(self, model, data, **kwargs):
"""
Construct a new CaptioningSolver instance.
Required arguments:
- model: A model object conforming to the API described above
- data: A dictionary of training and validation data from load_coco_data
Optional arguments:
- update_rule: A string giving the name of an update rule in optim.py.
Default is 'sgd'.
- optim_config: A dictionary containing hyperparameters that will be
passed to the chosen update rule. Each update rule requires different
hyperparameters (see optim.py) but all update rules require a
'learning_rate' parameter so that should always be present.
- lr_decay: A scalar for learning rate decay; after each epoch the learning
rate is multiplied by this value.
- batch_size: Size of minibatches used to compute loss and gradient during
training.
- num_epochs: The number of epochs to run for during training.
- print_every: Integer; training losses will be printed every print_every
iterations.
- verbose: Boolean; if set to false then no output will be printed during
training.
"""
self.model = model
self.data = data
# Unpack keyword arguments
self.update_rule = kwargs.pop('update_rule', 'sgd')
self.optim_config = kwargs.pop('optim_config', {})
self.lr_decay = kwargs.pop('lr_decay', 1.0)
self.batch_size = kwargs.pop('batch_size', 100)
self.num_epochs = kwargs.pop('num_epochs', 10)
self.print_every = kwargs.pop('print_every', 10)
self.verbose = kwargs.pop('verbose', True)
# Throw an error if there are extra keyword arguments
if len(kwargs) > 0:
extra = ', '.join('"%s"' % k for k in kwargs.keys())
raise ValueError('Unrecognized arguments %s' % extra)
# Make sure the update rule exists, then replace the string
# name with the actual function
if not hasattr(optim, self.update_rule):
raise ValueError('Invalid update_rule "%s"' % self.update_rule)
self.update_rule = getattr(optim, self.update_rule)
self._reset()
def _reset(self):
"""
Set up some book-keeping variables for optimization. Don't call this
manually.
"""
# Set up some variables for book-keeping
self.epoch = 0
self.best_val_acc = 0
self.best_params = {}
self.loss_history = []
self.train_acc_history = []
self.val_acc_history = []
# Make a deep copy of the optim_config for each parameter
self.optim_configs = {}
for p in self.model.params:
d = {k: v for k, v in self.optim_config.iteritems()}
self.optim_configs[p] = d
def _step(self):
"""
Make a single gradient update. This is called by train() and should not
be called manually.
"""
# Make a minibatch of training data
minibatch = sample_coco_minibatch(self.data,
batch_size=self.batch_size,
split='train')
captions, features, urls = minibatch
# Compute loss and gradient
loss, grads = self.model.loss(features, captions)
self.loss_history.append(loss)
# Perform a parameter update
for p, w in self.model.params.iteritems():
dw = grads[p]
config = self.optim_configs[p]
next_w, next_config = self.update_rule(w, dw, config)
self.model.params[p] = next_w
self.optim_configs[p] = next_config
# TODO: This does nothing right now; maybe implement BLEU?
def check_accuracy(self, X, y, num_samples=None, batch_size=100):
"""
Check accuracy of the model on the provided data.
Inputs:
- X: Array of data, of shape (N, d_1, ..., d_k)
- y: Array of labels, of shape (N,)
- num_samples: If not None, subsample the data and only test the model
on num_samples datapoints.
- batch_size: Split X and y into batches of this size to avoid using too
much memory.
Returns:
- acc: Scalar giving the fraction of instances that were correctly
classified by the model.
"""
return 0.0
# Maybe subsample the data
N = X.shape[0]
if num_samples is not None and N > num_samples:
mask = np.random.choice(N, num_samples)
N = num_samples
X = X[mask]
y = y[mask]
# Compute predictions in batches
num_batches = N / batch_size
if N % batch_size != 0:
num_batches += 1
y_pred = []
for i in xrange(num_batches):
start = i * batch_size
end = (i + 1) * batch_size
scores = self.model.loss(X[start:end])
y_pred.append(np.argmax(scores, axis=1))
y_pred = np.hstack(y_pred)
acc = np.mean(y_pred == y)
return acc
def train(self):
"""
Run optimization to train the model.
"""
num_train = self.data['train_captions'].shape[0]
iterations_per_epoch = max(num_train / self.batch_size, 1)
num_iterations = self.num_epochs * iterations_per_epoch
for t in xrange(num_iterations):
self._step()
# Maybe print training loss
if self.verbose and t % self.print_every == 0:
print '(Iteration %d / %d) loss: %f' % (
t + 1, num_iterations, self.loss_history[-1])
# At the end of every epoch, increment the epoch counter and decay the
# learning rate.
epoch_end = (t + 1) % iterations_per_epoch == 0
if epoch_end:
self.epoch += 1
for k in self.optim_configs:
self.optim_configs[k]['learning_rate'] *= self.lr_decay
# Check train and val accuracy on the first iteration, the last
# iteration, and at the end of each epoch.
# TODO: Implement some logic to check Bleu on validation set periodically
# At the end of training swap the best params into the model
# self.model.params = self.best_params
|
jrper/fluidity
|
refs/heads/master
|
examples/stokes_square_convection/Plot_RMS.py
|
4
|
#!/usr/bin/python
# This sript plots up the RMS velocity for both the 24x24 and 48x48 case
import pylab
from fluidity_tools import stat_parser as stat
# Stafiles:
statfile24="stokes-sc-Ra1e5-24.stat"
statfile48="stokes-sc-Ra1e5-48.stat"
# First plot 24x24 case:
pylab.plot(stat(statfile24)["CoordinateMesh"]["nodes"][-1],
stat(statfile24)["Fluid"]["Velocity%magnitude"]["l2norm"][-1],
linestyle='None', marker='o', markerfacecolor='0.15')
# Next plot 48x48 case:
pylab.plot(stat(statfile48)["CoordinateMesh"]["nodes"][-1],
stat(statfile48)["Fluid"]["Velocity%magnitude"]["l2norm"][-1],
linestyle='None', marker='o', markerfacecolor='0.15')
# Plot benchmark value as line for comparison:
pylab.plot([100,8e4],[193.214,193.214],'k--',lw=0.6)
pylab.xlabel(r"Vertices")
pylab.ylabel(r"RMS Velocity")
pylab.xlim(100,1e4)
pylab.ylim(192.0,195.0)
pylab.savefig("RMS_1e5.png")
|
Nolski/airmozilla
|
refs/heads/master
|
airmozilla/new/cron.py
|
14
|
import cronjobs
from airmozilla.cronlogger.decorators import capture
from . import eventemails
@cronjobs.register
@capture
def send_new_event_emails():
eventemails.send_new_event_emails(verbose=True)
|
andersonsilvade/python_C
|
refs/heads/master
|
Python32/web2py/applications/admin/controllers/mercurial.py
|
4
|
from gluon.fileutils import read_file, write_file
if DEMO_MODE or MULTI_USER_MODE:
session.flash = T('disabled in demo mode')
redirect(URL('default', 'site'))
if not have_mercurial:
session.flash = T("Sorry, could not find mercurial installed")
redirect(URL('default', 'design', args=request.args(0)))
_hgignore_content = """\
syntax: glob
*~
*.pyc
*.pyo
*.bak
*.bak2
cache/*
private/*
uploads/*
databases/*
sessions/*
errors/*
"""
def hg_repo(path):
import os
uio = ui.ui()
uio.quiet = True
if not os.environ.get('HGUSER') and not uio.config("ui", "username"):
os.environ['HGUSER'] = 'web2py@localhost'
try:
repo = hg.repository(ui=uio, path=path)
except:
repo = hg.repository(ui=uio, path=path, create=True)
hgignore = os.path.join(path, '.hgignore')
if not os.path.exists(hgignore):
write_file(hgignore, _hgignore_content)
return repo
def commit():
app = request.args(0)
path = apath(app, r=request)
repo = hg_repo(path)
form = FORM('Comment:', INPUT(_name='comment', requires=IS_NOT_EMPTY()),
INPUT(_type='submit', _value=T('Commit')))
if form.accepts(request.vars, session):
oldid = repo[repo.lookup('.')]
addremove(repo)
repo.commit(text=form.vars.comment)
if repo[repo.lookup('.')] == oldid:
response.flash = 'no changes'
try:
files = TABLE(*[TR(file) for file in repo[repo.lookup('.')].files()])
changes = TABLE(TR(TH('revision'), TH('description')))
for change in repo.changelog:
ctx = repo.changectx(change)
revision, description = ctx.rev(), ctx.description()
changes.append(TR(A(revision, _href=URL('revision',
args=(app, revision))),
description))
except:
files = []
changes = []
return dict(form=form, files=files, changes=changes, repo=repo)
def revision():
app = request.args(0)
path = apath(app, r=request)
repo = hg_repo(path)
revision = request.args(1)
ctx = repo.changectx(revision)
form = FORM(INPUT(_type='submit', _value=T('Revert')))
if form.accepts(request.vars):
hg.update(repo, revision)
session.flash = "reverted to revision %s" % ctx.rev()
redirect(URL('default', 'design', args=app))
return dict(
files=ctx.files(),
rev=str(ctx.rev()),
desc=ctx.description(),
form=form
)
|
decisive/api-demo-python
|
refs/heads/master
|
sample_scripts/target_keywords.py
|
1
|
import sys # NOTE: for exiting
import requests
import datetime
import pprint
import ujson as json # NOTE: faster json
API_KEY = '' # TODO: input api key here!!!
if not API_KEY:
sys.exit('Please insert your Decisive API key')
print
print 'Creating session to always add API key...'
# NOTE: you can also use decisive.DecisiveApiClient
session = requests.Session()
session.auth = (API_KEY,'')
API_HOST = 'https://ads.decisive.is'
def to_uri(*paths, **get_args):
path = '/'.join(p.strip('/') if isinstance(p,(str,unicode)) else unicode(p) for p in paths)
args = '&'.join('{}={}'.format(*i) for i in get_args.items())
return '{}/{}?{}'.format(API_HOST, path, args)
def get(*paths, **get_args):
uri = to_uri(*paths, **get_args)
response = session.get(uri)
response.raise_for_status()
return response.json()
def put(updated_ad):
uri = to_uri('ads',updated_ad['ad_id'])
response = session.put(uri, data=json.dumps(updated_ad))
response.raise_for_status()
return True
print
print 'Selecting ad...'
ads = get('ads', offset=1, limit=5, approved='true')
print [a['ad_id'] for a in ads]
ad = ads[0]
print 'selected', ad['ad_id']
pprint.pprint(ad['targeting'])
print
print 'Targeting keywords...'
ad['targeting']['keywords'] = ['game','test','music']
print put(ad)
|
TylerTemp/tomorrow
|
refs/heads/master
|
lib/hdlr/base.py
|
1
|
'''The basic handler of all tornado request
provide some convenient methods'''
import tornado.web
import tornado.locale
import tornado.escape
import logging
import functools
import json
import os
try:
from urllib.parse import quote, urlsplit, urlunsplit, urljoin
except ImportError:
from urllib import quote
from urlparse import urlsplit, urlunsplit, urljoin
from lib.tool.tracemore import get_exc_plus
from lib.config.base import Config
from lib import Log
class BaseHandler(tornado.web.RequestHandler, Log):
config = Config()
logger = logging.getLogger()
error_template = 'error.html'
def get(self, *a, **k):
splited = urlsplit(self.request.uri)
if not splited.path.endswith('/'):
to_list = list(splited)
to_list[2] = splited.path + '/'
return self.redirect(urlunsplit(to_list), True)
raise tornado.web.HTTPError(404)
def post(self, *a, **k):
if self.is_ajax():
self.clear()
self.set_status(405)
self.write({'code': -1, 'message': 'Method Not Allowed',
'error': -1})
return
raise tornado.web.HTTPError(405, 'Method Not Allowed')
def render(self, template_name, **kwargs):
kwargs.setdefault('JOLLA_HOST', self.config.jolla_host)
kwargs.setdefault('TOMORROW_HOST', self.config.tomorrow_host)
return super(BaseHandler, self).render(
template_name,
**kwargs
)
def is_ajax(self):
return (self.request.headers.get('X-Requested-With', None) ==
"XMLHttpRequest")
def is_ssl(self):
return (self.request.protocol == 'https')
def _list_path(self, path):
if not os.path.exists(path):
return []
for dirpath, dirnames, filenames in os.walk(path):
return list(filenames)
def get_user_locale(self):
arg = self.get_argument('lang', None)
if arg is not None:
return tornado.locale.get(arg)
cookie_lang = self.get_cookie('lang')
if cookie_lang:
return tornado.locale.get(cookie_lang)
return None
def write_error(self, status_code, **kwargs):
r = self.request
self.debug('%s - %s' % (r.remote_ip, r.host))
self.error('%s' % get_exc_plus())
self.clear()
self.set_status(status_code)
message = self.get_error(status_code, **kwargs)
if self.is_ajax():
self.debug('render error of ajax')
self.write({'code': -1, 'message': message, 'error': -1})
return
self.debug('render error of html')
return self.render(
self.error_template,
code=status_code,
msg=message,
)
def get_error(self, status_code, **kwargs):
msg = 'Unknown Error'
if self.settings['debug']:
if self.is_ajax():
exc_info = kwargs['exc_info']
return (getattr(exc_info[1], 'log_message', None) or
str(exc_info[1]))
return get_exc_plus()
elif status_code == 404:
msg = 'Page Not Found'
if 'exc_info' in kwargs:
exc_info = kwargs['exc_info']
if exc_info and len(exc_info) >= 2:
msg = getattr(exc_info[1], 'log_message', None) or msg
return msg
class EnsureSsl(object):
def __init__(self, permanent=False):
self._prem = permanent
def __call__(self, func):
@functools.wraps(func)
def wrapper(ins, *a, **k):
if (ins.request.protocol != 'https'):
return ins.redirect(
'https://%s%s' % (ins.request.host, ins.request.uri),
self._prem)
return func(ins, *a, **k)
return wrapper
class StaticFileHandler(tornado.web.StaticFileHandler):
def get(self):
path, file = os.path.split(self.root)
self.root = path
return super(StaticFileHandler, self).get(path=file)
class RedirectHandler(BaseHandler):
def initialize(self, to, permanently=False):
self._to = to
self._permanently = permanently
def get(self, *a, **k):
return self.redirect(self._to, self._permanently)
post = get
|
edsuom/sAsync
|
refs/heads/master
|
sasync/test/test_items.py
|
1
|
# sAsync:
# An enhancement to the SQLAlchemy package that provides persistent
# item-value stores, arrays, and dictionaries, and an access broker for
# conveniently managing database access, table setup, and
# transactions. Everything can be run in an asynchronous fashion using
# the Twisted framework and its deferred processing capabilities.
#
# Copyright (C) 2006, 2015 by Edwin A. Suominen, http://edsuom.com
#
# See edsuom.com for API documentation as well as information about
# Ed's background and other projects, software and otherwise.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS
# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
Unit tests for sasync.items.py.
"""
from twisted.internet.defer import Deferred, DeferredList
from sqlalchemy import *
from sasync.database import transact, AccessBroker
import sasync.items as items
from sasync.test.testbase import MockThing, TestCase
GROUP_ID = 123
VERBOSE = False
db = 'items.db'
class TestableItemsTransactor(items.Transactor):
@transact
def pre(self):
# Group 123
self.sasync_items.insert().execute(
group_id=123, name='foo', value='OK')
# Set up an experienced MockThing to have pickled
thing = MockThing()
thing.method(1)
self.sasync_items.insert().execute(
group_id=123, name='bar', value=thing)
# Group 124
self.sasync_items.insert().execute(
group_id=124, name='foo', value='bogus')
self.sasync_items.insert().execute(
group_id=124, name='invalid', value='bogus')
@transact
def post(self):
self.sasync_items.delete().execute()
class ItemsMixin:
def tearDown(self):
def _tearDown():
si = self.i.t.sasync_items
si.delete(si.c.group_id == GROUP_ID).execute()
d = self.i.t.deferToQueue(_tearDown, niceness=20)
d.addCallback(lambda _: self.i.shutdown())
return d
class TestItemsTransactor(ItemsMixin, TestCase):
def setUp(self):
url = "sqlite:///%s" % db
self.i = items.Items(GROUP_ID, url)
self.i.t = TestableItemsTransactor(self.i.groupID, url)
return self.i.t.pre()
def tearDown(self):
return self.i.t.post()
def test_load(self):
def gotValue(value, name):
if name == 'foo':
self.failUnlessEqual(value, 'OK')
else:
self.failUnless(
isinstance(value, MockThing),
"Item 'bar' is a '%s', not an instance of 'MockThing'" \
% value)
self.failUnless(
value.beenThereDoneThat,
"Class instance wasn't properly persisted with its state")
self.failUnlessEqual(
value.method(2.5), 5.0,
"Class instance wasn't properly persisted with its method")
dList = []
for name in ('foo', 'bar'):
dList.append(self.i.t.load(name).addCallback(gotValue, name))
return DeferredList(dList)
def test_load(self):
def gotValue(value, name):
if name == 'foo':
self.failUnlessEqual(value, 'OK')
else:
self.failUnless(
isinstance(value, MockThing),
"Item 'bar' is a '%s', not an instance of 'MockThing'" \
% value)
self.failUnless(
value.beenThereDoneThat,
"Class instance wasn't properly persisted with its state")
self.failUnlessEqual(
value.method(2.5), 5.0,
"Class instance wasn't properly persisted with its method")
dList = []
for name in ('foo', 'bar'):
dList.append(self.i.t.load(name).addCallback(gotValue, name))
return DeferredList(dList)
def test_loadAbsent(self):
def gotValue(value):
self.failUnless(
isinstance(value, items.Missing),
"Should have returned 'Missing' object, not '%s'!" % \
str(value))
def gotExpectedError(failure):
self.fail("Shouldn't have raised error on missing value")
return self.i.t.load('invalid').addCallbacks(
gotValue, gotExpectedError)
def test_loadAll(self):
def loaded(items):
itemKeys = items.keys()
itemKeys.sort()
self.failUnlessEqual(itemKeys, ['bar', 'foo'])
return self.i.t.loadAll().addCallback(loaded)
def insertLots(self, callback):
noviceThing = MockThing()
experiencedThing = MockThing()
experiencedThing.method(0)
self.whatToInsert = {
'alpha':5937341,
'bravo':'abc',
'charlie':-3.1415,
'delta':(1,2,3),
'echo':True,
'foxtrot':False,
'golf':noviceThing,
'hotel':experiencedThing,
'india':MockThing
}
dList = []
for name, value in self.whatToInsert.iteritems():
dList.append(self.i.t.insert(name, value))
return DeferredList(dList).addCallback(
callback, self.whatToInsert.copy())
def test_insert(self):
def done(null, items):
def check():
table = self.i.t.sasync_items
for name, inserted in items.iteritems():
value = table.select(
and_(table.c.group_id == 123,
table.c.name == name)
).execute().fetchone()['value']
msg = "Inserted '{}:{}' ".format(name, inserted) +\
"but read '{}' back from the database!".format(value)
self.failUnlessEqual(value, inserted, msg)
for otherName, otherValue in items.iteritems():
if otherName != name and value == otherValue:
self.fail(
"Inserted item '%s' is equal to item '%s'" % \
(name, otherName))
return self.i.t.deferToQueue(check)
return self.insertLots(done)
def test_deleteOne(self):
def gotOriginal(value):
self.failUnlessEqual(value, 'OK')
return self.i.t.delete('foo').addCallback(getAfterDeleted)
def getAfterDeleted(null):
return self.i.t.load('foo').addCallback(checkIfDeleted)
def checkIfDeleted(value):
self.failUnless(isinstance(value, items.Missing))
return self.i.t.load('foo').addCallback(gotOriginal)
def test_deleteMultiple(self):
def getAfterDeleted(null):
return self.i.t.loadAll().addCallback(checkIfDeleted)
def checkIfDeleted(values):
self.failUnlessEqual(values, {})
return self.i.t.delete('foo', 'bar').addCallback(getAfterDeleted)
def test_namesFew(self):
def got(names):
names.sort()
self.failUnlessEqual(names, ['bar', 'foo'])
return self.i.t.names().addCallback(got)
def test_namesMany(self):
def get(null, items):
return self.i.t.names().addCallback(got, items.keys())
def got(names, shouldHave):
shouldHave += ['foo', 'bar']
names.sort()
shouldHave.sort()
self.failUnlessEqual(names, shouldHave)
return self.insertLots(get)
def test_update(self):
def update(null, items):
return DeferredList([
self.i.t.update('alpha', 1),
self.i.t.update('bravo', 2),
self.i.t.update('charlie', 3)
]).addCallback(check, items)
def check(null, items):
return self.i.t.loadAll().addCallback(loaded, items)
def loaded(loadedItems, controlItems):
controlItems.update({'alpha':1, 'bravo':2, 'charlie':3})
for name, value in controlItems.iteritems():
self.failUnlessEqual(
value, loadedItems.get(name, 'Impossible Value'))
return self.insertLots(update)
class TestItems(ItemsMixin, TestCase):
def setUp(self):
self.i = items.Items(GROUP_ID, "sqlite:///%s" % db)
def test_insertAndLoad(self):
nouns = ('lamp', 'rug', 'chair')
def first(null):
return self.i.loadAll().addCallback(second)
def second(items):
self.failUnlessEqual(items['Nouns'], nouns)
return self.i.insert('Nouns', nouns).addCallback(first)
def test_insertAndDelete(self):
items = {'a':0, 'b':1, 'c':2, 'd':3, 'e':4}
def first(null):
return self.i.delete('c').addCallback(second)
def second(null):
return self.i.names().addCallback(third)
def third(nameList):
desiredList = [x for x in items.keys() if x != 'c']
desiredList.sort()
nameList.sort()
self.failUnlessEqual(nameList, desiredList)
dL = []
for name, value in items.iteritems():
dL.append(self.i.insert(name, value))
return DeferredList(dL).addCallback(first)
def test_insertAndLoadAll(self):
items = {'a':0, 'b':1, 'c':2, 'd':3, 'e':4}
def first(null):
return self.i.loadAll().addCallback(second)
def second(loadedItems):
self.failUnlessEqual(loadedItems, items)
dL = []
for name, value in items.iteritems():
dL.append(self.i.insert(name, value))
return DeferredList(dL).addCallback(first)
def test_insertAndUpdate(self):
items = {'a':0, 'b':1, 'c':2, 'd':3, 'e':4}
def first(null):
return self.i.update('b', 10).addCallback(second)
def second(null):
return self.i.loadAll().addCallback(third)
def third(loadedItems):
expectedItems = {'a':0, 'b':10, 'c':2, 'd':3, 'e':4}
self.failUnlessEqual(loadedItems, expectedItems)
dL = []
for name, value in items.iteritems():
dL.append(self.i.insert(name, value))
return DeferredList(dL).addCallback(first)
class TestItemsIntegerNames(ItemsMixin, TestCase):
def setUp(self):
self.items = {'1':'a', 2:'b', 3:'c', '04':'d'}
self.i = items.Items(GROUP_ID, "sqlite:///%s" % db, nameType=int)
def insertStuff(self):
dL = []
for name, value in self.items.iteritems():
dL.append(self.i.insert(name, value))
return DeferredList(dL)
def test_names(self):
def first(null):
return self.i.names().addCallback(second)
def second(names):
names.sort()
self.failUnlessEqual(names, [1, 2, 3, 4])
return self.insertStuff().addCallback(first)
def test_loadAll(self):
def first(null):
return self.i.loadAll().addCallback(second)
def second(loaded):
self.failUnlessEqual(loaded, {1:'a', 2:'b', 3:'c', 4:'d'})
return self.insertStuff().addCallback(first)
class TestItemsStringNames(ItemsMixin, TestCase):
def setUp(self):
self.items = {'1':'a', 2:'b', u'3':'c', "4":'d'}
self.i = items.Items(GROUP_ID, "sqlite:///%s" % db, nameType=str)
def insertStuff(self):
dL = []
for name, value in self.items.iteritems():
dL.append(self.i.insert(name, value))
return DeferredList(dL)
def test_names(self):
def first(null):
return self.i.names().addCallback(second)
def second(names):
names.sort()
self.failUnlessEqual(names, ['1', '2', '3', '4'])
return self.insertStuff().addCallback(first)
def test_loadAll(self):
def first(null):
return self.i.loadAll().addCallback(second)
def second(loaded):
self.failUnlessEqual(loaded, {'1':'a', '2':'b', '3':'c', '4':'d'})
return self.insertStuff().addCallback(first)
|
discosultan/quake-console
|
refs/heads/master
|
Samples/Sandbox/Lib/pickletools.py
|
41
|
'''"Executable documentation" for the pickle module.
Extensive comments about the pickle protocols and pickle-machine opcodes
can be found here. Some functions meant for external use:
genops(pickle)
Generate all the opcodes in a pickle, as (opcode, arg, position) triples.
dis(pickle, out=None, memo=None, indentlevel=4)
Print a symbolic disassembly of a pickle.
'''
__all__ = ['dis', 'genops', 'optimize']
# Other ideas:
#
# - A pickle verifier: read a pickle and check it exhaustively for
# well-formedness. dis() does a lot of this already.
#
# - A protocol identifier: examine a pickle and return its protocol number
# (== the highest .proto attr value among all the opcodes in the pickle).
# dis() already prints this info at the end.
#
# - A pickle optimizer: for example, tuple-building code is sometimes more
# elaborate than necessary, catering for the possibility that the tuple
# is recursive. Or lots of times a PUT is generated that's never accessed
# by a later GET.
"""
"A pickle" is a program for a virtual pickle machine (PM, but more accurately
called an unpickling machine). It's a sequence of opcodes, interpreted by the
PM, building an arbitrarily complex Python object.
For the most part, the PM is very simple: there are no looping, testing, or
conditional instructions, no arithmetic and no function calls. Opcodes are
executed once each, from first to last, until a STOP opcode is reached.
The PM has two data areas, "the stack" and "the memo".
Many opcodes push Python objects onto the stack; e.g., INT pushes a Python
integer object on the stack, whose value is gotten from a decimal string
literal immediately following the INT opcode in the pickle bytestream. Other
opcodes take Python objects off the stack. The result of unpickling is
whatever object is left on the stack when the final STOP opcode is executed.
The memo is simply an array of objects, or it can be implemented as a dict
mapping little integers to objects. The memo serves as the PM's "long term
memory", and the little integers indexing the memo are akin to variable
names. Some opcodes pop a stack object into the memo at a given index,
and others push a memo object at a given index onto the stack again.
At heart, that's all the PM has. Subtleties arise for these reasons:
+ Object identity. Objects can be arbitrarily complex, and subobjects
may be shared (for example, the list [a, a] refers to the same object a
twice). It can be vital that unpickling recreate an isomorphic object
graph, faithfully reproducing sharing.
+ Recursive objects. For example, after "L = []; L.append(L)", L is a
list, and L[0] is the same list. This is related to the object identity
point, and some sequences of pickle opcodes are subtle in order to
get the right result in all cases.
+ Things pickle doesn't know everything about. Examples of things pickle
does know everything about are Python's builtin scalar and container
types, like ints and tuples. They generally have opcodes dedicated to
them. For things like module references and instances of user-defined
classes, pickle's knowledge is limited. Historically, many enhancements
have been made to the pickle protocol in order to do a better (faster,
and/or more compact) job on those.
+ Backward compatibility and micro-optimization. As explained below,
pickle opcodes never go away, not even when better ways to do a thing
get invented. The repertoire of the PM just keeps growing over time.
For example, protocol 0 had two opcodes for building Python integers (INT
and LONG), protocol 1 added three more for more-efficient pickling of short
integers, and protocol 2 added two more for more-efficient pickling of
long integers (before protocol 2, the only ways to pickle a Python long
took time quadratic in the number of digits, for both pickling and
unpickling). "Opcode bloat" isn't so much a subtlety as a source of
wearying complication.
Pickle protocols:
For compatibility, the meaning of a pickle opcode never changes. Instead new
pickle opcodes get added, and each version's unpickler can handle all the
pickle opcodes in all protocol versions to date. So old pickles continue to
be readable forever. The pickler can generally be told to restrict itself to
the subset of opcodes available under previous protocol versions too, so that
users can create pickles under the current version readable by older
versions. However, a pickle does not contain its version number embedded
within it. If an older unpickler tries to read a pickle using a later
protocol, the result is most likely an exception due to seeing an unknown (in
the older unpickler) opcode.
The original pickle used what's now called "protocol 0", and what was called
"text mode" before Python 2.3. The entire pickle bytestream is made up of
printable 7-bit ASCII characters, plus the newline character, in protocol 0.
That's why it was called text mode. Protocol 0 is small and elegant, but
sometimes painfully inefficient.
The second major set of additions is now called "protocol 1", and was called
"binary mode" before Python 2.3. This added many opcodes with arguments
consisting of arbitrary bytes, including NUL bytes and unprintable "high bit"
bytes. Binary mode pickles can be substantially smaller than equivalent
text mode pickles, and sometimes faster too; e.g., BININT represents a 4-byte
int as 4 bytes following the opcode, which is cheaper to unpickle than the
(perhaps) 11-character decimal string attached to INT. Protocol 1 also added
a number of opcodes that operate on many stack elements at once (like APPENDS
and SETITEMS), and "shortcut" opcodes (like EMPTY_DICT and EMPTY_TUPLE).
The third major set of additions came in Python 2.3, and is called "protocol
2". This added:
- A better way to pickle instances of new-style classes (NEWOBJ).
- A way for a pickle to identify its protocol (PROTO).
- Time- and space- efficient pickling of long ints (LONG{1,4}).
- Shortcuts for small tuples (TUPLE{1,2,3}}.
- Dedicated opcodes for bools (NEWTRUE, NEWFALSE).
- The "extension registry", a vector of popular objects that can be pushed
efficiently by index (EXT{1,2,4}). This is akin to the memo and GET, but
the registry contents are predefined (there's nothing akin to the memo's
PUT).
Another independent change with Python 2.3 is the abandonment of any
pretense that it might be safe to load pickles received from untrusted
parties -- no sufficient security analysis has been done to guarantee
this and there isn't a use case that warrants the expense of such an
analysis.
To this end, all tests for __safe_for_unpickling__ or for
copy_reg.safe_constructors are removed from the unpickling code.
References to these variables in the descriptions below are to be seen
as describing unpickling in Python 2.2 and before.
"""
# Meta-rule: Descriptions are stored in instances of descriptor objects,
# with plain constructors. No meta-language is defined from which
# descriptors could be constructed. If you want, e.g., XML, write a little
# program to generate XML from the objects.
##############################################################################
# Some pickle opcodes have an argument, following the opcode in the
# bytestream. An argument is of a specific type, described by an instance
# of ArgumentDescriptor. These are not to be confused with arguments taken
# off the stack -- ArgumentDescriptor applies only to arguments embedded in
# the opcode stream, immediately following an opcode.
# Represents the number of bytes consumed by an argument delimited by the
# next newline character.
UP_TO_NEWLINE = -1
# Represents the number of bytes consumed by a two-argument opcode where
# the first argument gives the number of bytes in the second argument.
TAKEN_FROM_ARGUMENT1 = -2 # num bytes is 1-byte unsigned int
TAKEN_FROM_ARGUMENT4 = -3 # num bytes is 4-byte signed little-endian int
class ArgumentDescriptor(object):
__slots__ = (
# name of descriptor record, also a module global name; a string
'name',
# length of argument, in bytes; an int; UP_TO_NEWLINE and
# TAKEN_FROM_ARGUMENT{1,4} are negative values for variable-length
# cases
'n',
# a function taking a file-like object, reading this kind of argument
# from the object at the current position, advancing the current
# position by n bytes, and returning the value of the argument
'reader',
# human-readable docs for this arg descriptor; a string
'doc',
)
def __init__(self, name, n, reader, doc):
assert isinstance(name, str)
self.name = name
assert isinstance(n, int) and (n >= 0 or
n in (UP_TO_NEWLINE,
TAKEN_FROM_ARGUMENT1,
TAKEN_FROM_ARGUMENT4))
self.n = n
self.reader = reader
assert isinstance(doc, str)
self.doc = doc
from struct import unpack as _unpack
def read_uint1(f):
r"""
>>> import StringIO
>>> read_uint1(StringIO.StringIO('\xff'))
255
"""
data = f.read(1)
if data:
return ord(data)
raise ValueError("not enough data in stream to read uint1")
uint1 = ArgumentDescriptor(
name='uint1',
n=1,
reader=read_uint1,
doc="One-byte unsigned integer.")
def read_uint2(f):
r"""
>>> import StringIO
>>> read_uint2(StringIO.StringIO('\xff\x00'))
255
>>> read_uint2(StringIO.StringIO('\xff\xff'))
65535
"""
data = f.read(2)
if len(data) == 2:
return _unpack("<H", data)[0]
raise ValueError("not enough data in stream to read uint2")
uint2 = ArgumentDescriptor(
name='uint2',
n=2,
reader=read_uint2,
doc="Two-byte unsigned integer, little-endian.")
def read_int4(f):
r"""
>>> import StringIO
>>> read_int4(StringIO.StringIO('\xff\x00\x00\x00'))
255
>>> read_int4(StringIO.StringIO('\x00\x00\x00\x80')) == -(2**31)
True
"""
data = f.read(4)
if len(data) == 4:
return _unpack("<i", data)[0]
raise ValueError("not enough data in stream to read int4")
int4 = ArgumentDescriptor(
name='int4',
n=4,
reader=read_int4,
doc="Four-byte signed integer, little-endian, 2's complement.")
def read_stringnl(f, decode=True, stripquotes=True):
r"""
>>> import StringIO
>>> read_stringnl(StringIO.StringIO("'abcd'\nefg\n"))
'abcd'
>>> read_stringnl(StringIO.StringIO("\n"))
Traceback (most recent call last):
...
ValueError: no string quotes around ''
>>> read_stringnl(StringIO.StringIO("\n"), stripquotes=False)
''
>>> read_stringnl(StringIO.StringIO("''\n"))
''
>>> read_stringnl(StringIO.StringIO('"abcd"'))
Traceback (most recent call last):
...
ValueError: no newline found when trying to read stringnl
Embedded escapes are undone in the result.
>>> read_stringnl(StringIO.StringIO(r"'a\n\\b\x00c\td'" + "\n'e'"))
'a\n\\b\x00c\td'
"""
data = f.readline()
if not data.endswith('\n'):
raise ValueError("no newline found when trying to read stringnl")
data = data[:-1] # lose the newline
if stripquotes:
for q in "'\"":
if data.startswith(q):
if not data.endswith(q):
raise ValueError("strinq quote %r not found at both "
"ends of %r" % (q, data))
data = data[1:-1]
break
else:
raise ValueError("no string quotes around %r" % data)
# I'm not sure when 'string_escape' was added to the std codecs; it's
# crazy not to use it if it's there.
if decode:
data = data.decode('string_escape')
return data
stringnl = ArgumentDescriptor(
name='stringnl',
n=UP_TO_NEWLINE,
reader=read_stringnl,
doc="""A newline-terminated string.
This is a repr-style string, with embedded escapes, and
bracketing quotes.
""")
def read_stringnl_noescape(f):
return read_stringnl(f, decode=False, stripquotes=False)
stringnl_noescape = ArgumentDescriptor(
name='stringnl_noescape',
n=UP_TO_NEWLINE,
reader=read_stringnl_noescape,
doc="""A newline-terminated string.
This is a str-style string, without embedded escapes,
or bracketing quotes. It should consist solely of
printable ASCII characters.
""")
def read_stringnl_noescape_pair(f):
r"""
>>> import StringIO
>>> read_stringnl_noescape_pair(StringIO.StringIO("Queue\nEmpty\njunk"))
'Queue Empty'
"""
return "%s %s" % (read_stringnl_noescape(f), read_stringnl_noescape(f))
stringnl_noescape_pair = ArgumentDescriptor(
name='stringnl_noescape_pair',
n=UP_TO_NEWLINE,
reader=read_stringnl_noescape_pair,
doc="""A pair of newline-terminated strings.
These are str-style strings, without embedded
escapes, or bracketing quotes. They should
consist solely of printable ASCII characters.
The pair is returned as a single string, with
a single blank separating the two strings.
""")
def read_string4(f):
r"""
>>> import StringIO
>>> read_string4(StringIO.StringIO("\x00\x00\x00\x00abc"))
''
>>> read_string4(StringIO.StringIO("\x03\x00\x00\x00abcdef"))
'abc'
>>> read_string4(StringIO.StringIO("\x00\x00\x00\x03abcdef"))
Traceback (most recent call last):
...
ValueError: expected 50331648 bytes in a string4, but only 6 remain
"""
n = read_int4(f)
if n < 0:
raise ValueError("string4 byte count < 0: %d" % n)
data = f.read(n)
if len(data) == n:
return data
raise ValueError("expected %d bytes in a string4, but only %d remain" %
(n, len(data)))
string4 = ArgumentDescriptor(
name="string4",
n=TAKEN_FROM_ARGUMENT4,
reader=read_string4,
doc="""A counted string.
The first argument is a 4-byte little-endian signed int giving
the number of bytes in the string, and the second argument is
that many bytes.
""")
def read_string1(f):
r"""
>>> import StringIO
>>> read_string1(StringIO.StringIO("\x00"))
''
>>> read_string1(StringIO.StringIO("\x03abcdef"))
'abc'
"""
n = read_uint1(f)
assert n >= 0
data = f.read(n)
if len(data) == n:
return data
raise ValueError("expected %d bytes in a string1, but only %d remain" %
(n, len(data)))
string1 = ArgumentDescriptor(
name="string1",
n=TAKEN_FROM_ARGUMENT1,
reader=read_string1,
doc="""A counted string.
The first argument is a 1-byte unsigned int giving the number
of bytes in the string, and the second argument is that many
bytes.
""")
def read_unicodestringnl(f):
r"""
>>> import StringIO
>>> read_unicodestringnl(StringIO.StringIO("abc\uabcd\njunk"))
u'abc\uabcd'
"""
data = f.readline()
if not data.endswith('\n'):
raise ValueError("no newline found when trying to read "
"unicodestringnl")
data = data[:-1] # lose the newline
return unicode(data, 'raw-unicode-escape')
unicodestringnl = ArgumentDescriptor(
name='unicodestringnl',
n=UP_TO_NEWLINE,
reader=read_unicodestringnl,
doc="""A newline-terminated Unicode string.
This is raw-unicode-escape encoded, so consists of
printable ASCII characters, and may contain embedded
escape sequences.
""")
def read_unicodestring4(f):
r"""
# bug 24549
#>>> import StringIO
#>>> s = u'abcd\uabcd'
#>>> enc = s.encode('utf-8')
#>>> enc
#'abcd\xea\xaf\x8d'
#>>> n = chr(len(enc)) + chr(0) * 3 # little-endian 4-byte length
#>>> t = read_unicodestring4(StringIO.StringIO(n + enc + 'junk'))
#>>> s == t
#True
#
#>>> read_unicodestring4(StringIO.StringIO(n + enc[:-1]))
#Traceback (most recent call last):
#...
#ValueError: expected 7 bytes in a unicodestring4, but only 6 remain
"""
n = read_int4(f)
if n < 0:
raise ValueError("unicodestring4 byte count < 0: %d" % n)
data = f.read(n)
if len(data) == n:
return unicode(data, 'utf-8')
raise ValueError("expected %d bytes in a unicodestring4, but only %d "
"remain" % (n, len(data)))
unicodestring4 = ArgumentDescriptor(
name="unicodestring4",
n=TAKEN_FROM_ARGUMENT4,
reader=read_unicodestring4,
doc="""A counted Unicode string.
The first argument is a 4-byte little-endian signed int
giving the number of bytes in the string, and the second
argument-- the UTF-8 encoding of the Unicode string --
contains that many bytes.
""")
def read_decimalnl_short(f):
r"""
>>> import StringIO
>>> read_decimalnl_short(StringIO.StringIO("1234\n56"))
1234
>>> read_decimalnl_short(StringIO.StringIO("1234L\n56"))
Traceback (most recent call last):
...
ValueError: trailing 'L' not allowed in '1234L'
"""
s = read_stringnl(f, decode=False, stripquotes=False)
if s.endswith("L"):
raise ValueError("trailing 'L' not allowed in %r" % s)
# It's not necessarily true that the result fits in a Python short int:
# the pickle may have been written on a 64-bit box. There's also a hack
# for True and False here.
if s == "00":
return False
elif s == "01":
return True
try:
return int(s)
except OverflowError:
return long(s)
def read_decimalnl_long(f):
r"""
>>> import StringIO
>>> read_decimalnl_long(StringIO.StringIO("1234\n56"))
Traceback (most recent call last):
...
ValueError: trailing 'L' required in '1234'
Someday the trailing 'L' will probably go away from this output.
>>> read_decimalnl_long(StringIO.StringIO("1234L\n56"))
1234L
>>> read_decimalnl_long(StringIO.StringIO("123456789012345678901234L\n6"))
123456789012345678901234L
"""
s = read_stringnl(f, decode=False, stripquotes=False)
if not s.endswith("L"):
raise ValueError("trailing 'L' required in %r" % s)
return long(s)
decimalnl_short = ArgumentDescriptor(
name='decimalnl_short',
n=UP_TO_NEWLINE,
reader=read_decimalnl_short,
doc="""A newline-terminated decimal integer literal.
This never has a trailing 'L', and the integer fit
in a short Python int on the box where the pickle
was written -- but there's no guarantee it will fit
in a short Python int on the box where the pickle
is read.
""")
decimalnl_long = ArgumentDescriptor(
name='decimalnl_long',
n=UP_TO_NEWLINE,
reader=read_decimalnl_long,
doc="""A newline-terminated decimal integer literal.
This has a trailing 'L', and can represent integers
of any size.
""")
def read_floatnl(f):
r"""
>>> import StringIO
>>> read_floatnl(StringIO.StringIO("-1.25\n6"))
-1.25
"""
s = read_stringnl(f, decode=False, stripquotes=False)
return float(s)
floatnl = ArgumentDescriptor(
name='floatnl',
n=UP_TO_NEWLINE,
reader=read_floatnl,
doc="""A newline-terminated decimal floating literal.
In general this requires 17 significant digits for roundtrip
identity, and pickling then unpickling infinities, NaNs, and
minus zero doesn't work across boxes, or on some boxes even
on itself (e.g., Windows can't read the strings it produces
for infinities or NaNs).
""")
def read_float8(f):
r"""
#>>> import StringIO, struct
#>>> raw = struct.pack(">d", -1.25)
#>>> raw
#'\xbf\xf4\x00\x00\x00\x00\x00\x00'
#>>> read_float8(StringIO.StringIO(raw + "\n"))
#-1.25
"""
data = f.read(8)
if len(data) == 8:
return _unpack(">d", data)[0]
raise ValueError("not enough data in stream to read float8")
float8 = ArgumentDescriptor(
name='float8',
n=8,
reader=read_float8,
doc="""An 8-byte binary representation of a float, big-endian.
The format is unique to Python, and shared with the struct
module (format string '>d') "in theory" (the struct and cPickle
implementations don't share the code -- they should). It's
strongly related to the IEEE-754 double format, and, in normal
cases, is in fact identical to the big-endian 754 double format.
On other boxes the dynamic range is limited to that of a 754
double, and "add a half and chop" rounding is used to reduce
the precision to 53 bits. However, even on a 754 box,
infinities, NaNs, and minus zero may not be handled correctly
(may not survive roundtrip pickling intact).
""")
# Protocol 2 formats
from pickle import decode_long
def read_long1(f):
r"""
>>> import StringIO
>>> read_long1(StringIO.StringIO("\x00"))
0L
>>> read_long1(StringIO.StringIO("\x02\xff\x00"))
255L
>>> read_long1(StringIO.StringIO("\x02\xff\x7f"))
32767L
>>> read_long1(StringIO.StringIO("\x02\x00\xff"))
-256L
>>> read_long1(StringIO.StringIO("\x02\x00\x80"))
-32768L
"""
n = read_uint1(f)
data = f.read(n)
if len(data) != n:
raise ValueError("not enough data in stream to read long1")
return decode_long(data)
long1 = ArgumentDescriptor(
name="long1",
n=TAKEN_FROM_ARGUMENT1,
reader=read_long1,
doc="""A binary long, little-endian, using 1-byte size.
This first reads one byte as an unsigned size, then reads that
many bytes and interprets them as a little-endian 2's-complement long.
If the size is 0, that's taken as a shortcut for the long 0L.
""")
def read_long4(f):
r"""
>>> import StringIO
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x00"))
255L
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x7f"))
32767L
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\xff"))
-256L
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\x80"))
-32768L
>>> read_long1(StringIO.StringIO("\x00\x00\x00\x00"))
0L
"""
n = read_int4(f)
if n < 0:
raise ValueError("long4 byte count < 0: %d" % n)
data = f.read(n)
if len(data) != n:
raise ValueError("not enough data in stream to read long4")
return decode_long(data)
long4 = ArgumentDescriptor(
name="long4",
n=TAKEN_FROM_ARGUMENT4,
reader=read_long4,
doc="""A binary representation of a long, little-endian.
This first reads four bytes as a signed size (but requires the
size to be >= 0), then reads that many bytes and interprets them
as a little-endian 2's-complement long. If the size is 0, that's taken
as a shortcut for the long 0L, although LONG1 should really be used
then instead (and in any case where # of bytes < 256).
""")
##############################################################################
# Object descriptors. The stack used by the pickle machine holds objects,
# and in the stack_before and stack_after attributes of OpcodeInfo
# descriptors we need names to describe the various types of objects that can
# appear on the stack.
class StackObject(object):
__slots__ = (
# name of descriptor record, for info only
'name',
# type of object, or tuple of type objects (meaning the object can
# be of any type in the tuple)
'obtype',
# human-readable docs for this kind of stack object; a string
'doc',
)
def __init__(self, name, obtype, doc):
assert isinstance(name, str)
self.name = name
assert isinstance(obtype, type) or isinstance(obtype, tuple)
if isinstance(obtype, tuple):
for contained in obtype:
assert isinstance(contained, type)
self.obtype = obtype
assert isinstance(doc, str)
self.doc = doc
def __repr__(self):
return self.name
pyint = StackObject(
name='int',
obtype=int,
doc="A short (as opposed to long) Python integer object.")
pylong = StackObject(
name='long',
obtype=long,
doc="A long (as opposed to short) Python integer object.")
pyinteger_or_bool = StackObject(
name='int_or_bool',
obtype=(int, long, bool),
doc="A Python integer object (short or long), or "
"a Python bool.")
pybool = StackObject(
name='bool',
obtype=(bool,),
doc="A Python bool object.")
pyfloat = StackObject(
name='float',
obtype=float,
doc="A Python float object.")
pystring = StackObject(
name='str',
obtype=str,
doc="A Python string object.")
pyunicode = StackObject(
name='unicode',
obtype=unicode,
doc="A Python Unicode string object.")
pynone = StackObject(
name="None",
obtype=type(None),
doc="The Python None object.")
pytuple = StackObject(
name="tuple",
obtype=tuple,
doc="A Python tuple object.")
pylist = StackObject(
name="list",
obtype=list,
doc="A Python list object.")
pydict = StackObject(
name="dict",
obtype=dict,
doc="A Python dict object.")
anyobject = StackObject(
name='any',
obtype=object,
doc="Any kind of object whatsoever.")
markobject = StackObject(
name="mark",
obtype=StackObject,
doc="""'The mark' is a unique object.
Opcodes that operate on a variable number of objects
generally don't embed the count of objects in the opcode,
or pull it off the stack. Instead the MARK opcode is used
to push a special marker object on the stack, and then
some other opcodes grab all the objects from the top of
the stack down to (but not including) the topmost marker
object.
""")
stackslice = StackObject(
name="stackslice",
obtype=StackObject,
doc="""An object representing a contiguous slice of the stack.
This is used in conjuction with markobject, to represent all
of the stack following the topmost markobject. For example,
the POP_MARK opcode changes the stack from
[..., markobject, stackslice]
to
[...]
No matter how many object are on the stack after the topmost
markobject, POP_MARK gets rid of all of them (including the
topmost markobject too).
""")
##############################################################################
# Descriptors for pickle opcodes.
class OpcodeInfo(object):
__slots__ = (
# symbolic name of opcode; a string
'name',
# the code used in a bytestream to represent the opcode; a
# one-character string
'code',
# If the opcode has an argument embedded in the byte string, an
# instance of ArgumentDescriptor specifying its type. Note that
# arg.reader(s) can be used to read and decode the argument from
# the bytestream s, and arg.doc documents the format of the raw
# argument bytes. If the opcode doesn't have an argument embedded
# in the bytestream, arg should be None.
'arg',
# what the stack looks like before this opcode runs; a list
'stack_before',
# what the stack looks like after this opcode runs; a list
'stack_after',
# the protocol number in which this opcode was introduced; an int
'proto',
# human-readable docs for this opcode; a string
'doc',
)
def __init__(self, name, code, arg,
stack_before, stack_after, proto, doc):
assert isinstance(name, str)
self.name = name
assert isinstance(code, str)
assert len(code) == 1
self.code = code
assert arg is None or isinstance(arg, ArgumentDescriptor)
self.arg = arg
assert isinstance(stack_before, list)
for x in stack_before:
assert isinstance(x, StackObject)
self.stack_before = stack_before
assert isinstance(stack_after, list)
for x in stack_after:
assert isinstance(x, StackObject)
self.stack_after = stack_after
assert isinstance(proto, int) and 0 <= proto <= 2
self.proto = proto
assert isinstance(doc, str)
self.doc = doc
I = OpcodeInfo
opcodes = [
# Ways to spell integers.
I(name='INT',
code='I',
arg=decimalnl_short,
stack_before=[],
stack_after=[pyinteger_or_bool],
proto=0,
doc="""Push an integer or bool.
The argument is a newline-terminated decimal literal string.
The intent may have been that this always fit in a short Python int,
but INT can be generated in pickles written on a 64-bit box that
require a Python long on a 32-bit box. The difference between this
and LONG then is that INT skips a trailing 'L', and produces a short
int whenever possible.
Another difference is due to that, when bool was introduced as a
distinct type in 2.3, builtin names True and False were also added to
2.2.2, mapping to ints 1 and 0. For compatibility in both directions,
True gets pickled as INT + "I01\\n", and False as INT + "I00\\n".
Leading zeroes are never produced for a genuine integer. The 2.3
(and later) unpicklers special-case these and return bool instead;
earlier unpicklers ignore the leading "0" and return the int.
"""),
I(name='BININT',
code='J',
arg=int4,
stack_before=[],
stack_after=[pyint],
proto=1,
doc="""Push a four-byte signed integer.
This handles the full range of Python (short) integers on a 32-bit
box, directly as binary bytes (1 for the opcode and 4 for the integer).
If the integer is non-negative and fits in 1 or 2 bytes, pickling via
BININT1 or BININT2 saves space.
"""),
I(name='BININT1',
code='K',
arg=uint1,
stack_before=[],
stack_after=[pyint],
proto=1,
doc="""Push a one-byte unsigned integer.
This is a space optimization for pickling very small non-negative ints,
in range(256).
"""),
I(name='BININT2',
code='M',
arg=uint2,
stack_before=[],
stack_after=[pyint],
proto=1,
doc="""Push a two-byte unsigned integer.
This is a space optimization for pickling small positive ints, in
range(256, 2**16). Integers in range(256) can also be pickled via
BININT2, but BININT1 instead saves a byte.
"""),
I(name='LONG',
code='L',
arg=decimalnl_long,
stack_before=[],
stack_after=[pylong],
proto=0,
doc="""Push a long integer.
The same as INT, except that the literal ends with 'L', and always
unpickles to a Python long. There doesn't seem a real purpose to the
trailing 'L'.
Note that LONG takes time quadratic in the number of digits when
unpickling (this is simply due to the nature of decimal->binary
conversion). Proto 2 added linear-time (in C; still quadratic-time
in Python) LONG1 and LONG4 opcodes.
"""),
I(name="LONG1",
code='\x8a',
arg=long1,
stack_before=[],
stack_after=[pylong],
proto=2,
doc="""Long integer using one-byte length.
A more efficient encoding of a Python long; the long1 encoding
says it all."""),
I(name="LONG4",
code='\x8b',
arg=long4,
stack_before=[],
stack_after=[pylong],
proto=2,
doc="""Long integer using found-byte length.
A more efficient encoding of a Python long; the long4 encoding
says it all."""),
# Ways to spell strings (8-bit, not Unicode).
I(name='STRING',
code='S',
arg=stringnl,
stack_before=[],
stack_after=[pystring],
proto=0,
doc="""Push a Python string object.
The argument is a repr-style string, with bracketing quote characters,
and perhaps embedded escapes. The argument extends until the next
newline character.
"""),
I(name='BINSTRING',
code='T',
arg=string4,
stack_before=[],
stack_after=[pystring],
proto=1,
doc="""Push a Python string object.
There are two arguments: the first is a 4-byte little-endian signed int
giving the number of bytes in the string, and the second is that many
bytes, which are taken literally as the string content.
"""),
I(name='SHORT_BINSTRING',
code='U',
arg=string1,
stack_before=[],
stack_after=[pystring],
proto=1,
doc="""Push a Python string object.
There are two arguments: the first is a 1-byte unsigned int giving
the number of bytes in the string, and the second is that many bytes,
which are taken literally as the string content.
"""),
# Ways to spell None.
I(name='NONE',
code='N',
arg=None,
stack_before=[],
stack_after=[pynone],
proto=0,
doc="Push None on the stack."),
# Ways to spell bools, starting with proto 2. See INT for how this was
# done before proto 2.
I(name='NEWTRUE',
code='\x88',
arg=None,
stack_before=[],
stack_after=[pybool],
proto=2,
doc="""True.
Push True onto the stack."""),
I(name='NEWFALSE',
code='\x89',
arg=None,
stack_before=[],
stack_after=[pybool],
proto=2,
doc="""True.
Push False onto the stack."""),
# Ways to spell Unicode strings.
I(name='UNICODE',
code='V',
arg=unicodestringnl,
stack_before=[],
stack_after=[pyunicode],
proto=0, # this may be pure-text, but it's a later addition
doc="""Push a Python Unicode string object.
The argument is a raw-unicode-escape encoding of a Unicode string,
and so may contain embedded escape sequences. The argument extends
until the next newline character.
"""),
I(name='BINUNICODE',
code='X',
arg=unicodestring4,
stack_before=[],
stack_after=[pyunicode],
proto=1,
doc="""Push a Python Unicode string object.
There are two arguments: the first is a 4-byte little-endian signed int
giving the number of bytes in the string. The second is that many
bytes, and is the UTF-8 encoding of the Unicode string.
"""),
# Ways to spell floats.
I(name='FLOAT',
code='F',
arg=floatnl,
stack_before=[],
stack_after=[pyfloat],
proto=0,
doc="""Newline-terminated decimal float literal.
The argument is repr(a_float), and in general requires 17 significant
digits for roundtrip conversion to be an identity (this is so for
IEEE-754 double precision values, which is what Python float maps to
on most boxes).
In general, FLOAT cannot be used to transport infinities, NaNs, or
minus zero across boxes (or even on a single box, if the platform C
library can't read the strings it produces for such things -- Windows
is like that), but may do less damage than BINFLOAT on boxes with
greater precision or dynamic range than IEEE-754 double.
"""),
I(name='BINFLOAT',
code='G',
arg=float8,
stack_before=[],
stack_after=[pyfloat],
proto=1,
doc="""Float stored in binary form, with 8 bytes of data.
This generally requires less than half the space of FLOAT encoding.
In general, BINFLOAT cannot be used to transport infinities, NaNs, or
minus zero, raises an exception if the exponent exceeds the range of
an IEEE-754 double, and retains no more than 53 bits of precision (if
there are more than that, "add a half and chop" rounding is used to
cut it back to 53 significant bits).
"""),
# Ways to build lists.
I(name='EMPTY_LIST',
code=']',
arg=None,
stack_before=[],
stack_after=[pylist],
proto=1,
doc="Push an empty list."),
I(name='APPEND',
code='a',
arg=None,
stack_before=[pylist, anyobject],
stack_after=[pylist],
proto=0,
doc="""Append an object to a list.
Stack before: ... pylist anyobject
Stack after: ... pylist+[anyobject]
although pylist is really extended in-place.
"""),
I(name='APPENDS',
code='e',
arg=None,
stack_before=[pylist, markobject, stackslice],
stack_after=[pylist],
proto=1,
doc="""Extend a list by a slice of stack objects.
Stack before: ... pylist markobject stackslice
Stack after: ... pylist+stackslice
although pylist is really extended in-place.
"""),
I(name='LIST',
code='l',
arg=None,
stack_before=[markobject, stackslice],
stack_after=[pylist],
proto=0,
doc="""Build a list out of the topmost stack slice, after markobject.
All the stack entries following the topmost markobject are placed into
a single Python list, which single list object replaces all of the
stack from the topmost markobject onward. For example,
Stack before: ... markobject 1 2 3 'abc'
Stack after: ... [1, 2, 3, 'abc']
"""),
# Ways to build tuples.
I(name='EMPTY_TUPLE',
code=')',
arg=None,
stack_before=[],
stack_after=[pytuple],
proto=1,
doc="Push an empty tuple."),
I(name='TUPLE',
code='t',
arg=None,
stack_before=[markobject, stackslice],
stack_after=[pytuple],
proto=0,
doc="""Build a tuple out of the topmost stack slice, after markobject.
All the stack entries following the topmost markobject are placed into
a single Python tuple, which single tuple object replaces all of the
stack from the topmost markobject onward. For example,
Stack before: ... markobject 1 2 3 'abc'
Stack after: ... (1, 2, 3, 'abc')
"""),
I(name='TUPLE1',
code='\x85',
arg=None,
stack_before=[anyobject],
stack_after=[pytuple],
proto=2,
doc="""Build a one-tuple out of the topmost item on the stack.
This code pops one value off the stack and pushes a tuple of
length 1 whose one item is that value back onto it. In other
words:
stack[-1] = tuple(stack[-1:])
"""),
I(name='TUPLE2',
code='\x86',
arg=None,
stack_before=[anyobject, anyobject],
stack_after=[pytuple],
proto=2,
doc="""Build a two-tuple out of the top two items on the stack.
This code pops two values off the stack and pushes a tuple of
length 2 whose items are those values back onto it. In other
words:
stack[-2:] = [tuple(stack[-2:])]
"""),
I(name='TUPLE3',
code='\x87',
arg=None,
stack_before=[anyobject, anyobject, anyobject],
stack_after=[pytuple],
proto=2,
doc="""Build a three-tuple out of the top three items on the stack.
This code pops three values off the stack and pushes a tuple of
length 3 whose items are those values back onto it. In other
words:
stack[-3:] = [tuple(stack[-3:])]
"""),
# Ways to build dicts.
I(name='EMPTY_DICT',
code='}',
arg=None,
stack_before=[],
stack_after=[pydict],
proto=1,
doc="Push an empty dict."),
I(name='DICT',
code='d',
arg=None,
stack_before=[markobject, stackslice],
stack_after=[pydict],
proto=0,
doc="""Build a dict out of the topmost stack slice, after markobject.
All the stack entries following the topmost markobject are placed into
a single Python dict, which single dict object replaces all of the
stack from the topmost markobject onward. The stack slice alternates
key, value, key, value, .... For example,
Stack before: ... markobject 1 2 3 'abc'
Stack after: ... {1: 2, 3: 'abc'}
"""),
I(name='SETITEM',
code='s',
arg=None,
stack_before=[pydict, anyobject, anyobject],
stack_after=[pydict],
proto=0,
doc="""Add a key+value pair to an existing dict.
Stack before: ... pydict key value
Stack after: ... pydict
where pydict has been modified via pydict[key] = value.
"""),
I(name='SETITEMS',
code='u',
arg=None,
stack_before=[pydict, markobject, stackslice],
stack_after=[pydict],
proto=1,
doc="""Add an arbitrary number of key+value pairs to an existing dict.
The slice of the stack following the topmost markobject is taken as
an alternating sequence of keys and values, added to the dict
immediately under the topmost markobject. Everything at and after the
topmost markobject is popped, leaving the mutated dict at the top
of the stack.
Stack before: ... pydict markobject key_1 value_1 ... key_n value_n
Stack after: ... pydict
where pydict has been modified via pydict[key_i] = value_i for i in
1, 2, ..., n, and in that order.
"""),
# Stack manipulation.
I(name='POP',
code='0',
arg=None,
stack_before=[anyobject],
stack_after=[],
proto=0,
doc="Discard the top stack item, shrinking the stack by one item."),
I(name='DUP',
code='2',
arg=None,
stack_before=[anyobject],
stack_after=[anyobject, anyobject],
proto=0,
doc="Push the top stack item onto the stack again, duplicating it."),
I(name='MARK',
code='(',
arg=None,
stack_before=[],
stack_after=[markobject],
proto=0,
doc="""Push markobject onto the stack.
markobject is a unique object, used by other opcodes to identify a
region of the stack containing a variable number of objects for them
to work on. See markobject.doc for more detail.
"""),
I(name='POP_MARK',
code='1',
arg=None,
stack_before=[markobject, stackslice],
stack_after=[],
proto=1,
doc="""Pop all the stack objects at and above the topmost markobject.
When an opcode using a variable number of stack objects is done,
POP_MARK is used to remove those objects, and to remove the markobject
that delimited their starting position on the stack.
"""),
# Memo manipulation. There are really only two operations (get and put),
# each in all-text, "short binary", and "long binary" flavors.
I(name='GET',
code='g',
arg=decimalnl_short,
stack_before=[],
stack_after=[anyobject],
proto=0,
doc="""Read an object from the memo and push it on the stack.
The index of the memo object to push is given by the newline-terminated
decimal string following. BINGET and LONG_BINGET are space-optimized
versions.
"""),
I(name='BINGET',
code='h',
arg=uint1,
stack_before=[],
stack_after=[anyobject],
proto=1,
doc="""Read an object from the memo and push it on the stack.
The index of the memo object to push is given by the 1-byte unsigned
integer following.
"""),
I(name='LONG_BINGET',
code='j',
arg=int4,
stack_before=[],
stack_after=[anyobject],
proto=1,
doc="""Read an object from the memo and push it on the stack.
The index of the memo object to push is given by the 4-byte signed
little-endian integer following.
"""),
I(name='PUT',
code='p',
arg=decimalnl_short,
stack_before=[],
stack_after=[],
proto=0,
doc="""Store the stack top into the memo. The stack is not popped.
The index of the memo location to write into is given by the newline-
terminated decimal string following. BINPUT and LONG_BINPUT are
space-optimized versions.
"""),
I(name='BINPUT',
code='q',
arg=uint1,
stack_before=[],
stack_after=[],
proto=1,
doc="""Store the stack top into the memo. The stack is not popped.
The index of the memo location to write into is given by the 1-byte
unsigned integer following.
"""),
I(name='LONG_BINPUT',
code='r',
arg=int4,
stack_before=[],
stack_after=[],
proto=1,
doc="""Store the stack top into the memo. The stack is not popped.
The index of the memo location to write into is given by the 4-byte
signed little-endian integer following.
"""),
# Access the extension registry (predefined objects). Akin to the GET
# family.
I(name='EXT1',
code='\x82',
arg=uint1,
stack_before=[],
stack_after=[anyobject],
proto=2,
doc="""Extension code.
This code and the similar EXT2 and EXT4 allow using a registry
of popular objects that are pickled by name, typically classes.
It is envisioned that through a global negotiation and
registration process, third parties can set up a mapping between
ints and object names.
In order to guarantee pickle interchangeability, the extension
code registry ought to be global, although a range of codes may
be reserved for private use.
EXT1 has a 1-byte integer argument. This is used to index into the
extension registry, and the object at that index is pushed on the stack.
"""),
I(name='EXT2',
code='\x83',
arg=uint2,
stack_before=[],
stack_after=[anyobject],
proto=2,
doc="""Extension code.
See EXT1. EXT2 has a two-byte integer argument.
"""),
I(name='EXT4',
code='\x84',
arg=int4,
stack_before=[],
stack_after=[anyobject],
proto=2,
doc="""Extension code.
See EXT1. EXT4 has a four-byte integer argument.
"""),
# Push a class object, or module function, on the stack, via its module
# and name.
I(name='GLOBAL',
code='c',
arg=stringnl_noescape_pair,
stack_before=[],
stack_after=[anyobject],
proto=0,
doc="""Push a global object (module.attr) on the stack.
Two newline-terminated strings follow the GLOBAL opcode. The first is
taken as a module name, and the second as a class name. The class
object module.class is pushed on the stack. More accurately, the
object returned by self.find_class(module, class) is pushed on the
stack, so unpickling subclasses can override this form of lookup.
"""),
# Ways to build objects of classes pickle doesn't know about directly
# (user-defined classes). I despair of documenting this accurately
# and comprehensibly -- you really have to read the pickle code to
# find all the special cases.
I(name='REDUCE',
code='R',
arg=None,
stack_before=[anyobject, anyobject],
stack_after=[anyobject],
proto=0,
doc="""Push an object built from a callable and an argument tuple.
The opcode is named to remind of the __reduce__() method.
Stack before: ... callable pytuple
Stack after: ... callable(*pytuple)
The callable and the argument tuple are the first two items returned
by a __reduce__ method. Applying the callable to the argtuple is
supposed to reproduce the original object, or at least get it started.
If the __reduce__ method returns a 3-tuple, the last component is an
argument to be passed to the object's __setstate__, and then the REDUCE
opcode is followed by code to create setstate's argument, and then a
BUILD opcode to apply __setstate__ to that argument.
If type(callable) is not ClassType, REDUCE complains unless the
callable has been registered with the copy_reg module's
safe_constructors dict, or the callable has a magic
'__safe_for_unpickling__' attribute with a true value. I'm not sure
why it does this, but I've sure seen this complaint often enough when
I didn't want to <wink>.
"""),
I(name='BUILD',
code='b',
arg=None,
stack_before=[anyobject, anyobject],
stack_after=[anyobject],
proto=0,
doc="""Finish building an object, via __setstate__ or dict update.
Stack before: ... anyobject argument
Stack after: ... anyobject
where anyobject may have been mutated, as follows:
If the object has a __setstate__ method,
anyobject.__setstate__(argument)
is called.
Else the argument must be a dict, the object must have a __dict__, and
the object is updated via
anyobject.__dict__.update(argument)
This may raise RuntimeError in restricted execution mode (which
disallows access to __dict__ directly); in that case, the object
is updated instead via
for k, v in argument.items():
anyobject[k] = v
"""),
I(name='INST',
code='i',
arg=stringnl_noescape_pair,
stack_before=[markobject, stackslice],
stack_after=[anyobject],
proto=0,
doc="""Build a class instance.
This is the protocol 0 version of protocol 1's OBJ opcode.
INST is followed by two newline-terminated strings, giving a
module and class name, just as for the GLOBAL opcode (and see
GLOBAL for more details about that). self.find_class(module, name)
is used to get a class object.
In addition, all the objects on the stack following the topmost
markobject are gathered into a tuple and popped (along with the
topmost markobject), just as for the TUPLE opcode.
Now it gets complicated. If all of these are true:
+ The argtuple is empty (markobject was at the top of the stack
at the start).
+ It's an old-style class object (the type of the class object is
ClassType).
+ The class object does not have a __getinitargs__ attribute.
then we want to create an old-style class instance without invoking
its __init__() method (pickle has waffled on this over the years; not
calling __init__() is current wisdom). In this case, an instance of
an old-style dummy class is created, and then we try to rebind its
__class__ attribute to the desired class object. If this succeeds,
the new instance object is pushed on the stack, and we're done. In
restricted execution mode it can fail (assignment to __class__ is
disallowed), and I'm not really sure what happens then -- it looks
like the code ends up calling the class object's __init__ anyway,
via falling into the next case.
Else (the argtuple is not empty, it's not an old-style class object,
or the class object does have a __getinitargs__ attribute), the code
first insists that the class object have a __safe_for_unpickling__
attribute. Unlike as for the __safe_for_unpickling__ check in REDUCE,
it doesn't matter whether this attribute has a true or false value, it
only matters whether it exists (XXX this is a bug; cPickle
requires the attribute to be true). If __safe_for_unpickling__
doesn't exist, UnpicklingError is raised.
Else (the class object does have a __safe_for_unpickling__ attr),
the class object obtained from INST's arguments is applied to the
argtuple obtained from the stack, and the resulting instance object
is pushed on the stack.
NOTE: checks for __safe_for_unpickling__ went away in Python 2.3.
"""),
I(name='OBJ',
code='o',
arg=None,
stack_before=[markobject, anyobject, stackslice],
stack_after=[anyobject],
proto=1,
doc="""Build a class instance.
This is the protocol 1 version of protocol 0's INST opcode, and is
very much like it. The major difference is that the class object
is taken off the stack, allowing it to be retrieved from the memo
repeatedly if several instances of the same class are created. This
can be much more efficient (in both time and space) than repeatedly
embedding the module and class names in INST opcodes.
Unlike INST, OBJ takes no arguments from the opcode stream. Instead
the class object is taken off the stack, immediately above the
topmost markobject:
Stack before: ... markobject classobject stackslice
Stack after: ... new_instance_object
As for INST, the remainder of the stack above the markobject is
gathered into an argument tuple, and then the logic seems identical,
except that no __safe_for_unpickling__ check is done (XXX this is
a bug; cPickle does test __safe_for_unpickling__). See INST for
the gory details.
NOTE: In Python 2.3, INST and OBJ are identical except for how they
get the class object. That was always the intent; the implementations
had diverged for accidental reasons.
"""),
I(name='NEWOBJ',
code='\x81',
arg=None,
stack_before=[anyobject, anyobject],
stack_after=[anyobject],
proto=2,
doc="""Build an object instance.
The stack before should be thought of as containing a class
object followed by an argument tuple (the tuple being the stack
top). Call these cls and args. They are popped off the stack,
and the value returned by cls.__new__(cls, *args) is pushed back
onto the stack.
"""),
# Machine control.
I(name='PROTO',
code='\x80',
arg=uint1,
stack_before=[],
stack_after=[],
proto=2,
doc="""Protocol version indicator.
For protocol 2 and above, a pickle must start with this opcode.
The argument is the protocol version, an int in range(2, 256).
"""),
I(name='STOP',
code='.',
arg=None,
stack_before=[anyobject],
stack_after=[],
proto=0,
doc="""Stop the unpickling machine.
Every pickle ends with this opcode. The object at the top of the stack
is popped, and that's the result of unpickling. The stack should be
empty then.
"""),
# Ways to deal with persistent IDs.
I(name='PERSID',
code='P',
arg=stringnl_noescape,
stack_before=[],
stack_after=[anyobject],
proto=0,
doc="""Push an object identified by a persistent ID.
The pickle module doesn't define what a persistent ID means. PERSID's
argument is a newline-terminated str-style (no embedded escapes, no
bracketing quote characters) string, which *is* "the persistent ID".
The unpickler passes this string to self.persistent_load(). Whatever
object that returns is pushed on the stack. There is no implementation
of persistent_load() in Python's unpickler: it must be supplied by an
unpickler subclass.
"""),
I(name='BINPERSID',
code='Q',
arg=None,
stack_before=[anyobject],
stack_after=[anyobject],
proto=1,
doc="""Push an object identified by a persistent ID.
Like PERSID, except the persistent ID is popped off the stack (instead
of being a string embedded in the opcode bytestream). The persistent
ID is passed to self.persistent_load(), and whatever object that
returns is pushed on the stack. See PERSID for more detail.
"""),
]
del I
# Verify uniqueness of .name and .code members.
name2i = {}
code2i = {}
for i, d in enumerate(opcodes):
if d.name in name2i:
raise ValueError("repeated name %r at indices %d and %d" %
(d.name, name2i[d.name], i))
if d.code in code2i:
raise ValueError("repeated code %r at indices %d and %d" %
(d.code, code2i[d.code], i))
name2i[d.name] = i
code2i[d.code] = i
del name2i, code2i, i, d
##############################################################################
# Build a code2op dict, mapping opcode characters to OpcodeInfo records.
# Also ensure we've got the same stuff as pickle.py, although the
# introspection here is dicey.
code2op = {}
for d in opcodes:
code2op[d.code] = d
del d
def assure_pickle_consistency(verbose=False):
import pickle, re
copy = code2op.copy()
for name in pickle.__all__:
if not re.match("[A-Z][A-Z0-9_]+$", name):
if verbose:
print "skipping %r: it doesn't look like an opcode name" % name
continue
picklecode = getattr(pickle, name)
if not isinstance(picklecode, str) or len(picklecode) != 1:
if verbose:
print ("skipping %r: value %r doesn't look like a pickle "
"code" % (name, picklecode))
continue
if picklecode in copy:
if verbose:
print "checking name %r w/ code %r for consistency" % (
name, picklecode)
d = copy[picklecode]
if d.name != name:
raise ValueError("for pickle code %r, pickle.py uses name %r "
"but we're using name %r" % (picklecode,
name,
d.name))
# Forget this one. Any left over in copy at the end are a problem
# of a different kind.
del copy[picklecode]
else:
raise ValueError("pickle.py appears to have a pickle opcode with "
"name %r and code %r, but we don't" %
(name, picklecode))
if copy:
msg = ["we appear to have pickle opcodes that pickle.py doesn't have:"]
for code, d in copy.items():
msg.append(" name %r with code %r" % (d.name, code))
raise ValueError("\n".join(msg))
assure_pickle_consistency()
del assure_pickle_consistency
##############################################################################
# A pickle opcode generator.
def genops(pickle):
"""Generate all the opcodes in a pickle.
'pickle' is a file-like object, or string, containing the pickle.
Each opcode in the pickle is generated, from the current pickle position,
stopping after a STOP opcode is delivered. A triple is generated for
each opcode:
opcode, arg, pos
opcode is an OpcodeInfo record, describing the current opcode.
If the opcode has an argument embedded in the pickle, arg is its decoded
value, as a Python object. If the opcode doesn't have an argument, arg
is None.
If the pickle has a tell() method, pos was the value of pickle.tell()
before reading the current opcode. If the pickle is a string object,
it's wrapped in a StringIO object, and the latter's tell() result is
used. Else (the pickle doesn't have a tell(), and it's not obvious how
to query its current position) pos is None.
"""
import cStringIO as StringIO
if isinstance(pickle, str):
pickle = StringIO.StringIO(pickle)
if hasattr(pickle, "tell"):
getpos = pickle.tell
else:
getpos = lambda: None
while True:
pos = getpos()
code = pickle.read(1)
opcode = code2op.get(code)
if opcode is None:
if code == "":
raise ValueError("pickle exhausted before seeing STOP")
else:
raise ValueError("at position %s, opcode %r unknown" % (
pos is None and "<unknown>" or pos,
code))
if opcode.arg is None:
arg = None
else:
arg = opcode.arg.reader(pickle)
yield opcode, arg, pos
if code == '.':
assert opcode.name == 'STOP'
break
##############################################################################
# A pickle optimizer.
def optimize(p):
'Optimize a pickle string by removing unused PUT opcodes'
gets = set() # set of args used by a GET opcode
puts = [] # (arg, startpos, stoppos) for the PUT opcodes
prevpos = None # set to pos if previous opcode was a PUT
for opcode, arg, pos in genops(p):
if prevpos is not None:
puts.append((prevarg, prevpos, pos))
prevpos = None
if 'PUT' in opcode.name:
prevarg, prevpos = arg, pos
elif 'GET' in opcode.name:
gets.add(arg)
# Copy the pickle string except for PUTS without a corresponding GET
s = []
i = 0
for arg, start, stop in puts:
j = stop if (arg in gets) else start
s.append(p[i:j])
i = stop
s.append(p[i:])
return ''.join(s)
##############################################################################
# A symbolic pickle disassembler.
def dis(pickle, out=None, memo=None, indentlevel=4):
"""Produce a symbolic disassembly of a pickle.
'pickle' is a file-like object, or string, containing a (at least one)
pickle. The pickle is disassembled from the current position, through
the first STOP opcode encountered.
Optional arg 'out' is a file-like object to which the disassembly is
printed. It defaults to sys.stdout.
Optional arg 'memo' is a Python dict, used as the pickle's memo. It
may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes.
Passing the same memo object to another dis() call then allows disassembly
to proceed across multiple pickles that were all created by the same
pickler with the same memo. Ordinarily you don't need to worry about this.
Optional arg indentlevel is the number of blanks by which to indent
a new MARK level. It defaults to 4.
In addition to printing the disassembly, some sanity checks are made:
+ All embedded opcode arguments "make sense".
+ Explicit and implicit pop operations have enough items on the stack.
+ When an opcode implicitly refers to a markobject, a markobject is
actually on the stack.
+ A memo entry isn't referenced before it's defined.
+ The markobject isn't stored in the memo.
+ A memo entry isn't redefined.
"""
# Most of the hair here is for sanity checks, but most of it is needed
# anyway to detect when a protocol 0 POP takes a MARK off the stack
# (which in turn is needed to indent MARK blocks correctly).
stack = [] # crude emulation of unpickler stack
if memo is None:
memo = {} # crude emulation of unpicker memo
maxproto = -1 # max protocol number seen
markstack = [] # bytecode positions of MARK opcodes
indentchunk = ' ' * indentlevel
errormsg = None
for opcode, arg, pos in genops(pickle):
if pos is not None:
print >> out, "%5d:" % pos,
line = "%-4s %s%s" % (repr(opcode.code)[1:-1],
indentchunk * len(markstack),
opcode.name)
maxproto = max(maxproto, opcode.proto)
before = opcode.stack_before # don't mutate
after = opcode.stack_after # don't mutate
numtopop = len(before)
# See whether a MARK should be popped.
markmsg = None
if markobject in before or (opcode.name == "POP" and
stack and
stack[-1] is markobject):
assert markobject not in after
if __debug__:
if markobject in before:
assert before[-1] is stackslice
if markstack:
markpos = markstack.pop()
if markpos is None:
markmsg = "(MARK at unknown opcode offset)"
else:
markmsg = "(MARK at %d)" % markpos
# Pop everything at and after the topmost markobject.
while stack[-1] is not markobject:
stack.pop()
stack.pop()
# Stop later code from popping too much.
try:
numtopop = before.index(markobject)
except ValueError:
assert opcode.name == "POP"
numtopop = 0
else:
errormsg = markmsg = "no MARK exists on stack"
# Check for correct memo usage.
if opcode.name in ("PUT", "BINPUT", "LONG_BINPUT"):
assert arg is not None
if arg in memo:
errormsg = "memo key %r already defined" % arg
elif not stack:
errormsg = "stack is empty -- can't store into memo"
elif stack[-1] is markobject:
errormsg = "can't store markobject in the memo"
else:
memo[arg] = stack[-1]
elif opcode.name in ("GET", "BINGET", "LONG_BINGET"):
if arg in memo:
assert len(after) == 1
after = [memo[arg]] # for better stack emulation
else:
errormsg = "memo key %r has never been stored into" % arg
if arg is not None or markmsg:
# make a mild effort to align arguments
line += ' ' * (10 - len(opcode.name))
if arg is not None:
line += ' ' + repr(arg)
if markmsg:
line += ' ' + markmsg
print >> out, line
if errormsg:
# Note that we delayed complaining until the offending opcode
# was printed.
raise ValueError(errormsg)
# Emulate the stack effects.
if len(stack) < numtopop:
raise ValueError("tries to pop %d items from stack with "
"only %d items" % (numtopop, len(stack)))
if numtopop:
del stack[-numtopop:]
if markobject in after:
assert markobject not in before
markstack.append(pos)
stack.extend(after)
print >> out, "highest protocol among opcodes =", maxproto
if stack:
raise ValueError("stack not empty after STOP: %r" % stack)
# For use in the doctest, simply as an example of a class to pickle.
class _Example:
def __init__(self, value):
self.value = value
_dis_test = r"""
>>> import pickle
>>> x = [1, 2, (3, 4), {'abc': u"def"}]
>>> pkl = pickle.dumps(x, 0)
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pkl)
# 0: ( MARK
# 1: l LIST (MARK at 0)
# 2: p PUT 0
# 5: I INT 1
# 8: a APPEND
# 9: I INT 2
# 12: a APPEND
# 13: ( MARK
# 14: I INT 3
# 17: I INT 4
# 20: t TUPLE (MARK at 13)
# 21: p PUT 1
# 24: a APPEND
# 25: ( MARK
# 26: d DICT (MARK at 25)
# 27: p PUT 2
# 30: S STRING 'abc'
# 37: p PUT 3
# 40: V UNICODE u'def'
# 45: p PUT 4
# 48: s SETITEM
# 49: a APPEND
# 50: . STOP
#highest protocol among opcodes = 0
#
#Try again with a "binary" pickle.
#
>>> pkl = pickle.dumps(x, 1)
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pkl)
# 0: ] EMPTY_LIST
# 1: q BINPUT 0
# 3: ( MARK
# 4: K BININT1 1
# 6: K BININT1 2
# 8: ( MARK
# 9: K BININT1 3
# 11: K BININT1 4
# 13: t TUPLE (MARK at 8)
# 14: q BINPUT 1
# 16: } EMPTY_DICT
# 17: q BINPUT 2
# 19: U SHORT_BINSTRING 'abc'
# 24: q BINPUT 3
# 26: X BINUNICODE u'def'
# 34: q BINPUT 4
# 36: s SETITEM
# 37: e APPENDS (MARK at 3)
# 38: . STOP
#highest protocol among opcodes = 1
#
#Exercise the INST/OBJ/BUILD family.
#
>>> import random
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(pickletools.dis, 0))
# 0: c GLOBAL 'pickletools dis'
# 17: p PUT 0
# 20: . STOP
#highest protocol among opcodes = 0
#
>>> from pickletools import _Example
>>> x = [_Example(42)] * 2
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(x, 0))
# 0: ( MARK
# 1: l LIST (MARK at 0)
# 2: p PUT 0
# 5: ( MARK
# 6: i INST 'pickletools _Example' (MARK at 5)
# 28: p PUT 1
# 31: ( MARK
# 32: d DICT (MARK at 31)
# 33: p PUT 2
# 36: S STRING 'value'
# 45: p PUT 3
# 48: I INT 42
# 52: s SETITEM
# 53: b BUILD
# 54: a APPEND
# 55: g GET 1
# 58: a APPEND
# 59: . STOP
#highest protocol among opcodes = 0
#
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(x, 1))
# 0: ] EMPTY_LIST
# 1: q BINPUT 0
# 3: ( MARK
# 4: ( MARK
# 5: c GLOBAL 'pickletools _Example'
# 27: q BINPUT 1
# 29: o OBJ (MARK at 4)
# 30: q BINPUT 2
# 32: } EMPTY_DICT
# 33: q BINPUT 3
# 35: U SHORT_BINSTRING 'value'
# 42: q BINPUT 4
# 44: K BININT1 42
# 46: s SETITEM
# 47: b BUILD
# 48: h BINGET 2
# 50: e APPENDS (MARK at 3)
# 51: . STOP
#highest protocol among opcodes = 1
#
Try "the canonical" recursive-object test.
>>> L = []
>>> T = L,
>>> L.append(T)
>>> L[0] is T
True
>>> T[0] is L
True
>>> L[0][0] is L
True
>>> T[0][0] is T
True
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(L, 0))
# 0: ( MARK
# 1: l LIST (MARK at 0)
# 2: p PUT 0
# 5: ( MARK
# 6: g GET 0
# 9: t TUPLE (MARK at 5)
# 10: p PUT 1
# 13: a APPEND
# 14: . STOP
#highest protocol among opcodes = 0
#
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(L, 1))
# 0: ] EMPTY_LIST
# 1: q BINPUT 0
# 3: ( MARK
# 4: h BINGET 0
# 6: t TUPLE (MARK at 3)
# 7: q BINPUT 1
# 9: a APPEND
# 10: . STOP
#highest protocol among opcodes = 1
#
Note that, in the protocol 0 pickle of the recursive tuple, the disassembler
has to emulate the stack in order to realize that the POP opcode at 16 gets
rid of the MARK at 0.
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(T, 0))
# 0: ( MARK
# 1: ( MARK
# 2: l LIST (MARK at 1)
# 3: p PUT 0
# 6: ( MARK
# 7: g GET 0
# 10: t TUPLE (MARK at 6)
# 11: p PUT 1
# 14: a APPEND
# 15: 0 POP
# 16: 0 POP (MARK at 0)
# 17: g GET 1
# 20: . STOP
#highest protocol among opcodes = 0
#
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(T, 1))
# 0: ( MARK
# 1: ] EMPTY_LIST
# 2: q BINPUT 0
# 4: ( MARK
# 5: h BINGET 0
# 7: t TUPLE (MARK at 4)
# 8: q BINPUT 1
# 10: a APPEND
# 11: 1 POP_MARK (MARK at 0)
# 12: h BINGET 1
# 14: . STOP
#highest protocol among opcodes = 1
#
Try protocol 2.
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(L, 2))
# 0: \x80 PROTO 2
# 2: ] EMPTY_LIST
# 3: q BINPUT 0
# 5: h BINGET 0
# 7: \x85 TUPLE1
# 8: q BINPUT 1
# 10: a APPEND
# 11: . STOP
#highest protocol among opcodes = 2
#
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(pickle.dumps(T, 2))
# 0: \x80 PROTO 2
# 2: ] EMPTY_LIST
# 3: q BINPUT 0
# 5: h BINGET 0
# 7: \x85 TUPLE1
# 8: q BINPUT 1
# 10: a APPEND
# 11: 0 POP
# 12: h BINGET 1
# 14: . STOP
#highest protocol among opcodes = 2
"""
_memo_test = r"""
>>> import pickle
>>> from StringIO import StringIO
>>> f = StringIO()
>>> p = pickle.Pickler(f, 2)
>>> x = [1, 2, 3]
>>> p.dump(x)
>>> p.dump(x)
>>> f.seek(0)
>>> memo = {}
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(f, memo=memo)
# 0: \x80 PROTO 2
# 2: ] EMPTY_LIST
# 3: q BINPUT 0
# 5: ( MARK
# 6: K BININT1 1
# 8: K BININT1 2
# 10: K BININT1 3
# 12: e APPENDS (MARK at 5)
# 13: . STOP
#highest protocol among opcodes = 2
#http://www.codeplex.com/IronPython/WorkItem/View.aspx?WorkItemId=21116
#>>> dis(f, memo=memo)
# 14: \x80 PROTO 2
# 16: h BINGET 0
# 18: . STOP
#highest protocol among opcodes = 2
"""
__test__ = {'disassembler_test': _dis_test,
'disassembler_memo_test': _memo_test,
}
def _test():
import doctest
return doctest.testmod()
if __name__ == "__main__":
_test()
|
toby82/kolla
|
refs/heads/master
|
docker/swift/swift-base/build-swift-ring.py
|
5
|
#!/usr/bin/env python
# Copyright 2015 Paul Bourke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script is a simple wrapper used to create and rebalance Swift ring files.
"""
import argparse
import subprocess
import sys
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--ring-file",
required=True,
help="Path to ring file")
parser.add_argument("-p", "--part-power",
required=True,
help="Part power")
parser.add_argument("-r", "--num-replicas",
required=True,
help="Number of replicas")
parser.add_argument("-m", "--min-part-hours",
required=True,
help="Min part hours")
parser.add_argument("-H", "--hosts",
required=True,
help="Hosts in the ring, comma delimited")
parser.add_argument("-w", "--weights",
required=True,
help="Weight of each host, comma delimited")
parser.add_argument("-d", "--devices",
required=True,
help="Device on each host, comma delimited")
parser.add_argument("-z", "--zones",
required=True,
help="Zone of each host, comma delimited")
return parser.parse_args()
def run_cmd(cmd):
print(' '.join(cmd))
subprocess.call(cmd)
def run(args):
hosts = args.hosts.split(',')
weights = args.weights.split(',')
zones = args.zones.split(',')
devices = args.devices.split(',')
if not (len(hosts) == len(weights) == len(zones) == len(devices)):
print('Error: an equal amount of hosts, devices, weights, '
'and zones are required')
sys.exit(1)
run_cmd(['swift-ring-builder',
args.ring_file,
'create',
args.part_power,
args.num_replicas,
args.min_part_hours])
for i in range(len(hosts)):
run_cmd(['swift-ring-builder',
args.ring_file,
'add',
'z{}-{}/{}'.format(zones[i], hosts[i], devices[i]),
weights[i]])
run_cmd(['swift-ring-builder', args.ring_file, 'rebalance'])
def main():
args = setup_args()
run(args)
if __name__ == "__main__":
main()
|
xuxiao19910803/edx-platform
|
refs/heads/master
|
cms/startup.py
|
88
|
"""
Module with code executed during Studio startup
"""
from django.conf import settings
# Force settings to run so that the python path is modified
settings.INSTALLED_APPS # pylint: disable=pointless-statement
from openedx.core.lib.django_startup import autostartup
from monkey_patch import django_utils_translation
def run():
"""
Executed during django startup
"""
django_utils_translation.patch()
autostartup()
add_mimetypes()
if settings.FEATURES.get('USE_CUSTOM_THEME', False):
enable_theme()
def add_mimetypes():
"""
Add extra mimetypes. Used in xblock_resource.
If you add a mimetype here, be sure to also add it in lms/startup.py.
"""
import mimetypes
mimetypes.add_type('application/vnd.ms-fontobject', '.eot')
mimetypes.add_type('application/x-font-opentype', '.otf')
mimetypes.add_type('application/x-font-ttf', '.ttf')
mimetypes.add_type('application/font-woff', '.woff')
def enable_theme():
"""
Enable the settings for a custom theme, whose files should be stored
in ENV_ROOT/themes/THEME_NAME (e.g., edx_all/themes/stanford).
At this moment this is actually just a fix for collectstatic,
(see https://openedx.atlassian.net/browse/TNL-726),
but can be improved with a full theming option also for Studio
in the future (see lms.startup)
"""
# Workaround for setting THEME_NAME to an empty
# string which is the default due to this ansible
# bug: https://github.com/ansible/ansible/issues/4812
if settings.THEME_NAME == "":
settings.THEME_NAME = None
return
assert settings.FEATURES['USE_CUSTOM_THEME']
settings.FAVICON_PATH = 'themes/{name}/images/favicon.ico'.format(
name=settings.THEME_NAME
)
# Calculate the location of the theme's files
theme_root = settings.ENV_ROOT / "themes" / settings.THEME_NAME
# Namespace the theme's static files to 'themes/<theme_name>' to
# avoid collisions with default edX static files
settings.STATICFILES_DIRS.append(
(u'themes/{}'.format(settings.THEME_NAME), theme_root / 'static')
)
|
TylerTemp/tomorrow
|
refs/heads/master
|
lib/tool/command/upload.py
|
1
|
"""
Usage:
upload [options] (<dir> | <file>)...
Options:
-p, --prefix=<name> format: <bucket>/<dir>/
[default: jolla]
-k, --key=<key>
-s, --secret=<secret>
"""
import qiniu
import qiniu.config
import logging
logger = logging.getLogger('upload')
CLIENT = None
KEY = None
SECRET = None
BUCKET = 'jolla'
def get_client():
global CLIENT
if CLIENT is None:
CLIENT = qiniu.Auth(KEY, SECRET)
return CLIENT
def set_client(key, secret):
global KEY
global SECRET
global CLIENT
KEY = key
SECRET = secret
CLIENT = qiniu.Auth(KEY, SECRET)
return CLIENT
def upload(data, name=None, bucket=None):
if bucket is None:
bucket = BUCKET
client = get_client()
token = client.upload_token(bucket, name, 3600)
ret, info = qiniu.put_data(token, name, data)
return ret['key']
if __name__ == '__main__':
import json
import os
from docpie import docpie
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('docpie').setLevel(logging.CRITICAL)
args = docpie(__doc__)
key = args['--key']
sec = args['--secret']
bucket_with_dir = args['--prefix']
assert bucket_with_dir
if not (key and sec):
conf = os.path.normpath(os.path.join(__file__, '..', 'config.conf'))
with open(conf, 'r', encoding='utf-8') as f:
config = json.load(f)['qiniu']
key = config['key']
sec = config['secret']
logger.debug('key: %s; secret: %s', key, sec)
set_client(key, sec)
bucket, _, prefix = bucket_with_dir.partition('/')
f_list = set()
for each in args['<dir>']:
if os.path.isdir(each):
logger.info('find dir %s', each)
base, dirs, files = next(os.walk(each))
f_list.update(os.path.join(base, x) for x in files)
else:
logger.info('find file %s', each)
f_list.add(each)
logger.info(f_list)
finished = []
for each in f_list:
logger.info('updating %s', each)
name = prefix + os.path.split(each)[-1]
with open(each, 'rb') as f:
key = upload(f.read(), name, bucket)
logger.info(key)
if bucket == 'jolla':
finished.append('https://dn-jolla.qbox.me/%s' % key)
logger.info(finished[-1])
for link in finished:
print(link)
|
haoyuchen1992/CourseBuilder
|
refs/heads/master
|
tests/functional/controllers_review.py
|
4
|
# coding: utf-8
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for controllers pertaining to peer review assessments."""
__author__ = 'Sean Lip'
import actions
from actions import assert_contains
from actions import assert_does_not_contain
from actions import assert_equals
from controllers import sites
from models import transforms
# The unit id for the peer review assignment in the default course.
LEGACY_REVIEW_UNIT_ID = 'ReviewAssessmentExample'
def get_review_step_key(response):
"""Returns the review step key in a request query parameter."""
request_query_string = response.request.environ['QUERY_STRING']
return request_query_string[request_query_string.find('key=') + 4:]
def get_review_payload(identifier, is_draft=False):
"""Returns a sample review payload."""
review = transforms.dumps([
{'index': 0, 'type': 'choices', 'value': '0', 'correct': False},
{'index': 1, 'type': 'regex', 'value': identifier, 'correct': True}
])
return {
'answers': review,
'is_draft': 'true' if is_draft else 'false',
}
class PeerReviewControllerTest(actions.TestBase):
"""Test peer review from the Student perspective."""
def test_submit_assignment(self):
"""Test submission of peer-reviewed assignments."""
# Override course.yaml settings by patching app_context.
get_environ_old = sites.ApplicationContext.get_environ
def get_environ_new(self):
environ = get_environ_old(self)
environ['course']['browsable'] = False
return environ
sites.ApplicationContext.get_environ = get_environ_new
email = 'test_peer_reviewed_assignment_submission@google.com'
name = 'Test Peer Reviewed Assignment Submission'
submission = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'First answer to Q1',
'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'First answer to Q3',
'correct': True},
])
second_submission = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'Second answer to Q1',
'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'Second answer to Q3',
'correct': True},
])
# Check that the sample peer-review assignment shows up in the preview
# page.
response = actions.view_preview(self)
assert_contains('Sample peer review assignment', response.body)
assert_does_not_contain('Review peer assignments', response.body)
actions.login(email)
actions.register(self, name)
# Check that the sample peer-review assignment shows up in the course
# page and that it can be visited.
response = actions.view_course(self)
assert_contains('Sample peer review assignment', response.body)
assert_contains('Review peer assignments', response.body)
assert_contains(
'<a href="assessment?name=%s">' % LEGACY_REVIEW_UNIT_ID,
response.body)
assert_contains('<span> Review peer assignments </span>', response.body,
collapse_whitespace=True)
assert_does_not_contain('<a href="reviewdashboard', response.body,
collapse_whitespace=True)
# Check that the progress circle for this assignment is unfilled.
assert_contains(
'progress-notstarted-%s' % LEGACY_REVIEW_UNIT_ID, response.body)
assert_does_not_contain(
'progress-completed-%s' % LEGACY_REVIEW_UNIT_ID, response.body)
# Try to access an invalid assignment.
response = self.get(
'assessment?name=FakeAssessment', expect_errors=True)
assert_equals(response.status_int, 404)
# The student should not be able to see others' reviews because he/she
# has not submitted an assignment yet.
response = self.get('assessment?name=%s' % LEGACY_REVIEW_UNIT_ID)
assert_does_not_contain('Submitted assignment', response.body)
assert_contains('Due date for this assignment', response.body)
assert_does_not_contain('Reviews received', response.body)
# The student should not be able to access the review dashboard because
# he/she has not submitted the assignment yet.
response = self.get(
'reviewdashboard?unit=%s' % LEGACY_REVIEW_UNIT_ID,
expect_errors=True)
assert_contains('You must submit the assignment for', response.body)
# The student submits the assignment.
response = actions.submit_assessment(
self,
LEGACY_REVIEW_UNIT_ID,
{'answers': submission, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
)
assert_contains(
'Thank you for completing this assignment', response.body)
assert_contains('Review peer assignments', response.body)
# The student views the submitted assignment, which has become readonly.
response = self.get('assessment?name=%s' % LEGACY_REVIEW_UNIT_ID)
assert_contains('First answer to Q1', response.body)
assert_contains('Submitted assignment', response.body)
# The student tries to re-submit the same assignment. This should fail.
response = actions.submit_assessment(
self,
LEGACY_REVIEW_UNIT_ID,
{'answers': second_submission,
'assessment_type': LEGACY_REVIEW_UNIT_ID},
presubmit_checks=False
)
assert_contains(
'You have already submitted this assignment.', response.body)
assert_contains('Review peer assignments', response.body)
# The student views the submitted assignment. The new answers have not
# been saved.
response = self.get('assessment?name=%s' % LEGACY_REVIEW_UNIT_ID)
assert_contains('First answer to Q1', response.body)
assert_does_not_contain('Second answer to Q1', response.body)
# The student checks the course page and sees that the progress
# circle for this assignment has been filled, and that the 'Review
# peer assignments' link is now available.
response = actions.view_course(self)
assert_contains(
'progress-completed-%s' % LEGACY_REVIEW_UNIT_ID, response.body)
assert_does_not_contain(
'<span> Review peer assignments </span>', response.body,
collapse_whitespace=True)
assert_contains(
'<a href="reviewdashboard?unit=%s">' % LEGACY_REVIEW_UNIT_ID,
response.body, collapse_whitespace=True)
# The student should also be able to now view the review dashboard.
response = self.get('reviewdashboard?unit=%s' % LEGACY_REVIEW_UNIT_ID)
assert_contains('Assignments for your review', response.body)
assert_contains('Review a new assignment', response.body)
actions.logout()
# Clean up app_context.
sites.ApplicationContext.get_environ = get_environ_old
def test_handling_of_fake_review_step_key(self):
"""Test that bad keys result in the appropriate responses."""
email = 'student1@google.com'
name = 'Student 1'
submission = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S1-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'is-S1', 'correct': True},
])
payload = {
'answers': submission, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
actions.login(email)
actions.register(self, name)
actions.submit_assessment(self, LEGACY_REVIEW_UNIT_ID, payload)
actions.view_review(
self, LEGACY_REVIEW_UNIT_ID, 'Fake key',
expected_status_code=404)
actions.logout()
def test_not_enough_assignments_to_allocate(self):
"""Test for the case when there are too few assignments in the pool."""
email = 'student1@google.com'
name = 'Student 1'
submission = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S1-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'is-S1', 'correct': True},
])
payload = {
'answers': submission, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
actions.login(email)
actions.register(self, name)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload)
# The student goes to the review dashboard and requests an assignment
# to review -- but there is nothing to review.
response = actions.request_new_review(
self, LEGACY_REVIEW_UNIT_ID, expected_status_code=200)
assert_does_not_contain('Assignment to review', response.body)
assert_contains('Sorry, there are no new submissions ', response.body)
assert_contains('disabled="true"', response.body)
actions.logout()
def test_reviewer_cannot_impersonate_another_reviewer(self):
"""Test that one reviewer cannot use another's review step key."""
email1 = 'student1@google.com'
name1 = 'Student 1'
submission1 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S1-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'is-S1', 'correct': True},
])
payload1 = {
'answers': submission1, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
email2 = 'student2@google.com'
name2 = 'Student 2'
submission2 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S2-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'not-S1', 'correct': True},
])
payload2 = {
'answers': submission2, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
email3 = 'student3@google.com'
name3 = 'Student 3'
submission3 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S3-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'not-S1', 'correct': True},
])
payload3 = {
'answers': submission3, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
# Student 1 submits the assignment.
actions.login(email1)
actions.register(self, name1)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload1)
actions.logout()
# Student 2 logs in and submits the assignment.
actions.login(email2)
actions.register(self, name2)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload2)
# Student 2 requests a review, and is given Student 1's assignment.
response = actions.request_new_review(self, LEGACY_REVIEW_UNIT_ID)
review_step_key_2_for_1 = get_review_step_key(response)
assert_contains('S1-1', response.body)
actions.logout()
# Student 3 logs in, and submits the assignment.
actions.login(email3)
actions.register(self, name3)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload3)
# Student 3 tries to view Student 1's assignment using Student 2's
# review step key, but is not allowed to.
response = actions.view_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_2_for_1,
expected_status_code=404)
# Student 3 logs out.
actions.logout()
def test_student_cannot_see_reviews_prematurely(self):
"""Test that students cannot see others' reviews prematurely."""
email = 'student1@google.com'
name = 'Student 1'
submission = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S1-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'is-S1', 'correct': True},
])
payload = {
'answers': submission, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
actions.login(email)
actions.register(self, name)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload)
# Student 1 cannot see the reviews for his assignment yet, because he
# has not submitted the two required reviews.
response = self.get('assessment?name=%s' % LEGACY_REVIEW_UNIT_ID)
assert_equals(response.status_int, 200)
assert_contains('Due date for this assignment', response.body)
assert_contains(
'After you have completed the required number of peer reviews',
response.body)
actions.logout()
def test_draft_review_behaviour(self):
"""Test correctness of draft review visibility."""
email1 = 'student1@google.com'
name1 = 'Student 1'
submission1 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S1-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'is-S1', 'correct': True},
])
payload1 = {
'answers': submission1, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
email2 = 'student2@google.com'
name2 = 'Student 2'
submission2 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S2-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'not-S1', 'correct': True},
])
payload2 = {
'answers': submission2, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
email3 = 'student3@google.com'
name3 = 'Student 3'
submission3 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S3-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'not-S1', 'correct': True},
])
payload3 = {
'answers': submission3, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
# Student 1 submits the assignment.
actions.login(email1)
actions.register(self, name1)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload1)
actions.logout()
# Student 2 logs in and submits the assignment.
actions.login(email2)
actions.register(self, name2)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload2)
# Student 2 requests a review, and is given Student 1's assignment.
response = actions.request_new_review(self, LEGACY_REVIEW_UNIT_ID)
review_step_key_2_for_1 = get_review_step_key(response)
assert_contains('S1-1', response.body)
# Student 2 saves her review as a draft.
review_2_for_1_payload = get_review_payload(
'R2for1', is_draft=True)
response = actions.submit_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_2_for_1,
review_2_for_1_payload)
assert_contains('Your review has been saved.', response.body)
response = self.get('reviewdashboard?unit=%s' % LEGACY_REVIEW_UNIT_ID)
assert_equals(response.status_int, 200)
assert_contains('(Draft)', response.body)
# Student 2's draft is still changeable.
response = actions.view_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_2_for_1)
assert_contains('Submit Review', response.body)
response = actions.submit_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_2_for_1,
review_2_for_1_payload)
assert_contains('Your review has been saved.', response.body)
# Student 2 logs out.
actions.logout()
# Student 3 submits the assignment.
actions.login(email3)
actions.register(self, name3)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload3)
actions.logout()
# Student 1 logs in and requests two assignments to review.
actions.login(email1)
response = self.get('/reviewdashboard?unit=%s' % LEGACY_REVIEW_UNIT_ID)
response = actions.request_new_review(self, LEGACY_REVIEW_UNIT_ID)
assert_contains('Assignment to review', response.body)
assert_contains('not-S1', response.body)
review_step_key_1_for_someone = get_review_step_key(response)
response = actions.request_new_review(self, LEGACY_REVIEW_UNIT_ID)
assert_contains('Assignment to review', response.body)
assert_contains('not-S1', response.body)
review_step_key_1_for_someone_else = get_review_step_key(response)
response = self.get('reviewdashboard?unit=%s' % LEGACY_REVIEW_UNIT_ID)
assert_equals(response.status_int, 200)
assert_contains('disabled="true"', response.body)
# Student 1 submits both reviews, fulfilling his quota.
review_1_for_other_payload = get_review_payload('R1for')
response = actions.submit_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_1_for_someone,
review_1_for_other_payload)
assert_contains(
'Your review has been submitted successfully', response.body)
response = actions.submit_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_1_for_someone_else,
review_1_for_other_payload)
assert_contains(
'Your review has been submitted successfully', response.body)
response = self.get('/reviewdashboard?unit=%s' % LEGACY_REVIEW_UNIT_ID)
assert_contains('(Completed)', response.body)
assert_does_not_contain('(Draft)', response.body)
# Although Student 1 has submitted 2 reviews, he cannot view Student
# 2's review because it is still in Draft status.
response = self.get('assessment?name=%s' % LEGACY_REVIEW_UNIT_ID)
assert_equals(response.status_int, 200)
assert_contains(
'You have not received any peer reviews yet.', response.body)
assert_does_not_contain('R2for1', response.body)
# Student 1 logs out.
actions.logout()
# Student 2 submits her review for Student 1's assignment.
actions.login(email2)
response = self.get('review?unit=%s&key=%s' % (
LEGACY_REVIEW_UNIT_ID, review_step_key_2_for_1))
assert_does_not_contain('Submitted review', response.body)
response = actions.submit_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_2_for_1,
get_review_payload('R2for1'))
assert_contains(
'Your review has been submitted successfully', response.body)
# Her review is now read-only.
response = self.get('review?unit=%s&key=%s' % (
LEGACY_REVIEW_UNIT_ID, review_step_key_2_for_1))
assert_contains('Submitted review', response.body)
assert_contains('R2for1', response.body)
# Student 2 logs out.
actions.logout()
# Now Student 1 can see the review he has received from Student 2.
actions.login(email1)
response = self.get('assessment?name=%s' % LEGACY_REVIEW_UNIT_ID)
assert_equals(response.status_int, 200)
assert_contains('R2for1', response.body)
def test_independence_of_draft_reviews(self):
"""Test that draft reviews do not interfere with each other."""
email1 = 'student1@google.com'
name1 = 'Student 1'
submission1 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S1-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'is-S1', 'correct': True},
])
payload1 = {
'answers': submission1, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
email2 = 'student2@google.com'
name2 = 'Student 2'
submission2 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S2-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'not-S1', 'correct': True},
])
payload2 = {
'answers': submission2, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
email3 = 'student3@google.com'
name3 = 'Student 3'
submission3 = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'S3-1', 'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'not-S1', 'correct': True},
])
payload3 = {
'answers': submission3, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
# Student 1 submits the assignment.
actions.login(email1)
actions.register(self, name1)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload1)
actions.logout()
# Student 2 logs in and submits the assignment.
actions.login(email2)
actions.register(self, name2)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload2)
actions.logout()
# Student 3 logs in and submits the assignment.
actions.login(email3)
actions.register(self, name3)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload3)
actions.logout()
# Student 1 logs in and requests two assignments to review.
actions.login(email1)
response = self.get('/reviewdashboard?unit=%s' % LEGACY_REVIEW_UNIT_ID)
response = actions.request_new_review(self, LEGACY_REVIEW_UNIT_ID)
assert_equals(response.status_int, 200)
assert_contains('Assignment to review', response.body)
assert_contains('not-S1', response.body)
review_step_key_1_for_someone = get_review_step_key(response)
response = actions.request_new_review(self, LEGACY_REVIEW_UNIT_ID)
assert_equals(response.status_int, 200)
assert_contains('Assignment to review', response.body)
assert_contains('not-S1', response.body)
review_step_key_1_for_someone_else = get_review_step_key(response)
self.assertNotEqual(
review_step_key_1_for_someone, review_step_key_1_for_someone_else)
# Student 1 submits two draft reviews.
response = actions.submit_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_1_for_someone,
get_review_payload('R1forFirst', is_draft=True))
assert_contains('Your review has been saved.', response.body)
response = actions.submit_review(
self, LEGACY_REVIEW_UNIT_ID, review_step_key_1_for_someone_else,
get_review_payload('R1forSecond', is_draft=True))
assert_contains('Your review has been saved.', response.body)
# The two draft reviews should still be different when subsequently
# accessed.
response = self.get('review?unit=%s&key=%s' % (
LEGACY_REVIEW_UNIT_ID, review_step_key_1_for_someone))
assert_contains('R1forFirst', response.body)
response = self.get('review?unit=%s&key=%s' % (
LEGACY_REVIEW_UNIT_ID, review_step_key_1_for_someone_else))
assert_contains('R1forSecond', response.body)
# Student 1 logs out.
actions.logout()
class PeerReviewDashboardTest(actions.TestBase):
"""Test peer review from the Admin perspective."""
def test_add_reviewer(self):
"""Test that admin can add a reviewer, and cannot re-add reviewers."""
email = 'test_add_reviewer@google.com'
name = 'Test Add Reviewer'
submission = transforms.dumps([
{'index': 0, 'type': 'regex', 'value': 'First answer to Q1',
'correct': True},
{'index': 1, 'type': 'choices', 'value': 3, 'correct': False},
{'index': 2, 'type': 'regex', 'value': 'First answer to Q3',
'correct': True},
])
payload = {
'answers': submission, 'assessment_type': LEGACY_REVIEW_UNIT_ID}
actions.login(email)
actions.register(self, name)
response = actions.submit_assessment(
self, LEGACY_REVIEW_UNIT_ID, payload)
# There is nothing to review on the review dashboard.
response = actions.request_new_review(
self, LEGACY_REVIEW_UNIT_ID, expected_status_code=200)
assert_does_not_contain('Assignment to review', response.body)
assert_contains('Sorry, there are no new submissions ', response.body)
actions.logout()
# The admin assigns the student to review his own work.
actions.login(email, is_admin=True)
response = actions.add_reviewer(
self, LEGACY_REVIEW_UNIT_ID, email, email)
assert_equals(response.status_int, 302)
response = self.get(response.location)
assert_does_not_contain(
'Error 412: The reviewer is already assigned', response.body)
assert_contains('First answer to Q1', response.body)
assert_contains(
'Review 1 from test_add_reviewer@google.com', response.body)
# The admin repeats the 'add reviewer' action. This should fail.
response = actions.add_reviewer(
self, LEGACY_REVIEW_UNIT_ID, email, email)
assert_equals(response.status_int, 302)
response = self.get(response.location)
assert_contains(
'Error 412: The reviewer is already assigned', response.body)
|
alaski/nova
|
refs/heads/master
|
nova/tests/unit/objects/test_service.py
|
4
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import timeutils
from oslo_versionedobjects import base as ovo_base
from oslo_versionedobjects import exception as ovo_exc
from nova.compute import manager as compute_manager
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.objects import aggregate
from nova.objects import service
from nova import test
from nova.tests.unit.objects import test_compute_node
from nova.tests.unit.objects import test_objects
NOW = timeutils.utcnow().replace(microsecond=0)
def _fake_service(**kwargs):
fake_service = {
'created_at': NOW,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'id': 123,
'host': 'fake-host',
'binary': 'nova-fake',
'topic': 'fake-service-topic',
'report_count': 1,
'forced_down': False,
'disabled': False,
'disabled_reason': None,
'last_seen_up': None,
'version': service.SERVICE_VERSION,
}
fake_service.update(kwargs)
return fake_service
fake_service = _fake_service()
OPTIONAL = ['availability_zone', 'compute_node']
class _TestServiceObject(object):
def supported_hv_specs_comparator(self, expected, obj_val):
obj_val = [inst.to_list() for inst in obj_val]
self.assertJsonEqual(expected, obj_val)
def pci_device_pools_comparator(self, expected, obj_val):
obj_val = obj_val.obj_to_primitive()
self.assertJsonEqual(expected, obj_val)
def comparators(self):
return {'stats': self.assertJsonEqual,
'host_ip': self.assertJsonEqual,
'supported_hv_specs': self.supported_hv_specs_comparator,
'pci_device_pools': self.pci_device_pools_comparator}
def subs(self):
return {'supported_hv_specs': 'supported_instances',
'pci_device_pools': 'pci_stats'}
def _test_query(self, db_method, obj_method, *args, **kwargs):
self.mox.StubOutWithMock(db, db_method)
db_exception = kwargs.pop('db_exception', None)
if db_exception:
getattr(db, db_method)(self.context, *args, **kwargs).AndRaise(
db_exception)
else:
getattr(db, db_method)(self.context, *args, **kwargs).AndReturn(
fake_service)
self.mox.ReplayAll()
obj = getattr(service.Service, obj_method)(self.context, *args,
**kwargs)
if db_exception:
self.assertIsNone(obj)
else:
self.compare_obj(obj, fake_service, allow_missing=OPTIONAL)
def test_get_by_id(self):
self._test_query('service_get', 'get_by_id', 123)
def test_get_by_host_and_topic(self):
self._test_query('service_get_by_host_and_topic',
'get_by_host_and_topic', 'fake-host', 'fake-topic')
def test_get_by_host_and_binary(self):
self._test_query('service_get_by_host_and_binary',
'get_by_host_and_binary', 'fake-host', 'fake-binary')
def test_get_by_host_and_binary_raises(self):
self._test_query('service_get_by_host_and_binary',
'get_by_host_and_binary', 'fake-host', 'fake-binary',
db_exception=exception.HostBinaryNotFound(
host='fake-host', binary='fake-binary'))
def test_get_by_compute_host(self):
self._test_query('service_get_by_compute_host', 'get_by_compute_host',
'fake-host')
def test_get_by_args(self):
self._test_query('service_get_by_host_and_binary', 'get_by_args',
'fake-host', 'fake-binary')
def test_create(self):
self.mox.StubOutWithMock(db, 'service_create')
db.service_create(self.context, {'host': 'fake-host',
'version': fake_service['version']}
).AndReturn(fake_service)
self.mox.ReplayAll()
service_obj = service.Service(context=self.context)
service_obj.host = 'fake-host'
service_obj.create()
self.assertEqual(fake_service['id'], service_obj.id)
self.assertEqual(service.SERVICE_VERSION, service_obj.version)
def test_recreate_fails(self):
self.mox.StubOutWithMock(db, 'service_create')
db.service_create(self.context, {'host': 'fake-host',
'version': fake_service['version']}
).AndReturn(fake_service)
self.mox.ReplayAll()
service_obj = service.Service(context=self.context)
service_obj.host = 'fake-host'
service_obj.create()
self.assertRaises(exception.ObjectActionError, service_obj.create)
def test_save(self):
self.mox.StubOutWithMock(db, 'service_update')
db.service_update(self.context, 123,
{'host': 'fake-host',
'version': fake_service['version']}
).AndReturn(fake_service)
self.mox.ReplayAll()
service_obj = service.Service(context=self.context)
service_obj.id = 123
service_obj.host = 'fake-host'
service_obj.save()
self.assertEqual(service.SERVICE_VERSION, service_obj.version)
@mock.patch.object(db, 'service_create',
return_value=fake_service)
def test_set_id_failure(self, db_mock):
service_obj = service.Service(context=self.context,
binary='nova-compute')
service_obj.create()
self.assertRaises(ovo_exc.ReadOnlyFieldError, setattr,
service_obj, 'id', 124)
def _test_destroy(self):
self.mox.StubOutWithMock(db, 'service_destroy')
db.service_destroy(self.context, 123)
self.mox.ReplayAll()
service_obj = service.Service(context=self.context)
service_obj.id = 123
service_obj.destroy()
def test_destroy(self):
# The test harness needs db.service_destroy to work,
# so avoid leaving it broken here after we're done
orig_service_destroy = db.service_destroy
try:
self._test_destroy()
finally:
db.service_destroy = orig_service_destroy
def test_get_by_topic(self):
self.mox.StubOutWithMock(db, 'service_get_all_by_topic')
db.service_get_all_by_topic(self.context, 'fake-topic').AndReturn(
[fake_service])
self.mox.ReplayAll()
services = service.ServiceList.get_by_topic(self.context, 'fake-topic')
self.assertEqual(1, len(services))
self.compare_obj(services[0], fake_service, allow_missing=OPTIONAL)
@mock.patch('nova.db.service_get_all_by_binary')
def test_get_by_binary(self, mock_get):
mock_get.return_value = [fake_service]
services = service.ServiceList.get_by_binary(self.context,
'fake-binary')
self.assertEqual(1, len(services))
mock_get.assert_called_once_with(self.context,
'fake-binary',
include_disabled=False)
@mock.patch('nova.db.service_get_all_by_binary')
def test_get_by_binary_disabled(self, mock_get):
mock_get.return_value = [_fake_service(disabled=True)]
services = service.ServiceList.get_by_binary(self.context,
'fake-binary',
include_disabled=True)
self.assertEqual(1, len(services))
mock_get.assert_called_once_with(self.context,
'fake-binary',
include_disabled=True)
@mock.patch('nova.db.service_get_all_by_binary')
def test_get_by_binary_both(self, mock_get):
mock_get.return_value = [_fake_service(),
_fake_service(disabled=True)]
services = service.ServiceList.get_by_binary(self.context,
'fake-binary',
include_disabled=True)
self.assertEqual(2, len(services))
mock_get.assert_called_once_with(self.context,
'fake-binary',
include_disabled=True)
def test_get_by_host(self):
self.mox.StubOutWithMock(db, 'service_get_all_by_host')
db.service_get_all_by_host(self.context, 'fake-host').AndReturn(
[fake_service])
self.mox.ReplayAll()
services = service.ServiceList.get_by_host(self.context, 'fake-host')
self.assertEqual(1, len(services))
self.compare_obj(services[0], fake_service, allow_missing=OPTIONAL)
def test_get_all(self):
self.mox.StubOutWithMock(db, 'service_get_all')
db.service_get_all(self.context, disabled=False).AndReturn(
[fake_service])
self.mox.ReplayAll()
services = service.ServiceList.get_all(self.context, disabled=False)
self.assertEqual(1, len(services))
self.compare_obj(services[0], fake_service, allow_missing=OPTIONAL)
def test_get_all_with_az(self):
self.mox.StubOutWithMock(db, 'service_get_all')
self.mox.StubOutWithMock(aggregate.AggregateList,
'get_by_metadata_key')
db.service_get_all(self.context, disabled=None).AndReturn(
[dict(fake_service, topic='compute')])
agg = aggregate.Aggregate(context=self.context)
agg.name = 'foo'
agg.metadata = {'availability_zone': 'test-az'}
agg.create()
agg.hosts = [fake_service['host']]
aggregate.AggregateList.get_by_metadata_key(self.context,
'availability_zone', hosts=set(agg.hosts)).AndReturn([agg])
self.mox.ReplayAll()
services = service.ServiceList.get_all(self.context, set_zones=True)
self.assertEqual(1, len(services))
self.assertEqual('test-az', services[0].availability_zone)
def test_compute_node(self):
fake_compute_node = objects.ComputeNode._from_db_object(
self.context, objects.ComputeNode(),
test_compute_node.fake_compute_node)
self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_all_by_host')
objects.ComputeNodeList.get_all_by_host(
self.context, 'fake-host').AndReturn(
[fake_compute_node])
self.mox.ReplayAll()
service_obj = service.Service(id=123, host="fake-host",
binary="nova-compute")
service_obj._context = self.context
self.assertEqual(service_obj.compute_node,
fake_compute_node)
# Make sure it doesn't re-fetch this
service_obj.compute_node
@mock.patch.object(db, 'service_get_all_computes_by_hv_type')
def test_get_all_computes_by_hv_type(self, mock_get_all):
mock_get_all.return_value = [fake_service]
services = service.ServiceList.get_all_computes_by_hv_type(
self.context, 'hv-type')
self.assertEqual(1, len(services))
self.compare_obj(services[0], fake_service, allow_missing=OPTIONAL)
mock_get_all.assert_called_once_with(self.context, 'hv-type',
include_disabled=False)
def test_load_when_orphaned(self):
service_obj = service.Service()
service_obj.id = 123
self.assertRaises(exception.OrphanedObjectError,
getattr, service_obj, 'compute_node')
@mock.patch.object(objects.ComputeNodeList, 'get_all_by_host')
def test_obj_make_compatible_for_compute_node(self, get_all_by_host):
service_obj = objects.Service(context=self.context)
fake_service_dict = fake_service.copy()
fake_compute_obj = objects.ComputeNode(host=fake_service['host'],
service_id=fake_service['id'])
get_all_by_host.return_value = [fake_compute_obj]
versions = ovo_base.obj_tree_get_versions('Service')
versions['ComputeNode'] = '1.10'
service_obj.obj_make_compatible_from_manifest(fake_service_dict, '1.9',
versions)
self.assertEqual(
fake_compute_obj.obj_to_primitive(target_version='1.10',
version_manifest=versions),
fake_service_dict['compute_node'])
@mock.patch('nova.db.service_get_minimum_version')
def test_get_minimum_version_none(self, mock_get):
mock_get.return_value = None
self.assertEqual(0,
objects.Service.get_minimum_version(self.context,
'nova-compute'))
mock_get.assert_called_once_with(self.context, ['nova-compute'])
@mock.patch('nova.db.service_get_minimum_version')
def test_get_minimum_version(self, mock_get):
mock_get.return_value = {'nova-compute': 123}
self.assertEqual(123,
objects.Service.get_minimum_version(self.context,
'nova-compute'))
mock_get.assert_called_once_with(self.context, ['nova-compute'])
@mock.patch('nova.db.service_get_minimum_version')
@mock.patch('nova.objects.service.LOG')
def test_get_minimum_version_checks_binary(self, mock_log, mock_get):
mock_get.return_value = None
self.assertEqual(0,
objects.Service.get_minimum_version(self.context,
'nova-compute'))
self.assertFalse(mock_log.warning.called)
self.assertRaises(exception.ObjectActionError,
objects.Service.get_minimum_version,
self.context,
'compute')
self.assertTrue(mock_log.warning.called)
@mock.patch('nova.db.service_get_minimum_version')
def test_get_minimum_version_with_caching(self, mock_get):
objects.Service.enable_min_version_cache()
mock_get.return_value = {'nova-compute': 123}
self.assertEqual(123,
objects.Service.get_minimum_version(self.context,
'nova-compute'))
self.assertEqual({"nova-compute": 123},
objects.Service._MIN_VERSION_CACHE)
self.assertEqual(123,
objects.Service.get_minimum_version(self.context,
'nova-compute'))
mock_get.assert_called_once_with(self.context, ['nova-compute'])
objects.Service._SERVICE_VERSION_CACHING = False
objects.Service.clear_min_version_cache()
@mock.patch('nova.db.service_get_minimum_version')
def test_get_min_version_multiple_with_old(self, mock_gmv):
mock_gmv.return_value = {'nova-api': None,
'nova-scheduler': 2,
'nova-conductor': 3}
binaries = ['nova-api', 'nova-api', 'nova-conductor',
'nova-conductor', 'nova-api']
minimum = objects.Service.get_minimum_version_multi(self.context,
binaries)
self.assertEqual(0, minimum)
@mock.patch('nova.db.service_get_minimum_version')
def test_get_min_version_multiple(self, mock_gmv):
mock_gmv.return_value = {'nova-api': 1,
'nova-scheduler': 2,
'nova-conductor': 3}
binaries = ['nova-api', 'nova-api', 'nova-conductor',
'nova-conductor', 'nova-api']
minimum = objects.Service.get_minimum_version_multi(self.context,
binaries)
self.assertEqual(1, minimum)
@mock.patch('nova.db.service_get_minimum_version',
return_value={'nova-compute': 2})
def test_create_above_minimum(self, mock_get):
with mock.patch('nova.objects.service.SERVICE_VERSION',
new=3):
objects.Service(context=self.context,
binary='nova-compute').create()
@mock.patch('nova.db.service_get_minimum_version',
return_value={'nova-compute': 2})
def test_create_equal_to_minimum(self, mock_get):
with mock.patch('nova.objects.service.SERVICE_VERSION',
new=2):
objects.Service(context=self.context,
binary='nova-compute').create()
@mock.patch('nova.db.service_get_minimum_version',
return_value={'nova-compute': 2})
def test_create_below_minimum(self, mock_get):
with mock.patch('nova.objects.service.SERVICE_VERSION',
new=1):
self.assertRaises(exception.ServiceTooOld,
objects.Service(context=self.context,
binary='nova-compute',
).create)
class TestServiceObject(test_objects._LocalTest,
_TestServiceObject):
pass
class TestRemoteServiceObject(test_objects._RemoteTest,
_TestServiceObject):
pass
class TestServiceVersion(test.TestCase):
def setUp(self):
self.ctxt = context.get_admin_context()
super(TestServiceVersion, self).setUp()
def _collect_things(self):
data = {
'compute_rpc': compute_manager.ComputeManager.target.version,
}
return data
def test_version(self):
calculated = self._collect_things()
self.assertEqual(
len(service.SERVICE_VERSION_HISTORY), service.SERVICE_VERSION + 1,
'Service version %i has no history. Please update '
'nova.objects.service.SERVICE_VERSION_HISTORY '
'and add %s to it' % (service.SERVICE_VERSION, repr(calculated)))
current = service.SERVICE_VERSION_HISTORY[service.SERVICE_VERSION]
self.assertEqual(
current, calculated,
'Changes detected that require a SERVICE_VERSION change. Please '
'increment nova.objects.service.SERVICE_VERSION, and make sure it'
'is equal to nova.compute.manager.ComputeManager.target.version.')
def test_version_in_init(self):
self.assertRaises(exception.ObjectActionError,
objects.Service,
version=123)
def test_version_set_on_init(self):
self.assertEqual(service.SERVICE_VERSION,
objects.Service().version)
def test_version_loaded_from_db(self):
fake_version = fake_service['version'] + 1
fake_different_service = dict(fake_service)
fake_different_service['version'] = fake_version
obj = objects.Service()
obj._from_db_object(self.ctxt, obj, fake_different_service)
self.assertEqual(fake_version, obj.version)
|
pjryan126/solid-start-careers
|
refs/heads/master
|
store/api/glassdoor/venv/lib/python2.7/site-packages/requests/packages/chardet/hebrewprober.py
|
2928
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
jemandez/creaturas-magicas
|
refs/heads/master
|
Configuraciones bรกsicas/scripts/addons/blendertools-1.0.0/maketarget/pose.py
|
1
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
**Project Name:** MakeHuman
**Product Home Page:** http://www.makehuman.org/
**Code Home Page:** http://code.google.com/p/makehuman/
**Authors:** Thomas Larsson
**Copyright(c):** MakeHuman Team 2001-2014
**Licensing:** AGPL3 (see also http://www.makehuman.org/node/318)
**Coding Standards:** See http://www.makehuman.org/node/165
Abstract
--------
Load a saved pose
"""
import bpy
import os
import sys
import math
from mathutils import Vector, Quaternion, Matrix
from bpy.props import *
from bpy_extras.io_utils import ExportHelper, ImportHelper
from . import mh
from .error import MHError, handleMHError
from . import utils
from .utils import round, setObjectMode
#----------------------------------------------------------
# saveMhpFile(context, filepath):
# loadMhpFile(context, filepath):
#----------------------------------------------------------
def saveMhpFile(context, filepath):
ob = context.object
if ob.type == 'ARMATURE':
rig = ob
else:
rig = ob.parent
scn = context.scene
if rig and rig.type == 'ARMATURE':
roots = rigRoots(rig)
if len(roots) > 1:
raise MHError("Armature %s has multiple roots: %s" % (rig.name, roots))
(pname, ext) = os.path.splitext(filepath)
mhppath = pname + ".mhp"
fp = open(mhppath, "w", encoding="utf-8", newline="\n")
root = rig.pose.bones[roots[0]]
writeMhpBones(fp, root, None)
fp.close()
print("Mhp file %s saved" % mhppath)
def writeMhpBones(fp, pb, log):
b = pb.bone
if pb.parent:
mat = b.matrix_local.inverted() * b.parent.matrix_local * pb.parent.matrix.inverted() * pb.matrix
else:
mat = pb.matrix.copy()
maty = mat[1].copy()
matz = mat[2].copy()
mat[1] = matz
mat[2] = -maty
diff = mat - Matrix()
nonzero = False
for i in range(4):
if abs(diff[i].length) > 5e-3:
nonzero = True
break
if nonzero:
fp.write("%s\tmatrix" % pb.name)
for i in range(4):
row = mat[i]
fp.write("\t%s\t%s\t%s\t%s" % (round(row[0]), round(row[1]), round(row[2]), round(row[3])))
fp.write("\n")
"""
t,q,s = mat.decompose()
magn = math.sqrt(q.x*q.x + q.y*q.y + q.z*q.z)
if magn > 1e-5:
fp.write("%s\t%s\t%s\t%s\t%s\t%s\n" % (pb.name, string, round(q.w), round(q.x), round(q.y), round(q.z)))
s -= Vector((1,1,1))
if s.length > 1e-3 and isMuscleBone(pb):
fp.write("%s\t%s\t%s\t%s\t%s\n" % (pb.name, "scale", round(s[0]), round(s[1]), round(s[2])))
#log.write("%s %s\n%s\n" % (pb.name, s, m))
"""
for child in pb.children:
writeMhpBones(fp, child, log)
def isMuscleBone(pb):
for cns in pb.constraints:
if (cns.type == 'STRETCH_TO' or
cns.type == 'TRANSFORM' or
cns.type == 'TRACK_TO' or
cns.type == 'COPY_ROTATION'):
return True
return False
def loadMhpFile(context, filepath):
ob = context.object
if ob.type == 'ARMATURE':
rig = ob
else:
rig = ob.parent
unit = Matrix()
for pb in rig.pose.bones:
pb.matrix_basis = unit
scn = context.scene
if rig and rig.type == 'ARMATURE':
(pname, ext) = os.path.splitext(filepath)
mhppath = pname + ".mhp"
fp = open(mhppath, "rU")
for line in fp:
words = line.split()
if len(words) < 4:
continue
try:
pb = rig.pose.bones[words[0]]
except KeyError:
continue
if isMuscleBone(pb):
pass
elif words[1] == "quat":
q = Quaternion((float(words[2]), float(words[3]), float(words[4]), float(words[5])))
mat = q.to_matrix().to_4x4()
pb.matrix_basis = mat
elif words[1] == "gquat":
q = Quaternion((float(words[2]), float(words[3]), float(words[4]), float(words[5])))
mat = q.to_matrix().to_4x4()
maty = mat[1].copy()
matz = mat[2].copy()
mat[1] = -matz
mat[2] = maty
pb.matrix_basis = pb.bone.matrix_local.inverted() * mat
elif words[1] == "matrix":
rows = []
n = 2
for i in range(4):
rows.append((float(words[n]), float(words[n+1]), float(words[n+2]), float(words[n+3])))
n += 4
mat = Matrix(rows)
if pb.parent:
pb.matrix_basis = mat
else:
maty = mat[1].copy()
matz = mat[2].copy()
mat[1] = -matz
mat[2] = maty
pb.matrix_basis = pb.bone.matrix_local.inverted() * mat
elif words[1] == "scale":
pass
else:
raise MHError("Unknown line in mcp file:\n%s" % line)
fp.close()
print("Mhp file %s loaded" % mhppath)
class VIEW3D_OT_LoadMhpButton(bpy.types.Operator):
bl_idname = "mh.load_mhp"
bl_label = "Load MHP File"
bl_description = "Load a pose in MHP format"
bl_options = {'UNDO'}
filename_ext = ".mhp"
filter_glob = StringProperty(default="*.mhp", options={'HIDDEN'})
filepath = bpy.props.StringProperty(
name="File Path",
description="File path used for mhp file",
maxlen= 1024, default= "")
@classmethod
def poll(self, context):
return context.object
def execute(self, context):
from .maketarget import makeBaseObj
setObjectMode(context)
try:
loadMhpFile(context, self.properties.filepath)
makeBaseObj(context)
except MHError:
handleMHError(context)
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class VIEW3D_OT_SaveasMhpFileButton(bpy.types.Operator, ExportHelper):
bl_idname = "mh.saveas_mhp"
bl_label = "Save MHP File"
bl_description = "Save current pose in MHP format"
bl_options = {'UNDO'}
filename_ext = ".mhp"
filter_glob = StringProperty(default="*.mhp", options={'HIDDEN'})
filepath = bpy.props.StringProperty(
name="File Path",
description="File path used for mhp file",
maxlen= 1024, default= "")
@classmethod
def poll(self, context):
return context.object
def execute(self, context):
setObjectMode(context)
try:
saveMhpFile(context, self.properties.filepath)
except MHError:
handleMHError(context)
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
#----------------------------------------------------------
# saveBvhFile(context, filepath):
# loadBvhFile(context, filepath):
#----------------------------------------------------------
import io_anim_bvh
from io_anim_bvh import export_bvh, import_bvh
def saveBvhFile(context, filepath):
ob = context.object
rig = ob.parent
scn = context.scene
if rig and rig.type == 'ARMATURE':
roots = rigRoots(rig)
if len(roots) > 1:
raise MHError("Armature %s has multiple roots: %s" % (rig.name, roots))
scn.objects.active = rig
(pname, ext) = os.path.splitext(filepath)
bvhpath = pname + ".bvh"
export_bvh.write_armature(context, bvhpath,
frame_start = scn.frame_current,
frame_end = scn.frame_current,
global_scale = 1.0,
rotate_mode = scn.MhExportRotateMode,
root_transform_only = True
)
scn.objects.active = ob
print("Saved %s" % bvhpath)
return True
else:
return False
def rigRoots(rig):
roots = []
for bone in rig.data.bones:
if not bone.parent:
roots.append(bone.name)
return roots
def loadBvhFile(context, filepath):
ob = context.object
rig = ob.parent
scn = context.scene
if rig and rig.type == 'ARMATURE':
(pname, ext) = os.path.splitext(filepath)
bvhpath = pname + ".bvh"
bvh_nodes = import_bvh.read_bvh(context, bvhpath,
rotate_mode=scn.MhImportRotateMode,
global_scale=1.0)
frame_orig = context.scene.frame_current
bvh_name = bpy.path.display_name_from_filepath(bvhpath)
import_bvh.bvh_node_dict2armature(context, bvh_name, bvh_nodes,
rotate_mode = scn.MhImportRotateMode,
frame_start = scn.frame_current,
IMPORT_LOOP = False,
global_matrix = rig.matrix_world,
)
context.scene.frame_set(frame_orig)
tmp = context.object
bpy.ops.object.mode_set(mode='POSE')
scn.objects.active = rig
bpy.ops.object.mode_set(mode='POSE')
copyPose(tmp, rig)
scn.objects.active = ob
scn.objects.unlink(tmp)
del tmp
print("Loaded %s" % bvhpath)
return True
else:
return False
def copyPose(src, trg):
for name,srcBone in src.pose.bones.items():
trgBone = trg.pose.bones[srcBone.name]
s = srcBone.matrix_basis
t = trgBone.matrix_basis.copy()
for i in range(3):
for j in range(3):
t[i][j] = s[i][j]
trgBone.matrix_basis = t
class VIEW3D_OT_LoadBvhButton(bpy.types.Operator):
bl_idname = "mh.load_bvh"
bl_label = "Load BVH File"
bl_description = "Load a pose in BVH format"
bl_options = {'UNDO'}
filename_ext = ".bvh"
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
filepath = bpy.props.StringProperty(
name="File Path",
description="File path used for bvh file",
maxlen= 1024, default= "")
@classmethod
def poll(self, context):
return context.object
def execute(self, context):
try:
setObjectMode(context)
loadBvhFile(context, self.properties.filepath)
except MHError:
handleMHError(context)
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class VIEW3D_OT_SaveasBvhFileButton(bpy.types.Operator, ExportHelper):
bl_idname = "mh.saveas_bvh"
bl_label = "Save BVH File"
bl_description = "Save current pose in BVH format"
bl_options = {'UNDO'}
filename_ext = ".bvh"
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
filepath = bpy.props.StringProperty(
name="File Path",
description="File path used for bvh file",
maxlen= 1024, default= "")
@classmethod
def poll(self, context):
return context.object
def execute(self, context):
try:
setObjectMode(context)
saveBvhFile(context, self.properties.filepath)
except MHError:
handleMHError(context)
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
#----------------------------------------------------------
# Convert weights
#----------------------------------------------------------
def readWeights(filepath, nVerts):
weights = {}
for n in range(nVerts):
weights[n] = []
bone = None
fp = open(filepath, "rU")
for line in fp:
words = line.split()
if len(words) < 2:
pass
elif words[0] == "#":
if words[1] == "weights":
bone = words[2]
else:
bone = None
elif bone:
vn = int(words[0])
if vn < mh.NBodyVerts:
weights[vn].append( (bone, float(words[1])) )
fp.close()
normedWeights = {}
for vn,data in weights.items():
wsum = 0.0
for bone,w in data:
wsum += w
ndata = []
for bone,w in data:
ndata.append((bone,w/wsum))
normedWeights[vn] = ndata
return normedWeights
def defineMatrices(rig):
mats = {}
for pb in rig.pose.bones:
mats[pb.name] = pb.matrix * pb.bone.matrix_local.inverted()
return mats
def getPoseLocs(mats, restLocs, weights, nVerts):
locs = {}
for n in range(nVerts):
if weights[n]:
mat = getMatrix(mats, weights[n])
locs[n] = mat * restLocs[n]
else:
locs[n] = restLocs[n]
return locs
def getRestLocs(mats, poseLocs, weights, nVerts):
locs = {}
for n in range(nVerts):
if weights[n]:
mat = getMatrix(mats, weights[n])
locs[n] = mat.inverted() * poseLocs[n]
else:
locs[n] = poseLocs[n]
return locs
def getMatrix(mats, weight):
mat = Matrix()
mat.zero()
for bname,w in weight:
mat += w * mats[bname]
return mat
def getShapeLocs(ob, nVerts):
locs = {}
filename = "test"
for n in range(nVerts):
locs[n] = Vector((0,0,0))
for skey in ob.data.shape_keys.key_blocks:
if skey.name == "Basis":
continue
filename = skey.name
for n,v in enumerate(skey.data):
bv = ob.data.vertices[n]
vec = v.co - bv.co
locs[n] += skey.value*vec
return locs, filename
def addLocs(locs1, locs2, nVerts):
locs = {}
for n in range(nVerts):
locs[n] = locs1[n] + locs2[n]
return locs
def subLocs(locs1, locs2, nVerts):
locs = {}
for n in range(nVerts):
locs[n] = locs1[n] - locs2[n]
return locs
def saveNewTarget(filepath, locs, nVerts):
fp = open(filepath, "w", encoding="utf-8", newline="\n")
locList = list(locs.items())
locList.sort()
for (n, dr) in locList:
if dr.length > Epsilon:
fp.write("%d %s %s %s\n" % (n, round(dr[0]), round(dr[2]), round(-dr[1])))
fp.close()
return
class VIEW3D_OT_ConvertRigButton(bpy.types.Operator):
bl_idname = "mh.convert_rig"
bl_label = "Convert to rig"
bl_description = ""
bl_options = {'UNDO'}
@classmethod
def poll(self, context):
return context.object
def execute(self, context):
setObjectMode(context)
scn = context.scene
ob = context.object
rig = ob.parent
nVerts = len(ob.data.vertices)
oldWeights = readWeights(os.path.join(scn.MhProgramPath, "data/rigs", scn.MhSourceRig+".rig"), nVerts)
newWeights = readWeights(os.path.join(scn.MhProgramPath, "data/rigs",scn.MhTargetRig+".rig"), nVerts)
mats = defineMatrices(rig)
restLocs = {}
for n in range(nVerts):
restLocs[n] = ob.data.vertices[n].co
oldShapeDiffs, filename = getShapeLocs(ob, nVerts)
oldRestLocs = addLocs(restLocs, oldShapeDiffs, nVerts)
globalLocs = getPoseLocs(mats, oldRestLocs, oldWeights, nVerts)
newRestLocs = getRestLocs(mats, globalLocs, newWeights, nVerts)
newShapeDiffs = subLocs(newRestLocs, restLocs, nVerts)
saveNewTarget(os.path.join(scn.MhProgramPath, "data/poses", scn.MhPoseTargetDir, filename + ".target"), newShapeDiffs, nVerts)
return{'FINISHED'}
#----------------------------------------------------------
# Init
#----------------------------------------------------------
def init():
bpy.types.Scene.MhSourceRig = StringProperty(default = "rigid")
bpy.types.Scene.MhTargetRig = StringProperty(default = "soft1")
bpy.types.Scene.MhPoseTargetDir = StringProperty(default = "dance1-soft1")
bpy.types.Scene.MhImportRotateMode = EnumProperty(
name="Rotation",
description="Rotation conversion",
items=(('QUATERNION', "Quaternion",
"Convert rotations to quaternions"),
('NATIVE', "Euler (Native)", ("Use the rotation order "
"defined in the BVH file")),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
),
default='NATIVE',
)
bpy.types.Scene.MhExportRotateMode = EnumProperty(
name="Rotation",
description="Rotation conversion",
items=(('NATIVE', "Euler (Native)",
"Use the rotation order defined in the BVH file"),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
),
default='ZYX',
)
|
patilsangram/erpnext
|
refs/heads/develop
|
erpnext/accounts/doctype/bank_statement_settings_item/bank_statement_settings_item.py
|
17
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, sathishpy@gmail.com and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class BankStatementSettingsItem(Document):
pass
|
mattwthompson/mdtraj
|
refs/heads/master
|
tests/test_neighbors.py
|
7
|
import numpy as np
import mdtraj as md
from mdtraj.testing import eq
random = np.random.RandomState(0)
def compute_neighbors_reference(traj, cutoff, query_indices, haystack_indices=None):
if haystack_indices is None:
haystack_indices = range(traj.n_atoms)
# explicitly enumerate the pairs of query-haystack indices we need to
# check
pairs = np.array([(q, i) for i in haystack_indices for q in query_indices if i != q])
dists = md.compute_distances(traj, pairs)
# some of the haystack might be within cutoff of more than one of the
# query atoms, so we need unique
reference = [np.unique(pairs[dists[i] < cutoff, 1]) for i in range(traj.n_frames)]
return reference
def test_compute_neighbors_1():
n_frames = 2
n_atoms = 20
cutoff = 2
xyz = random.randn(n_frames, n_atoms, 3)
traj = md.Trajectory(xyz=xyz, topology=None)
query_indices = [0, 1]
value = md.compute_neighbors(traj, cutoff, query_indices)
reference = compute_neighbors_reference(traj, cutoff, query_indices)
for i in range(n_frames):
eq(value[i], reference[i])
def test_compute_neighbors_2(get_fn):
traj = md.load(get_fn('4ZUO.pdb'))
query_indices = traj.top.select('residue 1')
cutoff = 1.0
value = md.compute_neighbors(traj, cutoff, query_indices)
reference = compute_neighbors_reference(traj, cutoff, query_indices)
for i in range(traj.n_frames):
eq(value[i], reference[i])
def test_compute_neighbors_3(get_fn):
traj = md.load(get_fn('test_good.nc'), top=get_fn('test.parm7'))
query_indices = traj.top.select('residue 1')
cutoff = 1.0
value = md.compute_neighbors(traj, cutoff, query_indices)
reference = compute_neighbors_reference(traj, cutoff, query_indices)
for i in range(traj.n_frames):
eq(value[i], reference[i])
|
openstack/nova
|
refs/heads/master
|
nova/scheduler/filters/isolated_hosts_filter.py
|
7
|
# Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import nova.conf
from nova.scheduler import filters
CONF = nova.conf.CONF
class IsolatedHostsFilter(filters.BaseHostFilter):
"""Keep specified images to selected hosts."""
# The configuration values do not change within a request
run_filter_once_per_request = True
RUN_ON_REBUILD = True
def host_passes(self, host_state, spec_obj):
"""Result Matrix with 'restrict_isolated_hosts_to_isolated_images' set
to True::
| | isolated_image | non_isolated_image
| -------------+----------------+-------------------
| iso_host | True | False
| non_iso_host | False | True
Result Matrix with 'restrict_isolated_hosts_to_isolated_images' set
to False::
| | isolated_image | non_isolated_image
| -------------+----------------+-------------------
| iso_host | True | True
| non_iso_host | False | True
"""
# If the configuration does not list any hosts, the filter will always
# return True, assuming a configuration error, so letting all hosts
# through.
isolated_hosts = CONF.filter_scheduler.isolated_hosts
isolated_images = CONF.filter_scheduler.isolated_images
restrict_isolated_hosts_to_isolated_images = (
CONF.filter_scheduler.restrict_isolated_hosts_to_isolated_images)
if not isolated_images:
# As there are no images to match, return True if the filter is
# not restrictive otherwise return False if the host is in the
# isolation list.
return ((not restrict_isolated_hosts_to_isolated_images) or
(host_state.host not in isolated_hosts))
# Check to see if the image id is set since volume-backed instances
# can be created without an imageRef in the server create request.
image_ref = spec_obj.image.id \
if spec_obj.image and 'id' in spec_obj.image else None
image_isolated = image_ref in isolated_images
host_isolated = host_state.host in isolated_hosts
if restrict_isolated_hosts_to_isolated_images:
return (image_isolated == host_isolated)
else:
return (not image_isolated) or host_isolated
|
davidharrigan/django
|
refs/heads/master
|
tests/urlpatterns_reverse/included_urls.py
|
452
|
from django.conf.urls import url
from .views import empty_view
urlpatterns = [
url(r'^$', empty_view, name="inner-nothing"),
url(r'^extra/(?P<extra>\w+)/$', empty_view, name="inner-extra"),
url(r'^(?P<one>[0-9]+)|(?P<two>[0-9]+)/$', empty_view, name="inner-disjunction"),
]
|
popazerty/e2_sh4
|
refs/heads/master
|
lib/python/Components/FileList.py
|
6
|
import os
import re
from MenuList import MenuList
from Components.Harddisk import harddiskmanager
from Tools.Directories import SCOPE_ACTIVE_SKIN, resolveFilename, fileExists, pathExists
from enigma import RT_HALIGN_LEFT, eListboxPythonMultiContent, \
eServiceReference, eServiceCenter, gFont
from Tools.LoadPixmap import LoadPixmap
EXTENSIONS = {
"m4a": "music",
"mp2": "music",
"mp3": "music",
"wav": "music",
"ogg": "music",
"wma": "music",
"flac": "music",
"jpg": "picture",
"jpeg": "picture",
"png": "picture",
"bmp": "picture",
"ts": "movie",
"avi": "movie",
"divx": "movie",
"m4v": "movie",
"mpg": "movie",
"mpeg": "movie",
"mkv": "movie",
"mp4": "movie",
"mov": "movie",
"m2ts": "movie",
"3gp": "movie",
"3g2": "movie",
"asf": "movie",
"wmv": "movie",
}
def FileEntryComponent(name, absolute = None, isDir = False):
res = [(absolute, isDir), (eListboxPythonMultiContent.TYPE_TEXT, 35, 1, 470, 20, 0, RT_HALIGN_LEFT, name)]
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 10, 2, 20, 20, png))
return res
class FileList(MenuList):
def __init__(self, directory, showDirectories = True, showFiles = True, showMountpoints = True, matchingPattern = None, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.additional_extensions = additionalExtensions
self.mountpoints = []
self.current_directory = None
self.current_mountpoint = None
self.useServiceRef = useServiceRef
self.showDirectories = showDirectories
self.showMountpoints = showMountpoints
self.showFiles = showFiles
self.isTop = isTop
# example: matching .nfi and .ts files: "^.*\.(nfi|ts)"
if matchingPattern:
self.matchingPattern = re.compile(matchingPattern)
else:
self.matchingPattern = None
self.inhibitDirs = inhibitDirs or []
self.inhibitMounts = inhibitMounts or []
self.refreshMountpoints()
self.changeDir(directory)
self.l.setFont(0, gFont("Regular", 18))
self.l.setItemHeight(23)
self.serviceHandler = eServiceCenter.getInstance()
def refreshMountpoints(self):
self.mountpoints = [os.path.join(p.mountpoint, "") for p in harddiskmanager.getMountedPartitions()]
self.mountpoints.sort(reverse = True)
def getMountpoint(self, file):
file = os.path.join(os.path.realpath(file), "")
for m in self.mountpoints:
if file.startswith(m):
return m
return False
def getMountpointLink(self, file):
if os.path.realpath(file) == file:
return self.getMountpoint(file)
else:
if file[-1] == "/":
file = file[:-1]
mp = self.getMountpoint(file)
last = file
file = os.path.dirname(file)
while last != "/" and mp == self.getMountpoint(file):
last = file
file = os.path.dirname(file)
return os.path.join(last, "")
def getSelection(self):
if self.l.getCurrentSelection() is None:
return None
return self.l.getCurrentSelection()[0]
def getCurrentEvent(self):
l = self.l.getCurrentSelection()
if not l or l[0][1] == True:
return None
else:
return self.serviceHandler.info(l[0][0]).getEvent(l[0][0])
def getFileList(self):
return self.list
def inParentDirs(self, dir, parents):
dir = os.path.realpath(dir)
for p in parents:
if dir.startswith(p):
return True
return False
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os.path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(FileEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
# we should not use the 'eServiceReference(string)' constructor, because it doesn't allow ':' in the directoryname
root = eServiceReference(2, 0, directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = os.listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os.path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if directory is not None and self.showDirectories and not self.isTop:
if directory == self.current_mountpoint and self.showMountpoints:
self.list.append(FileEntryComponent(name = "<" +_("List of storage devices") + ">", absolute = None, isDir = True))
elif (directory != "/") and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(FileEntryComponent(name = "<" +_("Parent directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
if self.showDirectories:
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
self.list.append(FileEntryComponent(name = name, absolute = x, isDir = True))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or self.matchingPattern.search(path):
self.list.append(FileEntryComponent(name = name, absolute = x , isDir = False))
if self.showMountpoints and len(self.list) == 0:
self.list.append(FileEntryComponent(name = _("nothing connected"), absolute = None, isDir = False))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
def getCurrentDirectory(self):
return self.current_directory
def canDescent(self):
if self.getSelection() is None:
return False
return self.getSelection()[1]
def descent(self):
if self.getSelection() is None:
return
self.changeDir(self.getSelection()[0], select = self.current_directory)
def getFilename(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
x = x.getPath()
return x
def getServiceRef(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
return x
return None
def execBegin(self):
harddiskmanager.on_partition_list_change.append(self.partitionListChanged)
def execEnd(self):
harddiskmanager.on_partition_list_change.remove(self.partitionListChanged)
def refresh(self):
self.changeDir(self.current_directory, self.getFilename())
def partitionListChanged(self, action, device):
self.refreshMountpoints()
if self.current_directory is None:
self.refresh()
def MultiFileSelectEntryComponent(name, absolute = None, isDir = False, selected = False):
res = [(absolute, isDir, selected, name), (eListboxPythonMultiContent.TYPE_TEXT, 55, 1, 470, 20, 0, RT_HALIGN_LEFT, name)]
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 30, 2, 20, 20, png))
if not name.startswith('<'):
if selected:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "icons/lock_on.png"))
else:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "icons/lock_off.png"))
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 2, 0, 25, 25, icon))
return res
class MultiFileSelectList(FileList):
def __init__(self, preselectedFiles, directory, showMountpoints = False, matchingPattern = None, showDirectories = True, showFiles = True, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
if preselectedFiles is None:
self.selectedFiles = []
else:
self.selectedFiles = preselectedFiles
FileList.__init__(self, directory, showMountpoints = showMountpoints, matchingPattern = matchingPattern, showDirectories = showDirectories, showFiles = showFiles, useServiceRef = useServiceRef, inhibitDirs = inhibitDirs, inhibitMounts = inhibitMounts, isTop = isTop, enableWrapAround = enableWrapAround, additionalExtensions = additionalExtensions)
self.changeDir(directory)
self.l.setItemHeight(25)
self.l.setFont(0, gFont("Regular", 20))
self.onSelectionChanged = [ ]
def selectionChanged(self):
for f in self.onSelectionChanged:
f()
def changeSelectionState(self):
if len(self.list):
idx = self.l.getCurrentSelectionIndex()
newList = self.list[:]
x = self.list[idx]
if not x[0][3].startswith('<'):
if x[0][1] is True:
realPathname = x[0][0]
else:
realPathname = self.current_directory + x[0][0]
if x[0][2]:
SelectState = False
try:
self.selectedFiles.remove(realPathname)
except:
try:
self.selectedFiles.remove(os.path.normpath(realPathname))
except:
print "Couldn't remove:", realPathname
else:
SelectState = True
if (realPathname not in self.selectedFiles) and (os.path.normpath(realPathname) not in self.selectedFiles):
self.selectedFiles.append(realPathname)
newList[idx] = MultiFileSelectEntryComponent(name = x[0][3], absolute = x[0][0], isDir = x[0][1], selected = SelectState)
self.list = newList
self.l.setList(self.list)
def getSelectedList(self):
selectedFilesExist = []
for x in self.selectedFiles:
if pathExists(x):
selectedFilesExist.append(x)
return selectedFilesExist
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os.path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(MultiFileSelectEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
root = eServiceReference("2:0:1:0:0:0:0:0:0:0:" + directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = os.listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os.path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if directory is not None and self.showDirectories and not self.isTop:
if directory == self.current_mountpoint and self.showMountpoints:
self.list.append(MultiFileSelectEntryComponent(name = "<" +_("List of storage devices") + ">", absolute = None, isDir = True))
elif (directory != "/") and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(MultiFileSelectEntryComponent(name = "<" +_("Parent directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
if self.showDirectories:
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
alreadySelected = (x in self.selectedFiles) or (os.path.normpath(x) in self.selectedFiles)
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x, isDir = True, selected = alreadySelected))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or self.matchingPattern.search(path):
alreadySelected = False
for entry in self.selectedFiles:
if os.path.basename(entry) == x:
alreadySelected = True
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x , isDir = False, selected = alreadySelected))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
|
AndKe/MAVProxy
|
refs/heads/master
|
MAVProxy/modules/mavproxy_misseditor/button_renderer.py
|
10
|
#!/usr/bin/env python
'''
Custom button render class for use inside a wx.grid
(ported from http://forums.wxwidgets.org/viewtopic.php?t=14403 )
Michael Day
June 2014
'''
from ..lib.wx_loader import wx
from wx import grid
import copy
class ButtonRenderer(wx.grid.PyGridCellRenderer):
def __init__(self,label,width=75,height=25):
self.label = label
self.width = width
self.height = height
wx.grid.PyGridCellRenderer.__init__(self)
def Clone(self):
return copy.copy(self)
def GetBestSize(self, grid, dc, row, col):
return wx.Size(self.width,self.height)
def Draw(self, grid, attr, dc, rect, row, col, isSelected):
dc.SetBrush(wx.Brush(wx.SystemSettings.GetColour(
wx.SYS_COLOUR_BTNFACE)))
dc.DrawRectangle( rect.GetX(), rect.GetY(), rect.GetWidth(), rect.GetHeight())
#draw a shaded rectangle to emulate a button
#(taken from src/generic/renderg.cpp)
strength = 1
pen1 = wx.Pen(wx.WHITE, strength)
dc.SetPen(pen1)
dc.DrawLine(rect.GetLeft()+strength-1, rect.GetTop()+strength-1,
rect.GetLeft()+strength-1, rect.GetBottom()-strength+1)
dc.DrawLine(rect.GetLeft()+strength-1, rect.GetTop()+strength-1,
rect.GetRight()-strength, rect.GetTop()+strength-1)
pen2 = wx.Pen(wx.BLACK, strength)
dc.SetPen(pen2)
dc.DrawLine(rect.GetRight()-strength, rect.GetTop(),
rect.GetRight()-strength, rect.GetBottom());
dc.DrawLine(rect.GetLeft(), rect.GetBottom(),
rect.GetRight() - strength, rect.GetBottom());
'''
#another drawing routine
#(taken from src/generic/renderg.cpp)
#Could port this later for animating the button when clicking
const wxCoord x = rect.x,
y = rect.y,
w = rect.width,
h = rect.height;
dc.SetBrush(*wxTRANSPARENT_BRUSH);
wxPen pen(*wxBLACK, 1);
dc.SetPen(pen);
dc.DrawLine( x+w, y, x+w, y+h ); // right (outer)
dc.DrawRectangle( x, y+h, w+1, 1 ); // bottom (outer)
pen.SetColour(wxColour(wxT("DARK GREY")));
dc.SetPen(pen);
dc.DrawLine( x+w-1, y, x+w-1, y+h ); // right (inner)
dc.DrawRectangle( x+1, y+h-1, w-2, 1 ); // bottom (inner)
pen.SetColour(*wxWHITE);
dc.SetPen(pen);
dc.DrawRectangle( x, y, w, 1 ); // top (outer)
dc.DrawRectangle( x, y, 1, h ); // left (outer)
dc.DrawLine( x, y+h-1, x+1, y+h-1 );
dc.DrawLine( x+w-1, y, x+w-1, y+1 );
'''
# draw the button-label
dc.SetBackgroundMode(wx.TRANSPARENT )
dc.SetTextForeground(attr.GetTextColour() )
dc.SetFont( attr.GetFont() )
#dc.DrawLabel( wxT("Delete"), rect,
dc.DrawLabel( self.label, rect,
wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_CENTER_HORIZONTAL)
|
pigeonflight/strider-plone
|
refs/heads/master
|
docker/appengine/lib/django-1.3/django/contrib/gis/maps/google/__init__.py
|
603
|
"""
This module houses the GoogleMap object, used for generating
the needed javascript to embed Google Maps in a Web page.
Google(R) is a registered trademark of Google, Inc. of Mountain View, California.
Example:
* In the view:
return render_to_response('template.html', {'google' : GoogleMap(key="abcdefg")})
* In the template:
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
{{ google.xhtml }}
<head>
<title>Google Maps via GeoDjango</title>
{{ google.style }}
{{ google.scripts }}
</head>
{{ google.body }}
<div id="{{ google.dom_id }}" style="width:600px;height:400px;"></div>
</body>
</html>
Note: If you want to be more explicit in your templates, the following are
equivalent:
{{ google.body }} => "<body {{ google.onload }} {{ google.onunload }}>"
{{ google.xhtml }} => "<html xmlns="http://www.w3.org/1999/xhtml" {{ google.xmlns }}>"
{{ google.style }} => "<style>{{ google.vml_css }}</style>"
Explanation:
- The `xhtml` property provides the correct XML namespace needed for
Google Maps to operate in IE using XHTML. Google Maps on IE uses
VML to draw polylines. Returns, by default:
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml">
- The `style` property provides the correct style tag for the CSS
properties required by Google Maps on IE:
<style type="text/css">v\:* {behavior:url(#default#VML);}</style>
- The `scripts` property provides the necessary <script> tags for
including the Google Maps javascript, as well as including the
generated javascript.
- The `body` property provides the correct attributes for the
body tag to load the generated javascript. By default, returns:
<body onload="gmap_load()" onunload="GUnload()">
- The `dom_id` property returns the DOM id for the map. Defaults to "map".
The following attributes may be set or customized in your local settings:
* GOOGLE_MAPS_API_KEY: String of your Google Maps API key. These are tied to
to a domain. May be obtained from http://www.google.com/apis/maps/
* GOOGLE_MAPS_API_VERSION (optional): Defaults to using "2.x"
* GOOGLE_MAPS_URL (optional): Must have a substitution ('%s') for the API
version.
"""
from django.contrib.gis.maps.google.gmap import GoogleMap, GoogleMapSet
from django.contrib.gis.maps.google.overlays import GEvent, GIcon, GMarker, GPolygon, GPolyline
from django.contrib.gis.maps.google.zoom import GoogleZoom
|
tuenti/Diamond
|
refs/heads/master
|
src/collectors/celerymon/celerymon.py
|
60
|
# coding=utf-8
"""
Collects simple task stats out of a running celerymon process
#### Dependencies
* celerymon connected to celery broker
Example config file CelerymonCollector.conf
```
enabled=True
host=celerymon.example.com
port=16379
```
"""
import diamond.collector
import urllib2
import time
try:
import json
except ImportError:
import simplejson as json
class CelerymonCollector(diamond.collector.Collector):
LastCollectTime = None
def get_default_config_help(self):
config_help = super(CelerymonCollector, self).get_default_config_help()
config_help.update({
'path': 'celerymon',
'host': 'A single hostname to get metrics from',
'port': 'The celerymon port'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(CelerymonCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': '8989'
})
return config
def collect(self):
"""
Overrides the Collector.collect method
"""
# Handle collection time intervals correctly
CollectTime = int(time.time())
time_delta = float(self.config['interval'])
if not self.LastCollectTime:
self.LastCollectTime = CollectTime - time_delta
host = self.config['host']
port = self.config['port']
celerymon_url = "http://%s:%s/api/task/?since=%i" % (
host, port, self.LastCollectTime)
response = urllib2.urlopen(celerymon_url)
body = response.read()
celery_data = json.loads(body)
results = dict()
total_messages = 0
for data in celery_data:
name = str(data[1]['name'])
if name not in results:
results[name] = dict()
state = str(data[1]['state'])
if state not in results[name]:
results[name][state] = 1
else:
results[name][state] += 1
total_messages += 1
# Publish Metric
self.publish('total_messages', total_messages)
for result in results:
for state in results[result]:
metric_value = results[result][state]
metric_name = "%s.%s" % (result, state)
self.publish(metric_name, metric_value)
self.LastCollectTime = CollectTime
|
mbanderson/headtracker
|
refs/heads/master
|
src/data/sampler/recorder.py
|
1
|
#!/usr/bin/env python
"""Collects magnetometer/IMU data measurements sent over serial."""
import serial
import sys
import numpy as np
import argparse
import time
# Scale count values by initialized sensor sensitivities.
def mag_count_to_gauss(x):
COUNT_RES = 0.92
mG = x * COUNT_RES
return mG / 1000.0
def gyro_count_to_dps(x):
FS_SEL_SCALE = 131.0
return x / FS_SEL_SCALE
def accel_count_to_g(x):
AFS_SEL_SCALE = 16384.0
return x / AFS_SEL_SCALE
def main(args):
"""Samples measurements from sampler.ino."""
# Serial set-up
port = args.port
baud = args.baud
ser = serial.Serial(port, baud)
# Log data to .txt file
with open(args.filename, 'w') as f:
try:
print "Recording started"
ctr = 0
while True:
# Grab serial data
valstr = ser.readline().rstrip()
# Ignore initial diagnostics
if 'connection' in valstr:
continue
vals = valstr.split('\t')
if '' in vals or len(vals) < 10:
continue
try:
vals = [int(val) for val in vals]
except ValueError:
continue
# Parse into t, mag, imu data, convert from counts
t = np.array([vals[0]])
magvals = mag_count_to_gauss(np.array(vals[1:4]))
accvals = accel_count_to_g(np.array(vals[4:7]))
gyrovals = gyro_count_to_dps(np.array(vals[7:10]))
# Repackage for file write
to_str = lambda x: [str(v) for v in x]
t = to_str(t)
magvals = to_str(magvals)
accvals = to_str(accvals)
gyrovals = to_str(gyrovals)
write_str = '\t'.join(t + magvals + accvals + gyrovals) + '\n'
f.write(write_str)
ctr += 1
if ctr % 1000 == 0:
print '.',
except serial.SerialTimeoutException as e:
print e
sys.exit(0)
except KeyboardInterrupt:
print "Recording ended"
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filename', help='output file name',
type=str)
parser.add_argument('--port',help='serial port name',
type=str, default='COM3')
parser.add_argument('--baud',help='serial baud rate',
type=int, default=38400)
args = parser.parse_args()
if args.filename:
if not '.txt' in args.filename:
args.filename += '.txt'
else:
tformat = '%m%d%y%H%M%S'
tstr = time.strftime(tformat)
args.filename = 'recorder_' + tstr + '.txt'
main(args)
|
aperigault/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/vmware/vmware_target_canonical_facts.py
|
2
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Joseph Callen <jcallen () csc.com>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_target_canonical_facts
short_description: Return canonical (NAA) from an ESXi host system
description:
- This module can be used to gather facts about canonical (NAA) from an ESXi host based on SCSI target ID.
version_added: "2.0"
author:
- Joseph Callen (@jcpowermac)
- Abhijeet Kasurde (@Akasurde)
notes:
requirements:
- Tested on vSphere 5.5 and 6.5
- PyVmomi installed
options:
target_id:
description:
- The target id based on order of scsi device.
- version 2.6 onwards, this parameter is optional.
required: False
type: int
cluster_name:
description:
- Name of the cluster.
- Facts about all SCSI devices for all host system in the given cluster is returned.
- This parameter is required, if C(esxi_hostname) is not provided.
version_added: 2.6
type: str
esxi_hostname:
description:
- Name of the ESXi host system.
- Facts about all SCSI devices for the given ESXi host system is returned.
- This parameter is required, if C(cluster_name) is not provided.
version_added: 2.6
type: str
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Get Canonical name of particular target on particular ESXi host system
vmware_target_canonical_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
target_id: 7
esxi_hostname: esxi_hostname
delegate_to: localhost
- name: Get Canonical name of all target on particular ESXi host system
vmware_target_canonical_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
delegate_to: localhost
- name: Get Canonical name of all ESXi hostname on particular Cluster
vmware_target_canonical_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: '{{ cluster_name }}'
delegate_to: localhost
'''
RETURN = r"""
canonical:
description: metadata about SCSI Target device
returned: if host system and target id is given
type: str
sample: "mpx.vmhba0:C0:T0:L0"
scsi_tgt_facts:
description: metadata about all SCSI Target devices
returned: if host system or cluster is given
type: dict
sample: {
"DC0_C0_H0": {
"scsilun_canonical": {
"key-vim.host.ScsiDisk-0000000000766d686261303a303a30": "mpx.vmhba0:C0:T0:L0",
"key-vim.host.ScsiLun-0005000000766d686261313a303a30": "mpx.vmhba1:C0:T0:L0"
},
"target_lun_uuid": {
"0": "key-vim.host.ScsiDisk-0000000000766d686261303a303a30"
}
},
"DC0_C0_H1": {
"scsilun_canonical": {
"key-vim.host.ScsiDisk-0000000000766d686261303a303a30": "mpx.vmhba0:C0:T0:L0",
"key-vim.host.ScsiLun-0005000000766d686261313a303a30": "mpx.vmhba1:C0:T0:L0"
},
"target_lun_uuid": {
"0": "key-vim.host.ScsiDisk-0000000000766d686261303a303a30"
}
},
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec
class ScsiTargetFactsManager(PyVmomi):
def __init__(self, module):
super(ScsiTargetFactsManager, self).__init__(module)
cluster_name = self.module.params.get('cluster_name')
self.esxi_hostname = self.module.params.get('esxi_hostname')
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=self.esxi_hostname)
def gather_scsi_device_facts(self):
"""
Function to gather facts about SCSI target devices
"""
scsi_tgt_facts = {}
target_lun_uuid = {}
scsilun_canonical = {}
target_id = self.module.params['target_id']
for host in self.hosts:
# Associate the scsiLun key with the canonicalName (NAA)
for scsilun in host.config.storageDevice.scsiLun:
scsilun_canonical[scsilun.key] = scsilun.canonicalName
# Associate target number with LUN uuid
for target in host.config.storageDevice.scsiTopology.adapter[0].target:
for lun in target.lun:
target_lun_uuid[target.target] = lun.scsiLun
scsi_tgt_facts[host.name] = dict(scsilun_canonical=scsilun_canonical,
target_lun_uuid=target_lun_uuid)
if target_id is not None and self.esxi_hostname is not None:
canonical = ''
temp_lun_data = scsi_tgt_facts[self.esxi_hostname]['target_lun_uuid']
if self.esxi_hostname in scsi_tgt_facts and \
target_id in temp_lun_data:
temp_scsi_data = scsi_tgt_facts[self.esxi_hostname]['scsilun_canonical']
temp_target = temp_lun_data[target_id]
canonical = temp_scsi_data[temp_target]
self.module.exit_json(changed=False, canonical=canonical)
self.module.exit_json(changed=False, scsi_tgt_facts=scsi_tgt_facts)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
dict(
target_id=dict(required=False, type='int'),
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
],
supports_check_mode=True,
)
scsi_tgt_manager = ScsiTargetFactsManager(module)
scsi_tgt_manager.gather_scsi_device_facts()
if __name__ == '__main__':
main()
|
hackultura/procult
|
refs/heads/master
|
procult/authentication/managers.py
|
1
|
# -*- coding: utf-8 -*-
import operator
from functools import reduce
from django.db import models
from django.db.models import Q
from django.contrib.auth.models import BaseUserManager
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **kwargs):
if not email:
raise ValueError("Usuario precisa de um email valido.")
if not kwargs.get('name'):
raise ValueError("Usuario precisa do nome completo.")
user = self.model(
email=self.normalize_email(email),
name=kwargs.get('name')
)
user.set_password(password)
user.save()
return user
def create_superuser(self, email, password, **kwargs):
user = self.create_user(email, password, **kwargs)
user.is_admin = True
user.save()
return user
def admins(self):
return self.exclude(is_admin=False)
class EntesManager(models.Manager):
def get_queryset(self):
return super(EntesManager, self).get_queryset().exclude(
cpf__exact='', cnpj__exact='')
def with_cpf(self):
return self.exclude(cpf__exact='')
def with_cnpj(self):
return self.exclude(cnpj__exact='')
def is_created(self, **kwargs):
query = []
if 'cpf' in kwargs:
query.append(Q(cpf=kwargs.pop('cpf', '')))
if 'cnpj' in kwargs:
query.append(Q(cnpj=kwargs.pop('cnpj', '')))
if 'ceac' in kwargs:
query.append(Q(ceac=kwargs.pop('ceac', '')))
return self.filter(reduce(operator.or_, query)).exists()
|
diorcety/intellij-community
|
refs/heads/master
|
python/testData/refactoring/changeSignature/duplicateParam.before.py
|
83
|
def some_funct<caret>ion_name(argument_1, opt1 = None, opt2 = None, **extra_info):
print locals()
class A(object):
def someMethod(self):
self.url = some_function_name(0, extra1=True, extra2=2)
|
h3llrais3r/SickRage
|
refs/heads/master
|
lib/ndg/httpsclient/urllib2_build_opener.py
|
66
|
"""urllib2 style build opener integrates with HTTPSConnection class from this
package.
"""
__author__ = "P J Kershaw"
__date__ = "21/12/10"
__copyright__ = "(C) 2011 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "Philip.Kershaw@stfc.ac.uk"
__revision__ = '$Id$'
import logging
from urllib2 import (ProxyHandler, UnknownHandler, HTTPDefaultErrorHandler,
FTPHandler, FileHandler, HTTPErrorProcessor, HTTPHandler,
OpenerDirector, HTTPRedirectHandler)
from ndg.httpsclient.https import HTTPSContextHandler
log = logging.getLogger(__name__)
# Copied from urllib2 with modifications for ssl
def build_opener(*handlers, **kw):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP and FTP.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
import types
def isclass(obj):
return isinstance(obj, types.ClassType) or hasattr(obj, "__bases__")
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
check_classes = list(default_classes)
check_classes.append(HTTPSContextHandler)
skip = []
for klass in check_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.append(klass)
elif isinstance(check, klass):
skip.append(klass)
for klass in default_classes:
if klass not in skip:
opener.add_handler(klass())
# Pick up SSL context from keyword settings
ssl_context = kw.get('ssl_context')
# Add the HTTPS handler with ssl_context
if HTTPSContextHandler not in skip:
opener.add_handler(HTTPSContextHandler(ssl_context))
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
|
motion2015/edx-platform
|
refs/heads/master
|
lms/djangoapps/open_ended_grading/open_ended_notifications.py
|
66
|
import datetime
import json
import logging
from django.conf import settings
from xmodule.open_ended_grading_classes import peer_grading_service
from xmodule.open_ended_grading_classes.controller_query_service import ControllerQueryService
from courseware.access import has_access
from edxmako.shortcuts import render_to_string
from student.models import unique_id_for_user
from util.cache import cache
from .staff_grading_service import StaffGradingService
log = logging.getLogger(__name__)
NOTIFICATION_CACHE_TIME = 300
KEY_PREFIX = "open_ended_"
NOTIFICATION_TYPES = (
('student_needs_to_peer_grade', 'peer_grading', 'Peer Grading'),
('staff_needs_to_grade', 'staff_grading', 'Staff Grading'),
('new_student_grading_to_view', 'open_ended_problems', 'Problems you have submitted'),
('flagged_submissions_exist', 'open_ended_flagged_problems', 'Flagged Submissions')
)
def staff_grading_notifications(course, user):
staff_gs = StaffGradingService(settings.OPEN_ENDED_GRADING_INTERFACE)
pending_grading = False
img_path = ""
course_id = course.id
student_id = unique_id_for_user(user)
notification_type = "staff"
success, notification_dict = get_value_from_cache(student_id, course_id, notification_type)
if success:
return notification_dict
try:
notifications = json.loads(staff_gs.get_notifications(course_id))
if notifications['success']:
if notifications['staff_needs_to_grade']:
pending_grading = True
except:
#Non catastrophic error, so no real action
notifications = {}
#This is a dev_facing_error
log.info(
"Problem with getting notifications from staff grading service for course {0} user {1}.".format(course_id,
student_id))
if pending_grading:
img_path = "/static/images/grading_notification.png"
notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications}
set_value_in_cache(student_id, course_id, notification_type, notification_dict)
return notification_dict
def peer_grading_notifications(course, user):
peer_gs = peer_grading_service.PeerGradingService(settings.OPEN_ENDED_GRADING_INTERFACE, render_to_string)
pending_grading = False
img_path = ""
course_id = course.id
student_id = unique_id_for_user(user)
notification_type = "peer"
success, notification_dict = get_value_from_cache(student_id, course_id, notification_type)
if success:
return notification_dict
try:
notifications = json.loads(peer_gs.get_notifications(course_id, student_id))
if notifications['success']:
if notifications['student_needs_to_peer_grade']:
pending_grading = True
except:
#Non catastrophic error, so no real action
notifications = {}
#This is a dev_facing_error
log.info(
"Problem with getting notifications from peer grading service for course {0} user {1}.".format(course_id,
student_id))
if pending_grading:
img_path = "/static/images/grading_notification.png"
notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications}
set_value_in_cache(student_id, course_id, notification_type, notification_dict)
return notification_dict
def combined_notifications(course, user):
"""
Show notifications to a given user for a given course. Get notifications from the cache if possible,
or from the grading controller server if not.
@param course: The course object for which we are getting notifications
@param user: The user object for which we are getting notifications
@return: A dictionary with boolean pending_grading (true if there is pending grading), img_path (for notification
image), and response (actual response from grading controller server).
"""
#Set up return values so that we can return them for error cases
pending_grading = False
img_path = ""
notifications = {}
notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications}
#We don't want to show anonymous users anything.
if not user.is_authenticated():
return notification_dict
#Initialize controller query service using our mock system
controller_qs = ControllerQueryService(settings.OPEN_ENDED_GRADING_INTERFACE, render_to_string)
student_id = unique_id_for_user(user)
user_is_staff = bool(has_access(user, 'staff', course))
course_id = course.id
notification_type = "combined"
#See if we have a stored value in the cache
success, notification_dict = get_value_from_cache(student_id, course_id, notification_type)
if success:
return notification_dict
#Get the time of the last login of the user
last_login = user.last_login
last_time_viewed = last_login - datetime.timedelta(seconds=(NOTIFICATION_CACHE_TIME + 60))
try:
#Get the notifications from the grading controller
notifications = controller_qs.check_combined_notifications(
course.id,
student_id,
user_is_staff,
last_time_viewed,
)
if notifications.get('success'):
if (notifications.get('staff_needs_to_grade') or
notifications.get('student_needs_to_peer_grade')):
pending_grading = True
except:
#Non catastrophic error, so no real action
#This is a dev_facing_error
log.exception(
u"Problem with getting notifications from controller query service for course {0} user {1}.".format(
course_id, student_id))
if pending_grading:
img_path = "/static/images/grading_notification.png"
notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications}
#Store the notifications in the cache
set_value_in_cache(student_id, course_id, notification_type, notification_dict)
return notification_dict
def get_value_from_cache(student_id, course_id, notification_type):
key_name = create_key_name(student_id, course_id, notification_type)
success, value = _get_value_from_cache(key_name)
return success, value
def set_value_in_cache(student_id, course_id, notification_type, value):
key_name = create_key_name(student_id, course_id, notification_type)
_set_value_in_cache(key_name, value)
def create_key_name(student_id, course_id, notification_type):
key_name = u"{prefix}{type}_{course}_{student}".format(
prefix=KEY_PREFIX,
type=notification_type,
course=course_id,
student=student_id,
)
return key_name
def _get_value_from_cache(key_name):
value = cache.get(key_name)
success = False
if value is None:
return success, value
try:
value = json.loads(value)
success = True
except:
pass
return success, value
def _set_value_in_cache(key_name, value):
cache.set(key_name, json.dumps(value), NOTIFICATION_CACHE_TIME)
|
chromium2014/src
|
refs/heads/master
|
tools/cr/cr/commands/install.py
|
113
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the install command."""
import cr
class InstallCommand(cr.Command):
"""The implementation of the install command.
This first uses Builder.Build to bring the target up to date, and then
installs it using Installer.Reinstall.
The builder installs its command line arguments, and you can use those to
select which builder is used. Selecting the skip builder
(using --builder=skip) bypasses the build stage.
"""
def __init__(self):
super(InstallCommand, self).__init__()
self.help = 'Install a binary'
def AddArguments(self, subparsers):
parser = super(InstallCommand, self).AddArguments(subparsers)
cr.Builder.AddArguments(self, parser)
cr.Installer.AddArguments(self, parser)
cr.Target.AddArguments(self, parser, allow_multiple=True)
self.ConsumeArgs(parser, 'the installer')
return parser
def Run(self):
targets = cr.Target.GetTargets()
if not cr.Installer.Skipping():
cr.Builder.Build(targets, [])
cr.Installer.Reinstall(targets, cr.context.remains)
|
hzruandd/WebSocket-for-Python
|
refs/heads/master
|
test/test_websocket.py
|
6
|
# -*- coding: utf-8 -*-
import unittest
import os
import socket
import struct
from mock import MagicMock, call, patch
from ws4py.framing import Frame, \
OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from ws4py.websocket import WebSocket
from ws4py.messaging import TextMessage, BinaryMessage, \
CloseControlMessage, PingControlMessage, PongControlMessage
from ws4py.compat import *
class WSWebSocketTest(unittest.TestCase):
def test_get_ipv4_addresses(self):
m = MagicMock()
m.getsockname.return_value = ('127.0.0.1', 52300)
m.getpeername.return_value = ('127.0.0.1', 4800)
ws = WebSocket(sock=m)
self.assertEqual(ws.local_address, ('127.0.0.1', 52300))
self.assertEqual(ws.peer_address, ('127.0.0.1', 4800))
def test_get_ipv6_addresses(self):
m = MagicMock()
m.getsockname.return_value = ('127.0.0.1', 52300, None, None)
m.getpeername.return_value = ('127.0.0.1', 4800, None, None)
ws = WebSocket(sock=m)
self.assertEqual(ws.local_address, ('127.0.0.1', 52300))
self.assertEqual(ws.peer_address, ('127.0.0.1', 4800))
def test_get_underlying_connection(self):
m = MagicMock()
ws = WebSocket(sock=m)
self.assertEqual(ws.connection, m)
def test_close_connection(self):
m = MagicMock()
ws = WebSocket(sock=m)
ws.close_connection()
m.shutdown.assert_called_once_with(socket.SHUT_RDWR)
m.close.assert_called_once_with()
self.assertIsNone(ws.connection)
m = MagicMock()
m.close = MagicMock(side_effect=RuntimeError)
ws = WebSocket(sock=m)
ws.close_connection()
self.assertIsNone(ws.connection)
def test_terminate_with_closing(self):
m = MagicMock()
s = MagicMock()
c = MagicMock()
cc = MagicMock()
ws = WebSocket(sock=m)
with patch.multiple(ws, closed=c, close_connection=cc):
ws.stream = s
ws.stream.closing = CloseControlMessage(code=1000, reason='test closing')
ws.terminate()
self.assertTrue(ws.client_terminated)
self.assertTrue(ws.server_terminated)
self.assertTrue(ws.terminated)
c.assert_called_once_with(1000, b'test closing')
cc.assert_called_once_with()
self.assertIsNone(ws.stream)
self.assertIsNone(ws.environ)
def test_terminate_without_closing(self):
m = MagicMock()
s = MagicMock()
c = MagicMock()
cc = MagicMock()
ws = WebSocket(sock=m)
with patch.multiple(ws, closed=c, close_connection=cc):
ws.stream = s
ws.stream.closing = None
ws.terminate()
self.assertTrue(ws.client_terminated)
self.assertTrue(ws.server_terminated)
self.assertTrue(ws.terminated)
c.assert_called_once_with(1006, "Going away")
cc.assert_called_once_with()
self.assertIsNone(ws.stream)
self.assertIsNone(ws.environ)
def test_cannot_process_more_data_when_stream_is_terminated(self):
m = MagicMock()
ws = WebSocket(sock=m)
ws.client_terminated = True
ws.server_terminated = True
self.assertFalse(ws.once())
def test_socket_error_on_receiving_more_bytes(self):
m = MagicMock()
m.recv = MagicMock(side_effect=socket.error)
ws = WebSocket(sock=m)
self.assertFalse(ws.once())
def test_no_bytes_were_read(self):
m = MagicMock()
m.recv.return_value = b''
ws = WebSocket(sock=m)
self.assertFalse(ws.once())
def test_send_bytes_without_masking(self):
tm = TextMessage(b'hello world').single()
m = MagicMock()
ws = WebSocket(sock=m)
ws.send(b'hello world')
m.sendall.assert_called_once_with(tm)
def test_send_bytes_with_masking(self):
tm = TextMessage(b'hello world').single(mask=True)
m = MagicMock()
ws = WebSocket(sock=m)
ws.stream = MagicMock()
ws.stream.always_mask = True
ws.stream.text_message.return_value.single.return_value = tm
ws.send(b'hello world')
m.sendall.assert_called_once_with(tm)
def test_send_message_without_masking(self):
tm = TextMessage(b'hello world')
m = MagicMock()
ws = WebSocket(sock=m)
ws.send(tm)
m.sendall.assert_called_once_with(tm.single())
def test_send_generator_without_masking(self):
tm0 = b'hello'
tm1 = b'world'
def datasource():
yield tm0
yield tm1
gen = datasource()
m = MagicMock()
ws = WebSocket(sock=m)
ws.send(gen)
self.assertEqual(m.sendall.call_count, 2)
self.assertRaises(StopIteration, next, gen)
def test_sending_unknown_datetype(self):
m = MagicMock()
ws = WebSocket(sock=m)
self.assertRaises(ValueError, ws.send, 123)
def test_closing_message_received(self):
s = MagicMock()
m = MagicMock()
c = MagicMock()
ws = WebSocket(sock=m)
with patch.multiple(ws, close=c):
ws.stream = s
ws.stream.closing = CloseControlMessage(code=1000, reason='test closing')
ws.process(b'unused for this test')
c.assert_called_once_with(1000, b'test closing')
def test_sending_ping(self):
tm = PingControlMessage("hello").single(mask=False)
m = MagicMock()
ws = WebSocket(sock=m)
ws.ping("hello")
m.sendall.assert_called_once_with(tm)
if __name__ == '__main__':
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for testcase in [WSWebSocketTest]:
tests = loader.loadTestsFromTestCase(testcase)
suite.addTests(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
|
rotofly/odoo
|
refs/heads/master
|
addons/purchase_double_validation/__init__.py
|
441
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import purchase_double_validation_installer
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
HyperBaton/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/nxos/nxos_file_copy.py
|
10
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_file_copy
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Copy a file to a remote NXOS device.
description:
- This module supports two different workflows for copying a file
to flash (or bootflash) on NXOS devices. Files can either be (1) pushed
from the Ansible controller to the device or (2) pulled from a remote SCP
file server to the device. File copies are initiated from the NXOS
device to the remote SCP server. This module only supports the
use of connection C(network_cli) or C(Cli) transport with connection C(local).
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
- Rewritten as a plugin by (@mikewiebe)
notes:
- Tested against NXOS 7.0(3)I2(5), 7.0(3)I4(6), 7.0(3)I5(3),
7.0(3)I6(1), 7.0(3)I7(3), 6.0(2)A8(8), 7.0(3)F3(4), 7.3(0)D1(1),
8.3(0), 9.2, 9.3
- When pushing files (file_pull is False) to the NXOS device,
feature scp-server must be enabled.
- When pulling files (file_pull is True) to the NXOS device,
feature scp-server is not required.
- When pulling files (file_pull is True) to the NXOS device,
no transfer will take place if the file is already present.
- Check mode will tell you if the file would be copied.
requirements:
- paramiko (required when file_pull is False)
- SCPClient (required when file_pull is False)
- pexpect (required when file_pull is True)
options:
local_file:
description:
- When (file_pull is False) this is the path to the local file on the Ansible controller.
The local directory must exist.
- When (file_pull is True) this is the target file name on the NXOS device.
remote_file:
description:
- When (file_pull is False) this is the remote file path on the NXOS device.
If omitted, the name of the local file will be used.
The remote directory must exist.
- When (file_pull is True) this is the full path to the file on the remote SCP
server to be copied to the NXOS device.
file_system:
description:
- The remote file system on the nxos device. If omitted,
devices that support a I(file_system) parameter will use
their default values.
default: "bootflash:"
connect_ssh_port:
description:
- SSH server port used for file transfer.
default: 22
version_added: "2.5"
file_pull:
description:
- When (False) file is copied from the Ansible controller to the NXOS device.
- When (True) file is copied from a remote SCP server to the NXOS device.
In this mode, the file copy is initiated from the NXOS device.
- If the file is already present on the device it will be overwritten and
therefore the operation is NOT idempotent.
type: bool
default: False
version_added: "2.7"
file_pull_compact:
description:
- When file_pull is True, this is used to compact nxos image files.
This option can only be used with nxos image files.
- When (file_pull is False), this is not used.
type: bool
default: False
version_added: "2.9"
file_pull_kstack:
description:
- When file_pull is True, this can be used to speed up file copies when
the nxos running image supports the use-kstack option.
- When (file_pull is False), this is not used.
type: bool
default: False
version_added: "2.9"
local_file_directory:
description:
- When (file_pull is True) file is copied from a remote SCP server to the NXOS device,
and written to this directory on the NXOS device. If the directory does not exist, it
will be created under the file_system. This is an optional parameter.
- When (file_pull is False), this is not used.
version_added: "2.7"
file_pull_timeout:
description:
- Use this parameter to set timeout in seconds, when transferring
large files or when the network is slow.
- When (file_pull is False), this is not used.
default: 300
version_added: "2.7"
remote_scp_server:
description:
- The remote scp server address when file_pull is True.
This is required if file_pull is True.
- When (file_pull is False), this is not used.
version_added: "2.7"
remote_scp_server_user:
description:
- The remote scp server username when file_pull is True.
This is required if file_pull is True.
- When (file_pull is False), this is not used.
version_added: "2.7"
remote_scp_server_password:
description:
- The remote scp server password when file_pull is True.
This is required if file_pull is True.
- When (file_pull is False), this is not used.
version_added: "2.7"
vrf:
description:
- The VRF used to pull the file. Useful when no vrf management is defined
default: "management"
version_added: "2.9"
'''
EXAMPLES = '''
# File copy from ansible controller to nxos device
- name: "copy from server to device"
nxos_file_copy:
local_file: "./test_file.txt"
remote_file: "test_file.txt"
# Initiate file copy from the nxos device to transfer file from an SCP server back to the nxos device
- name: "initiate file copy from device"
nxos_file_copy:
file_pull: True
local_file: "xyz"
local_file_directory: "dir1/dir2/dir3"
remote_file: "/mydir/abc"
remote_scp_server: "192.168.0.1"
remote_scp_server_user: "myUser"
remote_scp_server_password: "myPassword"
vrf: "management"
'''
RETURN = '''
transfer_status:
description: Whether a file was transferred to the nxos device.
returned: success
type: str
sample: 'Sent'
local_file:
description: The path of the local file.
returned: success
type: str
sample: '/path/to/local/file'
remote_file:
description: The path of the remote file.
returned: success
type: str
sample: '/path/to/remote/file'
remote_scp_server:
description: The name of the scp server when file_pull is True.
returned: success
type: str
sample: 'fileserver.example.com'
changed:
description: Indicates whether or not the file was copied.
returned: success
type: bool
sample: true
'''
|
willingc/oh-mainline
|
refs/heads/master
|
vendor/packages/twisted/twisted/test/test_stateful.py
|
18
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for twisted.protocols.stateful
"""
from twisted.test import test_protocols
from twisted.protocols.stateful import StatefulProtocol
from struct import pack, unpack, calcsize
class MyInt32StringReceiver(StatefulProtocol):
"""
A stateful Int32StringReceiver.
"""
MAX_LENGTH = 99999
structFormat = "!I"
prefixLength = calcsize(structFormat)
def getInitialState(self):
return self._getHeader, 4
def lengthLimitExceeded(self, length):
self.transport.loseConnection()
def _getHeader(self, msg):
length, = unpack("!i", msg)
if length > self.MAX_LENGTH:
self.lengthLimitExceeded(length)
return
return self._getString, length
def _getString(self, msg):
self.stringReceived(msg)
return self._getHeader, 4
def stringReceived(self, msg):
"""
Override this.
"""
raise NotImplementedError
def sendString(self, data):
"""
Send an int32-prefixed string to the other end of the connection.
"""
self.transport.write(pack(self.structFormat, len(data)) + data)
class TestInt32(MyInt32StringReceiver):
def connectionMade(self):
self.received = []
def stringReceived(self, s):
self.received.append(s)
MAX_LENGTH = 50
closed = 0
def connectionLost(self, reason):
self.closed = 1
class Int32TestCase(test_protocols.Int32TestCase):
protocol = TestInt32
def test_bigReceive(self):
r = self.getProtocol()
big = ""
for s in self.strings * 4:
big += pack("!i", len(s)) + s
r.dataReceived(big)
self.assertEquals(r.received, self.strings * 4)
|
DougFirErickson/qgisSpaceSyntaxToolkit
|
refs/heads/master
|
esstoolkit/external/pyqtgraph/graphicsItems/ButtonItem.py
|
52
|
from ..Qt import QtGui, QtCore
from .GraphicsObject import GraphicsObject
__all__ = ['ButtonItem']
class ButtonItem(GraphicsObject):
"""Button graphicsItem displaying an image."""
clicked = QtCore.Signal(object)
def __init__(self, imageFile=None, width=None, parentItem=None, pixmap=None):
self.enabled = True
GraphicsObject.__init__(self)
if imageFile is not None:
self.setImageFile(imageFile)
elif pixmap is not None:
self.setPixmap(pixmap)
if width is not None:
s = float(width) / self.pixmap.width()
self.scale(s, s)
if parentItem is not None:
self.setParentItem(parentItem)
self.setOpacity(0.7)
def setImageFile(self, imageFile):
self.setPixmap(QtGui.QPixmap(imageFile))
def setPixmap(self, pixmap):
self.pixmap = pixmap
self.update()
def mouseClickEvent(self, ev):
if self.enabled:
self.clicked.emit(self)
def mouseHoverEvent(self, ev):
if not self.enabled:
return
if ev.isEnter():
self.setOpacity(1.0)
else:
self.setOpacity(0.7)
def disable(self):
self.enabled = False
self.setOpacity(0.4)
def enable(self):
self.enabled = True
self.setOpacity(0.7)
def paint(self, p, *args):
p.setRenderHint(p.Antialiasing)
p.drawPixmap(0, 0, self.pixmap)
def boundingRect(self):
return QtCore.QRectF(self.pixmap.rect())
|
dimtruck/magnum
|
refs/heads/master
|
magnum/common/pythonk8sclient/swagger_client/models/v1_replication_controller.py
|
5
|
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
class V1ReplicationController(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Swagger model
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'kind': 'str',
'api_version': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1ReplicationControllerSpec',
'status': 'V1ReplicationControllerStatus'
}
self.attribute_map = {
'kind': 'kind',
'api_version': 'apiVersion',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
self._kind = None
self._api_version = None
self._metadata = None
self._spec = None
self._status = None
@property
def kind(self):
"""
Gets the kind of this V1ReplicationController.
kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds
:return: The kind of this V1ReplicationController.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1ReplicationController.
kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds
:param kind: The kind of this V1ReplicationController.
:type: str
"""
self._kind = kind
@property
def api_version(self):
"""
Gets the api_version of this V1ReplicationController.
version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources
:return: The api_version of this V1ReplicationController.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1ReplicationController.
version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources
:param api_version: The api_version of this V1ReplicationController.
:type: str
"""
self._api_version = api_version
@property
def metadata(self):
"""
Gets the metadata of this V1ReplicationController.
standard object metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#metadata
:return: The metadata of this V1ReplicationController.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1ReplicationController.
standard object metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#metadata
:param metadata: The metadata of this V1ReplicationController.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V1ReplicationController.
specification of the desired behavior of the replication controller; http://releases.k8s.io/v1.0.4/docs/api-conventions.md#spec-and-status
:return: The spec of this V1ReplicationController.
:rtype: V1ReplicationControllerSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V1ReplicationController.
specification of the desired behavior of the replication controller; http://releases.k8s.io/v1.0.4/docs/api-conventions.md#spec-and-status
:param spec: The spec of this V1ReplicationController.
:type: V1ReplicationControllerSpec
"""
self._spec = spec
@property
def status(self):
"""
Gets the status of this V1ReplicationController.
most recently observed status of the replication controller; populated by the system, read-only; http://releases.k8s.io/v1.0.4/docs/api-conventions.md#spec-and-status
:return: The status of this V1ReplicationController.
:rtype: V1ReplicationControllerStatus
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this V1ReplicationController.
most recently observed status of the replication controller; populated by the system, read-only; http://releases.k8s.io/v1.0.4/docs/api-conventions.md#spec-and-status
:param status: The status of this V1ReplicationController.
:type: V1ReplicationControllerStatus
"""
self._status = status
def to_dict(self):
"""
Return model properties dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Return model properties str
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
|
edxzw/edx-platform
|
refs/heads/master
|
lms/djangoapps/oauth2_handler/tests.py
|
57
|
# pylint: disable=missing-docstring
from django.core.cache import cache
from django.test.utils import override_settings
from lang_pref import LANGUAGE_KEY
from xmodule.modulestore.tests.factories import (check_mongo_calls, CourseFactory)
from student.models import anonymous_id_for_user
from student.models import UserProfile
from student.roles import (CourseInstructorRole, CourseStaffRole, GlobalStaff,
OrgInstructorRole, OrgStaffRole)
from student.tests.factories import UserFactory, UserProfileFactory
from openedx.core.djangoapps.user_api.preferences.api import set_user_preference
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
# Will also run default tests for IDTokens and UserInfo
from oauth2_provider.tests import IDTokenTestCase, UserInfoTestCase
class BaseTestMixin(ModuleStoreTestCase):
profile = None
def setUp(self):
super(BaseTestMixin, self).setUp()
self.course_key = CourseFactory.create(emit_signals=True).id
self.course_id = unicode(self.course_key)
self.user_factory = UserFactory
self.set_user(self.make_user())
def set_user(self, user):
super(BaseTestMixin, self).set_user(user)
self.profile = UserProfileFactory(user=self.user)
class IDTokenTest(BaseTestMixin, IDTokenTestCase):
def setUp(self):
super(IDTokenTest, self).setUp()
# CourseAccessHandler uses the application cache.
cache.clear()
def test_sub_claim(self):
scopes, claims = self.get_id_token_values('openid')
self.assertIn('openid', scopes)
sub = claims['sub']
expected_sub = anonymous_id_for_user(self.user, None)
self.assertEqual(sub, expected_sub)
def test_user_name_claim(self):
_scopes, claims = self.get_id_token_values('openid profile')
claim_name = claims['name']
user_profile = UserProfile.objects.get(user=self.user)
user_name = user_profile.name
self.assertEqual(claim_name, user_name)
@override_settings(LANGUAGE_CODE='en')
def test_user_without_locale_claim(self):
scopes, claims = self.get_id_token_values('openid profile')
self.assertIn('profile', scopes)
self.assertEqual(claims['locale'], 'en')
def test_user_with_locale_claim(self):
language = 'en'
set_user_preference(self.user, LANGUAGE_KEY, language)
scopes, claims = self.get_id_token_values('openid profile')
self.assertIn('profile', scopes)
locale = claims['locale']
self.assertEqual(language, locale)
def test_no_special_course_access(self):
with check_mongo_calls(0):
scopes, claims = self.get_id_token_values('openid course_instructor course_staff')
self.assertNotIn('course_staff', scopes)
self.assertNotIn('staff_courses', claims)
self.assertNotIn('course_instructor', scopes)
self.assertNotIn('instructor_courses', claims)
def test_course_staff_courses(self):
CourseStaffRole(self.course_key).add_users(self.user)
with check_mongo_calls(0):
scopes, claims = self.get_id_token_values('openid course_staff')
self.assertIn('course_staff', scopes)
self.assertNotIn('staff_courses', claims) # should not return courses in id_token
def test_course_instructor_courses(self):
with check_mongo_calls(0):
CourseInstructorRole(self.course_key).add_users(self.user)
scopes, claims = self.get_id_token_values('openid course_instructor')
self.assertIn('course_instructor', scopes)
self.assertNotIn('instructor_courses', claims) # should not return courses in id_token
def test_course_staff_courses_with_claims(self):
CourseStaffRole(self.course_key).add_users(self.user)
course_id = unicode(self.course_key)
nonexistent_course_id = 'some/other/course'
claims = {
'staff_courses': {
'values': [course_id, nonexistent_course_id],
'essential': True,
}
}
with check_mongo_calls(0):
scopes, claims = self.get_id_token_values(scope='openid course_staff', claims=claims)
self.assertIn('course_staff', scopes)
self.assertIn('staff_courses', claims)
self.assertEqual(len(claims['staff_courses']), 1)
self.assertIn(course_id, claims['staff_courses'])
self.assertNotIn(nonexistent_course_id, claims['staff_courses'])
def test_permissions_scope(self):
scopes, claims = self.get_id_token_values('openid profile permissions')
self.assertIn('permissions', scopes)
self.assertFalse(claims['administrator'])
self.user.is_staff = True
self.user.save()
_scopes, claims = self.get_id_token_values('openid profile permissions')
self.assertTrue(claims['administrator'])
class UserInfoTest(BaseTestMixin, UserInfoTestCase):
def setUp(self):
super(UserInfoTest, self).setUp()
# create another course in the DB that only global staff have access to
CourseFactory.create(emit_signals=True)
def token_for_scope(self, scope):
full_scope = 'openid %s' % scope
self.set_access_token_scope(full_scope)
token = self.access_token.token # pylint: disable=no-member
return full_scope, token
def get_with_scope(self, scope):
scope, token = self.token_for_scope(scope)
result, claims = self.get_userinfo(token, scope)
self.assertEqual(result.status_code, 200)
return claims
def get_with_claim_value(self, scope, claim, values):
_full_scope, token = self.token_for_scope(scope)
result, claims = self.get_userinfo(
token,
claims={claim: {'values': values}}
)
self.assertEqual(result.status_code, 200)
return claims
def _assert_role_using_scope(self, scope, claim, assert_one_course=True):
with check_mongo_calls(0):
claims = self.get_with_scope(scope)
self.assertEqual(len(claims), 2)
courses = claims[claim]
self.assertIn(self.course_id, courses)
if assert_one_course:
self.assertEqual(len(courses), 1)
def test_request_global_staff_courses_using_scope(self):
GlobalStaff().add_users(self.user)
self._assert_role_using_scope('course_staff', 'staff_courses', assert_one_course=False)
def test_request_org_staff_courses_using_scope(self):
OrgStaffRole(self.course_key.org).add_users(self.user)
self._assert_role_using_scope('course_staff', 'staff_courses')
def test_request_org_instructor_courses_using_scope(self):
OrgInstructorRole(self.course_key.org).add_users(self.user)
self._assert_role_using_scope('course_instructor', 'instructor_courses')
def test_request_staff_courses_using_scope(self):
CourseStaffRole(self.course_key).add_users(self.user)
self._assert_role_using_scope('course_staff', 'staff_courses')
def test_request_instructor_courses_using_scope(self):
CourseInstructorRole(self.course_key).add_users(self.user)
self._assert_role_using_scope('course_instructor', 'instructor_courses')
def _assert_role_using_claim(self, scope, claim):
values = [self.course_id, 'some_invalid_course']
with check_mongo_calls(0):
claims = self.get_with_claim_value(scope, claim, values)
self.assertEqual(len(claims), 2)
courses = claims[claim]
self.assertIn(self.course_id, courses)
self.assertEqual(len(courses), 1)
def test_request_global_staff_courses_with_claims(self):
GlobalStaff().add_users(self.user)
self._assert_role_using_claim('course_staff', 'staff_courses')
def test_request_org_staff_courses_with_claims(self):
OrgStaffRole(self.course_key.org).add_users(self.user)
self._assert_role_using_claim('course_staff', 'staff_courses')
def test_request_org_instructor_courses_with_claims(self):
OrgInstructorRole(self.course_key.org).add_users(self.user)
self._assert_role_using_claim('course_instructor', 'instructor_courses')
def test_request_staff_courses_with_claims(self):
CourseStaffRole(self.course_key).add_users(self.user)
self._assert_role_using_claim('course_staff', 'staff_courses')
def test_request_instructor_courses_with_claims(self):
CourseInstructorRole(self.course_key).add_users(self.user)
self._assert_role_using_claim('course_instructor', 'instructor_courses')
def test_permissions_scope(self):
claims = self.get_with_scope('permissions')
self.assertIn('administrator', claims)
self.assertFalse(claims['administrator'])
self.user.is_staff = True
self.user.save()
claims = self.get_with_scope('permissions')
self.assertTrue(claims['administrator'])
|
thesuperzapper/tensorflow
|
refs/heads/master
|
tensorflow/contrib/sparsemax/python/ops/sparsemax_loss.py
|
103
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Sparsemax Loss op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.util import loader
from tensorflow.python.platform import resource_loader
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
__all__ = ["sparsemax_loss"]
def sparsemax_loss(logits, sparsemax, labels, name=None):
"""Computes sparsemax loss function [1].
[1]: https://arxiv.org/abs/1602.02068
Args:
logits: A `Tensor`. Must be one of the following types: `half`, `float32`,
`float64`.
sparsemax: A `Tensor`. Must have the same type as `logits`.
labels: A `Tensor`. Must have the same type as `logits`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `logits`.
"""
with ops.name_scope(name, "sparsemax_loss",
[logits, sparsemax, labels]) as name:
logits = ops.convert_to_tensor(logits, name="logits")
sparsemax = ops.convert_to_tensor(sparsemax, name="sparsemax")
labels = ops.convert_to_tensor(labels, name="labels")
shifted_logits = logits - \
math_ops.reduce_mean(logits, axis=1)[:, array_ops.newaxis]
# sum over support
support = math_ops.cast(sparsemax > 0, sparsemax.dtype)
sum_s = support * sparsemax * (shifted_logits - 0.5 * sparsemax)
# - z_k + ||q||^2
q_part = labels * (0.5 * labels - shifted_logits)
return math_ops.reduce_sum(sum_s + q_part, axis=1)
|
shakamunyi/tensorflow
|
refs/heads/master
|
tensorflow/contrib/linear_optimizer/python/sdca_estimator.py
|
27
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Linear Estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import layers
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.linear_optimizer.python import sdca_optimizer
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.training import session_run_hook
def _head_is_valid_for_sdca(head):
"""Returns true if the provided head is supported by SDCAOptimizer."""
# pylint: disable=protected-access
return isinstance(head, head_lib._BinaryLogisticHead) or isinstance(
head, head_lib._BinarySvmHead) or isinstance(head,
head_lib._RegressionHead)
# pylint: enable=protected-access
def _add_bias_column(feature_columns, columns_to_tensors, bias_variable,
columns_to_variables):
"""Adds a fake bias feature column filled with all 1s."""
# TODO(b/31008490): Move definition to a common constants place.
bias_column_name = "tf_virtual_bias_column"
if any(col.name is bias_column_name for col in feature_columns):
raise ValueError("%s is a reserved column name." % bias_column_name)
if not feature_columns:
raise ValueError("feature_columns can't be empty.")
# Loop through input tensors until we can figure out batch_size.
batch_size = None
for column in columns_to_tensors.values():
if isinstance(column, tuple):
column = column[0]
if isinstance(column, sparse_tensor.SparseTensor):
shape = tensor_util.constant_value(column.dense_shape)
if shape is not None:
batch_size = shape[0]
break
else:
batch_size = array_ops.shape(column)[0]
break
if batch_size is None:
raise ValueError("Could not infer batch size from input features.")
bias_column = layers.real_valued_column(bias_column_name)
columns_to_tensors[bias_column] = array_ops.ones(
[batch_size, 1], dtype=dtypes.float32)
columns_to_variables[bias_column] = [bias_variable]
def sdca_model_fn(features, labels, mode, params, config=None):
"""A model_fn for linear models that use the SDCA optimizer.
Args:
features: A dict of `Tensor` keyed by column name.
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` with values in the set {0, 1}.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance. Type must be one of `_BinarySvmHead`,
`_RegressionHead` or `_BinaryLogisticHead`.
* feature_columns: An iterable containing all the feature columns used by
the model.
* l1_regularization: Global (across all examples) L1-regularization
parameter.
* l2_regularization: Global (across all examples) L2-regularization
parameter.
* num_loss_partitions: Number of partitions of the global loss function
optimized by `SDCAOptimizer`.
* weight_column_name: A string defining the weight feature column, or
None if there are no weights.
* update_weights_hook: A `SessionRunHook` object or None. Used to update
model weights.
config: `RunConfig` object to configure the runtime settings.
Returns:
A `ModelFnOps` instance.
Raises:
ValueError: If the type of head is not one of `_BinarySvmHead`,
`_RegressionHead` or `_MultiClassHead`.
ValueError: If mode is not any of the `ModeKeys`.
"""
head = params["head"]
feature_columns = params["feature_columns"]
example_id_column = params["example_id_column"]
l1_regularization = params["l1_regularization"]
l2_regularization = params["l2_regularization"]
num_loss_partitions = params["num_loss_partitions"]
weight_column_name = params["weight_column_name"]
update_weights_hook = params.get("update_weights_hook", None)
loss_type = None
if isinstance(head, head_lib._BinarySvmHead): # pylint: disable=protected-access
loss_type = "hinge_loss"
elif isinstance(head, head_lib._BinaryLogisticHead): # pylint: disable=protected-access
loss_type = "logistic_loss"
elif isinstance(head, head_lib._RegressionHead): # pylint: disable=protected-access
loss_type = "squared_loss"
else:
raise ValueError("Unsupported head type: {}".format(type(head)))
assert head.logits_dimension == 1, (
"SDCA only applies to logits_dimension=1.")
# Update num_loss_partitions based on number of workers.
n_loss_partitions = num_loss_partitions or max(1, config.num_worker_replicas)
optimizer = sdca_optimizer.SDCAOptimizer(
example_id_column=example_id_column,
num_loss_partitions=n_loss_partitions,
symmetric_l1_regularization=l1_regularization,
symmetric_l2_regularization=l2_regularization)
parent_scope = "linear"
with variable_scope.variable_op_scope(features.values(),
parent_scope) as scope:
features = features.copy()
features.update(layers.transform_features(features, feature_columns))
logits, columns_to_variables, bias = (
layers.weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=1,
scope=scope))
_add_bias_column(feature_columns, features, bias, columns_to_variables)
def _train_op_fn(unused_loss):
global_step = contrib_variables.get_global_step()
sdca_model, train_op = optimizer.get_train_step(
columns_to_variables, weight_column_name, loss_type, features, labels,
global_step)
if update_weights_hook is not None:
update_weights_hook.set_parameters(sdca_model, train_op)
return train_op
model_fn_ops = head.create_model_fn_ops(
features=features,
labels=labels,
mode=mode,
train_op_fn=_train_op_fn,
logits=logits)
if update_weights_hook is not None:
return model_fn_ops._replace(training_chief_hooks=(
model_fn_ops.training_chief_hooks + [update_weights_hook]))
return model_fn_ops
class _SdcaUpdateWeightsHook(session_run_hook.SessionRunHook):
"""SessionRunHook to update and shrink SDCA model weights."""
def __init__(self):
pass
def set_parameters(self, sdca_model, train_op):
self._sdca_model = sdca_model
self._train_op = train_op
def begin(self):
"""Construct the update_weights op.
The op is implicitly added to the default graph.
"""
self._update_op = self._sdca_model.update_weights(self._train_op)
def before_run(self, run_context):
"""Return the update_weights op so that it is executed during this run."""
return session_run_hook.SessionRunArgs(self._update_op)
class _SDCAEstimator(estimator.Estimator):
"""Base estimator class for linear models using the SDCA optimizer.
This class should not be used directly. Rather, users should call one of the
derived estimators.
"""
def __init__(self,
example_id_column,
feature_columns,
weight_column_name=None,
model_dir=None,
head=None,
l1_regularization=0.0,
l2_regularization=1.0,
num_loss_partitions=None,
config=None,
feature_engineering_fn=None):
"""Construct a `_SDCAEstimator` estimator object.
Args:
example_id_column: A string defining the feature column name representing
example ids. Used to initialize the underlying SDCA optimizer.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
model_dir: Directory to save model parameters, graph etc. This can also be
used to load checkpoints from the directory into an estimator to
continue training a previously saved model.
head: type of head. Currently, _BinaryLogisticHead and _BinarySvmHead are
supported for classification and _RegressionHead for regression. It
should be a subclass of _SingleHead.
l1_regularization: L1-regularization parameter. Refers to global L1
regularization (across all examples).
l2_regularization: L2-regularization parameter. Refers to global L2
regularization (across all examples).
num_loss_partitions: number of partitions of the (global) loss function
optimized by the underlying optimizer (SDCAOptimizer).
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns features and
labels which will be fed into the model.
Returns:
A `_SDCAEstimator` estimator.
Raises:
ValueError: if head is not supported by SDCA.
"""
self._feature_columns = tuple(feature_columns or [])
assert self._feature_columns
if not _head_is_valid_for_sdca(head):
raise ValueError(
"head type: {} is not supported. Supported head types: "
"_BinaryLogisticHead, _BinarySvmHead and _RegressionHead.".format(
type(head)))
assert head.logits_dimension == 1
params = {
"head": head,
"feature_columns": feature_columns,
"example_id_column": example_id_column,
"num_loss_partitions": num_loss_partitions,
"l1_regularization": l1_regularization,
"l2_regularization": l2_regularization,
"weight_column_name": weight_column_name,
"update_weights_hook": _SdcaUpdateWeightsHook(),
}
super(_SDCAEstimator, self).__init__(
model_fn=sdca_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
class SDCALogisticClassifier(_SDCAEstimator):
"""Logistic regression binary classifier using the SDCA optimizer.
Example usage:
```python
sparse_column_a = sparse_column_with_hash_bucket(...)
sparse_column_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_x_sparse_feature_b = crossed_column(...)
classifier = SDCALogisticClassifier(
example_id_column='example_id',
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b]),
weight_column_name=...,
l2_regularization=...,
num_loss_partitions=...,
)
# Input builders
# returns x, y (where y is the label Tensor (with 0/1 values)
def input_fn_{train, eval}:
# returns x (features dict)
def input_fn_test:
...
classifier.fit(input_fn=input_fn_train)
classifier.evaluate(input_fn=input_fn_eval)
# Returns predicted classes.
classifier.predict_classes(input_fn=input_fn_test)
# Returns predicted probabilities.
classifier.predict_proba(input_fn=input_fn_test)
```
The input_fn provided to `fit`, `evaluate` and predict_* methods should return
the following features, otherwise there will be a `KeyError`:
* A feature with `key=example_id_column` whose value is a `Tensor` of dtype
string.
* If `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* For each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name` whose
`value` is a `SparseTensor`
- if `column` is a `RealValuedColumn, a feature with `key=column.name`
whose `value` is a `Tensor`
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`
"""
def __init__(self,
example_id_column,
feature_columns,
weight_column_name=None,
model_dir=None,
l1_regularization=0.0,
l2_regularization=1.0,
num_loss_partitions=None,
config=None,
feature_engineering_fn=None):
"""Construct a `SDCALogisticClassifier` object.
Args:
example_id_column: A string defining the feature column name representing
example ids. Used to initialize the underlying SDCA optimizer.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the iterable should derive from `FeatureColumn`.
Note that the order of the items is ignored at model construction time.
weight_column_name: A string defining feature column name representing
weights. It is used to downweight or boost examples during training. It
will be multiplied by the loss of the example.
model_dir: Directory to save model parameters, graph etc. This can also be
used to load checkpoints from the directory into an estimator to
continue training a previously saved model.
l1_regularization: L1-regularization parameter. Refers to global L1
regularization (across all examples).
l2_regularization: L2-regularization parameter. Refers to global L2
regularization (across all examples).
num_loss_partitions: Number of partitions of the global loss function
optimized by the underlying optimizer (SDCAOptimizer).
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns features and
labels which will be fed into the model.
Returns:
A `SDCALogisiticClassifier` estimator.
"""
super(SDCALogisticClassifier, self).__init__(
example_id_column=example_id_column,
feature_columns=feature_columns,
weight_column_name=weight_column_name,
model_dir=model_dir,
head=head_lib.multi_class_head(
n_classes=2, weight_column_name=weight_column_name),
l1_regularization=l1_regularization,
l2_regularization=l2_regularization,
num_loss_partitions=num_loss_partitions,
config=config,
feature_engineering_fn=None)
def predict_classes(self, input_fn=None):
"""Runs inference to determine the predicted class.
Args:
input_fn: The input function providing features.
Returns:
A generator of predicted classes for the features provided by input_fn.
"""
key = prediction_key.PredictionKey.CLASSES
predictions = super(SDCALogisticClassifier, self).predict(
input_fn=input_fn, outputs=[key])
return (pred[key] for pred in predictions)
def predict_proba(self, input_fn=None):
"""Runs inference to determine the class probability predictions.
Args:
input_fn: The input function providing features.
Returns:
A generator of predicted class probabilities for the features provided by
input_fn.
"""
key = prediction_key.PredictionKey.PROBABILITIES
predictions = super(SDCALogisticClassifier, self).predict(
input_fn=input_fn, outputs=[key])
return (pred[key] for pred in predictions)
class SDCALinearRegressor(_SDCAEstimator):
"""Linear regression model using SDCA to solve the underlying optimization.
Example usage:
```python
real_column_a = real_valued_column(...)
sparse_column_b = sparse_column_with_hash_bucket(...)
regressor = SDCALinearRegressor(
example_id_column='example_id',
feature_columns=[real_column_a, sparse_column_b]),
weight_column_name=...,
l2_regularization=...,
num_loss_partitions=...,
)
# Input builders
# returns x, y (where y is the label Tensor (with 0/1 values)
def input_fn_{train, eval}:
# returns x (features dict)
def input_fn_test:
...
regressor.fit(input_fn=input_fn_train)
regressor.evaluate(input_fn=input_fn_eval)
regressor.predict_scores(input_fn=input_fn_test) # returns predicted scores.
```
The input_fn provided to `fit`, `evaluate` and predict_* methods should return
the following features, otherwise there will be a `KeyError`:
* A feature with `key=example_id_column` whose value is a `Tensor` of dtype
string.
* If `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* For each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name` whose
`value` is a `SparseTensor`
- if `column` is a `RealValuedColumn, a feature with `key=column.name`
whose `value` is a `Tensor`
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`
"""
def __init__(self,
example_id_column,
feature_columns,
weight_column_name=None,
model_dir=None,
l1_regularization=0.0,
l2_regularization=1.0,
num_loss_partitions=None,
config=None,
feature_engineering_fn=None):
"""Construct a `SDCALinearRegressor` estimator object.
Args:
example_id_column: A string defining the feature column name representing
example ids. Used to initialize the underlying SDCA optimizer.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the iterable should derive from `FeatureColumn`.
Note that the order of the items is ignored at model construction time.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
model_dir: Directory to save model parameters, graph etc. This can also be
used to load checkpoints from the directory into an estimator to
continue training a previously saved model.
l1_regularization: L1-regularization parameter. Refers to global L1
regularization (across all examples).
l2_regularization: L2-regularization parameter. Refers to global L2
regularization (across all examples).
num_loss_partitions: number of partitions of the (global) loss function
optimized by the underlying optimizer (SDCAOptimizer).
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns features and
labels which will be fed into the model.
Returns:
A `SDCALinearRegressor` estimator.
"""
super(SDCALinearRegressor, self).__init__(
example_id_column=example_id_column,
feature_columns=feature_columns,
weight_column_name=weight_column_name,
model_dir=model_dir,
head=head_lib.regression_head(weight_column_name=weight_column_name),
l1_regularization=l1_regularization,
l2_regularization=l2_regularization,
num_loss_partitions=num_loss_partitions,
config=config,
feature_engineering_fn=None)
def predict_scores(self, input_fn):
"""Returns predicted scores for given features.
Args:
input_fn: The input function providing features.
Returns:
A generator of predicted scores for the features provided by input_fn.
"""
key = prediction_key.PredictionKey.SCORES
predictions = super(SDCALinearRegressor, self).predict(
input_fn=input_fn, outputs=[key])
return (pred[key] for pred in predictions)
|
bartosh/layerindex-web
|
refs/heads/master
|
layerindex/views.py
|
1
|
# layerindex-web - view definitions
#
# Copyright (C) 2013-2014 Intel Corporation
#
# Licensed under the MIT license, see COPYING.MIT for details
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, Http404
from django.core.urlresolvers import reverse, reverse_lazy, resolve
from django.core.exceptions import PermissionDenied
from django.template import RequestContext
from layerindex.models import Branch, LayerItem, LayerMaintainer, LayerBranch, LayerDependency, LayerNote, Recipe, Machine, BBClass, BBAppend, RecipeChange, RecipeChangeset, ClassicRecipe
from datetime import datetime
from django.views.generic import TemplateView, DetailView, ListView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.views.generic.base import RedirectView
from layerindex.forms import EditLayerForm, LayerMaintainerFormSet, EditNoteForm, EditProfileForm, RecipeChangesetForm, AdvancedRecipeSearchForm, BulkChangeEditFormSet, ClassicRecipeForm, ClassicRecipeSearchForm
from django.db import transaction
from django.contrib.auth.models import User, Permission
from django.db.models import Q, Count
from django.core.mail import EmailMessage
from django.template.loader import get_template
from django.template import Context
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from reversion.models import Revision
import simplesearch
import settings
from django.dispatch import receiver
import reversion
def edit_layernote_view(request, template_name, slug, pk=None):
layeritem = get_object_or_404(LayerItem, name=slug)
if layeritem.classic:
raise Http404
if not (request.user.is_authenticated() and (request.user.has_perm('layerindex.publish_layer') or layeritem.user_can_edit(request.user))):
raise PermissionDenied
if pk:
# Edit mode
layernote = get_object_or_404(LayerNote, pk=pk)
else:
# Add mode
layernote = LayerNote()
layernote.layer = layeritem
if request.method == 'POST':
form = EditNoteForm(request.POST, instance=layernote)
if form.is_valid():
form.save()
return HttpResponseRedirect(layeritem.get_absolute_url())
else:
form = EditNoteForm(instance=layernote)
return render(request, template_name, {
'form': form,
})
def delete_layernote_view(request, template_name, slug, pk):
layeritem = get_object_or_404(LayerItem, name=slug)
if layeritem.classic:
raise Http404
if not (request.user.is_authenticated() and (request.user.has_perm('layerindex.publish_layer') or layeritem.user_can_edit(request.user))):
raise PermissionDenied
layernote = get_object_or_404(LayerNote, pk=pk)
if request.method == 'POST':
layernote.delete()
return HttpResponseRedirect(layeritem.get_absolute_url())
else:
return render(request, template_name, {
'object': layernote,
'object_type': layernote._meta.verbose_name,
'cancel_url': layeritem.get_absolute_url()
})
def delete_layer_view(request, template_name, slug):
layeritem = get_object_or_404(LayerItem, name=slug)
if layeritem.classic:
raise Http404
if not (request.user.is_authenticated() and request.user.has_perm('layerindex.publish_layer') and layeritem.status == 'N'):
raise PermissionDenied
if request.method == 'POST':
layeritem.delete()
return HttpResponseRedirect(reverse('layer_list', args=('master',)))
else:
return render(request, template_name, {
'object': layeritem,
'object_type': layeritem._meta.verbose_name,
'cancel_url': layeritem.get_absolute_url()
})
def edit_layer_view(request, template_name, branch='master', slug=None):
return_url = None
branchobj = Branch.objects.filter(name=branch)[:1].get()
if slug:
# Edit mode
layeritem = get_object_or_404(LayerItem, name=slug)
if layeritem.classic:
raise Http404
if not (request.user.is_authenticated() and (request.user.has_perm('layerindex.publish_layer') or layeritem.user_can_edit(request.user))):
raise PermissionDenied
layerbranch = get_object_or_404(LayerBranch, layer=layeritem, branch=branchobj)
deplistlayers = LayerItem.objects.exclude(id=layeritem.id).order_by('name')
returnto = request.GET.get('returnto', 'layer_item')
if returnto:
if returnto == 'layer_review':
return_url = reverse_lazy(returnto, args=(layeritem.name,))
else:
return_url = reverse_lazy(returnto, args=(branch, layeritem.name))
else:
# Submit mode
layeritem = LayerItem()
layerbranch = LayerBranch(layer=layeritem, branch=branchobj)
deplistlayers = LayerItem.objects.filter(classic=False).order_by('name')
if request.method == 'POST':
last_vcs_url = layeritem.vcs_url
form = EditLayerForm(request.user, layerbranch, request.POST, instance=layeritem)
maintainerformset = LayerMaintainerFormSet(request.POST, instance=layerbranch)
if form.is_valid() and maintainerformset.is_valid():
with transaction.commit_on_success():
reset_last_rev = False
form.save()
layerbranch.layer = layeritem
new_subdir = form.cleaned_data['vcs_subdir']
if layerbranch.vcs_subdir != new_subdir:
layerbranch.vcs_subdir = new_subdir
reset_last_rev = True
layerbranch.save()
maintainerformset.save()
if slug:
new_deps = form.cleaned_data['deps']
existing_deps = [deprec.dependency for deprec in layerbranch.dependencies_set.all()]
reset_last_rev = False
for dep in new_deps:
if dep not in existing_deps:
deprec = LayerDependency()
deprec.layerbranch = layerbranch
deprec.dependency = dep
deprec.save()
reset_last_rev = True
for dep in existing_deps:
if dep not in new_deps:
layerbranch.dependencies_set.filter(dependency=dep).delete()
reset_last_rev = True
if layeritem.vcs_url != last_vcs_url:
reset_last_rev = True
if reset_last_rev:
layerbranch.vcs_last_rev = ''
layerbranch.save()
else:
# Save dependencies
for dep in form.cleaned_data['deps']:
deprec = LayerDependency()
deprec.layerbranch = layerbranch
deprec.dependency = dep
deprec.save()
# Send email
plaintext = get_template('layerindex/submitemail.txt')
perm = Permission.objects.get(codename='publish_layer')
users = User.objects.filter(Q(groups__permissions=perm) | Q(user_permissions=perm) ).distinct()
for user in users:
if user.first_name:
user_name = user.first_name
else:
user_name = user.username
d = Context({
'user_name': user_name,
'layer_name': layeritem.name,
'layer_url': request.build_absolute_uri(reverse('layer_review', args=(layeritem.name,))),
})
subject = '%s - %s' % (settings.SUBMIT_EMAIL_SUBJECT, layeritem.name)
from_email = settings.SUBMIT_EMAIL_FROM
to_email = user.email
text_content = plaintext.render(d)
msg = EmailMessage(subject, text_content, from_email, [to_email])
msg.send()
return HttpResponseRedirect(reverse('submit_layer_thanks'))
messages.success(request, 'Layer %s saved successfully.' % layeritem.name)
if return_url:
return HttpResponseRedirect(return_url)
else:
form = EditLayerForm(request.user, layerbranch, instance=layeritem)
maintainerformset = LayerMaintainerFormSet(instance=layerbranch)
return render(request, template_name, {
'form': form,
'maintainerformset': maintainerformset,
'deplistlayers': deplistlayers,
'return_url': return_url,
})
def bulk_change_edit_view(request, template_name, pk):
changeset = get_object_or_404(RecipeChangeset, pk=pk)
if request.method == 'POST':
formset = BulkChangeEditFormSet(request.POST, queryset=changeset.recipechange_set.all())
if formset.is_valid():
for form in formset:
form.clear_same_values()
formset.save()
return HttpResponseRedirect(reverse('bulk_change_review', args=(changeset.id,)))
else:
formset = BulkChangeEditFormSet(queryset=changeset.recipechange_set.all())
return render(request, template_name, {
'formset': formset,
})
def bulk_change_patch_view(request, pk):
import os
import os.path
import utils
changeset = get_object_or_404(RecipeChangeset, pk=pk)
# FIXME this couples the web server and machine running the update script together,
# but given that it's a separate script the way is open to decouple them in future
try:
ret = utils.runcmd('python bulkchange.py %d %s' % (int(pk), settings.TEMP_BASE_DIR), os.path.dirname(__file__))
if ret:
fn = ret.splitlines()[-1]
if os.path.exists(fn):
if fn.endswith('.tar.gz'):
mimetype = 'application/x-gzip'
else:
mimetype = 'text/x-diff'
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = 'attachment; filename="%s"' % os.path.basename(fn)
with open(fn, "rb") as f:
data = f.read()
response.write(data)
os.remove(fn)
return response
return HttpResponse('No patch data generated', content_type='text/plain')
except Exception as e:
output = getattr(e, 'output', None)
if output:
if 'timeout' in output:
return HttpResponse('Failed to generate patches: timed out waiting for lock. Please try again shortly.', content_type='text/plain')
return HttpResponse('Failed to generate patches: %s' % e, content_type='text/plain')
# FIXME better error handling
def _check_url_branch(kwargs):
branchname = kwargs['branch']
if branchname:
if branchname == 'oe-classic':
raise Http404
branch = get_object_or_404(Branch, name=branchname)
def publish(request, name):
if not (request.user.is_authenticated() and request.user.has_perm('layerindex.publish_layer')):
raise PermissionDenied
return _statuschange(request, name, 'P')
def _statuschange(request, name, newstatus):
w = get_object_or_404(LayerItem, name=name)
if w.classic:
raise Http404
if w.status != newstatus:
w.change_status(newstatus, request.user.username)
w.save()
return HttpResponseRedirect(w.get_absolute_url())
class RedirectParamsView(RedirectView):
def get_redirect_url(self, *args, **kwargs):
redirect_name = kwargs.pop('redirect_name')
return reverse_lazy(redirect_name, args=args, kwargs=kwargs)
class LayerListView(ListView):
context_object_name = 'layerbranch_list'
def get_queryset(self):
_check_url_branch(self.kwargs)
return LayerBranch.objects.filter(branch__name=self.kwargs['branch']).filter(layer__status='P').order_by('layer__layer_type', 'layer__name')
def get_context_data(self, **kwargs):
context = super(LayerListView, self).get_context_data(**kwargs)
context['url_branch'] = self.kwargs['branch']
context['this_url_name'] = resolve(self.request.path_info).url_name
context['layer_type_choices'] = LayerItem.LAYER_TYPE_CHOICES
return context
class LayerReviewListView(ListView):
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
if not request.user.has_perm('layerindex.publish_layer'):
raise PermissionDenied
return super(LayerReviewListView, self).dispatch(request, *args, **kwargs)
def get_queryset(self):
return LayerBranch.objects.filter(branch__name='master').filter(layer__status='N').order_by('layer__name')
class LayerDetailView(DetailView):
model = LayerItem
slug_field = 'name'
# This is a bit of a mess. Surely there has to be a better way to handle this...
def dispatch(self, request, *args, **kwargs):
self.user = request.user
res = super(LayerDetailView, self).dispatch(request, *args, **kwargs)
l = self.get_object()
if l:
if l.classic:
raise Http404
if l.status == 'N':
if not (request.user.is_authenticated() and request.user.has_perm('layerindex.publish_layer')):
raise PermissionDenied
return res
def get_context_data(self, **kwargs):
_check_url_branch(self.kwargs)
context = super(LayerDetailView, self).get_context_data(**kwargs)
layer = context['layeritem']
context['useredit'] = layer.user_can_edit(self.user)
layerbranch = layer.get_layerbranch(self.kwargs['branch'])
if layerbranch:
context['layerbranch'] = layerbranch
context['machines'] = layerbranch.machine_set.order_by('name')
context['appends'] = layerbranch.bbappend_set.order_by('filename')
context['classes'] = layerbranch.bbclass_set.order_by('name')
context['url_branch'] = self.kwargs['branch']
context['this_url_name'] = resolve(self.request.path_info).url_name
return context
class LayerReviewDetailView(LayerDetailView):
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
if not request.user.has_perm('layerindex.publish_layer'):
raise PermissionDenied
return super(LayerReviewDetailView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
self.kwargs['branch'] = 'master'
context = super(LayerReviewDetailView, self).get_context_data(**kwargs)
return context
def recipes_preferred_count(qs):
# Add extra column so we can show "duplicate" recipes from other layers de-emphasised
# (it's a bit crude having to do this using SQL but I couldn't find a better way...)
return qs.extra(
select={
'preferred_count': """SELECT COUNT(1)
FROM layerindex_recipe AS recipe2
, layerindex_layerbranch as branch2
, layerindex_layeritem as layer1
, layerindex_layeritem as layer2
WHERE branch2.id = recipe2.layerbranch_id
AND layer2.id = branch2.layer_id
AND layer2.layer_type in ('S', 'A')
AND branch2.branch_id = layerindex_layerbranch.branch_id
AND recipe2.pn = layerindex_recipe.pn
AND recipe2.layerbranch_id <> layerindex_recipe.layerbranch_id
AND layer1.id = layerindex_layerbranch.layer_id
AND layer2.index_preference > layer1.index_preference
"""
},
)
class RecipeSearchView(ListView):
context_object_name = 'recipe_list'
paginate_by = 50
def get_queryset(self):
_check_url_branch(self.kwargs)
query_string = self.request.GET.get('q', '')
init_qs = Recipe.objects.filter(layerbranch__branch__name=self.kwargs['branch'])
if query_string.strip():
entry_query = simplesearch.get_query(query_string, ['pn', 'summary', 'description', 'filename'])
qs = init_qs.filter(entry_query).order_by('pn', 'layerbranch__layer')
else:
if 'q' in self.request.GET:
qs = init_qs.order_by('pn', 'layerbranch__layer')
else:
# It's a bit too slow to return all records by default, and most people
# won't actually want that (if they do they can just hit the search button
# with no query string)
return Recipe.objects.none()
return recipes_preferred_count(qs)
def get_context_data(self, **kwargs):
context = super(RecipeSearchView, self).get_context_data(**kwargs)
searchval = self.request.GET.get('q', '')
context['search_keyword'] = searchval
context['url_branch'] = self.kwargs['branch']
context['this_url_name'] = resolve(self.request.path_info).url_name
if searchval:
context['extra_url_param'] = '?q=%s' % searchval
return context
class DuplicatesView(TemplateView):
def get_recipes(self, layer_ids):
init_qs = Recipe.objects.filter(layerbranch__branch__name=self.kwargs['branch'])
if layer_ids:
init_qs = init_qs.filter(layerbranch__layer__in=layer_ids)
dupes = init_qs.values('pn').annotate(Count('layerbranch', distinct=True)).filter(layerbranch__count__gt=1)
qs = init_qs.all().filter(pn__in=[item['pn'] for item in dupes]).order_by('pn', 'layerbranch__layer', '-pv')
return recipes_preferred_count(qs)
def get_classes(self, layer_ids):
init_qs = BBClass.objects.filter(layerbranch__branch__name=self.kwargs['branch'])
if layer_ids:
init_qs = init_qs.filter(layerbranch__layer__in=layer_ids)
dupes = init_qs.values('name').annotate(Count('layerbranch', distinct=True)).filter(layerbranch__count__gt=1)
qs = init_qs.all().filter(name__in=[item['name'] for item in dupes]).order_by('name', 'layerbranch__layer')
return qs
def get_context_data(self, **kwargs):
layer_ids = [int(i) for i in self.request.GET.getlist('l')]
context = super(DuplicatesView, self).get_context_data(**kwargs)
context['recipes'] = self.get_recipes(layer_ids)
context['classes'] = self.get_classes(layer_ids)
context['url_branch'] = self.kwargs['branch']
context['this_url_name'] = resolve(self.request.path_info).url_name
context['layers'] = LayerBranch.objects.filter(branch__name=self.kwargs['branch']).filter(layer__status='P').order_by( 'layer__name')
context['showlayers'] = layer_ids
return context
class AdvancedRecipeSearchView(ListView):
context_object_name = 'recipe_list'
paginate_by = 50
def get_queryset(self):
field = self.request.GET.get('field', '')
if field:
search_form = AdvancedRecipeSearchForm(self.request.GET)
if not search_form.is_valid():
return Recipe.objects.none()
match_type = self.request.GET.get('match_type', '')
if match_type == 'B':
value = ''
else:
value = self.request.GET.get('value', '')
if value or match_type == 'B':
if match_type == 'C' or match_type == 'N':
query = Q(**{"%s__icontains" % field: value})
else:
query = Q(**{"%s" % field: value})
queryset = Recipe.objects.filter(layerbranch__branch__name='master')
layer = self.request.GET.get('layer', '')
if layer:
queryset = queryset.filter(layerbranch__layer=layer)
if match_type == 'N':
# Exclude blank as well
queryset = queryset.exclude(Q(**{"%s" % field: ''})).exclude(query)
else:
queryset = queryset.filter(query)
return queryset.order_by('pn', 'layerbranch__layer')
return Recipe.objects.none()
def get_context_data(self, **kwargs):
context = super(AdvancedRecipeSearchView, self).get_context_data(**kwargs)
if self.request.GET.get('field', ''):
searched = True
search_form = AdvancedRecipeSearchForm(self.request.GET)
else:
searched = False
search_form = AdvancedRecipeSearchForm()
context['search_form'] = search_form
context['searched'] = searched
return context
class BulkChangeView(CreateView):
model = RecipeChangeset
form_class = RecipeChangesetForm
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(BulkChangeView, self).dispatch(request, *args, **kwargs)
def form_valid(self, form):
if not self.request.user.is_authenticated():
raise PermissionDenied
obj = form.save(commit=False)
obj.user = self.request.user
obj.save()
return HttpResponseRedirect(reverse('bulk_change_search', args=(obj.id,)))
def get_context_data(self, **kwargs):
context = super(BulkChangeView, self).get_context_data(**kwargs)
context['changesets'] = RecipeChangeset.objects.filter(user=self.request.user)
return context
class BulkChangeSearchView(AdvancedRecipeSearchView):
def get(self, request, *args, **kwargs):
self.changeset = get_object_or_404(RecipeChangeset, pk=kwargs['pk'])
if self.changeset.user != request.user:
raise PermissionDenied
return super(BulkChangeSearchView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied
changeset = get_object_or_404(RecipeChangeset, pk=kwargs['pk'])
if changeset.user != request.user:
raise PermissionDenied
def add_recipes(recipes):
for recipe in recipes:
if not changeset.recipechange_set.filter(recipe=recipe):
change = RecipeChange()
change.changeset = changeset
change.recipe = recipe
change.save()
if 'add_selected' in request.POST:
id_list = request.POST.getlist('selecteditems')
id_list = [int(i) for i in id_list if i.isdigit()]
recipes = Recipe.objects.filter(id__in=id_list)
add_recipes(recipes)
elif 'add_all' in request.POST:
add_recipes(self.get_queryset())
elif 'remove_all' in request.POST:
changeset.recipechange_set.all().delete()
return self.get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(BulkChangeSearchView, self).get_context_data(**kwargs)
context['changeset'] = self.changeset
context['current_branch'] = 'master'
return context
class BaseDeleteView(DeleteView):
def get_context_data(self, **kwargs):
context = super(BaseDeleteView, self).get_context_data(**kwargs)
obj = context.get('object', None)
if obj:
context['object_type'] = obj._meta.verbose_name
cancel = self.request.GET.get('cancel', '')
if cancel:
context['cancel_url'] = reverse_lazy(cancel, args=(obj.pk,))
return context
class BulkChangeDeleteView(BaseDeleteView):
model = RecipeChangeset
success_url = reverse_lazy('bulk_change')
def get_queryset(self):
qs = super(BulkChangeDeleteView, self).get_queryset()
return qs.filter(user=self.request.user)
class MachineSearchView(ListView):
context_object_name = 'machine_list'
paginate_by = 50
def get_queryset(self):
_check_url_branch(self.kwargs)
query_string = self.request.GET.get('q', '')
init_qs = Machine.objects.filter(layerbranch__branch__name=self.kwargs['branch'])
if query_string.strip():
entry_query = simplesearch.get_query(query_string, ['name', 'description'])
return init_qs.filter(entry_query).order_by('name', 'layerbranch__layer')
else:
if 'q' in self.request.GET:
return init_qs.order_by('name', 'layerbranch__layer')
else:
# Be consistent with RecipeSearchView
return Machine.objects.none()
def get_context_data(self, **kwargs):
context = super(MachineSearchView, self).get_context_data(**kwargs)
context['search_keyword'] = self.request.GET.get('q', '')
context['url_branch'] = self.kwargs['branch']
context['this_url_name'] = resolve(self.request.path_info).url_name
return context
class PlainTextListView(ListView):
def render_to_response(self, context):
"Returns a plain text response rendering of the template"
template = get_template(self.template_name)
return HttpResponse(template.render(Context(context)),
content_type='text/plain')
class HistoryListView(ListView):
context_object_name = "revisions"
paginate_by = 50
def get_queryset(self):
return Revision.objects.all().order_by('-date_created')
class EditProfileFormView(UpdateView):
form_class = EditProfileForm
def dispatch(self, request, *args, **kwargs):
self.user = request.user
return super(EditProfileFormView, self).dispatch(request, *args, **kwargs)
def get_object(self, queryset=None):
return self.user
def get_success_url(self):
return reverse('frontpage')
@receiver(reversion.pre_revision_commit)
def annotate_revision(sender, **kwargs):
ignorefields = ['vcs_last_rev', 'vcs_last_fetch', 'vcs_last_commit', 'updated']
versions = kwargs.pop('versions')
instances = kwargs.pop('instances')
changelist = []
for ver, inst in zip(versions, instances):
currentVersion = ver.field_dict
modelmeta = ver.content_type.model_class()._meta
if ver.type == reversion.models.VERSION_DELETE:
changelist.append("Deleted %s: %s" % (modelmeta.verbose_name.lower(), ver.object_repr))
else:
pastver = reversion.get_for_object(inst)
if pastver and ver.type != reversion.models.VERSION_ADD:
pastVersion = pastver[0].field_dict
changes = set(currentVersion.items()) - set(pastVersion.items())
changedVars = [var[0] for var in changes]
fieldchanges = []
for field in changedVars:
if field not in ignorefields:
modelfield = modelmeta.get_field(field)
newvalue = currentVersion[field]
if modelfield.choices:
for v in modelfield.choices:
if v[0] == newvalue:
newvalue = v[1]
break
fieldchanges.append("%s to '%s'" % (modelfield.verbose_name.lower(), newvalue))
if fieldchanges:
changelist.append("Changed %s %s %s" % (modelmeta.verbose_name.lower(), ver.object_repr, ", ".join(fieldchanges)))
else:
changelist.append("Added %s: %s" % (modelmeta.verbose_name.lower(), ver.object_repr))
comment = '\n'.join(changelist)
if not comment:
comment = 'No changes'
revision = kwargs.pop('revision')
revision.comment = comment
revision.save()
kwargs['revision'] = revision
class RecipeDetailView(DetailView):
model = Recipe
def get_context_data(self, **kwargs):
context = super(RecipeDetailView, self).get_context_data(**kwargs)
recipe = self.get_object()
if recipe:
verappendprefix = recipe.filename.split('.bb')[0]
appendprefix = verappendprefix.split('_')[0]
#context['verappends'] = BBAppend.objects.filter(layerbranch__branch=recipe.layerbranch.branch).filter(filename='%s.bbappend' % verappendprefix)
context['appends'] = BBAppend.objects.filter(layerbranch__branch=recipe.layerbranch.branch).filter(filename__regex=r'%s(_[^_]*)?\.bbappend' % appendprefix)
verappends = []
for append in context['appends']:
if append.matches_recipe(recipe):
verappends.append(append)
context['verappends'] = verappends
return context
class ClassicRecipeSearchView(RecipeSearchView):
def get_queryset(self):
self.kwargs['branch'] = 'oe-classic'
query_string = self.request.GET.get('q', '')
cover_status = self.request.GET.get('cover_status', None)
cover_verified = self.request.GET.get('cover_verified', None)
category = self.request.GET.get('category', None)
init_qs = ClassicRecipe.objects.filter(layerbranch__branch__name='oe-classic')
if cover_status:
if cover_status == '!':
init_qs = init_qs.filter(cover_status__in=['U', 'N'])
else:
init_qs = init_qs.filter(cover_status=cover_status)
if cover_verified:
init_qs = init_qs.filter(cover_verified=(cover_verified=='1'))
if category:
init_qs = init_qs.filter(classic_category__icontains=category)
if query_string.strip():
entry_query = simplesearch.get_query(query_string, ['pn', 'summary', 'description', 'filename'])
qs = init_qs.filter(entry_query).order_by('pn', 'layerbranch__layer')
else:
if 'q' in self.request.GET:
qs = init_qs.order_by('pn', 'layerbranch__layer')
else:
# It's a bit too slow to return all records by default, and most people
# won't actually want that (if they do they can just hit the search button
# with no query string)
return Recipe.objects.none()
return qs
def get_context_data(self, **kwargs):
context = super(ClassicRecipeSearchView, self).get_context_data(**kwargs)
context['multi_classic_layers'] = LayerItem.objects.filter(classic=True).count() > 1
if 'q' in self.request.GET:
searched = True
search_form = ClassicRecipeSearchForm(self.request.GET)
else:
searched = False
search_form = ClassicRecipeSearchForm()
context['search_form'] = search_form
context['searched'] = searched
return context
class ClassicRecipeDetailView(UpdateView):
model = ClassicRecipe
form_class = ClassicRecipeForm
context_object_name = 'recipe'
def _can_edit(self):
if self.request.user.is_authenticated():
if not self.request.user.has_perm('layerindex.edit_classic'):
user_email = self.request.user.email.strip().lower()
if not LayerMaintainer.objects.filter(email__iexact=user_email):
return False
else:
return False
return True
def post(self, request, *args, **kwargs):
if not self._can_edit():
raise PermissionDenied
return super(ClassicRecipeDetailView, self).post(request, *args, **kwargs)
def get_success_url(self):
return reverse_lazy('classic_recipe_search')
def get_context_data(self, **kwargs):
context = super(ClassicRecipeDetailView, self).get_context_data(**kwargs)
context['can_edit'] = self._can_edit()
return context
class ClassicRecipeStatsView(TemplateView):
def get_context_data(self, **kwargs):
context = super(ClassicRecipeStatsView, self).get_context_data(**kwargs)
# *** Cover status chart ***
statuses = []
status_counts = {}
for choice, desc in ClassicRecipe.COVER_STATUS_CHOICES:
statuses.append(desc)
status_counts[desc] = ClassicRecipe.objects.filter(cover_status=choice).count()
statuses = sorted(statuses, key=lambda status: status_counts[status], reverse=True)
chartdata = {'x': statuses, 'y': [status_counts[k] for k in statuses]}
context['charttype_status'] = 'pieChart'
context['chartdata_status'] = chartdata
context['extra_status'] = {
'x_is_date': False,
'x_axis_format': '',
'tag_script_js': True,
'jquery_on_ready': False,
}
# *** Categories chart ***
categories = ['obsoletedir', 'nonworkingdir']
uniquevals = ClassicRecipe.objects.exclude(classic_category='').values_list('classic_category', flat=True).distinct()
for value in uniquevals:
cats = value.split()
for cat in cats:
if not cat in categories:
categories.append(cat)
categories.append('none')
catcounts = dict.fromkeys(categories, 0)
unmigrated = ClassicRecipe.objects.filter(cover_status='U')
catcounts['none'] = unmigrated.filter(classic_category='').count()
values = unmigrated.exclude(classic_category='').values_list('classic_category', flat=True)
# We gather data this way because an item might be in more than one category, thus
# the categories list must be in priority order
for value in values:
recipecats = value.split()
foundcat = 'none'
for cat in categories:
if cat in recipecats:
foundcat = cat
break
catcounts[foundcat] += 1
# Eliminate categories with zero count
categories = [cat for cat in categories if catcounts[cat] > 0]
categories = sorted(categories, key=lambda cat: catcounts[cat], reverse=True)
chartdata_category = {'x': categories, 'y': [catcounts[k] for k in categories]}
context['charttype_category'] = 'pieChart'
context['chartdata_category'] = chartdata_category
context['extra_category'] = {
'x_is_date': False,
'x_axis_format': '',
'tag_script_js': True,
'jquery_on_ready': False,
}
return context
|
BMCV/galaxy-image-analysis
|
refs/heads/master
|
tools/2d_histogram_equalization/histogram_equalization.py
|
1
|
import argparse
import os
import sys
import skimage.io
import skimage.exposure
import skimage.util
hOptions = {
'default' : lambda img_raw: skimage.exposure.equalize_hist(img_raw),
'clahe' : lambda img_raw: skimage.exposure.equalize_adapthist(img_raw)
}
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Histogram equalization')
parser.add_argument('input_file', type=argparse.FileType('r'), default=sys.stdin, help='input file')
parser.add_argument('out_file', type=argparse.FileType('w'), default=sys.stdin, help='out file (TIFF)')
parser.add_argument('h_type', choices=hOptions.keys(), help='histogram equalization method')
args = parser.parse_args()
img_in = skimage.io.imread(args.input_file.name)
res = hOptions[args.h_type](img_in)
res = skimage.util.img_as_uint(res)
skimage.io.imsave(args.out_file.name, res, plugin="tifffile")
|
keyboard-k/youtube-dl-pet
|
refs/heads/master
|
youtube_dl/extractor/daum.py
|
3
|
# encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
)
class DaumIE(InfoExtractor):
_VALID_URL = r'https?://(?:m\.)?tvpot\.daum\.net/(?:v/|.*?clipid=)(?P<id>[^?#&]+)'
IE_NAME = 'daum.net'
_TESTS = [{
'url': 'http://tvpot.daum.net/clip/ClipView.do?clipid=52554690',
'info_dict': {
'id': '52554690',
'ext': 'mp4',
'title': 'DOTA 2GETHER ์์ฆ2 6ํ - 2๋ถ',
'description': 'DOTA 2GETHER ์์ฆ2 6ํ - 2๋ถ',
'upload_date': '20130831',
'duration': 3868,
},
}, {
# Test for https://github.com/rg3/youtube-dl/issues/7949
'url': 'http://tvpot.daum.net/mypot/View.do?ownerid=M1O35s8HPOo0&clipid=73147290',
'md5': 'c92d78bcee4424451f1667f275c1dc97',
'info_dict': {
'id': '73147290',
'ext': 'mp4',
'title': '์ธ์ด - ๋ํ๋ฐ์ง [์ ํฌ์ด์ ์ค์ผ์น๋ถ] 299ํ 20151218',
'description': '์ธ์ด - ๋ํ๋ฐ์ง',
'upload_date': '20151219',
'duration': 232,
},
}, {
'url': 'http://tvpot.daum.net/v/vab4dyeDBysyBssyukBUjBz',
'only_matching': True,
}, {
'url': 'http://tvpot.daum.net/v/07dXWRka62Y%24',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
canonical_url = 'http://tvpot.daum.net/v/%s' % video_id
webpage = self._download_webpage(canonical_url, video_id)
og_url = self._og_search_url(webpage, default=None) or self._search_regex(
r'<link[^>]+rel=(["\'])canonical\1[^>]+href=(["\'])(?P<url>.+?)\2',
webpage, 'canonical url', group='url')
full_id = self._search_regex(
r'tvpot\.daum\.net/v/([^/]+)', og_url, 'full id')
query = compat_urllib_parse.urlencode({'vid': full_id})
info = self._download_xml(
'http://tvpot.daum.net/clip/ClipInfoXml.do?' + query, video_id,
'Downloading video info')
urls = self._download_xml(
'http://videofarm.daum.net/controller/api/open/v1_2/MovieData.apixml?' + query,
video_id, 'Downloading video formats info')
formats = []
for format_el in urls.findall('result/output_list/output_list'):
profile = format_el.attrib['profile']
format_query = compat_urllib_parse.urlencode({
'vid': full_id,
'profile': profile,
})
url_doc = self._download_xml(
'http://videofarm.daum.net/controller/api/open/v1_2/MovieLocation.apixml?' + format_query,
video_id, note='Downloading video data for %s format' % profile)
format_url = url_doc.find('result/url').text
formats.append({
'url': format_url,
'format_id': profile,
})
return {
'id': video_id,
'title': info.find('TITLE').text,
'formats': formats,
'thumbnail': self._og_search_thumbnail(webpage),
'description': info.find('CONTENTS').text,
'duration': int(info.find('DURATION').text),
'upload_date': info.find('REGDTTM').text[:8],
}
|
youdonghai/intellij-community
|
refs/heads/master
|
python/testData/refactoring/inlinelocal/methodCallInlinedAsQualifier.after.py
|
79
|
a.method().strip()
|
AstroTech/workshop-python
|
refs/heads/master
|
django/src/django-rest-views.py
|
1
|
import json
from django.http import JsonResponse, HttpResponse
from django.views import View
from contact.models import Contact
class ContactAPI(View):
http_method_names = ['get', 'post', 'options']
def options(self, request, *args, **kwargs):
response = HttpResponse(status=200)
response['Access-Control-Allow-Origin'] = '*'
response['Access-Control-Allow-Methods'] = ', '.join(self.http_method_names).upper()
response['Access-Control-Allow-Headers'] = 'Content-Type'
return response
def get(self, *args, **kwargs):
result = {'contacts': list(Contact.objects.all().values())}
return JsonResponse(status=200, data=result, safe=False)
def post(self, request, *args, **kwargs):
data = json.loads(request.body, encoding='utf-8')
try:
contact, created = Contact.objects.update_or_create(**data)
if created:
return JsonResponse(status=201, data={'status': 'Created'}, safe=False)
else:
return JsonResponse(status=200, data={'status': 'Updated'}, safe=False)
except Exception:
return JsonResponse(status=400, data={'status': 'Bad Request'}, safe=False)
|
kbrebanov/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/facts/system/cmdline.py
|
137
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import shlex
from ansible.module_utils.facts.utils import get_file_content
from ansible.module_utils.facts.collector import BaseFactCollector
class CmdLineFactCollector(BaseFactCollector):
name = 'cmdline'
_fact_ids = set()
def _get_proc_cmdline(self):
return get_file_content('/proc/cmdline')
def _parse_proc_cmdline(self, data):
cmdline_dict = {}
try:
for piece in shlex.split(data, posix=False):
item = piece.split('=', 1)
if len(item) == 1:
cmdline_dict[item[0]] = True
else:
cmdline_dict[item[0]] = item[1]
except ValueError:
pass
return cmdline_dict
def collect(self, module=None, collected_facts=None):
cmdline_facts = {}
data = self._get_proc_cmdline()
if not data:
return cmdline_facts
cmdline_facts['cmdline'] = self._parse_proc_cmdline(data)
return cmdline_facts
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.